2 from __future__ import absolute_import
3 import glob, os, subprocess, sys, tempfile
4 from bup import bloom, git, midx
5 from bup.git import MissingObject, walk_object
6 from bup.helpers import Nonlocal, log, progress, qprogress
7 from os.path import basename
9 # This garbage collector uses a Bloom filter to track the live objects
10 # during the mark phase. This means that the collection is
11 # probabilistic; it may retain some (known) percentage of garbage, but
12 # it can also work within a reasonable, fixed RAM budget for any
13 # particular percentage and repository size.
15 # The collection proceeds as follows:
17 # - Scan all live objects by walking all of the refs, and insert
18 # every hash encountered into a new Bloom "liveness" filter.
19 # Compute the size of the liveness filter based on the total
20 # number of objects in the repository. This is the "mark phase".
22 # - Clear the data that's dependent on the repository's object set,
23 # i.e. the reflog, the normal Bloom filter, and the midxes.
25 # - Traverse all of the pack files, consulting the liveness filter
26 # to decide which objects to keep.
28 # For each pack file, rewrite it iff it probably contains more
29 # than (currently) 10% garbage (computed by an initial traversal
30 # of the packfile in consultation with the liveness filter). To
31 # rewrite, traverse the packfile (again) and write each hash that
32 # tests positive against the liveness filter to a packwriter.
34 # During the traversal of all of the packfiles, delete redundant,
35 # old packfiles only after the packwriter has finished the pack
36 # that contains all of their live objects.
38 # The current code unconditionally tracks the set of tree hashes seen
39 # during the mark phase, and skips any that have already been visited.
40 # This should decrease the IO load at the cost of increased RAM use.
42 # FIXME: add a bloom filter tuning parameter?
45 def count_objects(dir, verbosity):
46 # For now we'll just use open_idx(), but we could probably be much
47 # more efficient since all we need is a single integer (the last
48 # fanout entry) from each index.
50 indexes = glob.glob(os.path.join(dir, '*.idx'))
51 for i, idx_name in enumerate(indexes):
53 log('found %d objects (%d/%d %s)\r'
54 % (object_count, i + 1, len(indexes), basename(idx_name)))
55 idx = git.open_idx(idx_name)
56 object_count += len(idx)
60 def report_live_item(n, total, ref_name, ref_id, item, verbosity):
61 status = 'scanned %02.2f%%' % (n * 100.0 / total)
62 hex_id = ref_id.encode('hex')
63 dirslash = '/' if item.type == 'tree' else ''
64 chunk_path = item.chunk_path
69 ps = '/'.join(item.path)
70 chunk_ps = '/'.join(chunk_path)
71 log('%s %s:%s/%s%s\n' % (status, hex_id, ps, chunk_ps, dirslash))
74 # Top commit, for example has none.
75 demangled = git.demangle_name(item.path[-1], item.mode)[0] if item.path \
78 # Don't print mangled paths unless the verbosity is over 3.
80 ps = '/'.join(item.path[:-1] + [demangled])
82 qprogress('%s %s:%s%s\r' % (status, hex_id, ps, dirslash))
83 elif (verbosity > 1 and item.type == 'tree') \
84 or (verbosity > 2 and item.type == 'blob'):
85 log('%s %s:%s%s\n' % (status, hex_id, ps, dirslash))
87 ps = '/'.join(item.path)
88 log('%s %s:%s%s\n' % (status, hex_id, ps, dirslash))
91 def find_live_objects(existing_count, cat_pipe, verbosity=0):
92 prune_visited_trees = True # In case we want a command line option later
93 pack_dir = git.repo('objects/pack')
94 ffd, bloom_filename = tempfile.mkstemp('.bloom', 'tmp-gc-', pack_dir)
96 # FIXME: allow selection of k?
97 # FIXME: support ephemeral bloom filters (i.e. *never* written to disk)
98 live_objs = bloom.create(bloom_filename, expected=existing_count, k=None)
99 # live_objs will hold on to the fd until close or exit
100 os.unlink(bloom_filename)
101 stop_at, trees_visited = None, None
102 if prune_visited_trees:
103 trees_visited = set()
104 stop_at = lambda (x): x.decode('hex') in trees_visited
105 approx_live_count = 0
106 for ref_name, ref_id in git.list_refs():
107 for item in walk_object(cat_pipe, ref_id.encode('hex'),
112 report_live_item(approx_live_count, existing_count,
113 ref_name, ref_id, item, verbosity)
114 if trees_visited is not None and item.type == 'tree':
115 trees_visited.add(item.oid)
117 if not live_objs.exists(item.oid):
118 live_objs.add(item.oid)
119 approx_live_count += 1
121 live_objs.add(item.oid)
124 log('expecting to retain about %.2f%% unnecessary objects\n'
125 % live_objs.pfalse_positive())
129 def sweep(live_objects, existing_count, cat_pipe, threshold, compression,
131 # Traverse all the packs, saving the (probably) live data.
135 def remove_stale_files(new_pack_prefix):
136 if verbosity and new_pack_prefix:
137 log('created ' + basename(new_pack_prefix) + '\n')
138 for p in ns.stale_files:
139 if new_pack_prefix and p.startswith(new_pack_prefix):
140 continue # Don't remove the new pack file
142 log('removing ' + basename(p) + '\n')
144 if ns.stale_files: # So git cat-pipe will close them
148 writer = git.PackWriter(objcache_maker=None,
149 compression_level=compression,
151 on_pack_finish=remove_stale_files)
153 # FIXME: sanity check .idx names vs .pack names?
155 for idx_name in glob.glob(os.path.join(git.repo('objects/pack'), '*.idx')):
157 qprogress('preserving live data (%d%% complete)\r'
158 % ((float(collect_count) / existing_count) * 100))
159 idx = git.open_idx(idx_name)
162 for i in xrange(0, len(idx)):
163 sha = idx.shatable[i * 20 : (i + 1) * 20]
164 if live_objects.exists(sha):
167 collect_count += idx_live_count
168 if idx_live_count == 0:
171 % git.repo_rel(basename(idx_name)))
172 ns.stale_files.append(idx_name)
173 ns.stale_files.append(idx_name[:-3] + 'pack')
176 live_frac = idx_live_count / float(len(idx))
177 if live_frac > ((100 - threshold) / 100.0):
179 log('keeping %s (%d%% live)\n' % (git.repo_rel(basename(idx_name)),
184 log('rewriting %s (%.2f%% live)\n' % (basename(idx_name),
186 for i in xrange(0, len(idx)):
187 sha = idx.shatable[i * 20 : (i + 1) * 20]
188 if live_objects.exists(sha):
189 item_it = cat_pipe.get(sha.encode('hex'))
190 _, typ, _ = next(item_it)
191 writer.just_write(sha, typ, ''.join(item_it))
193 ns.stale_files.append(idx_name)
194 ns.stale_files.append(idx_name[:-3] + 'pack')
197 progress('preserving live data (%d%% complete)\n'
198 % ((float(collect_count) / existing_count) * 100))
200 # Nothing should have recreated midx/bloom yet.
201 pack_dir = git.repo('objects/pack')
202 assert(not os.path.exists(os.path.join(pack_dir, 'bup.bloom')))
203 assert(not glob.glob(os.path.join(pack_dir, '*.midx')))
205 # try/catch should call writer.abort()?
206 # This will finally run midx.
207 writer.close() # Can only change refs (if needed) after this.
208 remove_stale_files(None) # In case we didn't write to the writer.
211 log('discarded %d%% of objects\n'
212 % ((existing_count - count_objects(pack_dir, verbosity))
213 / float(existing_count) * 100))
216 def bup_gc(threshold=10, compression=1, verbosity=0):
218 existing_count = count_objects(git.repo('objects/pack'), verbosity)
220 log('found %d objects\n' % existing_count)
221 if not existing_count:
223 log('nothing to collect\n')
226 live_objects = find_live_objects(existing_count, cat_pipe,
228 except MissingObject as ex:
229 log('bup: missing object %r \n' % ex.oid.encode('hex'))
232 # FIXME: just rename midxes and bloom, and restore them at the end if
233 # we didn't change any packs?
234 packdir = git.repo('objects/pack')
235 if verbosity: log('clearing midx files\n')
236 midx.clear_midxes(packdir)
237 if verbosity: log('clearing bloom filter\n')
238 bloom.clear_bloom(packdir)
239 if verbosity: log('clearing reflog\n')
240 expirelog_cmd = ['git', 'reflog', 'expire', '--all', '--expire=all']
241 expirelog = subprocess.Popen(expirelog_cmd, preexec_fn = git._gitenv())
242 git._git_wait(' '.join(expirelog_cmd), expirelog)
243 if verbosity: log('removing unreachable data\n')
244 sweep(live_objects, existing_count, cat_pipe,
245 threshold, compression,