from collections import namedtuple
from itertools import islice
-from bup import _helpers, path, midx, bloom, xstat
+from bup import _helpers, hashsplit, path, midx, bloom, xstat
from bup.helpers import (Sha1, add_error, chunkyreader, debug1, debug2,
fdatasync,
hostname, localtime, log, merge_iter,
mmap_read, mmap_readwrite,
- progress, qprogress, unlink, username, userfullname,
+ parse_num,
+ progress, qprogress, stat_if_exists,
+ unlink, username, userfullname,
utc_offset_str)
-
-max_pack_size = 1000*1000*1000 # larger packs will slow down pruning
-max_pack_objects = 200*1000 # cache memory usage is about 83 bytes per object
-
verbose = 0
ignore_midx = 0
-repodir = None
+repodir = None # The default repository, once initialized
_typemap = { 'blob':3, 'tree':2, 'commit':1, 'tag':4 }
_typermap = { 3:'blob', 2:'tree', 1:'commit', 4:'tag' }
pass
+def _git_wait(cmd, p):
+ rv = p.wait()
+ if rv != 0:
+ raise GitError('%s returned %d' % (cmd, rv))
+
+def _git_capture(argv):
+ p = subprocess.Popen(argv, stdout=subprocess.PIPE, preexec_fn = _gitenv())
+ r = p.stdout.read()
+ _git_wait(repr(argv), p)
+ return r
+
+def git_config_get(option, repo_dir=None):
+ cmd = ('git', 'config', '--get', option)
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ preexec_fn=_gitenv(repo_dir=repo_dir))
+ r = p.stdout.read()
+ rc = p.wait()
+ if rc == 0:
+ return r
+ if rc != 1:
+ raise GitError('%s returned %d' % (cmd, rc))
+ return None
+
+
def parse_tz_offset(s):
"""UTC offset in seconds."""
tz_off = (int(s[1:3]) * 60 * 60) + (int(s[3:5]) * 60)
def _encode_packobj(type, content, compression_level=1):
+ if compression_level not in (0, 1, 2, 3, 4, 5, 6, 7, 8, 9):
+ raise ValueError('invalid compression level %s' % compression_level)
szout = ''
sz = len(content)
szbits = (sz & 0x0f) | (_typemap[type]<<4)
break
szbits = sz & 0x7f
sz >>= 7
- if compression_level > 9:
- compression_level = 9
- elif compression_level < 0:
- compression_level = 0
z = zlib.compressobj(compression_level)
yield szout
yield z.compress(content)
def _make_objcache():
return PackIdxList(repo('objects/pack'))
+# bup-gc assumes that it can disable all PackWriter activities
+# (bloom/midx/cache) via the constructor and close() arguments.
+
class PackWriter:
"""Writes Git objects inside a pack file."""
- def __init__(self, objcache_maker=_make_objcache, compression_level=1):
+ def __init__(self, objcache_maker=_make_objcache, compression_level=1,
+ run_midx=True, on_pack_finish=None,
+ max_pack_size=None, max_pack_objects=None):
+ self.repo_dir = repo()
self.file = None
self.parentfd = None
self.count = 0
self.objcache_maker = objcache_maker
self.objcache = None
self.compression_level = compression_level
+ self.run_midx=run_midx
+ self.on_pack_finish = on_pack_finish
+ if not max_pack_size:
+ max_pack_size = git_config_get('pack.packSizeLimit',
+ repo_dir=self.repo_dir)
+ if max_pack_size is not None:
+ max_pack_size = parse_num(max_pack_size)
+ if not max_pack_size:
+ # larger packs slow down pruning
+ max_pack_size = 1000 * 1000 * 1000
+ self.max_pack_size = max_pack_size
+ # cache memory usage is about 83 bytes per object
+ self.max_pack_objects = max_pack_objects if max_pack_objects \
+ else max(1, self.max_pack_size // 5000)
def __del__(self):
self.close()
def _open(self):
if not self.file:
- objdir = dir=repo('objects')
+ objdir = dir = os.path.join(self.repo_dir, 'objects')
fd, name = tempfile.mkstemp(suffix='.pack', dir=objdir)
try:
self.file = os.fdopen(fd, 'w+b')
size, crc = self._raw_write(_encode_packobj(type, content,
self.compression_level),
sha=sha)
- if self.outbytes >= max_pack_size or self.count >= max_pack_objects:
+ if self.outbytes >= self.max_pack_size \
+ or self.count >= self.max_pack_objects:
self.breakpoint()
return sha
def breakpoint(self):
"""Clear byte and object counts and return the last processed id."""
- id = self._end()
+ id = self._end(self.run_midx)
self.outbytes = self.count = 0
return id
self._require_objcache()
return self.objcache.exists(id, want_source=want_source)
+ def just_write(self, sha, type, content):
+ """Write an object to the pack file, bypassing the objcache. Fails if
+ sha exists()."""
+ self._write(sha, type, content)
+
def maybe_write(self, type, content):
"""Write an object to the pack file if not present and return its id."""
sha = calc_hash(type, content)
if not self.exists(sha):
- self._write(sha, type, content)
+ self.just_write(sha, type, content)
self._require_objcache()
self.objcache.add(sha)
return sha
f.close()
obj_list_sha = self._write_pack_idx_v2(self.filename + '.idx', idx, packbin)
-
- nameprefix = repo('objects/pack/pack-%s' % obj_list_sha)
+ nameprefix = os.path.join(self.repo_dir,
+ 'objects/pack/pack-' + obj_list_sha)
if os.path.exists(self.filename + '.map'):
os.unlink(self.filename + '.map')
os.rename(self.filename + '.pack', nameprefix + '.pack')
os.close(self.parentfd)
if run_midx:
- auto_midx(repo('objects/pack'))
+ auto_midx(os.path.join(self.repo_dir, 'objects/pack'))
+
+ if self.on_pack_finish:
+ self.on_pack_finish(nameprefix)
+
return nameprefix
def close(self, run_midx=True):
return env
-def list_refs(refname=None, repo_dir=None,
+def list_refs(refnames=None, repo_dir=None,
limit_to_heads=False, limit_to_tags=False):
- """Yield (refname, hash) tuples for all repository refs unless a ref
- name is specified. Given a ref name, only include tuples for that
- particular ref. The limits restrict the result items to
- refs/heads or refs/tags. If both limits are specified, items from
- both sources will be included.
+ """Yield (refname, hash) tuples for all repository refs unless
+ refnames are specified. In that case, only include tuples for
+ those refs. The limits restrict the result items to refs/heads or
+ refs/tags. If both limits are specified, items from both sources
+ will be included.
"""
argv = ['git', 'show-ref']
if limit_to_tags:
argv.append('--tags')
argv.append('--')
- if refname:
- argv += [refname]
+ if refnames:
+ argv += refnames
p = subprocess.Popen(argv,
preexec_fn = _gitenv(repo_dir),
stdout = subprocess.PIPE)
def read_ref(refname, repo_dir = None):
"""Get the commit id of the most recent commit made on a given ref."""
- refs = list_refs(refname, repo_dir=repo_dir, limit_to_heads=True)
+ refs = list_refs(refnames=[refname], repo_dir=repo_dir, limit_to_heads=True)
l = tuple(islice(refs, 2))
if l:
assert(len(l) == 1)
def check_repo_or_die(path=None):
- """Make sure a bup repository exists, and abort if not.
- If the path to a particular repository was not specified, this function
- initializes the default repository automatically.
- """
+ """Check to see if a bup repository probably exists, and abort if not."""
guess_repo(path)
- try:
- os.stat(repo('objects/pack/.'))
- except OSError as e:
- if e.errno == errno.ENOENT:
- log('error: %r is not a bup repository; run "bup init"\n'
- % repo())
+ top = repo()
+ pst = stat_if_exists(top + '/objects/pack')
+ if pst and stat.S_ISDIR(pst.st_mode):
+ return
+ if not pst:
+ top_st = stat_if_exists(top)
+ if not top_st:
+ log('error: repository %r does not exist (see "bup help init")\n'
+ % top)
sys.exit(15)
- else:
- log('error: %s\n' % e)
- sys.exit(14)
+ log('error: %r is not a repository\n' % top)
+ sys.exit(14)
_ver = None
return _ver
-def _git_wait(cmd, p):
- rv = p.wait()
- if rv != 0:
- raise GitError('%s returned %d' % (cmd, rv))
-
-
-def _git_capture(argv):
- p = subprocess.Popen(argv, stdout=subprocess.PIPE, preexec_fn = _gitenv())
- r = p.stdout.read()
- _git_wait(repr(argv), p)
- return r
-
-
class _AbortableIter:
def __init__(self, it, onabort = None):
self.it = it
self.abort()
+class MissingObject(KeyError):
+ def __init__(self, id):
+ self.id = id
+ KeyError.__init__(self, 'object %r is missing' % id.encode('hex'))
+
+
_ver_warned = 0
class CatPipe:
"""Link to 'git cat-file' that is used to retrieve blob data."""
self.repo_dir = repo_dir
wanted = ('1','5','6')
if ver() < wanted:
- if not _ver_warned:
- log('warning: git version < %s; bup will be slow.\n'
- % '.'.join(wanted))
- _ver_warned = 1
- self.get = self._slow_get
- else:
- self.p = self.inprogress = None
- self.get = self._fast_get
+ log('error: git version must be at least 1.5.6\n')
+ sys.exit(1)
+ self.p = self.inprogress = None
def _abort(self):
if self.p:
self.p = None
self.inprogress = None
- def _restart(self):
+ def restart(self):
self._abort()
self.p = subprocess.Popen(['git', 'cat-file', '--batch'],
stdin=subprocess.PIPE,
bufsize = 4096,
preexec_fn = _gitenv(self.repo_dir))
- def _fast_get(self, id):
+ def get(self, id, size=False):
+ """Yield the object type, and then an iterator over the data referred
+ to by the id ref. If size is true, yield (obj_type, obj_size)
+ instead of just the type.
+
+ """
if not self.p or self.p.poll() != None:
- self._restart()
+ self.restart()
assert(self.p)
poll_result = self.p.poll()
assert(poll_result == None)
if self.inprogress:
- log('_fast_get: opening %r while %r is open\n'
- % (id, self.inprogress))
+ log('get: opening %r while %r is open\n' % (id, self.inprogress))
assert(not self.inprogress)
assert(id.find('\n') < 0)
assert(id.find('\r') < 0)
hdr = self.p.stdout.readline()
if hdr.endswith(' missing\n'):
self.inprogress = None
- raise KeyError('blob %r is missing' % id)
+ raise MissingObject(id.decode('hex'))
spl = hdr.split(' ')
if len(spl) != 3 or len(spl[0]) != 40:
raise GitError('expected blob, got %r' % spl)
- (hex, type, size) = spl
-
- it = _AbortableIter(chunkyreader(self.p.stdout, int(spl[2])),
- onabort = self._abort)
+ hex, typ, sz = spl
+ sz = int(sz)
+ it = _AbortableIter(chunkyreader(self.p.stdout, sz),
+ onabort=self._abort)
try:
- yield type
+ if size:
+ yield typ, sz
+ else:
+ yield typ
for blob in it:
yield blob
readline_result = self.p.stdout.readline()
it.abort()
raise
- def _slow_get(self, id):
- assert(id.find('\n') < 0)
- assert(id.find('\r') < 0)
- assert(id[0] != '-')
- type = _git_capture(['git', 'cat-file', '-t', id]).strip()
- yield type
-
- p = subprocess.Popen(['git', 'cat-file', type, id],
- stdout=subprocess.PIPE,
- preexec_fn = _gitenv(self.repo_dir))
- for blob in chunkyreader(p.stdout):
- yield blob
- _git_wait('git cat-file', p)
-
def _join(self, it):
type = it.next()
if type == 'blob':
def cp(repo_dir=None):
"""Create a CatPipe object or reuse the already existing one."""
- global _cp
+ global _cp, repodir
if not repo_dir:
- repo_dir = repo()
+ repo_dir = repodir or repo()
repo_dir = os.path.abspath(repo_dir)
cp = _cp.get(repo_dir)
if not cp:
tags[c] = []
tags[c].append(name) # more than one tag can point at 'c'
return tags
+
+
+WalkItem = namedtuple('WalkItem', ['id', 'type', 'mode',
+ 'path', 'chunk_path', 'data'])
+# The path is the mangled path, and if an item represents a fragment
+# of a chunked file, the chunk_path will be the chunked subtree path
+# for the chunk, i.e. ['', '2d3115e', ...]. The top-level path for a
+# chunked file will have a chunk_path of ['']. So some chunk subtree
+# of the file '/foo/bar/baz' might look like this:
+#
+# item.path = ['foo', 'bar', 'baz.bup']
+# item.chunk_path = ['', '2d3115e', '016b097']
+# item.type = 'tree'
+# ...
+
+
+def walk_object(cat_pipe, id,
+ stop_at=None,
+ include_data=None):
+ """Yield everything reachable from id via cat_pipe as a WalkItem,
+ stopping whenever stop_at(id) returns true. Throw MissingObject
+ if a hash encountered is missing from the repository, and don't
+ read or return blob content in the data field unless include_data
+ is set.
+ """
+ # Maintain the pending stack on the heap to avoid stack overflow
+ pending = [(id, [], [], None)]
+ while len(pending):
+ id, parent_path, chunk_path, mode = pending.pop()
+ if stop_at and stop_at(id):
+ continue
+
+ if (not include_data) and mode and stat.S_ISREG(mode):
+ # If the object is a "regular file", then it's a leaf in
+ # the graph, so we can skip reading the data if the caller
+ # hasn't requested it.
+ yield WalkItem(id=id, type='blob',
+ chunk_path=chunk_path, path=parent_path,
+ mode=mode,
+ data=None)
+ continue
+
+ item_it = cat_pipe.get(id)
+ type = item_it.next()
+ if type not in ('blob', 'commit', 'tree'):
+ raise Exception('unexpected repository object type %r' % type)
+
+ # FIXME: set the mode based on the type when the mode is None
+ if type == 'blob' and not include_data:
+ # Dump data until we can ask cat_pipe not to fetch it
+ for ignored in item_it:
+ pass
+ data = None
+ else:
+ data = ''.join(item_it)
+
+ yield WalkItem(id=id, type=type,
+ chunk_path=chunk_path, path=parent_path,
+ mode=mode,
+ data=(data if include_data else None))
+
+ if type == 'commit':
+ commit_items = parse_commit(data)
+ for pid in commit_items.parents:
+ pending.append((pid, parent_path, chunk_path, mode))
+ pending.append((commit_items.tree, parent_path, chunk_path,
+ hashsplit.GIT_MODE_TREE))
+ elif type == 'tree':
+ for mode, name, ent_id in tree_decode(data):
+ demangled, bup_type = demangle_name(name, mode)
+ if chunk_path:
+ sub_path = parent_path
+ sub_chunk_path = chunk_path + [name]
+ else:
+ sub_path = parent_path + [name]
+ if bup_type == BUP_CHUNKED:
+ sub_chunk_path = ['']
+ else:
+ sub_chunk_path = chunk_path
+ pending.append((ent_id.encode('hex'), sub_path, sub_chunk_path,
+ mode))