from collections import namedtuple
from itertools import islice
-from bup import _helpers, path, midx, bloom, xstat
+from bup import _helpers, hashsplit, path, midx, bloom, xstat
from bup.helpers import (Sha1, add_error, chunkyreader, debug1, debug2,
- hostname, log, merge_iter, mmap_read, mmap_readwrite,
- progress, qprogress, unlink, username, userfullname,
+ fdatasync,
+ hostname, localtime, log, merge_iter,
+ mmap_read, mmap_readwrite,
+ progress, qprogress, stat_if_exists,
+ unlink, username, userfullname,
utc_offset_str)
return parse_commit(commit_content)
+def _local_git_date_str(epoch_sec):
+ return '%d %s' % (epoch_sec, utc_offset_str(epoch_sec))
+
+
+def _git_date_str(epoch_sec, tz_offset_sec):
+ offs = tz_offset_sec // 60
+ return '%d %s%02d%02d' \
+ % (epoch_sec,
+ '+' if offs >= 0 else '-',
+ abs(offs) // 60,
+ abs(offs) % 60)
+
+
def repo(sub = '', repo_dir=None):
"""Get the path to the git repository or one of its subdirectories."""
global repodir
def _encode_packobj(type, content, compression_level=1):
+ if compression_level not in (0, 1, 2, 3, 4, 5, 6, 7, 8, 9):
+ raise ValueError('invalid compression level %s' % compression_level)
szout = ''
sz = len(content)
szbits = (sz & 0x0f) | (_typemap[type]<<4)
break
szbits = sz & 0x7f
sz >>= 7
- if compression_level > 9:
- compression_level = 9
- elif compression_level < 0:
- compression_level = 0
z = zlib.compressobj(compression_level)
yield szout
yield z.compress(content)
def _make_objcache():
return PackIdxList(repo('objects/pack'))
+# bup-gc assumes that it can disable all PackWriter activities
+# (bloom/midx/cache) via the constructor and close() arguments.
+
class PackWriter:
"""Writes Git objects inside a pack file."""
- def __init__(self, objcache_maker=_make_objcache, compression_level=1):
+ def __init__(self, objcache_maker=_make_objcache, compression_level=1,
+ run_midx=True, on_pack_finish=None):
+ self.file = None
+ self.parentfd = None
self.count = 0
self.outbytes = 0
self.filename = None
- self.file = None
self.idx = None
self.objcache_maker = objcache_maker
self.objcache = None
self.compression_level = compression_level
+ self.run_midx=run_midx
+ self.on_pack_finish = on_pack_finish
def __del__(self):
self.close()
def _open(self):
if not self.file:
- (fd,name) = tempfile.mkstemp(suffix='.pack', dir=repo('objects'))
- self.file = os.fdopen(fd, 'w+b')
+ objdir = dir=repo('objects')
+ fd, name = tempfile.mkstemp(suffix='.pack', dir=objdir)
+ try:
+ self.file = os.fdopen(fd, 'w+b')
+ except:
+ os.close(fd)
+ raise
+ try:
+ self.parentfd = os.open(objdir, os.O_RDONLY)
+ except:
+ f = self.file
+ self.file = None
+ f.close()
+ raise
assert(name.endswith('.pack'))
self.filename = name[:-5]
self.file.write('PACK\0\0\0\2\0\0\0\0')
def breakpoint(self):
"""Clear byte and object counts and return the last processed id."""
- id = self._end()
+ id = self._end(self.run_midx)
self.outbytes = self.count = 0
return id
self._require_objcache()
return self.objcache.exists(id, want_source=want_source)
+ def just_write(self, sha, type, content):
+ """Write an object to the pack file, bypassing the objcache. Fails if
+ sha exists()."""
+ self._write(sha, type, content)
+
def maybe_write(self, type, content):
"""Write an object to the pack file if not present and return its id."""
sha = calc_hash(type, content)
if not self.exists(sha):
- self._write(sha, type, content)
+ self.just_write(sha, type, content)
self._require_objcache()
self.objcache.add(sha)
return sha
content = tree_encode(shalist)
return self.maybe_write('tree', content)
- def _new_commit(self, tree, parent, author, adate, committer, cdate, msg):
+ def new_commit(self, tree, parent,
+ author, adate_sec, adate_tz,
+ committer, cdate_sec, cdate_tz,
+ msg):
+ """Create a commit object in the pack. The date_sec values must be
+ epoch-seconds, and if a tz is None, the local timezone is assumed."""
+ if adate_tz:
+ adate_str = _git_date_str(adate_sec, adate_tz)
+ else:
+ adate_str = _local_git_date_str(adate_sec)
+ if cdate_tz:
+ cdate_str = _git_date_str(cdate_sec, cdate_tz)
+ else:
+ cdate_str = _local_git_date_str(cdate_sec)
l = []
if tree: l.append('tree %s' % tree.encode('hex'))
if parent: l.append('parent %s' % parent.encode('hex'))
- if author: l.append('author %s %s' % (author, _git_date(adate)))
- if committer: l.append('committer %s %s' % (committer, _git_date(cdate)))
+ if author: l.append('author %s %s' % (author, adate_str))
+ if committer: l.append('committer %s %s' % (committer, cdate_str))
l.append('')
l.append(msg)
return self.maybe_write('commit', '\n'.join(l))
- def new_commit(self, parent, tree, date, msg):
- """Create a commit object in the pack."""
- userline = '%s <%s@%s>' % (userfullname(), username(), hostname())
- commit = self._new_commit(tree, parent,
- userline, date, userline, date,
- msg)
- return commit
-
def abort(self):
"""Remove the pack file from disk."""
f = self.file
if f:
- self.idx = None
+ pfd = self.parentfd
self.file = None
+ self.parentfd = None
+ self.idx = None
try:
- os.unlink(self.filename + '.pack')
+ try:
+ os.unlink(self.filename + '.pack')
+ finally:
+ f.close()
finally:
- f.close()
+ if pfd is not None:
+ os.close(pfd)
def _end(self, run_midx=True):
f = self.file
sum.update(b)
packbin = sum.digest()
f.write(packbin)
+ fdatasync(f.fileno())
finally:
f.close()
os.unlink(self.filename + '.map')
os.rename(self.filename + '.pack', nameprefix + '.pack')
os.rename(self.filename + '.idx', nameprefix + '.idx')
+ try:
+ os.fsync(self.parentfd)
+ finally:
+ os.close(self.parentfd)
if run_midx:
auto_midx(repo('objects/pack'))
+
+ if self.on_pack_finish:
+ self.on_pack_finish(nameprefix)
+
return nameprefix
def close(self, run_midx=True):
idx_f = open(filename, 'w+b')
try:
idx_f.truncate(index_len)
+ fdatasync(idx_f.fileno())
idx_map = mmap_readwrite(idx_f, close=False)
- count = _helpers.write_idx(filename, idx_map, idx, self.count)
- assert(count == self.count)
+ try:
+ count = _helpers.write_idx(filename, idx_map, idx, self.count)
+ assert(count == self.count)
+ idx_map.flush()
+ finally:
+ idx_map.close()
finally:
- if idx_map: idx_map.close()
idx_f.close()
idx_f = open(filename, 'a+b')
for b in chunkyreader(idx_f):
idx_sum.update(b)
idx_f.write(idx_sum.digest())
+ fdatasync(idx_f.fileno())
return namebase
finally:
idx_f.close()
-def _git_date(date):
- return '%d %s' % (date, utc_offset_str(date))
-
-
def _gitenv(repo_dir = None):
if not repo_dir:
repo_dir = repo()
return env
-def list_refs(refname=None, repo_dir=None,
+def list_refs(refnames=None, repo_dir=None,
limit_to_heads=False, limit_to_tags=False):
- """Yield (refname, hash) tuples for all repository refs unless a ref
- name is specified. Given a ref name, only include tuples for that
- particular ref. The limits restrict the result items to
- refs/heads or refs/tags. If both limits are specified, items from
- both sources will be included.
+ """Yield (refname, hash) tuples for all repository refs unless
+ refnames are specified. In that case, only include tuples for
+ those refs. The limits restrict the result items to refs/heads or
+ refs/tags. If both limits are specified, items from both sources
+ will be included.
"""
argv = ['git', 'show-ref']
if limit_to_tags:
argv.append('--tags')
argv.append('--')
- if refname:
- argv += [refname]
+ if refnames:
+ argv += refnames
p = subprocess.Popen(argv,
preexec_fn = _gitenv(repo_dir),
stdout = subprocess.PIPE)
def read_ref(refname, repo_dir = None):
"""Get the commit id of the most recent commit made on a given ref."""
- refs = list_refs(refname, repo_dir=repo_dir, limit_to_heads=True)
+ refs = list_refs(refnames=[refname], repo_dir=repo_dir, limit_to_heads=True)
l = tuple(islice(refs, 2))
if l:
assert(len(l) == 1)
_git_wait('git update-ref', p)
-def delete_ref(refname):
- """Delete a repository reference."""
+def delete_ref(refname, oldvalue=None):
+ """Delete a repository reference (see git update-ref(1))."""
assert(refname.startswith('refs/'))
- p = subprocess.Popen(['git', 'update-ref', '-d', refname],
+ oldvalue = [] if not oldvalue else [oldvalue]
+ p = subprocess.Popen(['git', 'update-ref', '-d', refname] + oldvalue,
preexec_fn = _gitenv())
_git_wait('git update-ref', p)
def check_repo_or_die(path=None):
- """Make sure a bup repository exists, and abort if not.
- If the path to a particular repository was not specified, this function
- initializes the default repository automatically.
- """
+ """Check to see if a bup repository probably exists, and abort if not."""
guess_repo(path)
- try:
- os.stat(repo('objects/pack/.'))
- except OSError as e:
- if e.errno == errno.ENOENT:
- log('error: %r is not a bup repository; run "bup init"\n'
- % repo())
+ top = repo()
+ pst = stat_if_exists(top + '/objects/pack')
+ if pst and stat.S_ISDIR(pst.st_mode):
+ return
+ if not pst:
+ top_st = stat_if_exists(top)
+ if not top_st:
+ log('error: repository %r does not exist (see "bup help init")\n'
+ % top)
sys.exit(15)
- else:
- log('error: %s\n' % e)
- sys.exit(14)
+ log('error: %r is not a repository\n' % top)
+ sys.exit(14)
_ver = None
self.abort()
+class MissingObject(KeyError):
+ def __init__(self, id):
+ self.id = id
+ KeyError.__init__(self, 'object %r is missing' % id.encode('hex'))
+
+
_ver_warned = 0
class CatPipe:
"""Link to 'git cat-file' that is used to retrieve blob data."""
self.p = None
self.inprogress = None
- def _restart(self):
+ def restart(self):
self._abort()
self.p = subprocess.Popen(['git', 'cat-file', '--batch'],
stdin=subprocess.PIPE,
def _fast_get(self, id):
if not self.p or self.p.poll() != None:
- self._restart()
+ self.restart()
assert(self.p)
poll_result = self.p.poll()
assert(poll_result == None)
hdr = self.p.stdout.readline()
if hdr.endswith(' missing\n'):
self.inprogress = None
- raise KeyError('blob %r is missing' % id)
+ raise MissingObject(id.decode('hex'))
spl = hdr.split(' ')
if len(spl) != 3 or len(spl[0]) != 40:
raise GitError('expected blob, got %r' % spl)
tags[c] = []
tags[c].append(name) # more than one tag can point at 'c'
return tags
+
+
+WalkItem = namedtuple('WalkItem', ['id', 'type', 'mode',
+ 'path', 'chunk_path', 'data'])
+# The path is the mangled path, and if an item represents a fragment
+# of a chunked file, the chunk_path will be the chunked subtree path
+# for the chunk, i.e. ['', '2d3115e', ...]. The top-level path for a
+# chunked file will have a chunk_path of ['']. So some chunk subtree
+# of the file '/foo/bar/baz' might look like this:
+#
+# item.path = ['foo', 'bar', 'baz.bup']
+# item.chunk_path = ['', '2d3115e', '016b097']
+# item.type = 'tree'
+# ...
+
+
+def walk_object(cat_pipe, id,
+ stop_at=None,
+ include_data=None):
+ """Yield everything reachable from id via cat_pipe as a WalkItem,
+ stopping whenever stop_at(id) returns true. Throw MissingObject
+ if a hash encountered is missing from the repository, and don't
+ read or return blob content in the data field unless include_data
+ is set.
+ """
+ # Maintain the pending stack on the heap to avoid stack overflow
+ pending = [(id, [], [], None)]
+ while len(pending):
+ id, parent_path, chunk_path, mode = pending.pop()
+ if stop_at and stop_at(id):
+ continue
+
+ if (not include_data) and mode and stat.S_ISREG(mode):
+ # If the object is a "regular file", then it's a leaf in
+ # the graph, so we can skip reading the data if the caller
+ # hasn't requested it.
+ yield WalkItem(id=id, type='blob',
+ chunk_path=chunk_path, path=parent_path,
+ mode=mode,
+ data=None)
+ continue
+
+ item_it = cat_pipe.get(id)
+ type = item_it.next()
+ if type not in ('blob', 'commit', 'tree'):
+ raise Exception('unexpected repository object type %r' % type)
+
+ # FIXME: set the mode based on the type when the mode is None
+ if type == 'blob' and not include_data:
+ # Dump data until we can ask cat_pipe not to fetch it
+ for ignored in item_it:
+ pass
+ data = None
+ else:
+ data = ''.join(item_it)
+
+ yield WalkItem(id=id, type=type,
+ chunk_path=chunk_path, path=parent_path,
+ mode=mode,
+ data=(data if include_data else None))
+
+ if type == 'commit':
+ commit_items = parse_commit(data)
+ for pid in commit_items.parents:
+ pending.append((pid, parent_path, chunk_path, mode))
+ pending.append((commit_items.tree, parent_path, chunk_path,
+ hashsplit.GIT_MODE_TREE))
+ elif type == 'tree':
+ for mode, name, ent_id in tree_decode(data):
+ demangled, bup_type = demangle_name(name, mode)
+ if chunk_path:
+ sub_path = parent_path
+ sub_chunk_path = chunk_path + [name]
+ else:
+ sub_path = parent_path + [name]
+ if bup_type == BUP_CHUNKED:
+ sub_chunk_path = ['']
+ else:
+ sub_chunk_path = chunk_path
+ pending.append((ent_id.encode('hex'), sub_path, sub_chunk_path,
+ mode))