X-Git-Url: https://arthur.barton.de/gitweb/?a=blobdiff_plain;f=lib%2Fbup%2Fgit.py;h=4c1a95c52a985e5eaee1bb36ef4ca61651559c78;hb=20db61170c8a8fd4b452ec29f6a3133385c4fdad;hp=ff48da7113ed7e4bd5cd363f3807bd7ffa674e5c;hpb=b2f7ccd162e988a8785771a116c83b04f9ea51ce;p=bup.git diff --git a/lib/bup/git.py b/lib/bup/git.py index ff48da7..4c1a95c 100644 --- a/lib/bup/git.py +++ b/lib/bup/git.py @@ -3,31 +3,41 @@ bup repositories are in Git format. This library allows us to interact with the Git data structures. """ -from __future__ import absolute_import -import errno, os, sys, zlib, time, subprocess, struct, stat, re, tempfile, glob +from __future__ import absolute_import, print_function +import os, sys, zlib, subprocess, struct, stat, re, tempfile, glob +from array import array +from binascii import hexlify, unhexlify from collections import namedtuple from itertools import islice -from numbers import Integral -from bup import _helpers, compat, hashsplit, path, midx, bloom, xstat -from bup.compat import range +from bup import _helpers, hashsplit, path, midx, bloom, xstat +from bup.compat import (buffer, + byte_int, bytes_from_byte, bytes_from_uint, + environ, + items, + pending_raise, + range, + reraise) +from bup.io import path_msg from bup.helpers import (Sha1, add_error, chunkyreader, debug1, debug2, + exo, fdatasync, - hostname, localtime, log, + finalized, + log, merge_dict, merge_iter, mmap_read, mmap_readwrite, - parse_num, - progress, qprogress, shstr, stat_if_exists, - unlink, username, userfullname, + progress, qprogress, stat_if_exists, + unlink, utc_offset_str) + verbose = 0 -ignore_midx = 0 repodir = None # The default repository, once initialized -_typemap = { 'blob':3, 'tree':2, 'commit':1, 'tag':4 } -_typermap = { 3:'blob', 2:'tree', 1:'commit', 4:'tag' } +_typemap = {b'blob': 3, b'tree': 2, b'commit': 1, b'tag': 4} +_typermap = {v: k for k, v in items(_typemap)} + _total_searches = 0 _total_steps = 0 @@ -40,61 +50,99 @@ class GitError(Exception): def _gitenv(repo_dir=None): if not repo_dir: repo_dir = repo() - return merge_dict(os.environ, {'GIT_DIR': os.path.abspath(repo_dir)}) + return merge_dict(environ, {b'GIT_DIR': os.path.abspath(repo_dir)}) def _git_wait(cmd, p): rv = p.wait() if rv != 0: - raise GitError('%s returned %d' % (shstr(cmd), rv)) - -def _git_capture(argv): - p = subprocess.Popen(argv, stdout=subprocess.PIPE, env=_gitenv()) - r = p.stdout.read() - _git_wait(repr(argv), p) - return r - -def git_config_get(option, repo_dir=None): - cmd = ('git', 'config', '--get', option) - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, - env=_gitenv(repo_dir=repo_dir)) - r = p.stdout.read() + raise GitError('%r returned %d' % (cmd, rv)) + +def _git_exo(cmd, **kwargs): + kwargs['check'] = False + result = exo(cmd, **kwargs) + _, _, proc = result + if proc.returncode != 0: + raise GitError('%r returned %d' % (cmd, proc.returncode)) + return result + +def git_config_get(option, repo_dir=None, opttype=None, cfg_file=None): + assert not (repo_dir and cfg_file), "repo_dir and cfg_file cannot both be used" + cmd = [b'git', b'config', b'--null'] + if cfg_file: + cmd.extend([b'--file', cfg_file]) + if opttype == 'int': + cmd.extend([b'--int']) + elif opttype == 'bool': + cmd.extend([b'--bool']) + else: + assert opttype is None + cmd.extend([b'--get', option]) + env=None + if repo_dir: + env = _gitenv(repo_dir=repo_dir) + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=env, + close_fds=True) + # with --null, git writes out a trailing \0 after the value + r = p.stdout.read()[:-1] rc = p.wait() if rc == 0: + if opttype == 'int': + return int(r) + elif opttype == 'bool': + # git converts to 'true' or 'false' + return r == b'true' return r if rc != 1: - raise GitError('%s returned %d' % (cmd, rc)) + raise GitError('%r returned %d' % (cmd, rc)) return None def parse_tz_offset(s): """UTC offset in seconds.""" tz_off = (int(s[1:3]) * 60 * 60) + (int(s[3:5]) * 60) - if s[0] == '-': + if bytes_from_byte(s[0]) == b'-': return - tz_off return tz_off +def parse_commit_gpgsig(sig): + """Return the original signature bytes. + + i.e. with the "gpgsig " header and the leading space character on + each continuation line removed. + + """ + if not sig: + return None + assert sig.startswith(b'gpgsig ') + sig = sig[7:] + return sig.replace(b'\n ', b'\n') # FIXME: derived from http://git.rsbx.net/Documents/Git_Data_Formats.txt # Make sure that's authoritative. -_start_end_char = r'[^ .,:;<>"\'\0\n]' -_content_char = r'[^\0\n<>]' -_safe_str_rx = '(?:%s{1,2}|(?:%s%s*%s))' \ + +# See also +# https://github.com/git/git/blob/master/Documentation/technical/signature-format.txt +# The continuation lines have only one leading space. + +_start_end_char = br'[^ .,:;<>"\'\0\n]' +_content_char = br'[^\0\n<>]' +_safe_str_rx = br'(?:%s{1,2}|(?:%s%s*%s))' \ % (_start_end_char, _start_end_char, _content_char, _start_end_char) -_tz_rx = r'[-+]\d\d[0-5]\d' -_parent_rx = r'(?:parent [abcdefABCDEF0123456789]{40}\n)' +_tz_rx = br'[-+]\d\d[0-5]\d' +_parent_rx = br'(?:parent [abcdefABCDEF0123456789]{40}\n)' # Assumes every following line starting with a space is part of the # mergetag. Is there a formal commit blob spec? -_mergetag_rx = r'(?:\nmergetag object [abcdefABCDEF0123456789]{40}(?:\n [^\0\n]*)*)' -_commit_rx = re.compile(r'''tree (?P[abcdefABCDEF0123456789]{40}) +_mergetag_rx = br'(?:\nmergetag object [abcdefABCDEF0123456789]{40}(?:\n [^\0\n]*)*)' +_commit_rx = re.compile(br'''tree (?P[abcdefABCDEF0123456789]{40}) (?P%s*)author (?P%s) <(?P%s)> (?P\d+) (?P%s) committer (?P%s) <(?P%s)> (?P\d+) (?P%s)(?P%s?) - +(?Pgpgsig .*\n(?: .*\n)*)? (?P(?:.|\n)*)''' % (_parent_rx, _safe_str_rx, _safe_str_rx, _tz_rx, _safe_str_rx, _safe_str_rx, _tz_rx, _mergetag_rx)) -_parent_hash_rx = re.compile(r'\s*parent ([abcdefABCDEF0123456789]{40})\s*') +_parent_hash_rx = re.compile(br'\s*parent ([abcdefABCDEF0123456789]{40})\s*') # Note that the author_sec and committer_sec values are (UTC) epoch # seconds, and for now the mergetag is not included. @@ -103,6 +151,7 @@ CommitInfo = namedtuple('CommitInfo', ['tree', 'parents', 'author_sec', 'author_offset', 'committer_name', 'committer_mail', 'committer_sec', 'committer_offset', + 'gpgsig', 'message']) def parse_commit(content): @@ -120,6 +169,7 @@ def parse_commit(content): committer_mail=matches['committer_mail'], committer_sec=int(matches['csec']), committer_offset=parse_tz_offset(matches['ctz']), + gpgsig=parse_commit_gpgsig(matches['gpgsig']), message=matches['message']) @@ -127,65 +177,61 @@ def get_cat_data(cat_iterator, expected_type): _, kind, _ = next(cat_iterator) if kind != expected_type: raise Exception('expected %r, saw %r' % (expected_type, kind)) - return ''.join(cat_iterator) + return b''.join(cat_iterator) def get_commit_items(id, cp): - return parse_commit(get_cat_data(cp.get(id), 'commit')) + return parse_commit(get_cat_data(cp.get(id), b'commit')) def _local_git_date_str(epoch_sec): - return '%d %s' % (epoch_sec, utc_offset_str(epoch_sec)) + return b'%d %s' % (epoch_sec, utc_offset_str(epoch_sec)) def _git_date_str(epoch_sec, tz_offset_sec): offs = tz_offset_sec // 60 - return '%d %s%02d%02d' \ + return b'%d %s%02d%02d' \ % (epoch_sec, - '+' if offs >= 0 else '-', + b'+' if offs >= 0 else b'-', abs(offs) // 60, abs(offs) % 60) -def repo(sub = '', repo_dir=None): +def repo(sub = b'', repo_dir=None): """Get the path to the git repository or one of its subdirectories.""" repo_dir = repo_dir or repodir if not repo_dir: raise GitError('You should call check_repo_or_die()') # If there's a .git subdirectory, then the actual repo is in there. - gd = os.path.join(repo_dir, '.git') + gd = os.path.join(repo_dir, b'.git') if os.path.exists(gd): repo_dir = gd return os.path.join(repo_dir, sub) +_shorten_hash_rx = \ + re.compile(br'([^0-9a-z]|\b)([0-9a-z]{7})[0-9a-z]{33}([^0-9a-z]|\b)') + def shorten_hash(s): - return re.sub(r'([^0-9a-z]|\b)([0-9a-z]{7})[0-9a-z]{33}([^0-9a-z]|\b)', - r'\1\2*\3', s) + return _shorten_hash_rx.sub(br'\1\2*\3', s) def repo_rel(path): full = os.path.abspath(path) - fullrepo = os.path.abspath(repo('')) - if not fullrepo.endswith('/'): - fullrepo += '/' + fullrepo = os.path.abspath(repo(b'')) + if not fullrepo.endswith(b'/'): + fullrepo += b'/' if full.startswith(fullrepo): path = full[len(fullrepo):] - if path.startswith('index-cache/'): - path = path[len('index-cache/'):] + if path.startswith(b'index-cache/'): + path = path[len(b'index-cache/'):] return shorten_hash(path) -def all_packdirs(): - paths = [repo('objects/pack')] - paths += glob.glob(repo('index-cache/*/.')) - return paths - - def auto_midx(objdir): - args = [path.exe(), 'midx', '--auto', '--dir', objdir] + args = [path.exe(), b'midx', b'--auto', b'--dir', objdir] try: - rv = subprocess.call(args, stdout=open('/dev/null', 'w')) + rv = subprocess.call(args, stdout=open(os.devnull, 'w')) except OSError as e: # make sure 'args' gets printed to help with debugging add_error('%r: exception: %s' % (args, e)) @@ -193,9 +239,9 @@ def auto_midx(objdir): if rv: add_error('%r: returned %d' % (args, rv)) - args = [path.exe(), 'bloom', '--dir', objdir] + args = [path.exe(), b'bloom', b'--dir', objdir] try: - rv = subprocess.call(args, stdout=open('/dev/null', 'w')) + rv = subprocess.call(args, stdout=open(os.devnull, 'w')) except OSError as e: # make sure 'args' gets printed to help with debugging add_error('%r: exception: %s' % (args, e)) @@ -212,9 +258,9 @@ def mangle_name(name, mode, gitmode): """ if stat.S_ISREG(mode) and not stat.S_ISREG(gitmode): assert(stat.S_ISDIR(gitmode)) - return name + '.bup' - elif name.endswith('.bup') or name[:-1].endswith('.bup'): - return name + '.bupl' + return name + b'.bup' + elif name.endswith(b'.bup') or name[:-1].endswith(b'.bup'): + return name + b'.bupl' else: return name @@ -231,20 +277,19 @@ def demangle_name(name, mode): For more information on the name mangling algorithm, see mangle_name() """ - if name.endswith('.bupl'): + if name.endswith(b'.bupl'): return (name[:-5], BUP_NORMAL) - elif name.endswith('.bup'): + elif name.endswith(b'.bup'): return (name[:-4], BUP_CHUNKED) - elif name.endswith('.bupm'): + elif name.endswith(b'.bupm'): return (name[:-5], BUP_CHUNKED if stat.S_ISDIR(mode) else BUP_NORMAL) - else: - return (name, BUP_NORMAL) + return (name, BUP_NORMAL) def calc_hash(type, content): """Calculate some content's hash in the Git fashion.""" - header = '%s %d\0' % (type, len(content)) + header = b'%s %d\0' % (type, len(content)) sum = Sha1(header) sum.update(content) return sum.digest() @@ -254,7 +299,7 @@ def shalist_item_sort_key(ent): (mode, name, id) = ent assert(mode+0 == mode) if stat.S_ISDIR(mode): - return name + '/' + return name + b'/' else: return name @@ -268,19 +313,19 @@ def tree_encode(shalist): assert(mode+0 == mode) assert(name) assert(len(bin) == 20) - s = '%o %s\0%s' % (mode,name,bin) - assert(s[0] != '0') # 0-padded octal is not acceptable in a git tree + s = b'%o %s\0%s' % (mode,name,bin) + assert s[0] != b'0' # 0-padded octal is not acceptable in a git tree l.append(s) - return ''.join(l) + return b''.join(l) def tree_decode(buf): """Generate a list of (mode,name,hash) from the git tree object in buf.""" ofs = 0 while ofs < len(buf): - z = buf.find('\0', ofs) + z = buf.find(b'\0', ofs) assert(z > ofs) - spl = buf[ofs:z].split(' ', 1) + spl = buf[ofs:z].split(b' ', 1) assert(len(spl) == 2) mode,name = spl sha = buf[z+1:z+1+20] @@ -291,13 +336,13 @@ def tree_decode(buf): def _encode_packobj(type, content, compression_level=1): if compression_level not in (0, 1, 2, 3, 4, 5, 6, 7, 8, 9): raise ValueError('invalid compression level %s' % compression_level) - szout = '' + szout = b'' sz = len(content) szbits = (sz & 0x0f) | (_typemap[type]<<4) sz >>= 4 while 1: if sz: szbits |= 0x80 - szout += chr(szbits) + szout += bytes_from_uint(szbits) if not sz: break szbits = sz & 0x7f @@ -308,37 +353,16 @@ def _encode_packobj(type, content, compression_level=1): yield z.flush() -def _encode_looseobj(type, content, compression_level=1): - z = zlib.compressobj(compression_level) - yield z.compress('%s %d\0' % (type, len(content))) - yield z.compress(content) - yield z.flush() - - -def _decode_looseobj(buf): - assert(buf); - s = zlib.decompress(buf) - i = s.find('\0') - assert(i > 0) - l = s[:i].split(' ') - type = l[0] - sz = int(l[1]) - content = s[i+1:] - assert(type in _typemap) - assert(sz == len(content)) - return (type, content) - - def _decode_packobj(buf): assert(buf) - c = ord(buf[0]) + c = byte_int(buf[0]) type = _typermap[(c & 0x70) >> 4] sz = c & 0x0f shift = 4 i = 0 while c & 0x80: i += 1 - c = ord(buf[i]) + c = byte_int(buf[i]) sz |= (c & 0x7f) << shift shift += 7 if not (c & 0x80): @@ -363,21 +387,18 @@ class PackIdx: return want_source and os.path.basename(self.name) or True return None - def __len__(self): - return int(self.fanout[255]) - def _idx_from_hash(self, hash): global _total_searches, _total_steps _total_searches += 1 assert(len(hash) == 20) - b1 = ord(hash[0]) + b1 = byte_int(hash[0]) start = self.fanout[b1-1] # range -1..254 end = self.fanout[b1] # range 0..255 - want = str(hash) + want = hash _total_steps += 1 # lookup table is a step while start < end: _total_steps += 1 - mid = start + (end-start)/2 + mid = start + (end - start) // 2 v = self._idx_to_hash(mid) if v < want: start = mid+1 @@ -394,26 +415,47 @@ class PackIdxV1(PackIdx): self.name = filename self.idxnames = [self.name] self.map = mmap_read(f) - self.fanout = list(struct.unpack('!256I', buffer(self.map, 0, 256 * 4))) + # Min size for 'L' is 4, which is sufficient for struct's '!I' + self.fanout = array('L', struct.unpack('!256I', self.map)) self.fanout.append(0) # entry "-1" - nsha = self.fanout[255] - self.sha_ofs = 256*4 - self.shatable = buffer(self.map, self.sha_ofs, nsha*24) + self.nsha = self.fanout[255] + self.sha_ofs = 256 * 4 + # Avoid slicing shatable for individual hashes (very high overhead) + self.shatable = buffer(self.map, self.sha_ofs, self.nsha * 24) + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + with pending_raise(value, rethrow=False): + self.close() + + def __len__(self): + return int(self.nsha) # int() from long for python 2 def _ofs_from_idx(self, idx): - ofs = idx * 24 - return struct.unpack('!I', self.shatable[ofs : ofs + 4])[0] + if idx >= self.nsha or idx < 0: + raise IndexError('invalid pack index index %d' % idx) + ofs = self.sha_ofs + idx * 24 + return struct.unpack_from('!I', self.map, offset=ofs)[0] def _idx_to_hash(self, idx): - ofs = idx * 24 + 4 - return self.shatable[ofs : ofs + 20] + if idx >= self.nsha or idx < 0: + raise IndexError('invalid pack index index %d' % idx) + ofs = self.sha_ofs + idx * 24 + 4 + return self.map[ofs : ofs + 20] def __iter__(self): - count = self.fanout[255] - start = 256 * 4 + 4 - for ofs in range(start, start + (24 * count), 24): + start = self.sha_ofs + 4 + for ofs in range(start, start + 24 * self.nsha, 24): yield self.map[ofs : ofs + 20] + def close(self): + if self.map is not None: + self.shatable = None + self.map.close() + self.map = None + class PackIdxV2(PackIdx): """Object representation of a Git pack index (version 2) file.""" @@ -422,40 +464,58 @@ class PackIdxV2(PackIdx): self.idxnames = [self.name] self.map = mmap_read(f) assert self.map[0:8] == b'\377tOc\0\0\0\2' - self.fanout = list(struct.unpack('!256I', - buffer(self.map[8 : 8 + 256 * 4]))) - self.fanout.append(0) # entry "-1" - nsha = self.fanout[255] + # Min size for 'L' is 4, which is sufficient for struct's '!I' + self.fanout = array('L', struct.unpack_from('!256I', self.map, offset=8)) + self.fanout.append(0) + self.nsha = self.fanout[255] self.sha_ofs = 8 + 256*4 - self.shatable = buffer(self.map, self.sha_ofs, nsha*20) - self.ofstable = buffer(self.map, - self.sha_ofs + nsha*20 + nsha*4, - nsha*4) - self.ofs64table = buffer(self.map, - 8 + 256*4 + nsha*20 + nsha*4 + nsha*4) + self.ofstable_ofs = self.sha_ofs + self.nsha * 20 + self.nsha * 4 + self.ofs64table_ofs = self.ofstable_ofs + self.nsha * 4 + # Avoid slicing this for individual hashes (very high overhead) + self.shatable = buffer(self.map, self.sha_ofs, self.nsha*20) + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + with pending_raise(value, rethrow=False): + self.close() + + def __len__(self): + return int(self.nsha) # int() from long for python 2 def _ofs_from_idx(self, idx): - i = idx * 4 - ofs = struct.unpack('!I', self.ofstable[i : i + 4])[0] + if idx >= self.nsha or idx < 0: + raise IndexError('invalid pack index index %d' % idx) + ofs_ofs = self.ofstable_ofs + idx * 4 + ofs = struct.unpack_from('!I', self.map, offset=ofs_ofs)[0] if ofs & 0x80000000: idx64 = ofs & 0x7fffffff - idx64_i = idx64 * 8 - ofs = struct.unpack('!Q', self.ofs64table[idx64_i : idx64_i + 8])[0] + ofs64_ofs = self.ofs64table_ofs + idx64 * 8 + ofs = struct.unpack_from('!Q', self.map, offset=ofs64_ofs)[0] return ofs def _idx_to_hash(self, idx): - return self.shatable[idx * 20 : (idx + 1) * 20] + if idx >= self.nsha or idx < 0: + raise IndexError('invalid pack index index %d' % idx) + ofs = self.sha_ofs + idx * 20 + return self.map[ofs : ofs + 20] def __iter__(self): - count = self.fanout[255] - start = 8 + 256 * 4 - for ofs in range(start, start + (20 * count), 20): + start = self.sha_ofs + for ofs in range(start, start + 20 * self.nsha, 20): yield self.map[ofs : ofs + 20] + def close(self): + if self.map is not None: + self.shatable = None + self.map.close() + self.map = None + _mpi_count = 0 class PackIdxList: - def __init__(self, dir): + def __init__(self, dir, ignore_midx=False): global _mpi_count assert(_mpi_count == 0) # these things suck tons of VM; don't waste it _mpi_count += 1 @@ -464,6 +524,7 @@ class PackIdxList: self.packs = [] self.do_bloom = False self.bloom = None + self.ignore_midx = ignore_midx self.refresh() def __del__(self): @@ -489,7 +550,7 @@ class PackIdxList: else: _total_searches -= 1 # was counted by bloom return None - for i in xrange(len(self.packs)): + for i in range(len(self.packs)): p = self.packs[i] _total_searches -= 1 # will be incremented by sub-pack ix = p.exists(hash, want_source=want_source) @@ -509,30 +570,44 @@ class PackIdxList: If skip_midx is True, all work on .midx files will be skipped and .midx files will be removed from the list. - The module-global variable 'ignore_midx' can force this function to + The instance variable 'ignore_midx' can force this function to always act as if skip_midx was True. """ + if self.bloom is not None: + self.bloom.close() self.bloom = None # Always reopen the bloom as it may have been relaced self.do_bloom = False - skip_midx = skip_midx or ignore_midx + skip_midx = skip_midx or self.ignore_midx d = dict((p.name, p) for p in self.packs if not skip_midx or not isinstance(p, midx.PackMidx)) if os.path.exists(self.dir): if not skip_midx: midxl = [] + midxes = set(glob.glob(os.path.join(self.dir, b'*.midx'))) + # remove any *.midx files from our list that no longer exist + for ix in list(d.values()): + if not isinstance(ix, midx.PackMidx): + continue + if ix.name in midxes: + continue + # remove the midx + del d[ix.name] + ix.close() + self.packs.remove(ix) for ix in self.packs: if isinstance(ix, midx.PackMidx): for name in ix.idxnames: d[os.path.join(self.dir, name)] = ix - for full in glob.glob(os.path.join(self.dir,'*.midx')): + for full in midxes: if not d.get(full): mx = midx.PackMidx(full) (mxd, mxf) = os.path.split(mx.name) broken = False for n in mx.idxnames: if not os.path.exists(os.path.join(mxd, n)): - log(('warning: index %s missing\n' + - ' used by %s\n') % (n, mxf)) + log(('warning: index %s missing\n' + ' used by %s\n') + % (path_msg(n), path_msg(mxf))) broken = True if broken: mx.close() @@ -556,10 +631,10 @@ class PackIdxList: d[os.path.join(self.dir, name)] = ix elif not ix.force_keep: debug1('midx: removing redundant: %s\n' - % os.path.basename(ix.name)) + % path_msg(os.path.basename(ix.name))) ix.close() unlink(ix.name) - for full in glob.glob(os.path.join(self.dir,'*.idx')): + for full in glob.glob(os.path.join(self.dir, b'*.idx')): if not d.get(full): try: ix = open_idx(full) @@ -567,7 +642,7 @@ class PackIdxList: add_error(e) continue d[full] = ix - bfull = os.path.join(self.dir, 'bup.bloom') + bfull = os.path.join(self.dir, b'bup.bloom') if self.bloom is None and os.path.exists(bfull): self.bloom = bloom.ShaBloom(bfull) self.packs = list(set(d.values())) @@ -585,21 +660,22 @@ class PackIdxList: def open_idx(filename): - if filename.endswith('.idx'): + if filename.endswith(b'.idx'): f = open(filename, 'rb') header = f.read(8) - if header[0:4] == '\377tOc': + if header[0:4] == b'\377tOc': version = struct.unpack('!I', header[4:8])[0] if version == 2: return PackIdxV2(filename, f) else: raise GitError('%s: expected idx file version 2, got %d' - % (filename, version)) - elif len(header) == 8 and header[0:4] < '\377tOc': + % (path_msg(filename), version)) + elif len(header) == 8 and header[0:4] < b'\377tOc': return PackIdxV1(filename, f) else: - raise GitError('%s: unrecognized idx file header' % filename) - elif filename.endswith('.midx'): + raise GitError('%s: unrecognized idx file header' + % path_msg(filename)) + elif filename.endswith(b'.midx'): return midx.PackMidx(filename) else: raise GitError('idx filenames must end with .idx or .midx') @@ -617,8 +693,30 @@ def idxmerge(idxlist, final_progress=True): return merge_iter(idxlist, 10024, pfunc, pfinal) +def create_commit_blob(tree, parent, + author, adate_sec, adate_tz, + committer, cdate_sec, cdate_tz, + msg): + if adate_tz is not None: + adate_str = _git_date_str(adate_sec, adate_tz) + else: + adate_str = _local_git_date_str(adate_sec) + if cdate_tz is not None: + cdate_str = _git_date_str(cdate_sec, cdate_tz) + else: + cdate_str = _local_git_date_str(cdate_sec) + l = [] + if tree: l.append(b'tree %s' % hexlify(tree)) + if parent: l.append(b'parent %s' % hexlify(parent)) + if author: l.append(b'author %s %s' % (author, adate_str)) + if committer: l.append(b'committer %s %s' % (committer, cdate_str)) + l.append(b'') + l.append(msg) + return b'\n'.join(l) + + def _make_objcache(): - return PackIdxList(repo('objects/pack')) + return PackIdxList(repo(b'objects/pack')) # bup-gc assumes that it can disable all PackWriter activities # (bloom/midx/cache) via the constructor and close() arguments. @@ -641,10 +739,9 @@ class PackWriter: self.run_midx=run_midx self.on_pack_finish = on_pack_finish if not max_pack_size: - max_pack_size = git_config_get('pack.packSizeLimit', - repo_dir=self.repo_dir) - if max_pack_size is not None: - max_pack_size = parse_num(max_pack_size) + max_pack_size = git_config_get(b'pack.packSizeLimit', + repo_dir=self.repo_dir, + opttype='int') if not max_pack_size: # larger packs slow down pruning max_pack_size = 1000 * 1000 * 1000 @@ -653,19 +750,17 @@ class PackWriter: self.max_pack_objects = max_pack_objects if max_pack_objects \ else max(1, self.max_pack_size // 5000) - def __del__(self): - self.close() - def __enter__(self): return self def __exit__(self, type, value, traceback): - self.close() + with pending_raise(value, rethrow=False): + self.close() def _open(self): if not self.file: - objdir = dir = os.path.join(self.repo_dir, 'objects') - fd, name = tempfile.mkstemp(suffix='.pack', dir=objdir) + objdir = dir = os.path.join(self.repo_dir, b'objects') + fd, name = tempfile.mkstemp(suffix=b'.pack', dir=objdir) try: self.file = os.fdopen(fd, 'w+b') except: @@ -678,10 +773,10 @@ class PackWriter: self.file = None f.close() raise - assert(name.endswith('.pack')) + assert name.endswith(b'.pack') self.filename = name[:-5] - self.file.write('PACK\0\0\0\2\0\0\0\0') - self.idx = list(list() for i in xrange(256)) + self.file.write(b'PACK\0\0\0\2\0\0\0\0') + self.idx = PackIdxV2Writer() def _raw_write(self, datalist, sha): self._open() @@ -691,11 +786,11 @@ class PackWriter: # all-or-nothing. (The blob shouldn't be very big anyway, thanks # to our hashsplit algorithm.) f.write() does its own buffering, # but that's okay because we'll flush it in _end(). - oneblob = ''.join(datalist) + oneblob = b''.join(datalist) try: f.write(oneblob) except IOError as e: - raise GitError, e, sys.exc_info()[2] + reraise(GitError(e)) nw = len(oneblob) crc = zlib.crc32(oneblob) & 0xffffffff self._update_idx(sha, crc, nw) @@ -706,7 +801,7 @@ class PackWriter: def _update_idx(self, sha, crc, size): assert(sha) if self.idx: - self.idx[ord(sha[0])].append((sha, crc, self.file.tell() - size)) + self.idx.add(sha, crc, self.file.tell() - size) def _write(self, sha, type, content): if verbose: @@ -721,12 +816,6 @@ class PackWriter: self.breakpoint() return sha - def breakpoint(self): - """Clear byte and object counts and return the last processed id.""" - id = self._end(self.run_midx) - self.outbytes = self.count = 0 - return id - def _require_objcache(self): if self.objcache is None and self.objcache_maker: self.objcache = self.objcache_maker() @@ -756,12 +845,12 @@ class PackWriter: def new_blob(self, blob): """Create a blob object in the pack with the supplied content.""" - return self.maybe_write('blob', blob) + return self.maybe_write(b'blob', blob) def new_tree(self, shalist): """Create a tree object in the pack.""" content = tree_encode(shalist) - return self.maybe_write('tree', content) + return self.maybe_write(b'tree', content) def new_commit(self, tree, parent, author, adate_sec, adate_tz, @@ -769,53 +858,32 @@ class PackWriter: msg): """Create a commit object in the pack. The date_sec values must be epoch-seconds, and if a tz is None, the local timezone is assumed.""" - if adate_tz: - adate_str = _git_date_str(adate_sec, adate_tz) - else: - adate_str = _local_git_date_str(adate_sec) - if cdate_tz: - cdate_str = _git_date_str(cdate_sec, cdate_tz) - else: - cdate_str = _local_git_date_str(cdate_sec) - l = [] - if tree: l.append('tree %s' % tree.encode('hex')) - if parent: l.append('parent %s' % parent.encode('hex')) - if author: l.append('author %s %s' % (author, adate_str)) - if committer: l.append('committer %s %s' % (committer, cdate_str)) - l.append('') - l.append(msg) - return self.maybe_write('commit', '\n'.join(l)) + content = create_commit_blob(tree, parent, + author, adate_sec, adate_tz, + committer, cdate_sec, cdate_tz, + msg) + return self.maybe_write(b'commit', content) + + def _end(self, run_midx=True, abort=False): + # Ignores run_midx during abort + if not self.file: + return None + self.file, f = None, self.file + self.idx, idx = None, self.idx + self.parentfd, pfd, = None, self.parentfd + self.objcache = None - def abort(self): - """Remove the pack file from disk.""" - f = self.file - if f: - pfd = self.parentfd - self.file = None - self.parentfd = None - self.idx = None - try: - try: - os.unlink(self.filename + '.pack') - finally: - f.close() - finally: - if pfd is not None: - os.close(pfd) + with finalized(pfd, lambda x: x is not None and os.close(x)), \ + f: - def _end(self, run_midx=True): - f = self.file - if not f: return None - self.file = None - try: - self.objcache = None - idx = self.idx - self.idx = None + if abort: + os.unlink(self.filename + b'.pack') + return None # update object count f.seek(8) cp = struct.pack('!i', self.count) - assert(len(cp) == 4) + assert len(cp) == 4 f.write(cp) # calculate the pack sha1sum @@ -825,37 +893,52 @@ class PackWriter: sum.update(b) packbin = sum.digest() f.write(packbin) + f.flush() fdatasync(f.fileno()) - finally: f.close() - obj_list_sha = self._write_pack_idx_v2(self.filename + '.idx', idx, packbin) - nameprefix = os.path.join(self.repo_dir, - 'objects/pack/pack-' + obj_list_sha) - if os.path.exists(self.filename + '.map'): - os.unlink(self.filename + '.map') - os.rename(self.filename + '.pack', nameprefix + '.pack') - os.rename(self.filename + '.idx', nameprefix + '.idx') - try: - os.fsync(self.parentfd) - finally: - os.close(self.parentfd) - - if run_midx: - auto_midx(os.path.join(self.repo_dir, 'objects/pack')) + idx.write(self.filename + b'.idx', packbin) + nameprefix = os.path.join(self.repo_dir, + b'objects/pack/pack-' + hexlify(packbin)) + if os.path.exists(self.filename + b'.map'): + os.unlink(self.filename + b'.map') + os.rename(self.filename + b'.pack', nameprefix + b'.pack') + os.rename(self.filename + b'.idx', nameprefix + b'.idx') + os.fsync(pfd) + if run_midx: + auto_midx(os.path.join(self.repo_dir, b'objects/pack')) + if self.on_pack_finish: + self.on_pack_finish(nameprefix) + return nameprefix - if self.on_pack_finish: - self.on_pack_finish(nameprefix) + def abort(self): + """Remove the pack file from disk.""" + self._end(abort=True) - return nameprefix + def breakpoint(self): + """Clear byte and object counts and return the last processed id.""" + id = self._end(self.run_midx) + self.outbytes = self.count = 0 + return id def close(self, run_midx=True): """Close the pack file and move it to its definitive path.""" return self._end(run_midx=run_midx) - def _write_pack_idx_v2(self, filename, idx, packbin): + +class PackIdxV2Writer: + def __init__(self): + self.idx = list(list() for i in range(256)) + self.count = 0 + + def add(self, sha, crc, offs): + assert(sha) + self.count += 1 + self.idx[byte_int(sha[0])].append((sha, crc, offs)) + + def write(self, filename, packbin): ofs64_count = 0 - for section in idx: + for section in self.idx: for entry in section: if entry[2] >= 2**31: ofs64_count += 1 @@ -869,7 +952,8 @@ class PackWriter: fdatasync(idx_f.fileno()) idx_map = mmap_readwrite(idx_f, close=False) try: - count = _helpers.write_idx(filename, idx_map, idx, self.count) + count = _helpers.write_idx(filename, idx_map, self.idx, + self.count) assert(count == self.count) idx_map.flush() finally: @@ -885,17 +969,13 @@ class PackWriter: b = idx_f.read(8 + 4*256) idx_sum.update(b) - obj_list_sum = Sha1() - for b in chunkyreader(idx_f, 20*self.count): + for b in chunkyreader(idx_f, 20 * self.count): idx_sum.update(b) - obj_list_sum.update(b) - namebase = obj_list_sum.hexdigest() for b in chunkyreader(idx_f): idx_sum.update(b) idx_f.write(idx_sum.digest()) fdatasync(idx_f.fileno()) - return namebase finally: idx_f.close() @@ -909,23 +989,24 @@ def list_refs(patterns=None, repo_dir=None, limits are specified, items from both sources will be included. """ - argv = ['git', 'show-ref'] + argv = [b'git', b'show-ref'] if limit_to_heads: - argv.append('--heads') + argv.append(b'--heads') if limit_to_tags: - argv.append('--tags') - argv.append('--') + argv.append(b'--tags') + argv.append(b'--') if patterns: argv.extend(patterns) - p = subprocess.Popen(argv, env=_gitenv(repo_dir), stdout=subprocess.PIPE) + p = subprocess.Popen(argv, env=_gitenv(repo_dir), stdout=subprocess.PIPE, + close_fds=True) out = p.stdout.read().strip() rv = p.wait() # not fatal if rv: assert(not out) if out: - for d in out.split('\n'): - (sha, name) = d.split(' ', 1) - yield (name, sha.decode('hex')) + for d in out.split(b'\n'): + sha, name = d.split(b' ', 1) + yield name, unhexlify(sha) def read_ref(refname, repo_dir = None): @@ -939,27 +1020,23 @@ def read_ref(refname, repo_dir = None): return None -def rev_list_invocation(ref_or_refs, count=None, format=None): - if isinstance(ref_or_refs, compat.str_type): +def rev_list_invocation(ref_or_refs, format=None): + if isinstance(ref_or_refs, bytes): refs = (ref_or_refs,) else: refs = ref_or_refs - argv = ['git', 'rev-list'] - if isinstance(count, Integral): - argv.extend(['-n', str(count)]) - elif count: - raise ValueError('unexpected count argument %r' % count) + argv = [b'git', b'rev-list'] if format: - argv.append('--pretty=format:' + format) + argv.append(b'--pretty=format:' + format) for ref in refs: - assert not ref.startswith('-') + assert not ref.startswith(b'-') argv.append(ref) - argv.append('--') + argv.append(b'--') return argv -def rev_list(ref_or_refs, count=None, parse=None, format=None, repo_dir=None): +def rev_list(ref_or_refs, parse=None, format=None, repo_dir=None): """Yield information about commits as per "git rev-list". If a format is not provided, yield one hex hash at a time. If a format is provided, pass it to rev-list and call parse(git_stdout) for each @@ -969,10 +1046,11 @@ def rev_list(ref_or_refs, count=None, parse=None, format=None, repo_dir=None): """ assert bool(parse) == bool(format) - p = subprocess.Popen(rev_list_invocation(ref_or_refs, count=count, + p = subprocess.Popen(rev_list_invocation(ref_or_refs, format=format), env=_gitenv(repo_dir), - stdout = subprocess.PIPE) + stdout = subprocess.PIPE, + close_fds=True) if not format: for line in p.stdout: yield line.strip() @@ -980,8 +1058,8 @@ def rev_list(ref_or_refs, count=None, parse=None, format=None, repo_dir=None): line = p.stdout.readline() while line: s = line.strip() - if not s.startswith('commit '): - raise Exception('unexpected line ' + s) + if not s.startswith(b'commit '): + raise Exception('unexpected line ' + repr(s)) s = s[7:] assert len(s) == 40 yield s, parse(p.stdout) @@ -989,18 +1067,7 @@ def rev_list(ref_or_refs, count=None, parse=None, format=None, repo_dir=None): rv = p.wait() # not fatal if rv: - raise GitError, 'git rev-list returned error %d' % rv - - -def get_commit_dates(refs, repo_dir=None): - """Get the dates for the specified commit refs. For now, every unique - string in refs must resolve to a different commit or this - function will fail.""" - result = [] - for ref in refs: - commit = get_commit_items(ref, cp(repo_dir)) - result.append(commit.author_sec) - return result + raise GitError('git rev-list returned error %d' % rv) def rev_parse(committish, repo_dir=None): @@ -1013,14 +1080,14 @@ def rev_parse(committish, repo_dir=None): """ head = read_ref(committish, repo_dir=repo_dir) if head: - debug2("resolved from ref: commit = %s\n" % head.encode('hex')) + debug2("resolved from ref: commit = %s\n" % hexlify(head)) return head - pL = PackIdxList(repo('objects/pack', repo_dir=repo_dir)) + pL = PackIdxList(repo(b'objects/pack', repo_dir=repo_dir)) if len(committish) == 40: try: - hash = committish.decode('hex') + hash = unhexlify(committish) except TypeError: return None @@ -1033,21 +1100,23 @@ def rev_parse(committish, repo_dir=None): def update_ref(refname, newval, oldval, repo_dir=None): """Update a repository reference.""" if not oldval: - oldval = '' - assert(refname.startswith('refs/heads/') \ - or refname.startswith('refs/tags/')) - p = subprocess.Popen(['git', 'update-ref', refname, - newval.encode('hex'), oldval.encode('hex')], - env=_gitenv(repo_dir)) - _git_wait('git update-ref', p) + oldval = b'' + assert refname.startswith(b'refs/heads/') \ + or refname.startswith(b'refs/tags/') + p = subprocess.Popen([b'git', b'update-ref', refname, + hexlify(newval), hexlify(oldval)], + env=_gitenv(repo_dir), + close_fds=True) + _git_wait(b'git update-ref', p) def delete_ref(refname, oldvalue=None): """Delete a repository reference (see git update-ref(1)).""" - assert(refname.startswith('refs/')) + assert refname.startswith(b'refs/') oldvalue = [] if not oldvalue else [oldvalue] - p = subprocess.Popen(['git', 'update-ref', '-d', refname] + oldvalue, - env=_gitenv()) + p = subprocess.Popen([b'git', b'update-ref', b'-d', refname] + oldvalue, + env=_gitenv(), + close_fds=True) _git_wait('git update-ref', p) @@ -1062,9 +1131,9 @@ def guess_repo(path=None): if path: repodir = path if not repodir: - repodir = os.environ.get('BUP_DIR') + repodir = environ.get(b'BUP_DIR') if not repodir: - repodir = os.path.expanduser('~/.bup') + repodir = os.path.expanduser(b'~/.bup') def init_repo(path=None): @@ -1073,20 +1142,22 @@ def init_repo(path=None): d = repo() # appends a / to the path parent = os.path.dirname(os.path.dirname(d)) if parent and not os.path.exists(parent): - raise GitError('parent directory "%s" does not exist\n' % parent) - if os.path.exists(d) and not os.path.isdir(os.path.join(d, '.')): - raise GitError('"%s" exists but is not a directory\n' % d) - p = subprocess.Popen(['git', '--bare', 'init'], stdout=sys.stderr, - env=_gitenv()) + raise GitError('parent directory "%s" does not exist\n' + % path_msg(parent)) + if os.path.exists(d) and not os.path.isdir(os.path.join(d, b'.')): + raise GitError('"%s" exists but is not a directory\n' % path_msg(d)) + p = subprocess.Popen([b'git', b'--bare', b'init'], stdout=sys.stderr, + env=_gitenv(), + close_fds=True) _git_wait('git init', p) # Force the index version configuration in order to ensure bup works # regardless of the version of the installed Git binary. - p = subprocess.Popen(['git', 'config', 'pack.indexVersion', '2'], - stdout=sys.stderr, env=_gitenv()) + p = subprocess.Popen([b'git', b'config', b'pack.indexVersion', '2'], + stdout=sys.stderr, env=_gitenv(), close_fds=True) _git_wait('git config', p) # Enable the reflog - p = subprocess.Popen(['git', 'config', 'core.logAllRefUpdates', 'true'], - stdout=sys.stderr, env=_gitenv()) + p = subprocess.Popen([b'git', b'config', b'core.logAllRefUpdates', b'true'], + stdout=sys.stderr, env=_gitenv(), close_fds=True) _git_wait('git config', p) @@ -1094,7 +1165,7 @@ def check_repo_or_die(path=None): """Check to see if a bup repository probably exists, and abort if not.""" guess_repo(path) top = repo() - pst = stat_if_exists(top + '/objects/pack') + pst = stat_if_exists(top + b'/objects/pack') if pst and stat.S_ISDIR(pst.st_mode): return if not pst: @@ -1103,35 +1174,58 @@ def check_repo_or_die(path=None): log('error: repository %r does not exist (see "bup help init")\n' % top) sys.exit(15) - log('error: %r is not a repository\n' % top) + log('error: %s is not a repository\n' % path_msg(top)) sys.exit(14) -_ver = None -def ver(): - """Get Git's version and ensure a usable version is installed. - - The returned version is formatted as an ordered tuple with each position - representing a digit in the version tag. For example, the following tuple - would represent version 1.6.6.9: +def is_suitable_git(ver_str): + if not ver_str.startswith(b'git version '): + return 'unrecognized' + ver_str = ver_str[len(b'git version '):] + if ver_str.startswith(b'0.'): + return 'insufficient' + if ver_str.startswith(b'1.'): + if re.match(br'1\.[012345]rc', ver_str): + return 'insufficient' + if re.match(br'1\.[01234]\.', ver_str): + return 'insufficient' + if re.match(br'1\.5\.[012345]($|\.)', ver_str): + return 'insufficient' + if re.match(br'1\.5\.6-rc', ver_str): + return 'insufficient' + return 'suitable' + if re.match(br'[0-9]+(\.|$)?', ver_str): + return 'suitable' + sys.exit(13) + +_git_great = None + +def require_suitable_git(ver_str=None): + """Raise GitError if the version of git isn't suitable. + + Rely on ver_str when provided, rather than invoking the git in the + path. - ('1', '6', '6', '9') """ - global _ver - if not _ver: - p = subprocess.Popen(['git', '--version'], - stdout=subprocess.PIPE) - gvs = p.stdout.read() - _git_wait('git --version', p) - m = re.match(r'git version (\S+.\S+)', gvs) - if not m: - raise GitError('git --version weird output: %r' % gvs) - _ver = tuple(m.group(1).split('.')) - needed = ('1','5', '3', '1') - if _ver < needed: - raise GitError('git version %s or higher is required; you have %s' - % ('.'.join(needed), '.'.join(_ver))) - return _ver + global _git_great + if _git_great is not None: + return + if environ.get(b'BUP_GIT_VERSION_IS_FINE', b'').lower() \ + in (b'yes', b'true', b'1'): + _git_great = True + return + if not ver_str: + ver_str, _, _ = _git_exo([b'git', b'--version']) + status = is_suitable_git(ver_str) + if status == 'unrecognized': + raise GitError('Unexpected git --version output: %r' % ver_str) + if status == 'insufficient': + log('error: git version must be at least 1.5.6\n') + sys.exit(1) + if status == 'suitable': + _git_great = True + return + assert False class _AbortableIter: @@ -1143,7 +1237,7 @@ class _AbortableIter: def __iter__(self): return self - def next(self): + def __next__(self): try: return next(self.it) except StopIteration as e: @@ -1153,6 +1247,8 @@ class _AbortableIter: self.abort() raise + next = __next__ + def abort(self): """Abort iteration and call the abortion callback, if needed.""" if not self.done: @@ -1164,28 +1260,28 @@ class _AbortableIter: self.abort() -_ver_warned = 0 class CatPipe: """Link to 'git cat-file' that is used to retrieve blob data.""" def __init__(self, repo_dir = None): - global _ver_warned + require_suitable_git() self.repo_dir = repo_dir - wanted = ('1','5','6') - if ver() < wanted: - log('error: git version must be at least 1.5.6\n') - sys.exit(1) self.p = self.inprogress = None - def _abort(self): - if self.p: - self.p.stdout.close() - self.p.stdin.close() + def close(self, wait=False): + p = self.p + if p: + p.stdout.close() + p.stdin.close() self.p = None self.inprogress = None + if wait: + p.wait() + return p.returncode + return None def restart(self): - self._abort() - self.p = subprocess.Popen(['git', 'cat-file', '--batch'], + self.close() + self.p = subprocess.Popen([b'git', b'cat-file', b'--batch'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds = True, @@ -1205,30 +1301,33 @@ class CatPipe: if self.inprogress: log('get: opening %r while %r is open\n' % (ref, self.inprogress)) assert(not self.inprogress) - assert(ref.find('\n') < 0) - assert(ref.find('\r') < 0) - assert(not ref.startswith('-')) + assert ref.find(b'\n') < 0 + assert ref.find(b'\r') < 0 + assert not ref.startswith(b'-') self.inprogress = ref - self.p.stdin.write('%s\n' % ref) + self.p.stdin.write(ref + b'\n') self.p.stdin.flush() hdr = self.p.stdout.readline() - if hdr.endswith(' missing\n'): + if not hdr: + raise GitError('unexpected cat-file EOF (last request: %r, exit: %s)' + % (ref, self.p.poll() or 'none')) + if hdr.endswith(b' missing\n'): self.inprogress = None yield None, None, None return - info = hdr.split(' ') + info = hdr.split(b' ') if len(info) != 3 or len(info[0]) != 40: raise GitError('expected object (id, type, size), got %r' % info) oidx, typ, size = info size = int(size) it = _AbortableIter(chunkyreader(self.p.stdout, size), - onabort=self._abort) + onabort=self.close) try: yield oidx, typ, size for blob in it: yield blob readline_result = self.p.stdout.readline() - assert(readline_result == '\n') + assert readline_result == b'\n' self.inprogress = None except Exception as e: it.abort() @@ -1236,17 +1335,17 @@ class CatPipe: def _join(self, it): _, typ, _ = next(it) - if typ == 'blob': + if typ == b'blob': for blob in it: yield blob - elif typ == 'tree': - treefile = ''.join(it) + elif typ == b'tree': + treefile = b''.join(it) for (mode, name, sha) in tree_decode(treefile): - for blob in self.join(sha.encode('hex')): + for blob in self.join(hexlify(sha)): yield blob - elif typ == 'commit': - treeline = ''.join(it).split('\n')[0] - assert(treeline.startswith('tree ')) + elif typ == b'commit': + treeline = b''.join(it).split(b'\n')[0] + assert treeline.startswith(b'tree ') for blob in self.join(treeline[5:]): yield blob else: @@ -1259,11 +1358,8 @@ class CatPipe: or a commit. The content of all blobs that can be seen from trees or commits will be added to the list. """ - try: - for d in self._join(self.get(id)): - yield d - except StopIteration: - log('booger!\n') + for d in self._join(self.get(id)): + yield d _cp = {} @@ -1281,11 +1377,18 @@ def cp(repo_dir=None): return cp +def close_catpipes(): + # FIXME: chain exceptions + while _cp: + _, cp = _cp.popitem() + cp.close(wait=True) + + def tags(repo_dir = None): """Return a dictionary of all tags in the form {hash: [tag_names, ...]}.""" tags = {} for n, c in list_refs(repo_dir = repo_dir, limit_to_tags=True): - assert(n.startswith('refs/tags/')) + assert n.startswith(b'refs/tags/') name = n[10:] if not c in tags: tags[c] = [] @@ -1296,7 +1399,7 @@ def tags(repo_dir = None): class MissingObject(KeyError): def __init__(self, oid): self.oid = oid - KeyError.__init__(self, 'object %r is missing' % oid.encode('hex')) + KeyError.__init__(self, 'object %r is missing' % hexlify(oid)) WalkItem = namedtuple('WalkItem', ['oid', 'type', 'mode', @@ -1325,7 +1428,7 @@ def walk_object(get_ref, oidx, stop_at=None, include_data=None): pending = [(oidx, [], [], None)] while len(pending): oidx, parent_path, chunk_path, mode = pending.pop() - oid = oidx.decode('hex') + oid = unhexlify(oidx) if stop_at and stop_at(oidx): continue @@ -1333,7 +1436,7 @@ def walk_object(get_ref, oidx, stop_at=None, include_data=None): # If the object is a "regular file", then it's a leaf in # the graph, so we can skip reading the data if the caller # hasn't requested it. - yield WalkItem(oid=oid, type='blob', + yield WalkItem(oid=oid, type=b'blob', chunk_path=chunk_path, path=parent_path, mode=mode, data=None) @@ -1342,31 +1445,31 @@ def walk_object(get_ref, oidx, stop_at=None, include_data=None): item_it = get_ref(oidx) get_oidx, typ, _ = next(item_it) if not get_oidx: - raise MissingObject(oidx.decode('hex')) - if typ not in ('blob', 'commit', 'tree'): + raise MissingObject(unhexlify(oidx)) + if typ not in (b'blob', b'commit', b'tree'): raise Exception('unexpected repository object type %r' % typ) # FIXME: set the mode based on the type when the mode is None - if typ == 'blob' and not include_data: + if typ == b'blob' and not include_data: # Dump data until we can ask cat_pipe not to fetch it for ignored in item_it: pass data = None else: - data = ''.join(item_it) + data = b''.join(item_it) yield WalkItem(oid=oid, type=typ, chunk_path=chunk_path, path=parent_path, mode=mode, data=(data if include_data else None)) - if typ == 'commit': + if typ == b'commit': commit_items = parse_commit(data) for pid in commit_items.parents: pending.append((pid, parent_path, chunk_path, mode)) pending.append((commit_items.tree, parent_path, chunk_path, hashsplit.GIT_MODE_TREE)) - elif typ == 'tree': + elif typ == b'tree': for mode, name, ent_id in tree_decode(data): demangled, bup_type = demangle_name(name, mode) if chunk_path: @@ -1375,8 +1478,8 @@ def walk_object(get_ref, oidx, stop_at=None, include_data=None): else: sub_path = parent_path + [name] if bup_type == BUP_CHUNKED: - sub_chunk_path = [''] + sub_chunk_path = [b''] else: sub_chunk_path = chunk_path - pending.append((ent_id.encode('hex'), sub_path, sub_chunk_path, + pending.append((hexlify(ent_id), sub_path, sub_chunk_path, mode))