1 """Git interaction library.
2 bup repositories are in Git format. This library allows us to
3 interact with the Git data structures.
5 import os, sys, zlib, time, subprocess, struct, stat, re, tempfile, glob
6 from bup.helpers import *
7 from bup import _helpers, path, midx, bloom
9 SEEK_END=2 # os.SEEK_END is not defined in python 2.4
13 home_repodir = os.path.expanduser('~/.bup')
16 _typemap = { 'blob':3, 'tree':2, 'commit':1, 'tag':4 }
17 _typermap = { 3:'blob', 2:'tree', 1:'commit', 4:'tag' }
23 class GitError(Exception):
28 """Get the path to the git repository or one of its subdirectories."""
31 raise GitError('You should call check_repo_or_die()')
33 # If there's a .git subdirectory, then the actual repo is in there.
34 gd = os.path.join(repodir, '.git')
35 if os.path.exists(gd):
38 return os.path.join(repodir, sub)
42 return re.sub(r'([^0-9a-z]|\b)([0-9a-z]{7})[0-9a-z]{33}([^0-9a-z]|\b)',
47 full = os.path.abspath(path)
48 fullrepo = os.path.abspath(repo(''))
49 if not fullrepo.endswith('/'):
51 if full.startswith(fullrepo):
52 path = full[len(fullrepo):]
53 if path.startswith('index-cache/'):
54 path = path[len('index-cache/'):]
55 return shorten_hash(path)
59 paths = [repo('objects/pack')]
60 paths += glob.glob(repo('index-cache/*/.'))
64 def auto_midx(objdir):
65 args = [path.exe(), 'midx', '--auto', '--dir', objdir]
67 rv = subprocess.call(args, stdout=open('/dev/null', 'w'))
69 # make sure 'args' gets printed to help with debugging
70 add_error('%r: exception: %s' % (args, e))
73 add_error('%r: returned %d' % (args, rv))
75 args = [path.exe(), 'bloom', '--dir', objdir]
77 rv = subprocess.call(args, stdout=open('/dev/null', 'w'))
79 # make sure 'args' gets printed to help with debugging
80 add_error('%r: exception: %s' % (args, e))
83 add_error('%r: returned %d' % (args, rv))
86 def mangle_name(name, mode, gitmode):
87 """Mangle a file name to present an abstract name for segmented files.
88 Mangled file names will have the ".bup" extension added to them. If a
89 file's name already ends with ".bup", a ".bupl" extension is added to
90 disambiguate normal files from semgmented ones.
92 if stat.S_ISREG(mode) and not stat.S_ISREG(gitmode):
94 elif name.endswith('.bup') or name[:-1].endswith('.bup'):
100 (BUP_NORMAL, BUP_CHUNKED) = (0,1)
101 def demangle_name(name):
102 """Remove name mangling from a file name, if necessary.
104 The return value is a tuple (demangled_filename,mode), where mode is one of
107 * BUP_NORMAL : files that should be read as-is from the repository
108 * BUP_CHUNKED : files that were chunked and need to be assembled
110 For more information on the name mangling algorythm, see mangle_name()
112 if name.endswith('.bupl'):
113 return (name[:-5], BUP_NORMAL)
114 elif name.endswith('.bup'):
115 return (name[:-4], BUP_CHUNKED)
117 return (name, BUP_NORMAL)
120 def _encode_packobj(type, content):
123 szbits = (sz & 0x0f) | (_typemap[type]<<4)
126 if sz: szbits |= 0x80
132 z = zlib.compressobj(1)
134 yield z.compress(content)
138 def _encode_looseobj(type, content):
139 z = zlib.compressobj(1)
140 yield z.compress('%s %d\0' % (type, len(content)))
141 yield z.compress(content)
145 def _decode_looseobj(buf):
147 s = zlib.decompress(buf)
154 assert(type in _typemap)
155 assert(sz == len(content))
156 return (type, content)
159 def _decode_packobj(buf):
162 type = _typermap[(c & 0x70) >> 4]
169 sz |= (c & 0x7f) << shift
173 return (type, zlib.decompress(buf[i+1:]))
180 def find_offset(self, hash):
181 """Get the offset of an object inside the index file."""
182 idx = self._idx_from_hash(hash)
184 return self._ofs_from_idx(idx)
187 def exists(self, hash, want_source=False):
188 """Return nonempty if the object exists in this index."""
189 if hash and (self._idx_from_hash(hash) != None):
190 return want_source and os.path.basename(self.name) or True
194 return int(self.fanout[255])
196 def _idx_from_hash(self, hash):
197 global _total_searches, _total_steps
199 assert(len(hash) == 20)
201 start = self.fanout[b1-1] # range -1..254
202 end = self.fanout[b1] # range 0..255
204 _total_steps += 1 # lookup table is a step
207 mid = start + (end-start)/2
208 v = self._idx_to_hash(mid)
218 class PackIdxV1(PackIdx):
219 """Object representation of a Git pack index (version 1) file."""
220 def __init__(self, filename, f):
222 self.idxnames = [self.name]
223 self.map = mmap_read(f)
224 self.fanout = list(struct.unpack('!256I',
225 str(buffer(self.map, 0, 256*4))))
226 self.fanout.append(0) # entry "-1"
227 nsha = self.fanout[255]
229 self.shatable = buffer(self.map, self.sha_ofs, nsha*24)
231 def _ofs_from_idx(self, idx):
232 return struct.unpack('!I', str(self.shatable[idx*24 : idx*24+4]))[0]
234 def _idx_to_hash(self, idx):
235 return str(self.shatable[idx*24+4 : idx*24+24])
238 for i in xrange(self.fanout[255]):
239 yield buffer(self.map, 256*4 + 24*i + 4, 20)
242 class PackIdxV2(PackIdx):
243 """Object representation of a Git pack index (version 2) file."""
244 def __init__(self, filename, f):
246 self.idxnames = [self.name]
247 self.map = mmap_read(f)
248 assert(str(self.map[0:8]) == '\377tOc\0\0\0\2')
249 self.fanout = list(struct.unpack('!256I',
250 str(buffer(self.map, 8, 256*4))))
251 self.fanout.append(0) # entry "-1"
252 nsha = self.fanout[255]
253 self.sha_ofs = 8 + 256*4
254 self.shatable = buffer(self.map, self.sha_ofs, nsha*20)
255 self.ofstable = buffer(self.map,
256 self.sha_ofs + nsha*20 + nsha*4,
258 self.ofs64table = buffer(self.map,
259 8 + 256*4 + nsha*20 + nsha*4 + nsha*4)
261 def _ofs_from_idx(self, idx):
262 ofs = struct.unpack('!I', str(buffer(self.ofstable, idx*4, 4)))[0]
264 idx64 = ofs & 0x7fffffff
265 ofs = struct.unpack('!Q',
266 str(buffer(self.ofs64table, idx64*8, 8)))[0]
269 def _idx_to_hash(self, idx):
270 return str(self.shatable[idx*20:(idx+1)*20])
273 for i in xrange(self.fanout[255]):
274 yield buffer(self.map, 8 + 256*4 + 20*i, 20)
279 def __init__(self, dir):
281 assert(_mpi_count == 0) # these things suck tons of VM; don't waste it
286 self.do_bloom = False
293 assert(_mpi_count == 0)
296 return iter(idxmerge(self.packs))
299 return sum(len(pack) for pack in self.packs)
301 def exists(self, hash, want_source=False):
302 """Return nonempty if the object exists in the index files."""
303 global _total_searches
305 if hash in self.also:
307 if self.do_bloom and self.bloom:
308 if self.bloom.exists(hash):
309 self.do_bloom = False
311 _total_searches -= 1 # was counted by bloom
313 for i in xrange(len(self.packs)):
315 _total_searches -= 1 # will be incremented by sub-pack
316 ix = p.exists(hash, want_source=want_source)
318 # reorder so most recently used packs are searched first
319 self.packs = [p] + self.packs[:i] + self.packs[i+1:]
324 def refresh(self, skip_midx = False):
325 """Refresh the index list.
326 This method verifies if .midx files were superseded (e.g. all of its
327 contents are in another, bigger .midx file) and removes the superseded
330 If skip_midx is True, all work on .midx files will be skipped and .midx
331 files will be removed from the list.
333 The module-global variable 'ignore_midx' can force this function to
334 always act as if skip_midx was True.
336 self.bloom = None # Always reopen the bloom as it may have been relaced
337 self.do_bloom = False
338 skip_midx = skip_midx or ignore_midx
339 d = dict((p.name, p) for p in self.packs
340 if not skip_midx or not isinstance(p, midx.PackMidx))
341 if os.path.exists(self.dir):
344 for ix in self.packs:
345 if isinstance(ix, midx.PackMidx):
346 for name in ix.idxnames:
347 d[os.path.join(self.dir, name)] = ix
348 for full in glob.glob(os.path.join(self.dir,'*.midx')):
350 mx = midx.PackMidx(full)
351 (mxd, mxf) = os.path.split(mx.name)
353 for n in mx.idxnames:
354 if not os.path.exists(os.path.join(mxd, n)):
355 log(('warning: index %s missing\n' +
356 ' used by %s\n') % (n, mxf))
363 midxl.sort(key=lambda ix:
364 (-len(ix), -os.stat(ix.name).st_mtime))
367 for sub in ix.idxnames:
368 found = d.get(os.path.join(self.dir, sub))
369 if not found or isinstance(found, PackIdx):
370 # doesn't exist, or exists but not in a midx
375 for name in ix.idxnames:
376 d[os.path.join(self.dir, name)] = ix
377 elif not ix.force_keep:
378 debug1('midx: removing redundant: %s\n'
379 % os.path.basename(ix.name))
381 for full in glob.glob(os.path.join(self.dir,'*.idx')):
389 bfull = os.path.join(self.dir, 'bup.bloom')
390 if self.bloom is None and os.path.exists(bfull):
391 self.bloom = bloom.ShaBloom(bfull)
392 self.packs = list(set(d.values()))
393 self.packs.sort(lambda x,y: -cmp(len(x),len(y)))
394 if self.bloom and self.bloom.valid() and len(self.bloom) >= len(self):
398 debug1('PackIdxList: using %d index%s.\n'
399 % (len(self.packs), len(self.packs)!=1 and 'es' or ''))
402 """Insert an additional object in the list."""
406 def calc_hash(type, content):
407 """Calculate some content's hash in the Git fashion."""
408 header = '%s %d\0' % (type, len(content))
414 def _shalist_sort_key(ent):
415 (mode, name, id) = ent
416 if stat.S_ISDIR(int(mode, 8)):
422 def open_idx(filename):
423 if filename.endswith('.idx'):
424 f = open(filename, 'rb')
426 if header[0:4] == '\377tOc':
427 version = struct.unpack('!I', header[4:8])[0]
429 return PackIdxV2(filename, f)
431 raise GitError('%s: expected idx file version 2, got %d'
432 % (filename, version))
433 elif len(header) == 8 and header[0:4] < '\377tOc':
434 return PackIdxV1(filename, f)
436 raise GitError('%s: unrecognized idx file header' % filename)
437 elif filename.endswith('.midx'):
438 return midx.PackMidx(filename)
440 raise GitError('idx filenames must end with .idx or .midx')
443 def idxmerge(idxlist, final_progress=True):
444 """Generate a list of all the objects reachable in a PackIdxList."""
445 def pfunc(count, total):
446 qprogress('Reading indexes: %.2f%% (%d/%d)\r'
447 % (count*100.0/total, count, total))
448 def pfinal(count, total):
450 progress('Reading indexes: %.2f%% (%d/%d), done.\n'
451 % (100, total, total))
452 return merge_iter(idxlist, 10024, pfunc, pfinal)
455 def _make_objcache():
456 return PackIdxList(repo('objects/pack'))
459 """Writes Git objects inside a pack file."""
460 def __init__(self, objcache_maker=_make_objcache):
466 self.objcache_maker = objcache_maker
474 (fd,name) = tempfile.mkstemp(suffix='.pack', dir=repo('objects'))
475 self.file = os.fdopen(fd, 'w+b')
476 assert(name.endswith('.pack'))
477 self.filename = name[:-5]
478 self.file.write('PACK\0\0\0\2\0\0\0\0')
479 self.idx = list(list() for i in xrange(256))
481 def _raw_write(self, datalist, sha):
484 # in case we get interrupted (eg. KeyboardInterrupt), it's best if
485 # the file never has a *partial* blob. So let's make sure it's
486 # all-or-nothing. (The blob shouldn't be very big anyway, thanks
487 # to our hashsplit algorithm.) f.write() does its own buffering,
488 # but that's okay because we'll flush it in _end().
489 oneblob = ''.join(datalist)
493 raise GitError, e, sys.exc_info()[2]
495 crc = zlib.crc32(oneblob) & 0xffffffff
496 self._update_idx(sha, crc, nw)
501 def _update_idx(self, sha, crc, size):
504 self.idx[ord(sha[0])].append((sha, crc, self.file.tell() - size))
506 def _write(self, sha, type, content):
510 sha = calc_hash(type, content)
511 size, crc = self._raw_write(_encode_packobj(type, content), sha=sha)
514 def breakpoint(self):
515 """Clear byte and object counts and return the last processed id."""
517 self.outbytes = self.count = 0
520 def _require_objcache(self):
521 if self.objcache is None and self.objcache_maker:
522 self.objcache = self.objcache_maker()
523 if self.objcache is None:
525 "PackWriter not opened or can't check exists w/o objcache")
527 def exists(self, id, want_source=False):
528 """Return non-empty if an object is found in the object cache."""
529 self._require_objcache()
530 return self.objcache.exists(id, want_source=want_source)
532 def maybe_write(self, type, content):
533 """Write an object to the pack file if not present and return its id."""
534 self._require_objcache()
535 sha = calc_hash(type, content)
536 if not self.exists(sha):
537 self._write(sha, type, content)
538 self.objcache.add(sha)
541 def new_blob(self, blob):
542 """Create a blob object in the pack with the supplied content."""
543 return self.maybe_write('blob', blob)
545 def new_tree(self, shalist):
546 """Create a tree object in the pack."""
547 shalist = sorted(shalist, key = _shalist_sort_key)
549 for (mode,name,bin) in shalist:
552 assert(mode[0] != '0')
554 assert(len(bin) == 20)
555 l.append('%s %s\0%s' % (mode,name,bin))
556 return self.maybe_write('tree', ''.join(l))
558 def _new_commit(self, tree, parent, author, adate, committer, cdate, msg):
560 if tree: l.append('tree %s' % tree.encode('hex'))
561 if parent: l.append('parent %s' % parent.encode('hex'))
562 if author: l.append('author %s %s' % (author, _git_date(adate)))
563 if committer: l.append('committer %s %s' % (committer, _git_date(cdate)))
566 return self.maybe_write('commit', '\n'.join(l))
568 def new_commit(self, parent, tree, date, msg):
569 """Create a commit object in the pack."""
570 userline = '%s <%s@%s>' % (userfullname(), username(), hostname())
571 commit = self._new_commit(tree, parent,
572 userline, date, userline, date,
577 """Remove the pack file from disk."""
583 os.unlink(self.filename + '.pack')
585 def _end(self, run_midx=True):
587 if not f: return None
593 # update object count
595 cp = struct.pack('!i', self.count)
599 # calculate the pack sha1sum
602 for b in chunkyreader(f):
604 packbin = sum.digest()
608 obj_list_sha = self._write_pack_idx_v2(self.filename + '.idx', idx, packbin)
610 nameprefix = repo('objects/pack/pack-%s' % obj_list_sha)
611 if os.path.exists(self.filename + '.map'):
612 os.unlink(self.filename + '.map')
613 os.rename(self.filename + '.pack', nameprefix + '.pack')
614 os.rename(self.filename + '.idx', nameprefix + '.idx')
617 auto_midx(repo('objects/pack'))
620 def close(self, run_midx=True):
621 """Close the pack file and move it to its definitive path."""
622 return self._end(run_midx=run_midx)
624 def _write_pack_idx_v2(self, filename, idx, packbin):
625 idx_f = open(filename, 'w+b')
626 idx_f.write('\377tOc\0\0\0\2')
628 ofs64_ofs = 8 + 4*256 + 28*self.count
629 idx_f.truncate(ofs64_ofs)
631 idx_map = mmap_readwrite(idx_f, close=False)
632 idx_f.seek(0, SEEK_END)
633 count = _helpers.write_idx(idx_f, idx_map, idx, self.count)
634 assert(count == self.count)
640 b = idx_f.read(8 + 4*256)
643 obj_list_sum = Sha1()
644 for b in chunkyreader(idx_f, 20*self.count):
646 obj_list_sum.update(b)
647 namebase = obj_list_sum.hexdigest()
649 for b in chunkyreader(idx_f):
651 idx_f.write(idx_sum.digest())
658 return '%d %s' % (date, time.strftime('%z', time.localtime(date)))
662 os.environ['GIT_DIR'] = os.path.abspath(repo())
665 def list_refs(refname = None):
666 """Generate a list of tuples in the form (refname,hash).
667 If a ref name is specified, list only this particular ref.
669 argv = ['git', 'show-ref', '--']
672 p = subprocess.Popen(argv, preexec_fn = _gitenv, stdout = subprocess.PIPE)
673 out = p.stdout.read().strip()
674 rv = p.wait() # not fatal
678 for d in out.split('\n'):
679 (sha, name) = d.split(' ', 1)
680 yield (name, sha.decode('hex'))
683 def read_ref(refname):
684 """Get the commit id of the most recent commit made on a given ref."""
685 l = list(list_refs(refname))
693 def rev_list(ref, count=None):
694 """Generate a list of reachable commits in reverse chronological order.
696 This generator walks through commits, from child to parent, that are
697 reachable via the specified ref and yields a series of tuples of the form
700 If count is a non-zero integer, limit the number of commits to "count"
703 assert(not ref.startswith('-'))
706 opts += ['-n', str(atoi(count))]
707 argv = ['git', 'rev-list', '--pretty=format:%ct'] + opts + [ref, '--']
708 p = subprocess.Popen(argv, preexec_fn = _gitenv, stdout = subprocess.PIPE)
712 if s.startswith('commit '):
713 commit = s[7:].decode('hex')
717 rv = p.wait() # not fatal
719 raise GitError, 'git rev-list returned error %d' % rv
722 def rev_get_date(ref):
723 """Get the date of the latest commit on the specified ref."""
724 for (date, commit) in rev_list(ref, count=1):
726 raise GitError, 'no such commit %r' % ref
729 def rev_parse(committish):
730 """Resolve the full hash for 'committish', if it exists.
732 Should be roughly equivalent to 'git rev-parse'.
734 Returns the hex value of the hash if it is found, None if 'committish' does
735 not correspond to anything.
737 head = read_ref(committish)
739 debug2("resolved from ref: commit = %s\n" % head.encode('hex'))
742 pL = PackIdxList(repo('objects/pack'))
744 if len(committish) == 40:
746 hash = committish.decode('hex')
756 def update_ref(refname, newval, oldval):
757 """Change the commit pointed to by a branch."""
760 assert(refname.startswith('refs/heads/'))
761 p = subprocess.Popen(['git', 'update-ref', refname,
762 newval.encode('hex'), oldval.encode('hex')],
763 preexec_fn = _gitenv)
764 _git_wait('git update-ref', p)
767 def guess_repo(path=None):
768 """Set the path value in the global variable "repodir".
769 This makes bup look for an existing bup repository, but not fail if a
770 repository doesn't exist. Usually, if you are interacting with a bup
771 repository, you would not be calling this function but using
778 repodir = os.environ.get('BUP_DIR')
780 repodir = os.path.expanduser('~/.bup')
783 def init_repo(path=None):
784 """Create the Git bare repository for bup in a given path."""
786 d = repo() # appends a / to the path
787 parent = os.path.dirname(os.path.dirname(d))
788 if parent and not os.path.exists(parent):
789 raise GitError('parent directory "%s" does not exist\n' % parent)
790 if os.path.exists(d) and not os.path.isdir(os.path.join(d, '.')):
791 raise GitError('"%d" exists but is not a directory\n' % d)
792 p = subprocess.Popen(['git', '--bare', 'init'], stdout=sys.stderr,
793 preexec_fn = _gitenv)
794 _git_wait('git init', p)
795 # Force the index version configuration in order to ensure bup works
796 # regardless of the version of the installed Git binary.
797 p = subprocess.Popen(['git', 'config', 'pack.indexVersion', '2'],
798 stdout=sys.stderr, preexec_fn = _gitenv)
799 _git_wait('git config', p)
802 def check_repo_or_die(path=None):
803 """Make sure a bup repository exists, and abort if not.
804 If the path to a particular repository was not specified, this function
805 initializes the default repository automatically.
808 if not os.path.isdir(repo('objects/pack/.')):
809 if repodir == home_repodir:
812 log('error: %r is not a bup/git repository\n' % repo())
817 """Generate a list of (mode, name, hash) tuples of objects from 'buf'."""
819 while ofs < len(buf):
820 z = buf[ofs:].find('\0')
822 spl = buf[ofs:ofs+z].split(' ', 1)
823 assert(len(spl) == 2)
824 sha = buf[ofs+z+1:ofs+z+1+20]
826 yield (spl[0], spl[1], sha)
831 """Get Git's version and ensure a usable version is installed.
833 The returned version is formatted as an ordered tuple with each position
834 representing a digit in the version tag. For example, the following tuple
835 would represent version 1.6.6.9:
841 p = subprocess.Popen(['git', '--version'],
842 stdout=subprocess.PIPE)
843 gvs = p.stdout.read()
844 _git_wait('git --version', p)
845 m = re.match(r'git version (\S+.\S+)', gvs)
847 raise GitError('git --version weird output: %r' % gvs)
848 _ver = tuple(m.group(1).split('.'))
849 needed = ('1','5', '3', '1')
851 raise GitError('git version %s or higher is required; you have %s'
852 % ('.'.join(needed), '.'.join(_ver)))
856 def _git_wait(cmd, p):
859 raise GitError('%s returned %d' % (cmd, rv))
862 def _git_capture(argv):
863 p = subprocess.Popen(argv, stdout=subprocess.PIPE, preexec_fn = _gitenv)
865 _git_wait(repr(argv), p)
869 class _AbortableIter:
870 def __init__(self, it, onabort = None):
872 self.onabort = onabort
880 return self.it.next()
881 except StopIteration, e:
889 """Abort iteration and call the abortion callback, if needed."""
901 """Link to 'git cat-file' that is used to retrieve blob data."""
904 wanted = ('1','5','6')
907 log('warning: git version < %s; bup will be slow.\n'
910 self.get = self._slow_get
912 self.p = self.inprogress = None
913 self.get = self._fast_get
917 self.p.stdout.close()
920 self.inprogress = None
924 self.p = subprocess.Popen(['git', 'cat-file', '--batch'],
925 stdin=subprocess.PIPE,
926 stdout=subprocess.PIPE,
929 preexec_fn = _gitenv)
931 def _fast_get(self, id):
932 if not self.p or self.p.poll() != None:
935 assert(self.p.poll() == None)
937 log('_fast_get: opening %r while %r is open'
938 % (id, self.inprogress))
939 assert(not self.inprogress)
940 assert(id.find('\n') < 0)
941 assert(id.find('\r') < 0)
942 assert(not id.startswith('-'))
944 self.p.stdin.write('%s\n' % id)
946 hdr = self.p.stdout.readline()
947 if hdr.endswith(' missing\n'):
948 self.inprogress = None
949 raise KeyError('blob %r is missing' % id)
951 if len(spl) != 3 or len(spl[0]) != 40:
952 raise GitError('expected blob, got %r' % spl)
953 (hex, type, size) = spl
955 it = _AbortableIter(chunkyreader(self.p.stdout, int(spl[2])),
956 onabort = self._abort)
961 assert(self.p.stdout.readline() == '\n')
962 self.inprogress = None
967 def _slow_get(self, id):
968 assert(id.find('\n') < 0)
969 assert(id.find('\r') < 0)
971 type = _git_capture(['git', 'cat-file', '-t', id]).strip()
974 p = subprocess.Popen(['git', 'cat-file', type, id],
975 stdout=subprocess.PIPE,
976 preexec_fn = _gitenv)
977 for blob in chunkyreader(p.stdout):
979 _git_wait('git cat-file', p)
987 treefile = ''.join(it)
988 for (mode, name, sha) in treeparse(treefile):
989 for blob in self.join(sha.encode('hex')):
991 elif type == 'commit':
992 treeline = ''.join(it).split('\n')[0]
993 assert(treeline.startswith('tree '))
994 for blob in self.join(treeline[5:]):
997 raise GitError('invalid object type %r: expected blob/tree/commit'
1001 """Generate a list of the content of all blobs that can be reached
1002 from an object. The hash given in 'id' must point to a blob, a tree
1003 or a commit. The content of all blobs that can be seen from trees or
1004 commits will be added to the list.
1007 for d in self._join(self.get(id)):
1009 except StopIteration:
1013 """Return a dictionary of all tags in the form {hash: [tag_names, ...]}."""
1015 for (n,c) in list_refs():
1016 if n.startswith('refs/tags/'):
1021 tags[c].append(name) # more than one tag can point at 'c'