1 """Git interaction library.
2 bup repositories are in Git format. This library allows us to
3 interact with the Git data structures.
5 import os, sys, zlib, time, subprocess, struct, stat, re, tempfile, glob
6 from bup.helpers import *
7 from bup import _helpers, path, midx, bloom
9 SEEK_END=2 # os.SEEK_END is not defined in python 2.4
13 home_repodir = os.path.expanduser('~/.bup')
16 _typemap = { 'blob':3, 'tree':2, 'commit':1, 'tag':4 }
17 _typermap = { 3:'blob', 2:'tree', 1:'commit', 4:'tag' }
23 class GitError(Exception):
28 """Get the path to the git repository or one of its subdirectories."""
31 raise GitError('You should call check_repo_or_die()')
33 # If there's a .git subdirectory, then the actual repo is in there.
34 gd = os.path.join(repodir, '.git')
35 if os.path.exists(gd):
38 return os.path.join(repodir, sub)
42 return re.sub(r'([^0-9a-z]|\b)([0-9a-z]{7})[0-9a-z]{33}([^0-9a-z]|\b)',
47 full = os.path.abspath(path)
48 fullrepo = os.path.abspath(repo(''))
49 if not fullrepo.endswith('/'):
51 if full.startswith(fullrepo):
52 path = full[len(fullrepo):]
53 if path.startswith('index-cache/'):
54 path = path[len('index-cache/'):]
55 return shorten_hash(path)
59 paths = [repo('objects/pack')]
60 paths += glob.glob(repo('index-cache/*/.'))
64 def auto_midx(objdir):
65 args = [path.exe(), 'midx', '--auto', '--dir', objdir]
67 rv = subprocess.call(args, stdout=open('/dev/null', 'w'))
69 # make sure 'args' gets printed to help with debugging
70 add_error('%r: exception: %s' % (args, e))
73 add_error('%r: returned %d' % (args, rv))
75 args = [path.exe(), 'bloom', '--dir', objdir]
77 rv = subprocess.call(args, stdout=open('/dev/null', 'w'))
79 # make sure 'args' gets printed to help with debugging
80 add_error('%r: exception: %s' % (args, e))
83 add_error('%r: returned %d' % (args, rv))
86 def mangle_name(name, mode, gitmode):
87 """Mangle a file name to present an abstract name for segmented files.
88 Mangled file names will have the ".bup" extension added to them. If a
89 file's name already ends with ".bup", a ".bupl" extension is added to
90 disambiguate normal files from semgmented ones.
92 if stat.S_ISREG(mode) and not stat.S_ISREG(gitmode):
94 elif name.endswith('.bup') or name[:-1].endswith('.bup'):
100 (BUP_NORMAL, BUP_CHUNKED) = (0,1)
101 def demangle_name(name):
102 """Remove name mangling from a file name, if necessary.
104 The return value is a tuple (demangled_filename,mode), where mode is one of
107 * BUP_NORMAL : files that should be read as-is from the repository
108 * BUP_CHUNKED : files that were chunked and need to be assembled
110 For more information on the name mangling algorythm, see mangle_name()
112 if name.endswith('.bupl'):
113 return (name[:-5], BUP_NORMAL)
114 elif name.endswith('.bup'):
115 return (name[:-4], BUP_CHUNKED)
117 return (name, BUP_NORMAL)
120 def _encode_packobj(type, content):
123 szbits = (sz & 0x0f) | (_typemap[type]<<4)
126 if sz: szbits |= 0x80
132 z = zlib.compressobj(1)
134 yield z.compress(content)
138 def _encode_looseobj(type, content):
139 z = zlib.compressobj(1)
140 yield z.compress('%s %d\0' % (type, len(content)))
141 yield z.compress(content)
145 def _decode_looseobj(buf):
147 s = zlib.decompress(buf)
154 assert(type in _typemap)
155 assert(sz == len(content))
156 return (type, content)
159 def _decode_packobj(buf):
162 type = _typermap[(c & 0x70) >> 4]
169 sz |= (c & 0x7f) << shift
173 return (type, zlib.decompress(buf[i+1:]))
180 def find_offset(self, hash):
181 """Get the offset of an object inside the index file."""
182 idx = self._idx_from_hash(hash)
184 return self._ofs_from_idx(idx)
187 def exists(self, hash, want_source=False):
188 """Return nonempty if the object exists in this index."""
189 if hash and (self._idx_from_hash(hash) != None):
190 return want_source and os.path.basename(self.name) or True
194 return int(self.fanout[255])
196 def _idx_from_hash(self, hash):
197 global _total_searches, _total_steps
199 assert(len(hash) == 20)
201 start = self.fanout[b1-1] # range -1..254
202 end = self.fanout[b1] # range 0..255
204 _total_steps += 1 # lookup table is a step
207 mid = start + (end-start)/2
208 v = self._idx_to_hash(mid)
218 class PackIdxV1(PackIdx):
219 """Object representation of a Git pack index (version 1) file."""
220 def __init__(self, filename, f):
222 self.idxnames = [self.name]
223 self.map = mmap_read(f)
224 self.fanout = list(struct.unpack('!256I',
225 str(buffer(self.map, 0, 256*4))))
226 self.fanout.append(0) # entry "-1"
227 nsha = self.fanout[255]
229 self.shatable = buffer(self.map, self.sha_ofs, nsha*24)
231 def _ofs_from_idx(self, idx):
232 return struct.unpack('!I', str(self.shatable[idx*24 : idx*24+4]))[0]
234 def _idx_to_hash(self, idx):
235 return str(self.shatable[idx*24+4 : idx*24+24])
238 for i in xrange(self.fanout[255]):
239 yield buffer(self.map, 256*4 + 24*i + 4, 20)
242 class PackIdxV2(PackIdx):
243 """Object representation of a Git pack index (version 2) file."""
244 def __init__(self, filename, f):
246 self.idxnames = [self.name]
247 self.map = mmap_read(f)
248 assert(str(self.map[0:8]) == '\377tOc\0\0\0\2')
249 self.fanout = list(struct.unpack('!256I',
250 str(buffer(self.map, 8, 256*4))))
251 self.fanout.append(0) # entry "-1"
252 nsha = self.fanout[255]
253 self.sha_ofs = 8 + 256*4
254 self.shatable = buffer(self.map, self.sha_ofs, nsha*20)
255 self.ofstable = buffer(self.map,
256 self.sha_ofs + nsha*20 + nsha*4,
258 self.ofs64table = buffer(self.map,
259 8 + 256*4 + nsha*20 + nsha*4 + nsha*4)
261 def _ofs_from_idx(self, idx):
262 ofs = struct.unpack('!I', str(buffer(self.ofstable, idx*4, 4)))[0]
264 idx64 = ofs & 0x7fffffff
265 ofs = struct.unpack('!Q',
266 str(buffer(self.ofs64table, idx64*8, 8)))[0]
269 def _idx_to_hash(self, idx):
270 return str(self.shatable[idx*20:(idx+1)*20])
273 for i in xrange(self.fanout[255]):
274 yield buffer(self.map, 8 + 256*4 + 20*i, 20)
279 def __init__(self, dir):
281 assert(_mpi_count == 0) # these things suck tons of VM; don't waste it
286 self.do_bloom = False
293 assert(_mpi_count == 0)
296 return iter(idxmerge(self.packs))
299 return sum(len(pack) for pack in self.packs)
301 def exists(self, hash, want_source=False):
302 """Return nonempty if the object exists in the index files."""
303 global _total_searches
305 if hash in self.also:
307 if self.do_bloom and self.bloom:
308 if self.bloom.exists(hash):
309 self.do_bloom = False
311 _total_searches -= 1 # was counted by bloom
313 for i in xrange(len(self.packs)):
315 _total_searches -= 1 # will be incremented by sub-pack
316 ix = p.exists(hash, want_source=want_source)
318 # reorder so most recently used packs are searched first
319 self.packs = [p] + self.packs[:i] + self.packs[i+1:]
324 def refresh(self, skip_midx = False):
325 """Refresh the index list.
326 This method verifies if .midx files were superseded (e.g. all of its
327 contents are in another, bigger .midx file) and removes the superseded
330 If skip_midx is True, all work on .midx files will be skipped and .midx
331 files will be removed from the list.
333 The module-global variable 'ignore_midx' can force this function to
334 always act as if skip_midx was True.
336 self.bloom = None # Always reopen the bloom as it may have been relaced
337 self.do_bloom = False
338 skip_midx = skip_midx or ignore_midx
339 d = dict((p.name, p) for p in self.packs
340 if not skip_midx or not isinstance(p, midx.PackMidx))
341 if os.path.exists(self.dir):
344 for ix in self.packs:
345 if isinstance(ix, midx.PackMidx):
346 for name in ix.idxnames:
347 d[os.path.join(self.dir, name)] = ix
348 for full in glob.glob(os.path.join(self.dir,'*.midx')):
350 mx = midx.PackMidx(full)
351 (mxd, mxf) = os.path.split(mx.name)
353 for n in mx.idxnames:
354 if not os.path.exists(os.path.join(mxd, n)):
355 log(('warning: index %s missing\n' +
356 ' used by %s\n') % (n, mxf))
363 midxl.sort(lambda x,y: -cmp(len(x),len(y)))
366 for sub in ix.idxnames:
367 found = d.get(os.path.join(self.dir, sub))
368 if not found or isinstance(found, PackIdx):
369 # doesn't exist, or exists but not in a midx
374 for name in ix.idxnames:
375 d[os.path.join(self.dir, name)] = ix
376 elif not ix.force_keep:
377 debug1('midx: removing redundant: %s\n'
378 % os.path.basename(ix.name))
380 for full in glob.glob(os.path.join(self.dir,'*.idx')):
388 bfull = os.path.join(self.dir, 'bup.bloom')
389 if self.bloom is None and os.path.exists(bfull):
390 self.bloom = bloom.ShaBloom(bfull)
391 self.packs = list(set(d.values()))
392 self.packs.sort(lambda x,y: -cmp(len(x),len(y)))
393 if self.bloom and self.bloom.valid() and len(self.bloom) >= len(self):
397 debug1('PackIdxList: using %d index%s.\n'
398 % (len(self.packs), len(self.packs)!=1 and 'es' or ''))
401 """Insert an additional object in the list."""
405 def calc_hash(type, content):
406 """Calculate some content's hash in the Git fashion."""
407 header = '%s %d\0' % (type, len(content))
413 def _shalist_sort_key(ent):
414 (mode, name, id) = ent
415 if stat.S_ISDIR(int(mode, 8)):
421 def open_idx(filename):
422 if filename.endswith('.idx'):
423 f = open(filename, 'rb')
425 if header[0:4] == '\377tOc':
426 version = struct.unpack('!I', header[4:8])[0]
428 return PackIdxV2(filename, f)
430 raise GitError('%s: expected idx file version 2, got %d'
431 % (filename, version))
432 elif len(header) == 8 and header[0:4] < '\377tOc':
433 return PackIdxV1(filename, f)
435 raise GitError('%s: unrecognized idx file header' % filename)
436 elif filename.endswith('.midx'):
437 return midx.PackMidx(filename)
439 raise GitError('idx filenames must end with .idx or .midx')
442 def idxmerge(idxlist, final_progress=True):
443 """Generate a list of all the objects reachable in a PackIdxList."""
444 def pfunc(count, total):
445 qprogress('Reading indexes: %.2f%% (%d/%d)\r'
446 % (count*100.0/total, count, total))
447 def pfinal(count, total):
449 progress('Reading indexes: %.2f%% (%d/%d), done.\n'
450 % (100, total, total))
451 return merge_iter(idxlist, 10024, pfunc, pfinal)
454 def _make_objcache():
455 return PackIdxList(repo('objects/pack'))
458 """Writes Git objects inside a pack file."""
459 def __init__(self, objcache_maker=_make_objcache):
465 self.objcache_maker = objcache_maker
473 (fd,name) = tempfile.mkstemp(suffix='.pack', dir=repo('objects'))
474 self.file = os.fdopen(fd, 'w+b')
475 assert(name.endswith('.pack'))
476 self.filename = name[:-5]
477 self.file.write('PACK\0\0\0\2\0\0\0\0')
478 self.idx = list(list() for i in xrange(256))
480 def _raw_write(self, datalist, sha):
483 # in case we get interrupted (eg. KeyboardInterrupt), it's best if
484 # the file never has a *partial* blob. So let's make sure it's
485 # all-or-nothing. (The blob shouldn't be very big anyway, thanks
486 # to our hashsplit algorithm.) f.write() does its own buffering,
487 # but that's okay because we'll flush it in _end().
488 oneblob = ''.join(datalist)
492 raise GitError, e, sys.exc_info()[2]
494 crc = zlib.crc32(oneblob) & 0xffffffff
495 self._update_idx(sha, crc, nw)
500 def _update_idx(self, sha, crc, size):
503 self.idx[ord(sha[0])].append((sha, crc, self.file.tell() - size))
505 def _write(self, sha, type, content):
509 sha = calc_hash(type, content)
510 size, crc = self._raw_write(_encode_packobj(type, content), sha=sha)
513 def breakpoint(self):
514 """Clear byte and object counts and return the last processed id."""
516 self.outbytes = self.count = 0
519 def _require_objcache(self):
520 if self.objcache is None and self.objcache_maker:
521 self.objcache = self.objcache_maker()
522 if self.objcache is None:
524 "PackWriter not opened or can't check exists w/o objcache")
526 def exists(self, id, want_source=False):
527 """Return non-empty if an object is found in the object cache."""
528 self._require_objcache()
529 return self.objcache.exists(id, want_source=want_source)
531 def maybe_write(self, type, content):
532 """Write an object to the pack file if not present and return its id."""
533 self._require_objcache()
534 sha = calc_hash(type, content)
535 if not self.exists(sha):
536 self._write(sha, type, content)
537 self.objcache.add(sha)
540 def new_blob(self, blob):
541 """Create a blob object in the pack with the supplied content."""
542 return self.maybe_write('blob', blob)
544 def new_tree(self, shalist):
545 """Create a tree object in the pack."""
546 shalist = sorted(shalist, key = _shalist_sort_key)
548 for (mode,name,bin) in shalist:
551 assert(mode[0] != '0')
553 assert(len(bin) == 20)
554 l.append('%s %s\0%s' % (mode,name,bin))
555 return self.maybe_write('tree', ''.join(l))
557 def _new_commit(self, tree, parent, author, adate, committer, cdate, msg):
559 if tree: l.append('tree %s' % tree.encode('hex'))
560 if parent: l.append('parent %s' % parent.encode('hex'))
561 if author: l.append('author %s %s' % (author, _git_date(adate)))
562 if committer: l.append('committer %s %s' % (committer, _git_date(cdate)))
565 return self.maybe_write('commit', '\n'.join(l))
567 def new_commit(self, parent, tree, date, msg):
568 """Create a commit object in the pack."""
569 userline = '%s <%s@%s>' % (userfullname(), username(), hostname())
570 commit = self._new_commit(tree, parent,
571 userline, date, userline, date,
576 """Remove the pack file from disk."""
582 os.unlink(self.filename + '.pack')
584 def _end(self, run_midx=True):
586 if not f: return None
592 # update object count
594 cp = struct.pack('!i', self.count)
598 # calculate the pack sha1sum
601 for b in chunkyreader(f):
603 packbin = sum.digest()
607 obj_list_sha = self._write_pack_idx_v2(self.filename + '.idx', idx, packbin)
609 nameprefix = repo('objects/pack/pack-%s' % obj_list_sha)
610 if os.path.exists(self.filename + '.map'):
611 os.unlink(self.filename + '.map')
612 os.rename(self.filename + '.pack', nameprefix + '.pack')
613 os.rename(self.filename + '.idx', nameprefix + '.idx')
616 auto_midx(repo('objects/pack'))
619 def close(self, run_midx=True):
620 """Close the pack file and move it to its definitive path."""
621 return self._end(run_midx=run_midx)
623 def _write_pack_idx_v2(self, filename, idx, packbin):
624 idx_f = open(filename, 'w+b')
625 idx_f.write('\377tOc\0\0\0\2')
627 ofs64_ofs = 8 + 4*256 + 28*self.count
628 idx_f.truncate(ofs64_ofs)
630 idx_map = mmap_readwrite(idx_f, close=False)
631 idx_f.seek(0, SEEK_END)
632 count = _helpers.write_idx(idx_f, idx_map, idx, self.count)
633 assert(count == self.count)
639 b = idx_f.read(8 + 4*256)
642 obj_list_sum = Sha1()
643 for b in chunkyreader(idx_f, 20*self.count):
645 obj_list_sum.update(b)
646 namebase = obj_list_sum.hexdigest()
648 for b in chunkyreader(idx_f):
650 idx_f.write(idx_sum.digest())
657 return '%d %s' % (date, time.strftime('%z', time.localtime(date)))
661 os.environ['GIT_DIR'] = os.path.abspath(repo())
664 def list_refs(refname = None):
665 """Generate a list of tuples in the form (refname,hash).
666 If a ref name is specified, list only this particular ref.
668 argv = ['git', 'show-ref', '--']
671 p = subprocess.Popen(argv, preexec_fn = _gitenv, stdout = subprocess.PIPE)
672 out = p.stdout.read().strip()
673 rv = p.wait() # not fatal
677 for d in out.split('\n'):
678 (sha, name) = d.split(' ', 1)
679 yield (name, sha.decode('hex'))
682 def read_ref(refname):
683 """Get the commit id of the most recent commit made on a given ref."""
684 l = list(list_refs(refname))
692 def rev_list(ref, count=None):
693 """Generate a list of reachable commits in reverse chronological order.
695 This generator walks through commits, from child to parent, that are
696 reachable via the specified ref and yields a series of tuples of the form
699 If count is a non-zero integer, limit the number of commits to "count"
702 assert(not ref.startswith('-'))
705 opts += ['-n', str(atoi(count))]
706 argv = ['git', 'rev-list', '--pretty=format:%ct'] + opts + [ref, '--']
707 p = subprocess.Popen(argv, preexec_fn = _gitenv, stdout = subprocess.PIPE)
711 if s.startswith('commit '):
712 commit = s[7:].decode('hex')
716 rv = p.wait() # not fatal
718 raise GitError, 'git rev-list returned error %d' % rv
721 def rev_get_date(ref):
722 """Get the date of the latest commit on the specified ref."""
723 for (date, commit) in rev_list(ref, count=1):
725 raise GitError, 'no such commit %r' % ref
728 def rev_parse(committish):
729 """Resolve the full hash for 'committish', if it exists.
731 Should be roughly equivalent to 'git rev-parse'.
733 Returns the hex value of the hash if it is found, None if 'committish' does
734 not correspond to anything.
736 head = read_ref(committish)
738 debug2("resolved from ref: commit = %s\n" % head.encode('hex'))
741 pL = PackIdxList(repo('objects/pack'))
743 if len(committish) == 40:
745 hash = committish.decode('hex')
755 def update_ref(refname, newval, oldval):
756 """Change the commit pointed to by a branch."""
759 assert(refname.startswith('refs/heads/'))
760 p = subprocess.Popen(['git', 'update-ref', refname,
761 newval.encode('hex'), oldval.encode('hex')],
762 preexec_fn = _gitenv)
763 _git_wait('git update-ref', p)
766 def guess_repo(path=None):
767 """Set the path value in the global variable "repodir".
768 This makes bup look for an existing bup repository, but not fail if a
769 repository doesn't exist. Usually, if you are interacting with a bup
770 repository, you would not be calling this function but using
777 repodir = os.environ.get('BUP_DIR')
779 repodir = os.path.expanduser('~/.bup')
782 def init_repo(path=None):
783 """Create the Git bare repository for bup in a given path."""
785 d = repo() # appends a / to the path
786 parent = os.path.dirname(os.path.dirname(d))
787 if parent and not os.path.exists(parent):
788 raise GitError('parent directory "%s" does not exist\n' % parent)
789 if os.path.exists(d) and not os.path.isdir(os.path.join(d, '.')):
790 raise GitError('"%d" exists but is not a directory\n' % d)
791 p = subprocess.Popen(['git', '--bare', 'init'], stdout=sys.stderr,
792 preexec_fn = _gitenv)
793 _git_wait('git init', p)
794 # Force the index version configuration in order to ensure bup works
795 # regardless of the version of the installed Git binary.
796 p = subprocess.Popen(['git', 'config', 'pack.indexVersion', '2'],
797 stdout=sys.stderr, preexec_fn = _gitenv)
798 _git_wait('git config', p)
801 def check_repo_or_die(path=None):
802 """Make sure a bup repository exists, and abort if not.
803 If the path to a particular repository was not specified, this function
804 initializes the default repository automatically.
807 if not os.path.isdir(repo('objects/pack/.')):
808 if repodir == home_repodir:
811 log('error: %r is not a bup/git repository\n' % repo())
816 """Generate a list of (mode, name, hash) tuples of objects from 'buf'."""
818 while ofs < len(buf):
819 z = buf[ofs:].find('\0')
821 spl = buf[ofs:ofs+z].split(' ', 1)
822 assert(len(spl) == 2)
823 sha = buf[ofs+z+1:ofs+z+1+20]
825 yield (spl[0], spl[1], sha)
830 """Get Git's version and ensure a usable version is installed.
832 The returned version is formatted as an ordered tuple with each position
833 representing a digit in the version tag. For example, the following tuple
834 would represent version 1.6.6.9:
840 p = subprocess.Popen(['git', '--version'],
841 stdout=subprocess.PIPE)
842 gvs = p.stdout.read()
843 _git_wait('git --version', p)
844 m = re.match(r'git version (\S+.\S+)', gvs)
846 raise GitError('git --version weird output: %r' % gvs)
847 _ver = tuple(m.group(1).split('.'))
848 needed = ('1','5', '3', '1')
850 raise GitError('git version %s or higher is required; you have %s'
851 % ('.'.join(needed), '.'.join(_ver)))
855 def _git_wait(cmd, p):
858 raise GitError('%s returned %d' % (cmd, rv))
861 def _git_capture(argv):
862 p = subprocess.Popen(argv, stdout=subprocess.PIPE, preexec_fn = _gitenv)
864 _git_wait(repr(argv), p)
868 class _AbortableIter:
869 def __init__(self, it, onabort = None):
871 self.onabort = onabort
879 return self.it.next()
880 except StopIteration, e:
888 """Abort iteration and call the abortion callback, if needed."""
900 """Link to 'git cat-file' that is used to retrieve blob data."""
903 wanted = ('1','5','6')
906 log('warning: git version < %s; bup will be slow.\n'
909 self.get = self._slow_get
911 self.p = self.inprogress = None
912 self.get = self._fast_get
916 self.p.stdout.close()
919 self.inprogress = None
923 self.p = subprocess.Popen(['git', 'cat-file', '--batch'],
924 stdin=subprocess.PIPE,
925 stdout=subprocess.PIPE,
928 preexec_fn = _gitenv)
930 def _fast_get(self, id):
931 if not self.p or self.p.poll() != None:
934 assert(self.p.poll() == None)
936 log('_fast_get: opening %r while %r is open'
937 % (id, self.inprogress))
938 assert(not self.inprogress)
939 assert(id.find('\n') < 0)
940 assert(id.find('\r') < 0)
941 assert(not id.startswith('-'))
943 self.p.stdin.write('%s\n' % id)
945 hdr = self.p.stdout.readline()
946 if hdr.endswith(' missing\n'):
947 self.inprogress = None
948 raise KeyError('blob %r is missing' % id)
950 if len(spl) != 3 or len(spl[0]) != 40:
951 raise GitError('expected blob, got %r' % spl)
952 (hex, type, size) = spl
954 it = _AbortableIter(chunkyreader(self.p.stdout, int(spl[2])),
955 onabort = self._abort)
960 assert(self.p.stdout.readline() == '\n')
961 self.inprogress = None
966 def _slow_get(self, id):
967 assert(id.find('\n') < 0)
968 assert(id.find('\r') < 0)
970 type = _git_capture(['git', 'cat-file', '-t', id]).strip()
973 p = subprocess.Popen(['git', 'cat-file', type, id],
974 stdout=subprocess.PIPE,
975 preexec_fn = _gitenv)
976 for blob in chunkyreader(p.stdout):
978 _git_wait('git cat-file', p)
986 treefile = ''.join(it)
987 for (mode, name, sha) in treeparse(treefile):
988 for blob in self.join(sha.encode('hex')):
990 elif type == 'commit':
991 treeline = ''.join(it).split('\n')[0]
992 assert(treeline.startswith('tree '))
993 for blob in self.join(treeline[5:]):
996 raise GitError('invalid object type %r: expected blob/tree/commit'
1000 """Generate a list of the content of all blobs that can be reached
1001 from an object. The hash given in 'id' must point to a blob, a tree
1002 or a commit. The content of all blobs that can be seen from trees or
1003 commits will be added to the list.
1006 for d in self._join(self.get(id)):
1008 except StopIteration:
1012 """Return a dictionary of all tags in the form {hash: [tag_names, ...]}."""
1014 for (n,c) in list_refs():
1015 if n.startswith('refs/tags/'):
1020 tags[c].append(name) # more than one tag can point at 'c'