1 import os, errno, zlib, time, sha, subprocess, struct, mmap, stat, re
5 home_repodir = os.path.expanduser('~/.bup')
9 class GitError(Exception):
16 raise GitError('You should call check_repo_or_die()')
17 gd = os.path.join(repodir, '.git')
18 if os.path.exists(gd):
20 return os.path.join(repodir, sub)
24 def __init__(self, filename):
27 self.map = mmap.mmap(f.fileno(), 0,
28 mmap.MAP_SHARED, mmap.PROT_READ)
29 f.close() # map will persist beyond file close
30 assert(str(self.map[0:8]) == '\377tOc\0\0\0\2')
31 self.fanout = list(struct.unpack('!256I',
32 str(buffer(self.map, 8, 256*4))))
33 self.fanout.append(0) # entry "-1"
34 nsha = self.fanout[255]
35 self.ofstable = buffer(self.map,
36 8 + 256*4 + nsha*20 + nsha*4,
38 self.ofs64table = buffer(self.map,
39 8 + 256*4 + nsha*20 + nsha*4 + nsha*4)
41 def _ofs_from_idx(self, idx):
42 ofs = struct.unpack('!I', str(buffer(self.ofstable, idx*4, 4)))[0]
44 idx64 = ofs & 0x7fffffff
45 ofs = struct.unpack('!I',
46 str(buffer(self.ofs64table, idx64*8, 8)))[0]
49 def _idx_from_hash(self, hash):
50 assert(len(hash) == 20)
52 start = self.fanout[b1-1] # range -1..254
53 end = self.fanout[b1] # range 0..255
54 buf = buffer(self.map, 8 + 256*4, end*20)
57 mid = start + (end-start)/2
58 v = buffer(buf, mid*20, 20)
67 def find_offset(self, hash):
68 idx = self._idx_from_hash(hash)
70 return self._ofs_from_idx(idx)
73 def exists(self, hash):
74 return (self._idx_from_hash(hash) != None) and True or None
78 def __init__(self, dir):
82 for f in os.listdir(self.dir):
83 if f.endswith('.idx'):
84 self.packs.append(PackIndex(os.path.join(self.dir, f)))
86 def exists(self, hash):
89 for i in range(len(self.packs)):
92 # reorder so most recently used packs are searched first
93 self.packs = [p] + self.packs[:i] + self.packs[i+1:]
104 def calc_hash(type, content):
105 header = '%s %d\0' % (type, len(content))
106 sum = sha.sha(header)
111 def _shalist_sort_key(ent):
112 (mode, name, id) = ent
113 if stat.S_ISDIR(int(mode, 8)):
119 _typemap = dict(blob=3, tree=2, commit=1, tag=8)
121 def __init__(self, objcache_maker=None):
126 self.objcache_maker = objcache_maker
132 def _make_objcache(self):
133 if not self.objcache:
134 if self.objcache_maker:
135 self.objcache = self.objcache_maker()
137 self.objcache = MultiPackIndex(repo('objects/pack'))
141 self._make_objcache()
142 self.filename = repo('objects/bup%d' % os.getpid())
143 self.file = open(self.filename + '.pack', 'w+')
144 self.file.write('PACK\0\0\0\2\0\0\0\0')
146 def _raw_write(self, datalist):
151 self.outbytes += len(d)
154 def _write(self, bin, type, content):
161 szbits = (sz & 0x0f) | (_typemap[type]<<4)
164 if sz: szbits |= 0x80
165 out.append(chr(szbits))
171 z = zlib.compressobj(1)
172 out.append(z.compress(content))
173 out.append(z.flush())
178 def breakpoint(self):
180 self.outbytes = self.count = 0
183 def write(self, type, content):
184 return self._write(calc_hash(type, content), type, content)
186 def maybe_write(self, type, content):
187 bin = calc_hash(type, content)
188 if not self.objcache:
189 self._make_objcache()
190 if not self.objcache.exists(bin):
191 self._write(bin, type, content)
192 self.objcache.add(bin)
195 def new_blob(self, blob):
196 return self.maybe_write('blob', blob)
198 def new_tree(self, shalist):
199 shalist = sorted(shalist, key = _shalist_sort_key)
200 l = ['%s %s\0%s' % (mode,name,bin)
201 for (mode,name,bin) in shalist]
202 return self.maybe_write('tree', ''.join(l))
204 def _new_commit(self, tree, parent, author, adate, committer, cdate, msg):
206 if tree: l.append('tree %s' % tree.encode('hex'))
207 if parent: l.append('parent %s' % parent.encode('hex'))
208 if author: l.append('author %s %s' % (author, _git_date(adate)))
209 if committer: l.append('committer %s %s' % (committer, _git_date(cdate)))
212 return self.maybe_write('commit', '\n'.join(l))
214 def new_commit(self, parent, tree, msg):
216 userline = '%s <%s@%s>' % (userfullname(), username(), hostname())
217 commit = self._new_commit(tree, parent,
218 userline, now, userline, now,
227 os.unlink(self.filename + '.pack')
231 if not f: return None
234 # update object count
236 cp = struct.pack('!i', self.count)
240 # calculate the pack sha1sum
247 f.write(sum.digest())
252 p = subprocess.Popen(['git', 'index-pack', '-v',
254 self.filename + '.pack'],
255 preexec_fn = _gitenv,
256 stdout = subprocess.PIPE)
257 out = p.stdout.read().strip()
258 _git_wait('git index-pack', p)
260 raise GitError('git index-pack produced no output')
261 nameprefix = repo('objects/pack/%s' % out)
262 os.rename(self.filename + '.pack', nameprefix + '.pack')
263 os.rename(self.filename + '.idx', nameprefix + '.idx')
271 return time.strftime('%s %z', time.localtime(date))
275 os.environ['GIT_DIR'] = os.path.abspath(repo())
278 def read_ref(refname):
279 p = subprocess.Popen(['git', 'show-ref', '--', refname],
280 preexec_fn = _gitenv,
281 stdout = subprocess.PIPE)
282 out = p.stdout.read().strip()
283 rv = p.wait() # not fatal
287 return out.split()[0].decode('hex')
292 def update_ref(refname, newval, oldval):
295 p = subprocess.Popen(['git', 'update-ref', '--', refname,
296 newval.encode('hex'), oldval.encode('hex')],
297 preexec_fn = _gitenv)
298 _git_wait('git update-ref', p)
301 def guess_repo(path=None):
306 repodir = os.environ.get('BUP_DIR')
308 repodir = os.path.expanduser('~/.bup')
311 def init_repo(path=None):
314 if os.path.exists(d) and not os.path.isdir(os.path.join(d, '.')):
315 raise GitError('"%d" exists but is not a directory\n' % d)
316 p = subprocess.Popen(['git', '--bare', 'init'], stdout=sys.stderr,
317 preexec_fn = _gitenv)
318 _git_wait('git init', p)
319 p = subprocess.Popen(['git', 'config', 'pack.indexVersion', '2'],
320 stdout=sys.stderr, preexec_fn = _gitenv)
321 _git_wait('git config', p)
324 def check_repo_or_die(path=None):
326 if not os.path.isdir(repo('objects/pack/.')):
327 if repodir == home_repodir:
330 log('error: %r is not a bup/git repository\n' % repo())
336 while ofs < len(buf):
337 z = buf[ofs:].find('\0')
339 spl = buf[ofs:ofs+z].split(' ', 1)
340 assert(len(spl) == 2)
341 sha = buf[ofs+z+1:ofs+z+1+20]
343 yield (spl[0], spl[1], sha)
349 p = subprocess.Popen(['git', '--version'],
350 stdout=subprocess.PIPE)
351 gvs = p.stdout.read()
352 _git_wait('git --version', p)
353 m = re.match(r'git version (\S+.\S+)', gvs)
355 raise GitError('git --version weird output: %r' % gvs)
356 _ver = tuple(m.group(1).split('.'))
357 needed = ('1','5','4')
359 raise GitError('git version %s or higher is required; you have %s'
360 % ('.'.join(needed), '.'.join(_ver)))
364 def _git_wait(cmd, p):
367 raise GitError('%s returned %d' % (cmd, rv))
370 def _git_capture(argv):
371 p = subprocess.Popen(argv, stdout=subprocess.PIPE, preexec_fn = _gitenv)
373 _git_wait(repr(argv), p)
381 wanted = ('1','5','6')
384 log('warning: git version < %s; bup will be slow.\n'
387 self.get = self._slow_get
389 self.p = subprocess.Popen(['git', 'cat-file', '--batch'],
390 stdin=subprocess.PIPE,
391 stdout=subprocess.PIPE,
392 preexec_fn = _gitenv)
393 self.get = self._fast_get
395 def _fast_get(self, id):
396 assert(id.find('\n') < 0)
397 assert(id.find('\r') < 0)
399 self.p.stdin.write('%s\n' % id)
400 hdr = self.p.stdout.readline()
402 assert(len(spl) == 3)
403 assert(len(spl[0]) == 40)
404 (hex, type, size) = spl
406 for blob in chunkyreader(self.p.stdout, int(spl[2])):
408 assert(self.p.stdout.readline() == '\n')
410 def _slow_get(self, id):
411 assert(id.find('\n') < 0)
412 assert(id.find('\r') < 0)
414 type = _git_capture(['git', 'cat-file', '-t', id]).strip()
417 p = subprocess.Popen(['git', 'cat-file', type, id],
418 stdout=subprocess.PIPE,
419 preexec_fn = _gitenv)
420 for blob in chunkyreader(p.stdout):
422 _git_wait('git cat-file', p)
430 treefile = ''.join(it)
431 for (mode, name, sha) in _treeparse(treefile):
432 for blob in self.join(sha.encode('hex')):
434 elif type == 'commit':
435 treeline = ''.join(it).split('\n')[0]
436 assert(treeline.startswith('tree '))
437 for blob in self.join(treeline[5:]):
440 raise GitError('unknown object type %r' % type)
443 for d in self._join(self.get(id)):