1 import os, errno, zlib, time, sha, subprocess, struct, mmap, stat
5 home_repodir = os.path.expanduser('~/.bup')
9 class GitError(Exception):
16 raise GitError('You should call check_repo_or_die()')
17 gd = os.path.join(repodir, '.git')
18 if os.path.exists(gd):
20 return os.path.join(repodir, sub)
24 def __init__(self, filename):
27 self.map = mmap.mmap(f.fileno(), 0,
28 mmap.MAP_SHARED, mmap.PROT_READ)
29 f.close() # map will persist beyond file close
30 assert(str(self.map[0:8]) == '\377tOc\0\0\0\2')
31 self.fanout = list(struct.unpack('!256I',
32 str(buffer(self.map, 8, 256*4))))
33 self.fanout.append(0) # entry "-1"
34 nsha = self.fanout[255]
35 self.ofstable = buffer(self.map,
36 8 + 256*4 + nsha*20 + nsha*4,
38 self.ofs64table = buffer(self.map,
39 8 + 256*4 + nsha*20 + nsha*4 + nsha*4)
41 def _ofs_from_idx(self, idx):
42 ofs = struct.unpack('!I', str(buffer(self.ofstable, idx*4, 4)))[0]
44 idx64 = ofs & 0x7fffffff
45 ofs = struct.unpack('!I',
46 str(buffer(self.ofs64table, idx64*8, 8)))[0]
49 def _idx_from_hash(self, hash):
50 assert(len(hash) == 20)
52 start = self.fanout[b1-1] # range -1..254
53 end = self.fanout[b1] # range 0..255
54 buf = buffer(self.map, 8 + 256*4, end*20)
57 mid = start + (end-start)/2
58 v = buffer(buf, mid*20, 20)
67 def find_offset(self, hash):
68 idx = self._idx_from_hash(hash)
70 return self._ofs_from_idx(idx)
73 def exists(self, hash):
74 return (self._idx_from_hash(hash) != None) and True or None
78 def __init__(self, dir):
81 for f in os.listdir(dir):
82 if f.endswith('.idx'):
83 self.packs.append(PackIndex(os.path.join(dir, f)))
85 def exists(self, hash):
88 for i in range(len(self.packs)):
91 # reorder so most recently used packs are searched first
92 self.packs = [p] + self.packs[:i] + self.packs[i+1:]
103 def calc_hash(type, content):
104 header = '%s %d\0' % (type, len(content))
105 sum = sha.sha(header)
110 def _shalist_sort_key(ent):
111 (mode, name, id) = ent
112 if stat.S_ISDIR(int(mode, 8)):
118 _typemap = dict(blob=3, tree=2, commit=1, tag=8)
120 def __init__(self, objcache=None):
124 self.objcache = objcache or MultiPackIndex(repo('objects/pack'))
130 assert(not self.file)
131 self.objcache.zap_also()
132 self.filename = repo('objects/bup%d' % os.getpid())
133 self.file = open(self.filename + '.pack', 'w+')
134 self.file.write('PACK\0\0\0\2\0\0\0\0')
136 def _raw_write(self, datalist):
144 def _write(self, bin, type, content):
151 szbits = (sz & 0x0f) | (_typemap[type]<<4)
154 if sz: szbits |= 0x80
155 out.append(chr(szbits))
161 z = zlib.compressobj(1)
162 out.append(z.compress(content))
163 out.append(z.flush())
168 def write(self, type, content):
169 return self._write(calc_hash(type, content), type, content)
171 def maybe_write(self, type, content):
172 bin = calc_hash(type, content)
173 if not self.objcache.exists(bin):
174 self._write(bin, type, content)
175 self.objcache.add(bin)
178 def new_blob(self, blob):
179 return self.maybe_write('blob', blob)
181 def new_tree(self, shalist):
182 shalist = sorted(shalist, key = _shalist_sort_key)
183 l = ['%s %s\0%s' % (mode,name,bin)
184 for (mode,name,bin) in shalist]
185 return self.maybe_write('tree', ''.join(l))
187 def _new_commit(self, tree, parent, author, adate, committer, cdate, msg):
189 if tree: l.append('tree %s' % tree.encode('hex'))
190 if parent: l.append('parent %s' % parent)
191 if author: l.append('author %s %s' % (author, _git_date(adate)))
192 if committer: l.append('committer %s %s' % (committer, _git_date(cdate)))
195 return self.maybe_write('commit', '\n'.join(l))
197 def new_commit(self, ref, tree, msg):
199 userline = '%s <%s@%s>' % (userfullname(), username(), hostname())
200 oldref = ref and _read_ref(ref) or None
201 commit = self._new_commit(tree, oldref,
202 userline, now, userline, now,
205 self.close() # UGLY: needed so _update_ref can see the new objects
206 _update_ref(ref, commit.encode('hex'), oldref)
214 os.unlink(self.filename + '.pack')
218 if not f: return None
221 # update object count
223 cp = struct.pack('!i', self.count)
227 # calculate the pack sha1sum
234 f.write(sum.digest())
238 p = subprocess.Popen(['git', 'index-pack', '-v',
239 self.filename + '.pack'],
240 preexec_fn = _gitenv,
241 stdout = subprocess.PIPE)
242 out = p.stdout.read().strip()
243 if p.wait() or not out:
244 raise GitError('git index-pack returned an error')
245 nameprefix = repo('objects/pack/%s' % out)
246 os.rename(self.filename + '.pack', nameprefix + '.pack')
247 os.rename(self.filename + '.idx', nameprefix + '.idx')
251 class PackWriter_Remote(PackWriter):
252 def __init__(self, conn, objcache=None, onclose=None):
253 PackWriter.__init__(self, objcache)
255 self.filename = 'remote socket'
256 self.onclose = onclose
259 assert(not "can't reopen a PackWriter_Remote")
263 self.file.write('\0\0\0\0')
269 raise GitError("don't know how to abort remote pack writing")
271 def _raw_write(self, datalist):
273 data = ''.join(datalist)
275 self.file.write(struct.pack('!I', len(data)) + data)
279 return time.strftime('%s %z', time.localtime(date))
283 os.environ['GIT_DIR'] = os.path.abspath(repo())
286 def _read_ref(refname):
287 p = subprocess.Popen(['git', 'show-ref', '--', refname],
288 preexec_fn = _gitenv,
289 stdout = subprocess.PIPE)
290 out = p.stdout.read().strip()
293 return out.split()[0]
298 def _update_ref(refname, newval, oldval):
301 p = subprocess.Popen(['git', 'update-ref', '--', refname, newval, oldval],
302 preexec_fn = _gitenv)
307 def guess_repo(path=None):
312 repodir = os.environ.get('BUP_DIR')
314 repodir = os.path.expanduser('~/.bup')
317 def init_repo(path=None):
320 if os.path.exists(d) and not os.path.isdir(os.path.join(d, '.')):
321 raise GitError('"%d" exists but is not a directory\n' % d)
322 p = subprocess.Popen(['git', 'init', '--bare'], stdout=sys.stderr,
323 preexec_fn = _gitenv)
327 def check_repo_or_die(path=None):
329 if not os.path.isdir(repo('objects/pack/.')):
330 if repodir == home_repodir:
333 log('error: %r is not a bup/git repository\n' % repo())