bup repositories are in Git format. This library allows us to
interact with the Git data structures.
"""
-import os, zlib, time, subprocess, struct, stat, re, tempfile, heapq
+import os, sys, zlib, time, subprocess, struct, stat, re, tempfile, glob
from bup.helpers import *
-from bup import _helpers
+from bup import _helpers, path, bloom
-MIDX_VERSION = 2
+MIDX_VERSION = 4
+SEEK_END=2 # os.SEEK_END is not defined in python 2.4
verbose = 0
ignore_midx = 0
return os.path.join(repodir, sub)
+def repo_rel(path):
+ full = os.path.abspath(path)
+ fullrepo = os.path.abspath(repo(''))
+ if not fullrepo.endswith('/'):
+ fullrepo += '/'
+ if full.startswith(fullrepo):
+ path = full[len(fullrepo):]
+ if path.startswith('index-cache/'):
+ path = path[len('index-cache/'):]
+ return path
+
+
+def all_packdirs():
+ paths = [repo('objects/pack')]
+ paths += glob.glob(repo('index-cache/*/.'))
+ return paths
+
+
def auto_midx(objdir):
- main_exe = os.environ.get('BUP_MAIN_EXE') or sys.argv[0]
- args = [main_exe, 'midx', '--auto', '--dir', objdir]
- rv = subprocess.call(args, stdout=open('/dev/null', 'w'))
+ args = [path.exe(), 'midx', '--auto', '--dir', objdir]
+ try:
+ rv = subprocess.call(args, stdout=open('/dev/null', 'w'))
+ except OSError, e:
+ # make sure 'args' gets printed to help with debugging
+ add_error('%r: exception: %s' % (args, e))
+ raise
+ if rv:
+ add_error('%r: returned %d' % (args, rv))
+
+ args = [path.exe(), 'bloom', '--dir', objdir]
+ try:
+ rv = subprocess.call(args, stdout=open('/dev/null', 'w'))
+ except OSError, e:
+ # make sure 'args' gets printed to help with debugging
+ add_error('%r: exception: %s' % (args, e))
+ raise
if rv:
add_error('%r: returned %d' % (args, rv))
return self._ofs_from_idx(idx)
return None
- def exists(self, hash):
+ def exists(self, hash, want_source=False):
"""Return nonempty if the object exists in this index."""
- return hash and (self._idx_from_hash(hash) != None) and True or None
+ if hash and (self._idx_from_hash(hash) != None):
+ return want_source and os.path.basename(self.name) or True
+ return None
def __len__(self):
return int(self.fanout[255])
str(buffer(self.map, 0, 256*4))))
self.fanout.append(0) # entry "-1"
nsha = self.fanout[255]
- self.shatable = buffer(self.map, 256*4, nsha*24)
+ self.sha_ofs = 256*4
+ self.shatable = buffer(self.map, self.sha_ofs, nsha*24)
def _ofs_from_idx(self, idx):
return struct.unpack('!I', str(self.shatable[idx*24 : idx*24+4]))[0]
str(buffer(self.map, 8, 256*4))))
self.fanout.append(0) # entry "-1"
nsha = self.fanout[255]
- self.shatable = buffer(self.map, 8 + 256*4, nsha*20)
+ self.sha_ofs = 8 + 256*4
+ self.shatable = buffer(self.map, self.sha_ofs, nsha*20)
self.ofstable = buffer(self.map,
- 8 + 256*4 + nsha*20 + nsha*4,
+ self.sha_ofs + nsha*20 + nsha*4,
nsha*4)
self.ofs64table = buffer(self.map,
8 + 256*4 + nsha*20 + nsha*4 + nsha*4)
extract_bits = _helpers.extract_bits
-
class PackMidx:
"""Wrapper which contains data from multiple index files.
Multiple index (.midx) files constitute a wrapper around index (.idx) files
self.bits = _helpers.firstword(self.map[8:12])
self.entries = 2**self.bits
self.fanout = buffer(self.map, 12, self.entries*4)
- shaofs = 12 + self.entries*4
- nsha = self._fanget(self.entries-1)
- self.shalist = buffer(self.map, shaofs, nsha*20)
- self.idxnames = str(self.map[shaofs + 20*nsha:]).split('\0')
+ self.sha_ofs = 12 + self.entries*4
+ self.nsha = nsha = self._fanget(self.entries-1)
+ self.shatable = buffer(self.map, self.sha_ofs, nsha*20)
+ self.which_ofs = self.sha_ofs + 20*nsha
+ self.whichlist = buffer(self.map, self.which_ofs, nsha*4)
+ self.idxnames = str(self.map[self.which_ofs + 4*nsha:]).split('\0')
def _init_failed(self):
self.bits = 0
self.entries = 1
self.fanout = buffer('\0\0\0\0')
- self.shalist = buffer('\0'*20)
+ self.shatable = buffer('\0'*20)
self.idxnames = []
def _fanget(self, i):
return _helpers.firstword(s)
def _get(self, i):
- return str(self.shalist[i*20:(i+1)*20])
+ return str(self.shatable[i*20:(i+1)*20])
+
+ def _get_idx_i(self, i):
+ return struct.unpack('!I', self.whichlist[i*4:(i+1)*4])[0]
- def exists(self, hash):
+ def _get_idxname(self, i):
+ return self.idxnames[self._get_idx_i(i)]
+
+ def exists(self, hash, want_source=False):
"""Return nonempty if the object exists in the index files."""
global _total_searches, _total_steps
_total_searches += 1
end = mid
endv = _helpers.firstword(v)
else: # got it!
- return True
+ return want_source and self._get_idxname(mid) or True
return None
def __iter__(self):
for i in xrange(self._fanget(self.entries-1)):
- yield buffer(self.shalist, i*20, 20)
+ yield buffer(self.shatable, i*20, 20)
def __len__(self):
return int(self._fanget(self.entries-1))
assert(_mpi_count == 0) # these things suck tons of VM; don't waste it
_mpi_count += 1
self.dir = dir
- self.also = {}
+ self.also = set()
self.packs = []
+ self.do_bloom = False
+ self.bloom = None
self.refresh()
def __del__(self):
def __len__(self):
return sum(len(pack) for pack in self.packs)
- def exists(self, hash):
+ def exists(self, hash, want_source=False):
"""Return nonempty if the object exists in the index files."""
global _total_searches
_total_searches += 1
if hash in self.also:
return True
- for i in range(len(self.packs)):
+ if self.do_bloom and self.bloom is not None:
+ _total_searches -= 1 # will be incremented by bloom
+ if self.bloom.exists(hash):
+ self.do_bloom = False
+ else:
+ return None
+ for i in xrange(len(self.packs)):
p = self.packs[i]
_total_searches -= 1 # will be incremented by sub-pack
- if p.exists(hash):
+ ix = p.exists(hash, want_source=want_source)
+ if ix:
# reorder so most recently used packs are searched first
self.packs = [p] + self.packs[:i] + self.packs[i+1:]
- return p.name
+ return ix
+ self.do_bloom = True
return None
def refresh(self, skip_midx = False):
The module-global variable 'ignore_midx' can force this function to
always act as if skip_midx was True.
"""
+ self.bloom = None # Always reopen the bloom as it may have been relaced
+ self.do_bloom = False
skip_midx = skip_midx or ignore_midx
d = dict((p.name, p) for p in self.packs
if not skip_midx or not isinstance(p, PackMidx))
if isinstance(ix, PackMidx):
for name in ix.idxnames:
d[os.path.join(self.dir, name)] = ix
- for f in os.listdir(self.dir):
- full = os.path.join(self.dir, f)
- if f.endswith('.midx') and not d.get(full):
+ for full in glob.glob(os.path.join(self.dir,'*.midx')):
+ if not d.get(full):
mx = PackMidx(full)
(mxd, mxf) = os.path.split(mx.name)
- broken = 0
+ broken = False
for n in mx.idxnames:
if not os.path.exists(os.path.join(mxd, n)):
log(('warning: index %s missing\n' +
' used by %s\n') % (n, mxf))
- broken += 1
+ broken = True
if broken:
del mx
unlink(full)
midxl.append(mx)
midxl.sort(lambda x,y: -cmp(len(x),len(y)))
for ix in midxl:
- any = 0
+ any_needed = False
for sub in ix.idxnames:
found = d.get(os.path.join(self.dir, sub))
if not found or isinstance(found, PackIdx):
# doesn't exist, or exists but not in a midx
- d[ix.name] = ix
- for name in ix.idxnames:
- d[os.path.join(self.dir, name)] = ix
- any += 1
+ any_needed = True
break
- if not any and not ix.force_keep:
+ if any_needed:
+ d[ix.name] = ix
+ for name in ix.idxnames:
+ d[os.path.join(self.dir, name)] = ix
+ elif not ix.force_keep:
debug1('midx: removing redundant: %s\n'
% os.path.basename(ix.name))
unlink(ix.name)
- for f in os.listdir(self.dir):
- full = os.path.join(self.dir, f)
- if f.endswith('.idx') and not d.get(full):
+ for full in glob.glob(os.path.join(self.dir,'*.idx')):
+ if not d.get(full):
try:
ix = open_idx(full)
except GitError, e:
add_error(e)
continue
d[full] = ix
+ bfull = os.path.join(self.dir, 'bup.bloom')
+ if self.bloom is None and os.path.exists(bfull):
+ self.bloom = bloom.ShaBloom(bfull)
self.packs = list(set(d.values()))
+ self.packs.sort(lambda x,y: -cmp(len(x),len(y)))
+ if self.bloom and self.bloom.valid() and len(self.bloom) >= len(self):
+ self.do_bloom = True
+ else:
+ self.bloom = None
debug1('PackIdxList: using %d index%s.\n'
% (len(self.packs), len(self.packs)!=1 and 'es' or ''))
- def packname_containing(self, hash):
- # figure out which pack contains a given hash.
- # FIXME: if the midx file format would just *store* this information,
- # we could calculate it a lot more efficiently. But it's not needed
- # often, so let's do it like this.
- for f in os.listdir(self.dir):
- if f.endswith('.idx'):
- full = os.path.join(self.dir, f)
- try:
- ix = open_idx(full)
- except GitError, e:
- add_error(e)
- continue
- if ix.exists(hash):
- return full
-
def add(self, hash):
"""Insert an additional object in the list."""
- self.also[hash] = 1
-
- def zap_also(self):
- """Remove all additional objects from the list."""
- self.also = {}
+ self.also.add(hash)
def calc_hash(type, content):
def idxmerge(idxlist, final_progress=True):
"""Generate a list of all the objects reachable in a PackIdxList."""
- total = sum(len(i) for i in idxlist)
- iters = (iter(i) for i in idxlist)
- heap = [(next(it), it) for it in iters]
- heapq.heapify(heap)
- count = 0
- last = None
- while heap:
- if (count % 10024) == 0:
- progress('Reading indexes: %.2f%% (%d/%d)\r'
- % (count*100.0/total, count, total))
- (e, it) = heap[0]
- if e != last:
- yield e
- last = e
- count += 1
- e = next(it)
- if e:
- heapq.heapreplace(heap, (e, it))
- else:
- heapq.heappop(heap)
- if final_progress:
- log('Reading indexes: %.2f%% (%d/%d), done.\n' % (100, total, total))
+ def pfunc(count, total):
+ qprogress('Reading indexes: %.2f%% (%d/%d)\r'
+ % (count*100.0/total, count, total))
+ def pfinal(count, total):
+ if final_progress:
+ progress('Reading indexes: %.2f%% (%d/%d), done.\n'
+ % (100, total, total))
+ return merge_iter(idxlist, 10024, pfunc, pfinal)
+
+def _make_objcache():
+ return PackIdxList(repo('objects/pack'))
class PackWriter:
"""Writes Git objects insid a pack file."""
- def __init__(self, objcache_maker=None):
+ def __init__(self, objcache_maker=_make_objcache):
self.count = 0
self.outbytes = 0
self.filename = None
def __del__(self):
self.close()
- def _make_objcache(self):
- if self.objcache == None:
- if self.objcache_maker:
- self.objcache = self.objcache_maker()
- else:
- self.objcache = PackIdxList(repo('objects/pack'))
-
def _open(self):
if not self.file:
- self._make_objcache()
(fd,name) = tempfile.mkstemp(suffix='.pack', dir=repo('objects'))
self.file = os.fdopen(fd, 'w+b')
assert(name.endswith('.pack'))
self.file.write('PACK\0\0\0\2\0\0\0\0')
self.idx = list(list() for i in xrange(256))
- # the 'sha' parameter is used in client.py's _raw_write(), but not needed
- # in this basic version.
def _raw_write(self, datalist, sha):
self._open()
f = self.file
# to our hashsplit algorithm.) f.write() does its own buffering,
# but that's okay because we'll flush it in _end().
oneblob = ''.join(datalist)
- f.write(oneblob)
+ try:
+ f.write(oneblob)
+ except IOError, e:
+ raise GitError, e, sys.exc_info()[2]
nw = len(oneblob)
crc = zlib.crc32(oneblob) & 0xffffffff
self._update_idx(sha, crc, nw)
self.outbytes = self.count = 0
return id
- def write(self, type, content):
- """Write an object in this pack file."""
- return self._write(calc_hash(type, content), type, content)
+ def _require_objcache(self):
+ if self.objcache is None and self.objcache_maker:
+ self.objcache = self.objcache_maker()
+ if self.objcache is None:
+ raise GitError(
+ "PackWriter not opened or can't check exists w/o objcache")
- def exists(self, id):
+ def exists(self, id, want_source=False):
"""Return non-empty if an object is found in the object cache."""
- if not self.objcache:
- self._make_objcache()
- return self.objcache.exists(id)
+ self._require_objcache()
+ return self.objcache.exists(id, want_source=want_source)
def maybe_write(self, type, content):
"""Write an object to the pack file if not present and return its id."""
+ self._require_objcache()
sha = calc_hash(type, content)
if not self.exists(sha):
self._write(sha, type, content)
f.close()
os.unlink(self.filename + '.pack')
- def _end(self):
+ def _end(self, run_midx=True):
f = self.file
if not f: return None
self.file = None
f.write(packbin)
f.close()
- idx_f = open(self.filename + '.idx', 'wb')
- obj_list_sha = self._write_pack_idx_v2(idx_f, idx, packbin)
- idx_f.close()
+ obj_list_sha = self._write_pack_idx_v2(self.filename + '.idx', idx, packbin)
nameprefix = repo('objects/pack/pack-%s' % obj_list_sha)
if os.path.exists(self.filename + '.map'):
os.rename(self.filename + '.pack', nameprefix + '.pack')
os.rename(self.filename + '.idx', nameprefix + '.idx')
- auto_midx(repo('objects/pack'))
+ if run_midx:
+ auto_midx(repo('objects/pack'))
return nameprefix
- def close(self):
+ def close(self, run_midx=True):
"""Close the pack file and move it to its definitive path."""
- return self._end()
-
- def _write_pack_idx_v2(self, file, idx, packbin):
- sum = Sha1()
-
- def write(data):
- file.write(data)
- sum.update(data)
-
- write('\377tOc\0\0\0\2')
-
- n = 0
- for part in idx:
- n += len(part)
- write(struct.pack('!i', n))
- part.sort(key=lambda x: x[0])
+ return self._end(run_midx=run_midx)
+
+ def _write_pack_idx_v2(self, filename, idx, packbin):
+ idx_f = open(filename, 'w+b')
+ idx_f.write('\377tOc\0\0\0\2')
+
+ ofs64_ofs = 8 + 4*256 + 28*self.count
+ idx_f.truncate(ofs64_ofs)
+ idx_f.seek(0)
+ idx_map = mmap_readwrite(idx_f, close=False)
+ idx_f.seek(0, SEEK_END)
+ count = _helpers.write_idx(idx_f, idx_map, idx, self.count)
+ assert(count == self.count)
+ idx_map.close()
+ idx_f.write(packbin)
+
+ idx_f.seek(0)
+ idx_sum = Sha1()
+ b = idx_f.read(8 + 4*256)
+ idx_sum.update(b)
obj_list_sum = Sha1()
- for part in idx:
- for entry in part:
- write(entry[0])
- obj_list_sum.update(entry[0])
- for part in idx:
- for entry in part:
- write(struct.pack('!I', entry[1]))
- ofs64_list = []
- for part in idx:
- for entry in part:
- if entry[2] & 0x80000000:
- write(struct.pack('!I', 0x80000000 | len(ofs64_list)))
- ofs64_list.append(struct.pack('!Q', entry[2]))
- else:
- write(struct.pack('!i', entry[2]))
- for ofs64 in ofs64_list:
- write(ofs64)
-
- write(packbin)
- file.write(sum.digest())
- return obj_list_sum.hexdigest()
+ for b in chunkyreader(idx_f, 20*self.count):
+ idx_sum.update(b)
+ obj_list_sum.update(b)
+ namebase = obj_list_sum.hexdigest()
+
+ for b in chunkyreader(idx_f):
+ idx_sum.update(b)
+ idx_f.write(idx_sum.digest())
+ idx_f.close()
+
+ return namebase
def _git_date(date):
def init_repo(path=None):
"""Create the Git bare repository for bup in a given path."""
guess_repo(path)
- d = repo()
+ d = repo() # appends a / to the path
+ parent = os.path.dirname(os.path.dirname(d))
+ if parent and not os.path.exists(parent):
+ raise GitError('parent directory "%s" does not exist\n' % parent)
if os.path.exists(d) and not os.path.isdir(os.path.join(d, '.')):
raise GitError('"%d" exists but is not a directory\n' % d)
p = subprocess.Popen(['git', '--bare', 'init'], stdout=sys.stderr,