-#!/usr/bin/env python
-import sys, math, struct, glob, resource
-import tempfile
-from bup import options, git, midx, _helpers
-from bup.helpers import *
+#!/bin/sh
+"""": # -*-python-*-
+bup_python="$(dirname "$0")/bup-python" || exit $?
+exec "$bup_python" "$0" ${1+"$@"}
+"""
+# end of bup preamble
+
+from __future__ import absolute_import
+import glob, math, os, resource, struct, sys, tempfile
+
+from bup import options, git, midx, _helpers, xstat
+from bup.compat import range
+from bup.helpers import (Sha1, add_error, atomically_replaced_file, debug1, fdatasync,
+ handle_ctrl_c, log, mmap_readwrite, qprogress,
+ saved_errors, unlink)
+
PAGE_SIZE=4096
SHA_PER_PAGE=PAGE_SIZE/20.
def _group(l, count):
- for i in xrange(0, len(l), count):
+ for i in range(0, len(l), count):
yield l[i:i+count]
-
-
+
+
def max_files():
mf = min(resource.getrlimit(resource.RLIMIT_NOFILE))
if mf > 32:
log('Checking %s.\n' % nicename)
try:
ix = git.open_idx(name)
- except git.GitError, e:
+ except git.GitError as e:
add_error('%s: %s' % (name, e))
return
for count,subname in enumerate(ix.idxnames):
inp = []
total = 0
allfilenames = []
- for name in infilenames:
- ix = git.open_idx(name)
- inp.append((
- ix.map,
- len(ix),
- ix.sha_ofs,
- isinstance(ix, midx.PackMidx) and ix.which_ofs or 0,
- len(allfilenames),
- ))
- for n in ix.idxnames:
- allfilenames.append(os.path.basename(n))
- total += len(ix)
- inp.sort(lambda x,y: cmp(str(y[0][y[2]:y[2]+20]),str(x[0][x[2]:x[2]+20])))
-
- if not _first: _first = outdir
- dirprefix = (_first != outdir) and git.repo_rel(outdir)+': ' or ''
- log('midx: %s%screating from %d files (%d objects).\n'
- % (dirprefix, prefixstr, len(infilenames), total))
- if (opt.auto and (total < 1024 and len(infilenames) < 3)) \
- or ((opt.auto or opt.force) and len(infilenames) < 2) \
- or (opt.force and not total):
- debug1('midx: nothing to do.\n')
- return
-
- pages = int(total/SHA_PER_PAGE) or 1
- bits = int(math.ceil(math.log(pages, 2)))
- entries = 2**bits
- debug1('midx: table size: %d (%d bits)\n' % (entries*4, bits))
-
- unlink(outfilename)
- f = open(outfilename + '.tmp', 'w+b')
- f.write('MIDX')
- f.write(struct.pack('!II', midx.MIDX_VERSION, bits))
- assert(f.tell() == 12)
-
- f.truncate(12 + 4*entries + 20*total + 4*total)
-
- fmap = mmap_readwrite(f, close=False)
-
- count = merge_into(fmap, bits, total, inp)
- del fmap
-
- f.seek(0, git.SEEK_END)
- f.write('\0'.join(allfilenames))
- f.close()
- os.rename(outfilename + '.tmp', outfilename)
-
- # this is just for testing
+ midxs = []
+ try:
+ for name in infilenames:
+ ix = git.open_idx(name)
+ midxs.append(ix)
+ inp.append((
+ ix.map,
+ len(ix),
+ ix.sha_ofs,
+ isinstance(ix, midx.PackMidx) and ix.which_ofs or 0,
+ len(allfilenames),
+ ))
+ for n in ix.idxnames:
+ allfilenames.append(os.path.basename(n))
+ total += len(ix)
+ inp.sort(reverse=True, key=lambda x: str(x[0][x[2]:x[2]+20]))
+
+ if not _first: _first = outdir
+ dirprefix = (_first != outdir) and git.repo_rel(outdir)+': ' or ''
+ debug1('midx: %s%screating from %d files (%d objects).\n'
+ % (dirprefix, prefixstr, len(infilenames), total))
+ if (opt.auto and (total < 1024 and len(infilenames) < 3)) \
+ or ((opt.auto or opt.force) and len(infilenames) < 2) \
+ or (opt.force and not total):
+ debug1('midx: nothing to do.\n')
+ return
+
+ pages = int(total/SHA_PER_PAGE) or 1
+ bits = int(math.ceil(math.log(pages, 2)))
+ entries = 2**bits
+ debug1('midx: table size: %d (%d bits)\n' % (entries*4, bits))
+
+ unlink(outfilename)
+ with atomically_replaced_file(outfilename, 'wb') as f:
+ f.write('MIDX')
+ f.write(struct.pack('!II', midx.MIDX_VERSION, bits))
+ assert(f.tell() == 12)
+
+ f.truncate(12 + 4*entries + 20*total + 4*total)
+ f.flush()
+ fdatasync(f.fileno())
+
+ fmap = mmap_readwrite(f, close=False)
+
+ count = merge_into(fmap, bits, total, inp)
+ del fmap # Assume this calls msync() now.
+ f.seek(0, os.SEEK_END)
+ f.write('\0'.join(allfilenames))
+ finally:
+ for ix in midxs:
+ if isinstance(ix, midx.PackMidx):
+ ix.close()
+ midxs = None
+ inp = None
+
+
+ # This is just for testing (if you enable this, don't clear inp above)
if 0:
p = midx.PackMidx(outfilename)
assert(len(p.idxnames) == len(infilenames))
print p.idxnames
assert(len(p) == total)
for pe, e in p, git.idxmerge(inp, final_progress=False):
- assert(i == pi.next())
+ pin = next(pi)
+ assert(i == pin)
assert(p.exists(i))
return total, outfilename
print rv[1]
-def do_midx_dir(path):
+def do_midx_dir(path, outfilename):
already = {}
sizes = {}
if opt.force and not opt.auto:
# sort the biggest+newest midxes first, so that we can eliminate
# smaller (or older) redundant ones that come later in the list
- midxs.sort(key=lambda ix: (-sizes[ix], -os.stat(ix).st_mtime))
+ midxs.sort(key=lambda ix: (-sizes[ix], -xstat.stat(ix).st_mtime))
for mname in midxs:
any = 0
all.sort()
part1 = [name for sz,name in all[:len(all)-DESIRED_LWM+1]]
part2 = all[len(all)-DESIRED_LWM+1:]
- all = list(do_midx_group(path, part1)) + part2
+ all = list(do_midx_group(path, outfilename, part1)) + part2
if len(all) > DESIRED_HWM:
debug1('\nStill too many indexes (%d > %d). Merging again.\n'
% (len(all), DESIRED_HWM))
print name
-def do_midx_group(outdir, infiles):
+def do_midx_group(outdir, outfilename, infiles):
groups = list(_group(infiles, opt.max_files))
gprefix = ''
for n,sublist in enumerate(groups):
if len(groups) != 1:
gprefix = 'Group %d: ' % (n+1)
- rv = _do_midx(path, None, sublist, gprefix)
+ rv = _do_midx(outdir, outfilename, sublist, gprefix)
if rv:
yield rv
paths = opt.dir and [opt.dir] or git.all_packdirs()
for path in paths:
debug1('midx: scanning %s\n' % path)
- do_midx_dir(path)
+ do_midx_dir(path, opt.output)
else:
o.fatal("you must use -f or -a or provide input filenames")