from __future__ import absolute_import, print_function
import errno, os, stat, struct, tempfile
-from bup import compat, metadata, xstat
+from bup import metadata, xstat
from bup._helpers import UINT_MAX, bytescmp
-from bup.compat import range
+from bup.compat import pending_raise, range
from bup.helpers import (add_error, log, merge_iter, mmap_readwrite,
progress, qprogress, resolve_parent, slashappend)
class MetaStoreReader:
def __init__(self, filename):
+ self._closed = False
self._file = None
self._file = open(filename, 'rb')
def close(self):
+ self._closed = True
if self._file:
self._file.close()
self._file = None
def __del__(self):
- self.close()
+ assert self._closed
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ with pending_raise(value, rethrow=True):
+ self.close()
def metadata_at(self, ofs):
self._file.seek(ofs)
# truncation or corruption somewhat sensibly.
def __init__(self, filename):
+ self._closed = False
# Map metadata hashes to bupindex.meta offsets.
self._offsets = {}
self._filename = filename
self._file = open(filename, 'ab')
def close(self):
+ self._closed = True
if self._file:
self._file.close()
self._file = None
def __del__(self):
- # Be optimistic.
- self.close()
+ assert self._closed
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ with pending_raise(value, rethrow=False):
+ self.close()
def store(self, metadata):
meta_encoded = metadata.encode(include_path=False)
(ofs,n) = (f.tell(), len(self.list))
if self.list:
count = len(self.list)
- #log('popping %r with %d entries\n'
+ #log('popping %r with %d entries\n'
# % (''.join(self.ename), count))
for e in self.list:
e.write(f)
class Entry:
def __init__(self, basename, name, meta_ofs, tmax):
- assert basename is None or type(basename) == bytes
- assert name is None or type(name) == bytes
+ assert basename is None or isinstance(basename, bytes)
+ assert name is None or isinstance(name, bytes)
self.basename = basename
self.name = name
self.meta_ofs = meta_ofs
def __iter__(self):
return self.iter()
-
+
class Reader:
def __init__(self, filename):
+ self.closed = False
self.filename = filename
self.m = b''
self.writable = False
self.m[st.st_size - FOOTLEN
: st.st_size])[0]
- def __del__(self):
- self.close()
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ with pending_raise(value, rethrow=False):
+ self.close()
def __len__(self):
return int(self.count)
self.m.flush()
def close(self):
+ self.closed = True
self.save()
if self.writable and self.m:
self.m.close()
self.m = None
self.writable = False
+ def __del__(self):
+ assert self.closed
+
def filter(self, prefixes, wantrecurse=None):
for (rp, path) in reduce_paths(prefixes):
any_entries = False
# Otherwise something like "save x/y" will produce
# nothing if x is up to date.
pe = self.find(rp)
- assert(pe)
+ if not pe:
+ raise Exception("cannot find %r" % rp)
name = path + pe.name[len(rp):]
yield (name, pe)
class Writer:
def __init__(self, filename, metastore, tmax):
+ self.closed = False
self.rootlevel = self.level = Level([], None)
self.f = None
self.count = 0
self.f = os.fdopen(ffd, 'wb', 65536)
self.f.write(INDEX_HDR)
- def __del__(self):
- self.abort()
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ with pending_raise(value, rethrow=False):
+ self.abort()
def abort(self):
+ self.closed = True
f = self.f
self.f = None
if f:
assert(self.level == None)
def close(self):
+ self.closed = True
self.flush()
f = self.f
self.f = None
f.close()
os.rename(self.tmpname, self.filename)
+ def __del__(self):
+ assert self.closed
+
def _add(self, ename, entry):
if self.lastfile and self.lastfile <= ename:
- raise Error('%r must come before %r'
+ raise Error('%r must come before %r'
% (''.join(ename), ''.join(self.lastfile)))
self.lastfile = ename
self.level = _golevel(self.level, self.f, ename, entry,
paths = []
prev = None
for (rp, p) in xpaths:
- if prev and (prev == rp
+ if prev and (prev == rp
or (prev.endswith(b'/') and rp.startswith(prev))):
continue # already superceded by previous path
paths.append((rp, p))