+class MetaStoreReader:
+ def __init__(self, filename):
+ self._file = None
+ self._file = open(filename, 'rb')
+
+ def close(self):
+ if self._file:
+ self._file.close()
+ self._file = None
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ with pending_raise(value, rethrow=True):
+ self.close()
+
+ def metadata_at(self, ofs):
+ self._file.seek(ofs)
+ return metadata.Metadata.read(self._file)
+
+
+class MetaStoreWriter:
+ # For now, we just append to the file, and try to handle any
+ # truncation or corruption somewhat sensibly.
+
+ def __init__(self, filename):
+ # Map metadata hashes to bupindex.meta offsets.
+ self._offsets = {}
+ self._filename = filename
+ self._file = None
+ # FIXME: see how slow this is; does it matter?
+ m_file = open(filename, 'ab+')
+ try:
+ m_file.seek(0)
+ try:
+ m_off = m_file.tell()
+ m = metadata.Metadata.read(m_file)
+ while m:
+ m_encoded = m.encode()
+ self._offsets[m_encoded] = m_off
+ m_off = m_file.tell()
+ m = metadata.Metadata.read(m_file)
+ except EOFError:
+ pass
+ except:
+ log('index metadata in %r appears to be corrupt\n' % filename)
+ raise
+ finally:
+ m_file.close()
+ self._file = open(filename, 'ab')
+
+ def close(self):
+ if self._file:
+ self._file.close()
+ self._file = None
+
+ def __del__(self):
+ # Be optimistic.
+ self.close()
+
+ def store(self, metadata):
+ meta_encoded = metadata.encode(include_path=False)
+ ofs = self._offsets.get(meta_encoded)
+ if ofs:
+ return ofs
+ ofs = self._file.tell()
+ self._file.write(meta_encoded)
+ self._offsets[meta_encoded] = ofs
+ return ofs
+
+