1 import os, re, stat, time
14 class NodeError(Exception):
16 class NoSuchFile(NodeError):
18 class NotDir(NodeError):
20 class NotFile(NodeError):
22 class TooManySymlinks(NodeError):
27 it = cp().get(hash.encode('hex'))
29 assert(type == 'tree')
30 return git._treeparse(''.join(it))
33 def _tree_decode(hash):
34 tree = [(int(name,16),stat.S_ISDIR(int(mode,8)),sha)
37 assert(tree == list(sorted(tree)))
42 return sum(len(b) for b in cp().join(hash.encode('hex')))
45 def _last_chunk_info(hash):
46 tree = _tree_decode(hash)
48 (ofs,isdir,sha) = tree[-1]
50 (subofs, sublen) = _last_chunk_info(sha)
51 return (ofs+subofs, sublen)
53 return (ofs, _chunk_len(sha))
56 def _total_size(hash):
57 (lastofs, lastsize) = _last_chunk_info(hash)
58 return lastofs + lastsize
61 def _chunkiter(hash, startofs):
63 tree = _tree_decode(hash)
65 # skip elements before startofs
66 for i in xrange(len(tree)):
67 if i+1 >= len(tree) or tree[i+1][0] > startofs:
71 # iterate through what's left
72 for i in xrange(first, len(tree)):
73 (ofs,isdir,sha) = tree[i]
74 skipmore = startofs-ofs
78 for b in _chunkiter(sha, skipmore):
81 yield ''.join(cp().join(sha.encode('hex')))[skipmore:]
85 def __init__(self, hash, isdir, startofs):
87 self.it = _chunkiter(hash, startofs)
91 self.blob = ''.join(cp().join(hash.encode('hex')))[startofs:]
96 while len(out) < size:
97 if self.it and not self.blob:
99 self.blob = self.it.next()
100 except StopIteration:
103 want = size - len(out)
104 out += self.blob[:want]
105 self.blob = self.blob[want:]
108 log('next(%d) returned %d\n' % (size, len(out)))
114 def __init__(self, hash, size, isdir):
132 def read(self, count = -1):
134 count = self.size - self.ofs
135 if not self.reader or self.reader.ofs != self.ofs:
136 self.reader = _ChunkReader(self.hash, self.isdir, self.ofs)
138 buf = self.reader.next(count)
141 raise # our offsets will be all screwed up otherwise
150 def __init__(self, parent, name, mode, hash):
155 self.ctime = self.mtime = self.atime = 0
159 return cmp(a.name or None, b.name or None)
162 return iter(self.subs())
166 return os.path.join(self.parent.fullname(), self.name)
174 if self._subs == None:
176 return sorted(self._subs.values())
179 if self._subs == None:
181 ret = self._subs.get(name)
183 raise NoSuchFile("no file %r in %r" % (name, self.name))
188 return self.parent.top()
192 def _lresolve(self, parts):
193 #log('_lresolve %r in %r\n' % (parts, self.name))
196 (first, rest) = (parts[0], parts[1:])
198 return self._lresolve(rest)
201 raise NoSuchFile("no parent dir for %r" % self.name)
202 return self.parent._lresolve(rest)
204 return self.sub(first)._lresolve(rest)
206 return self.sub(first)
208 def lresolve(self, path):
210 if path.startswith('/'):
213 parts = re.split(r'/+', path or '.')
216 #log('parts: %r %r\n' % (path, parts))
217 return start._lresolve(parts)
219 def resolve(self, path):
220 return self.lresolve(path).lresolve('')
223 if self._subs == None:
231 raise NotFile('%s is not a regular file' % self.name)
235 def __init__(self, parent, name, mode, hash, bupmode):
236 Node.__init__(self, parent, name, mode, hash)
237 self.bupmode = bupmode
238 self._cached_size = None
239 self._filereader = None
242 # You'd think FUSE might call this only once each time a file is
243 # opened, but no; it's really more of a refcount, and it's called
244 # once per read(). Thus, it's important to cache the filereader
245 # object here so we're not constantly re-seeking.
246 if not self._filereader:
247 self._filereader = _FileReader(self.hash, self.size(),
248 self.bupmode == git.BUP_CHUNKED)
249 self._filereader.seek(0)
250 return self._filereader
253 if self._cached_size == None:
254 log('<<<<File.size() is calculating...\n')
255 if self.bupmode == git.BUP_CHUNKED:
256 self._cached_size = _total_size(self.hash)
258 self._cached_size = _chunk_len(self.hash)
259 log('<<<<File.size() done.\n')
260 return self._cached_size
265 def __init__(self, parent, name, hash, bupmode):
266 File.__init__(self, parent, name, 0120000, hash, bupmode)
269 return len(self.readlink())
272 return ''.join(cp().join(self.hash.encode('hex')))
274 def dereference(self):
277 raise TooManySymlinks('too many levels of symlinks: %r'
281 return self.parent.lresolve(self.readlink())
285 def _lresolve(self, parts):
286 return self.dereference()._lresolve(parts)
289 class FakeSymlink(Symlink):
290 def __init__(self, parent, name, toname):
291 Symlink.__init__(self, parent, name, EMPTY_SHA, git.BUP_NORMAL)
301 it = cp().get(self.hash.encode('hex'))
305 it = cp().get(self.hash.encode('hex') + ':')
307 assert(type == 'tree')
308 for (mode,mangled_name,sha) in git._treeparse(''.join(it)):
311 (name,bupmode) = git.demangle_name(mangled_name)
312 if bupmode == git.BUP_CHUNKED:
314 if stat.S_ISDIR(mode):
315 self._subs[name] = Dir(self, name, mode, sha)
316 elif stat.S_ISLNK(mode):
317 self._subs[name] = Symlink(self, name, sha, bupmode)
319 self._subs[name] = File(self, name, mode, sha, bupmode)
322 class CommitList(Node):
323 def __init__(self, parent, name, hash):
324 Node.__init__(self, parent, name, 040000, hash)
328 revs = list(git.rev_list(self.hash.encode('hex')))
329 for (date, commit) in revs:
330 l = time.localtime(date)
331 ls = time.strftime('%Y-%m-%d-%H%M%S', l)
332 commithex = '.' + commit.encode('hex')
333 n1 = Dir(self, commithex, 040000, commit)
334 n2 = FakeSymlink(self, ls, commithex)
335 n1.ctime = n1.mtime = n2.ctime = n2.mtime = date
336 self._subs[commithex] = n1
340 (date, commit) = latest
341 commithex = '.' + commit.encode('hex')
342 n2 = FakeSymlink(self, 'latest', commithex)
343 n2.ctime = n2.mtime = date
344 self._subs['latest'] = n2
348 def __init__(self, parent):
349 Node.__init__(self, parent, '/', 040000, EMPTY_SHA)
353 for (name,sha) in git.list_refs():
354 if name.startswith('refs/heads/'):
356 date = git.rev_get_date(sha.encode('hex'))
357 n1 = CommitList(self, name, sha)
358 n1.ctime = n1.mtime = date
359 self._subs[name] = n1