1 import os, re, stat, time
14 class NodeError(Exception):
16 class NoSuchFile(NodeError):
18 class NotDir(NodeError):
20 class NotFile(NodeError):
22 class TooManySymlinks(NodeError):
27 it = cp().get(hash.encode('hex'))
29 assert(type == 'tree')
30 return git._treeparse(''.join(it))
33 def _tree_decode(hash):
34 tree = [(int(name,16),stat.S_ISDIR(int(mode,8)),sha)
37 assert(tree == list(sorted(tree)))
42 return sum(len(b) for b in cp().join(hash.encode('hex')))
45 def _last_chunk_info(hash):
46 tree = _tree_decode(hash)
48 (ofs,isdir,sha) = tree[-1]
50 (subofs, sublen) = _last_chunk_info(sha)
51 return (ofs+subofs, sublen)
53 return (ofs, _chunk_len(sha))
56 def _total_size(hash):
57 (lastofs, lastsize) = _last_chunk_info(hash)
58 return lastofs + lastsize
61 def _chunkiter(hash, startofs):
63 tree = _tree_decode(hash)
65 # skip elements before startofs
66 for i in xrange(len(tree)):
67 if i+1 >= len(tree) or tree[i+1][0] > startofs:
71 # iterate through what's left
72 for i in xrange(first, len(tree)):
73 (ofs,isdir,sha) = tree[i]
74 skipmore = startofs-ofs
78 for b in _chunkiter(sha, skipmore):
81 yield ''.join(cp().join(sha.encode('hex')))[skipmore:]
85 def __init__(self, hash, isdir, startofs):
87 self.it = _chunkiter(hash, startofs)
91 self.blob = ''.join(cp().join(hash.encode('hex')))[startofs:]
96 while len(out) < size:
97 if self.it and not self.blob:
99 self.blob = self.it.next()
100 except StopIteration:
103 want = size - len(out)
104 out += self.blob[:want]
105 self.blob = self.blob[want:]
108 log('next(%d) returned %d\n' % (size, len(out)))
114 def __init__(self, hash, size, isdir):
132 def read(self, count = -1):
134 count = self.size - self.ofs
135 if not self.reader or self.reader.ofs != self.ofs:
136 self.reader = _ChunkReader(self.hash, self.isdir, self.ofs)
138 buf = self.reader.next(count)
141 raise # our offsets will be all screwed up otherwise
150 def __init__(self, parent, name, mode, hash):
155 self.ctime = self.mtime = self.atime = 0
159 return cmp(a.name or None, b.name or None)
162 return iter(self.subs())
166 return os.path.join(self.parent.fullname(), self.name)
174 if self._subs == None:
176 return sorted(self._subs.values())
179 if self._subs == None:
181 ret = self._subs.get(name)
183 raise NoSuchFile("no file %r in %r" % (name, self.name))
188 return self.parent.top()
192 def _lresolve(self, parts):
193 #log('_lresolve %r in %r\n' % (parts, self.name))
196 (first, rest) = (parts[0], parts[1:])
198 return self._lresolve(rest)
201 raise NoSuchFile("no parent dir for %r" % self.name)
202 return self.parent._lresolve(rest)
204 return self.sub(first)._lresolve(rest)
206 return self.sub(first)
208 def lresolve(self, path):
210 if path.startswith('/'):
213 parts = re.split(r'/+', path or '.')
216 #log('parts: %r %r\n' % (path, parts))
217 return start._lresolve(parts)
219 def try_lresolve(self, path):
221 return self.lresolve(path)
223 # some symlinks don't actually point at a file that exists!
226 def resolve(self, path):
227 return self.lresolve(path).lresolve('')
230 if self._subs == None:
238 raise NotFile('%s is not a regular file' % self.name)
242 def __init__(self, parent, name, mode, hash, bupmode):
243 Node.__init__(self, parent, name, mode, hash)
244 self.bupmode = bupmode
245 self._cached_size = None
246 self._filereader = None
249 # You'd think FUSE might call this only once each time a file is
250 # opened, but no; it's really more of a refcount, and it's called
251 # once per read(). Thus, it's important to cache the filereader
252 # object here so we're not constantly re-seeking.
253 if not self._filereader:
254 self._filereader = _FileReader(self.hash, self.size(),
255 self.bupmode == git.BUP_CHUNKED)
256 self._filereader.seek(0)
257 return self._filereader
260 if self._cached_size == None:
261 log('<<<<File.size() is calculating...\n')
262 if self.bupmode == git.BUP_CHUNKED:
263 self._cached_size = _total_size(self.hash)
265 self._cached_size = _chunk_len(self.hash)
266 log('<<<<File.size() done.\n')
267 return self._cached_size
272 def __init__(self, parent, name, hash, bupmode):
273 File.__init__(self, parent, name, 0120000, hash, bupmode)
276 return len(self.readlink())
279 return ''.join(cp().join(self.hash.encode('hex')))
281 def dereference(self):
284 raise TooManySymlinks('too many levels of symlinks: %r'
288 return self.parent.lresolve(self.readlink())
292 def _lresolve(self, parts):
293 return self.dereference()._lresolve(parts)
296 class FakeSymlink(Symlink):
297 def __init__(self, parent, name, toname):
298 Symlink.__init__(self, parent, name, EMPTY_SHA, git.BUP_NORMAL)
308 it = cp().get(self.hash.encode('hex'))
312 it = cp().get(self.hash.encode('hex') + ':')
314 assert(type == 'tree')
315 for (mode,mangled_name,sha) in git._treeparse(''.join(it)):
318 (name,bupmode) = git.demangle_name(mangled_name)
319 if bupmode == git.BUP_CHUNKED:
321 if stat.S_ISDIR(mode):
322 self._subs[name] = Dir(self, name, mode, sha)
323 elif stat.S_ISLNK(mode):
324 self._subs[name] = Symlink(self, name, sha, bupmode)
326 self._subs[name] = File(self, name, mode, sha, bupmode)
329 class CommitList(Node):
330 def __init__(self, parent, name, hash):
331 Node.__init__(self, parent, name, 040000, hash)
335 revs = list(git.rev_list(self.hash.encode('hex')))
336 for (date, commit) in revs:
337 l = time.localtime(date)
338 ls = time.strftime('%Y-%m-%d-%H%M%S', l)
339 commithex = '.' + commit.encode('hex')
340 n1 = Dir(self, commithex, 040000, commit)
341 n2 = FakeSymlink(self, ls, commithex)
342 n1.ctime = n1.mtime = n2.ctime = n2.mtime = date
343 self._subs[commithex] = n1
347 (date, commit) = latest
348 commithex = '.' + commit.encode('hex')
349 n2 = FakeSymlink(self, 'latest', commithex)
350 n2.ctime = n2.mtime = date
351 self._subs['latest'] = n2
355 def __init__(self, parent):
356 Node.__init__(self, parent, '/', 040000, EMPTY_SHA)
360 for (name,sha) in git.list_refs():
361 if name.startswith('refs/heads/'):
363 date = git.rev_get_date(sha.encode('hex'))
364 n1 = CommitList(self, name, sha)
365 n1.ctime = n1.mtime = date
366 self._subs[name] = n1