We had a bug where any deleted files in the index would always dirty all
their parent directories when refreshing, which is inefficient.
progress('Indexing: %d\r' % total)
total += 1
while rig.cur and rig.cur.name > path: # deleted paths
progress('Indexing: %d\r' % total)
total += 1
while rig.cur and rig.cur.name > path: # deleted paths
- rig.cur.set_deleted()
- rig.cur.repack()
+ if rig.cur.exists():
+ rig.cur.set_deleted()
+ rig.cur.repack()
rig.next()
if rig.cur and rig.cur.name == path: # paths that already existed
if pst:
rig.next()
if rig.cur and rig.cur.name == path: # paths that already existed
if pst:
if opt['print'] or opt.status or opt.modified:
for (name, ent) in index.Reader(indexfile).filter(extra or ['']):
if (opt.modified
if opt['print'] or opt.status or opt.modified:
for (name, ent) in index.Reader(indexfile).filter(extra or ['']):
if (opt.modified
- and (ent.flags & index.IX_HASHVALID
- or not ent.mode)):
+ and (ent.is_valid() or ent.is_deleted() or not ent.mode)):
continue
line = ''
if opt.status:
continue
line = ''
if opt.status:
def invalidate(self):
self.flags &= ~IX_HASHVALID
def invalidate(self):
self.flags &= ~IX_HASHVALID
def validate(self, gitmode, sha):
assert(sha)
def validate(self, gitmode, sha):
assert(sha)
self.sha = sha
self.flags |= IX_HASHVALID|IX_EXISTS
self.sha = sha
self.flags |= IX_HASHVALID|IX_EXISTS
+ def exists(self):
+ return not self.is_deleted()
+
def is_deleted(self):
return (self.flags & IX_EXISTS) == 0
def set_deleted(self):
if self.flags & IX_EXISTS:
self.flags &= ~(IX_EXISTS | IX_HASHVALID)
def is_deleted(self):
return (self.flags & IX_EXISTS) == 0
def set_deleted(self):
if self.flags & IX_EXISTS:
self.flags &= ~(IX_EXISTS | IX_HASHVALID)
- self.set_dirty()
-
- def set_dirty(self):
- pass # FIXME
def is_real(self):
return not self.is_fake()
def is_real(self):
return not self.is_fake()