1 """Virtual File System interface to bup repository content.
3 This module provides a path-based interface to the content of a bup
6 The VFS is structured like this:
9 /SAVE-NAME/SAVE-DATE/...
12 Each path is represented by an item that has least an item.meta which
13 may be either a Metadata object, or an integer mode. Functions like
14 item_mode() and item_size() will return the mode and size in either
15 case. Any item.meta Metadata instances must not be modified directly.
16 Make a copy to modify via item.meta.copy() if needed.
18 The want_meta argument is advisory for calls that accept it, and it
19 may not be honored. Callers must be able to handle an item.meta value
20 that is either an instance of Metadata or an integer mode, perhaps
21 via item_mode() or augment_item_meta().
23 Setting want_meta=False is rarely desirable since it can limit the VFS
24 to only the metadata that git itself can represent, and so for
25 example, fifos and sockets will appear to be regular files
26 (e.g. S_ISREG(item_mode(item)) will be true). But the option is still
27 provided because it may be more efficient when just the path names or
28 the more limited metadata is sufficient.
30 Any given metadata object's size may be None, in which case the size
31 can be computed via item_size() or augment_item_meta(...,
34 When traversing a directory using functions like contents(), the meta
35 value for any directories other than '.' will be a default directory
36 mode, not a Metadata object. This is because the actual metadata for
37 a directory is stored inside the directory (see
38 fill_in_metadata_if_dir() or ensure_item_has_metadata()).
40 Commit items represent commits (e.g. /.tag/some-commit or
41 /foo/latest), and for most purposes, they appear as the underlying
42 tree. S_ISDIR(item_mode(item)) will return true for both tree Items
43 and Commits and the commit's oid is the tree hash. The commit hash
44 will be item.coid, and nominal_oid(item) will return coid for commits,
45 oid for everything else.
49 from __future__ import print_function
50 from collections import namedtuple
51 from errno import ELOOP, ENOENT, ENOTDIR
52 from itertools import chain, dropwhile, groupby, izip, tee
53 from stat import S_IFDIR, S_IFLNK, S_IFREG, S_ISDIR, S_ISLNK, S_ISREG
54 from time import localtime, strftime
55 import exceptions, re, sys
57 from bup import client, git, metadata
58 from bup.git import BUP_CHUNKED, cp, get_commit_items, parse_commit, tree_decode
59 from bup.helpers import debug2, last
60 from bup.metadata import Metadata
61 from bup.repo import LocalRepo, RemoteRepo
64 class IOError(exceptions.IOError):
65 def __init__(self, errno, message):
66 exceptions.IOError.__init__(self, errno, message)
69 def __init__(self, message, terminus=None):
70 IOError.__init__(self, ELOOP, message)
71 self.terminus = terminus
73 default_file_mode = S_IFREG | 0o644
74 default_dir_mode = S_IFDIR | 0o755
75 default_symlink_mode = S_IFLNK | 0o755
77 def _default_mode_for_gitmode(gitmode):
79 return default_file_mode
81 return default_dir_mode
83 return default_symlink_mode
84 raise Exception('unexpected git mode ' + oct(gitmode))
86 def _normal_or_chunked_file_size(repo, oid):
87 """Return the size of the normal or chunked file indicated by oid."""
88 # FIXME: --batch-format CatPipe?
89 it = repo.cat(oid.encode('hex'))
90 _, obj_t, size = next(it)
92 while obj_t == 'tree':
93 mode, name, last_oid = last(tree_decode(''.join(it)))
95 it = repo.cat(last_oid.encode('hex'))
96 _, obj_t, size = next(it)
97 return ofs + sum(len(b) for b in it)
99 def _tree_chunks(repo, tree, startofs):
100 "Tree should be a sequence of (name, mode, hash) as per tree_decode()."
101 assert(startofs >= 0)
102 # name is the chunk's hex offset in the original file
103 tree = dropwhile(lambda (_1, name, _2): int(name, 16) < startofs, tree)
104 for mode, name, oid in tree:
106 skipmore = startofs - ofs
109 it = repo.cat(oid.encode('hex'))
110 _, obj_t, size = next(it)
113 assert obj_t == 'tree'
114 for b in _tree_chunks(repo, tree_decode(data), skipmore):
117 assert obj_t == 'blob'
118 yield data[skipmore:]
121 def __init__(self, repo, oid, startofs):
122 it = repo.cat(oid.encode('hex'))
123 _, obj_t, size = next(it)
124 isdir = obj_t == 'tree'
127 self.it = _tree_chunks(repo, tree_decode(data), startofs)
131 self.blob = data[startofs:]
134 def next(self, size):
136 while len(out) < size:
137 if self.it and not self.blob:
139 self.blob = self.it.next()
140 except StopIteration:
143 want = size - len(out)
144 out += self.blob[:want]
145 self.blob = self.blob[want:]
148 debug2('next(%d) returned %d\n' % (size, len(out)))
152 class _FileReader(object):
153 def __init__(self, repo, oid, known_size=None):
158 self._size = known_size
160 def _compute_size(self):
162 self._size = _normal_or_chunked_file_size(self._repo, self.oid)
167 raise IOError(errno.EINVAL, 'Invalid argument')
168 if ofs > self._compute_size():
169 raise IOError(errno.EINVAL, 'Invalid argument')
175 def read(self, count=-1):
177 count = self._compute_size() - self.ofs
178 if not self.reader or self.reader.ofs != self.ofs:
179 self.reader = _ChunkReader(self._repo, self.oid, self.ofs)
181 buf = self.reader.next(count)
184 raise # our offsets will be all screwed up otherwise
193 def __exit__(self, type, value, traceback):
197 _multiple_slashes_rx = re.compile(r'//+')
199 def _decompose_path(path):
200 """Return a reversed list of path elements, omitting any occurrences
201 of "." and ignoring any leading or trailing slash."""
202 path = re.sub(_multiple_slashes_rx, '/', path)
203 if path.startswith('/'):
205 if path.endswith('/'):
207 result = [x for x in path.split('/') if x != '.']
212 Item = namedtuple('Item', ('meta', 'oid'))
213 Chunky = namedtuple('Chunky', ('meta', 'oid'))
214 Root = namedtuple('Root', ('meta'))
215 Tags = namedtuple('Tags', ('meta'))
216 RevList = namedtuple('RevList', ('meta', 'oid'))
217 Commit = namedtuple('Commit', ('meta', 'oid', 'coid'))
219 item_types = frozenset((Item, Chunky, Root, Tags, RevList, Commit))
220 real_tree_types = frozenset((Item, Commit))
222 _root = Root(meta=default_dir_mode)
223 _tags = Tags(meta=default_dir_mode)
226 def nominal_oid(item):
227 """If the item is a Commit, return its commit oid, otherwise return
228 the item's oid, if it has one.
231 if isinstance(item, Commit):
233 return getattr(item, 'oid', None)
236 """Return a completely independent copy of item, such that
237 modifications will not affect the original.
240 meta = getattr(item, 'meta', None)
243 return(item._replace(meta=meta.copy()))
246 """Return the integer mode (stat st_mode) for item."""
248 if isinstance(m, Metadata):
252 def _read_dir_meta(bupm):
253 # This is because save writes unmodified Metadata() entries for
254 # fake parents -- test-save-strip-graft.sh demonstrates.
255 m = Metadata.read(bupm)
257 return default_dir_mode
258 assert m.mode is not None
263 def tree_data_and_bupm(repo, oid):
264 """Return (tree_bytes, bupm_oid) where bupm_oid will be None if the
265 tree has no metadata (i.e. older bup save, or non-bup tree).
268 assert len(oid) == 20
269 it = repo.cat(oid.encode('hex'))
270 _, item_t, size = next(it)
272 if item_t == 'commit':
273 commit = parse_commit(data)
274 it = repo.cat(commit.tree)
275 _, item_t, size = next(it)
277 assert item_t == 'tree'
278 elif item_t != 'tree':
279 raise Exception('%r is not a tree or commit' % oid.encode('hex'))
280 for _, mangled_name, sub_oid in tree_decode(data):
281 if mangled_name == '.bupm':
283 if mangled_name > '.bupm':
287 def _find_dir_item_metadata(repo, item):
288 """Return the metadata for the tree or commit item, or None if the
289 tree has no metadata (i.e. older bup save, or non-bup tree).
292 tree_data, bupm_oid = tree_data_and_bupm(repo, item.oid)
294 with _FileReader(repo, bupm_oid) as meta_stream:
295 return _read_dir_meta(meta_stream)
298 def _readlink(repo, oid):
299 return ''.join(repo.join(oid.encode('hex')))
301 def readlink(repo, item):
302 """Return the link target of item, which must be a symlink. Reads the
303 target from the repository if necessary."""
305 assert S_ISLNK(item_mode(item))
306 if isinstance(item.meta, Metadata):
307 target = item.meta.symlink_target
310 return _readlink(repo, item.oid)
312 def _compute_item_size(repo, item):
313 mode = item_mode(item)
315 size = _normal_or_chunked_file_size(repo, item.oid)
318 return len(_readlink(repo, item.oid))
321 def item_size(repo, item):
322 """Return the size of item, computing it if necessary."""
324 if isinstance(m, Metadata) and m.size is not None:
326 return _compute_item_size(repo, item)
328 def fopen(repo, item):
329 """Return an open reader for the given file item."""
331 assert S_ISREG(item_mode(item))
332 return _FileReader(repo, item.oid)
334 def _commit_meta_from_auth_sec(author_sec):
336 m.mode = default_dir_mode
337 m.uid = m.gid = m.size = 0
338 m.atime = m.mtime = m.ctime = author_sec * 10**9
341 def _commit_meta_from_oidx(repo, oidx):
343 _, typ, size = next(it)
344 assert typ == 'commit'
345 author_sec = parse_commit(''.join(it)).author_sec
346 return _commit_meta_from_auth_sec(author_sec)
348 def parse_rev_auth_secs(f):
349 tree, author_secs = f.readline().split(None, 2)
350 return tree, int(author_secs)
352 def root_items(repo, names=None):
353 """Yield (name, item) for the items in '/' in the VFS. Return
354 everything if names is logically false, otherwise return only
355 items with a name in the collection.
358 # FIXME: what about non-leaf refs like 'refs/heads/foo/bar/baz?
364 # FIXME: maybe eventually support repo.clone() or something
365 # and pass in two repos, so we can drop the tuple() and stream
366 # in parallel (i.e. meta vs refs).
367 for name, oid in tuple(repo.refs([], limit_to_heads=True)):
368 assert(name.startswith('refs/heads/'))
370 m = _commit_meta_from_oidx(repo, oid.encode('hex'))
371 yield name, RevList(meta=m, oid=oid)
379 if ref in ('.', '.tag'):
382 oidx, typ, size = next(it)
386 assert typ == 'commit'
387 commit = parse_commit(''.join(it))
388 yield ref, RevList(meta=_commit_meta_from_auth_sec(commit.author_sec),
389 oid=oidx.decode('hex'))
391 def ordered_tree_entries(tree_data, bupm=None):
392 """Yields (name, mangled_name, kind, gitmode, oid) for each item in
393 tree, sorted by name.
396 # Sadly, the .bupm entries currently aren't in git tree order,
397 # i.e. they don't account for the fact that git sorts trees
398 # (including our chunked trees) as if their names ended with "/",
399 # so "fo" sorts after "fo." iff fo is a directory. This makes
400 # streaming impossible when we need the metadata.
401 def result_from_tree_entry(tree_entry):
402 gitmode, mangled_name, oid = tree_entry
403 name, kind = git.demangle_name(mangled_name, gitmode)
404 return name, mangled_name, kind, gitmode, oid
406 tree_ents = (result_from_tree_entry(x) for x in tree_decode(tree_data))
408 tree_ents = sorted(tree_ents, key=lambda x: x[0])
409 for ent in tree_ents:
412 def tree_items(oid, tree_data, names=frozenset(), bupm=None):
414 def tree_item(ent_oid, kind, gitmode):
415 if kind == BUP_CHUNKED:
416 meta = Metadata.read(bupm) if bupm else default_file_mode
417 return Chunky(oid=ent_oid, meta=meta)
420 # No metadata here (accessable via '.' inside ent_oid).
421 return Item(meta=default_dir_mode, oid=ent_oid)
423 return Item(oid=ent_oid,
424 meta=(Metadata.read(bupm) if bupm \
425 else _default_mode_for_gitmode(gitmode)))
427 assert len(oid) == 20
429 dot_meta = _read_dir_meta(bupm) if bupm else default_dir_mode
430 yield '.', Item(oid=oid, meta=dot_meta)
431 tree_entries = ordered_tree_entries(tree_data, bupm)
432 for name, mangled_name, kind, gitmode, ent_oid in tree_entries:
433 if mangled_name == '.bupm':
436 yield name, tree_item(ent_oid, kind, gitmode)
439 # Assumes the tree is properly formed, i.e. there are no
440 # duplicates, and entries will be in git tree order.
441 if type(names) not in (frozenset, set):
442 names = frozenset(names)
443 remaining = len(names)
445 # Account for the bupm sort order issue (cf. ordered_tree_entries above)
446 last_name = max(names) if bupm else max(names) + '/'
449 dot_meta = _read_dir_meta(bupm) if bupm else default_dir_mode
450 yield '.', Item(oid=oid, meta=dot_meta)
455 tree_entries = ordered_tree_entries(tree_data, bupm)
456 for name, mangled_name, kind, gitmode, ent_oid in tree_entries:
457 if mangled_name == '.bupm':
460 if name not in names:
462 break # given bupm sort order, we're finished
463 if (kind == BUP_CHUNKED or not S_ISDIR(gitmode)) and bupm:
466 yield name, tree_item(ent_oid, kind, gitmode)
471 def tree_items_with_meta(repo, oid, tree_data, names):
472 # For now, the .bupm order doesn't quite match git's, and we don't
473 # load the tree data incrementally anyway, so we just work in RAM
475 assert len(oid) == 20
477 for _, mangled_name, sub_oid in tree_decode(tree_data):
478 if mangled_name == '.bupm':
479 bupm = _FileReader(repo, sub_oid)
481 if mangled_name > '.bupm':
483 for item in tree_items(oid, tree_data, names, bupm):
486 _save_name_rx = re.compile(r'^\d\d\d\d-\d\d-\d\d-\d{6}(-\d+)?$')
488 def _reverse_suffix_duplicates(strs):
489 """Yields the elements of strs, with any runs of duplicate values
490 suffixed with -N suffixes, where the zero padded integer N
491 decreases to 0 by 1 (e.g. 10, 09, ..., 00).
494 for name, duplicates in groupby(strs):
495 ndup = len(tuple(duplicates))
499 ndig = len(str(ndup - 1))
500 fmt = '%s-' + '%0' + str(ndig) + 'd'
501 for i in xrange(ndup - 1, -1, -1):
502 yield fmt % (name, i)
504 def _name_for_rev(rev):
505 commit, (tree_oidx, utc) = rev
506 assert len(commit) == 40
507 return strftime('%Y-%m-%d-%H%M%S', localtime(utc))
509 def _item_for_rev(rev):
510 commit, (tree_oidx, utc) = rev
511 assert len(commit) == 40
512 assert len(tree_oidx) == 40
513 return Commit(meta=default_dir_mode,
514 oid=tree_oidx.decode('hex'),
515 coid=commit.decode('hex'))
517 def revlist_items(repo, oid, names):
518 assert len(oid) == 20
519 oidx = oid.encode('hex')
520 names = frozenset(name for name in (names or tuple()) \
521 if _save_name_rx.match(name) or name in ('.', 'latest'))
523 # Do this before we open the rev_list iterator so we're not nesting
524 if (not names) or ('.' in names):
525 yield '.', RevList(oid=oid, meta=_commit_meta_from_oidx(repo, oidx))
527 revs = repo.rev_list((oidx,), format='%T %at', parse=parse_rev_auth_secs)
528 rev_items, rev_names = tee(revs)
529 revs = None # Don't disturb the tees
530 rev_names = _reverse_suffix_duplicates(_name_for_rev(x) for x in rev_names)
531 rev_items = (_item_for_rev(x) for x in rev_items)
535 for item in rev_items:
536 first_commit = first_commit or item
537 yield next(rev_names), item
538 yield 'latest', first_commit
541 # Revs are in reverse chronological order by default
542 last_name = min(names)
543 for item in rev_items:
544 first_commit = first_commit or item
545 name = next(rev_names) # Might have -N dup suffix
548 if not name in names:
552 # FIXME: need real short circuit...
553 for _ in rev_items: pass
554 for _ in rev_names: pass
556 if 'latest' in names:
557 yield 'latest', first_commit
559 def tags_items(repo, names):
563 assert len(oid) == 20
564 oidx = oid.encode('hex')
566 _, typ, size = next(it)
568 tree_oid = parse_commit(''.join(it)).tree.decode('hex')
569 assert len(tree_oid) == 20
570 # FIXME: more efficient/bulk?
571 return RevList(meta=_commit_meta_from_oidx(repo, oidx), oid=oid)
574 return Item(meta=default_file_mode, oid=oid)
576 return Item(meta=default_dir_mode, oid=oid)
577 raise Exception('unexpected tag type ' + typ + ' for tag ' + name)
581 # We have to pull these all into ram because tag_item calls cat()
582 for name, oid in tuple(repo.refs(names, limit_to_tags=True)):
583 assert(name.startswith('refs/tags/'))
585 yield name, tag_item(oid)
588 # Assumes no duplicate refs
589 if type(names) not in (frozenset, set):
590 names = frozenset(names)
591 remaining = len(names)
592 last_name = max(names)
599 for name, oid in repo.refs(names, limit_to_tags=True):
600 assert(name.startswith('refs/tags/'))
604 if name not in names:
606 yield name, tag_item(oid)
611 def contents(repo, item, names=None, want_meta=True):
612 """Yields information about the items contained in item. Yields
613 (name, item) for each name in names, if the name exists, in an
614 unspecified order. If there are no names, then yields (name,
615 item) for all items, including, a first item named '.'
616 representing the container itself.
618 The meta value for any directories other than '.' will be a
619 default directory mode, not a Metadata object. This is because
620 the actual metadata for a directory is stored inside the directory
621 (see fill_in_metadata_if_dir() or ensure_item_has_metadata()).
623 Note that want_meta is advisory. For any given item, item.meta
624 might be a Metadata instance or a mode, and if the former,
625 meta.size might be None. Missing sizes can be computed via via
626 item_size() or augment_item_meta(..., include_size=True).
628 Do not modify any item.meta Metadata instances directly. If
629 needed, make a copy via item.meta.copy() and modify that instead.
632 # Q: are we comfortable promising '.' first when no names?
635 assert S_ISDIR(item_mode(item))
638 if item_t in real_tree_types:
639 it = repo.cat(item.oid.encode('hex'))
640 _, obj_type, size = next(it)
642 if obj_type == 'tree':
644 item_gen = tree_items_with_meta(repo, item.oid, data, names)
646 item_gen = tree_items(item.oid, data, names)
647 elif obj_type == 'commit':
649 item_gen = tree_items_with_meta(repo, item.oid, tree_data, names)
651 item_gen = tree_items(item.oid, tree_data, names)
654 raise Exception('unexpected git ' + obj_type)
655 elif item_t == RevList:
656 item_gen = revlist_items(repo, item.oid, names)
658 item_gen = root_items(repo, names)
660 item_gen = tags_items(repo, names)
662 raise Exception('unexpected VFS item ' + str(item))
666 def _resolve_path(repo, path, parent=None, want_meta=True, deref=False):
673 assert type(x[0]) in (bytes, str)
674 assert type(x[1]) in item_types
675 assert parent[0][1] == _root
676 future = _decompose_path(path)
677 if path.startswith('/'):
678 if future == ['']: # path was effectively '/'
679 return (('', _root),)
686 if not future: # e.g. if path was effectively '.'
690 segment = future.pop()
692 if len(past) > 1: # .. from / is /
695 parent_name, parent_item = past[-1]
696 wanted = (segment,) if not want_meta else ('.', segment)
697 items = tuple(contents(repo, parent_item, names=wanted,
698 want_meta=want_meta))
700 item = items[0][1] if items else None
701 else: # First item will be '.' and have the metadata
702 item = items[1][1] if len(items) == 2 else None
703 dot, dot_item = items[0]
705 past[-1] = parent_name, parent_item
707 past.append((segment, None),)
709 mode = item_mode(item)
710 if not S_ISLNK(mode):
711 if not S_ISDIR(mode):
713 past.append((segment, item),)
716 if want_meta and type(item) in real_tree_types:
717 dir_meta = _find_dir_item_metadata(repo, item)
719 item = item._replace(meta=dir_meta)
721 past.append((segment, item),)
723 past.append((segment, item))
725 if not future and not deref:
726 past.append((segment, item),)
728 target = readlink(repo, item)
729 target_future = _decompose_path(target)
730 if target.startswith('/'):
731 future = target_future
733 if target_future == ['']: # path was effectively '/'
736 future.extend(target_future)
739 raise Loop('too many symlinks encountered while resolving %r%s'
741 'relative to %r' % parent if parent else ''))
743 def lresolve(repo, path, parent=None, want_meta=True):
744 """Perform exactly the same function as resolve(), except if the
745 final path element is a symbolic link, don't follow it, just
746 return it in the result."""
747 return _resolve_path(repo, path, parent=parent, want_meta=want_meta,
750 def resolve(repo, path, parent=None, want_meta=True):
751 """Follow the path in the virtual filesystem and return a tuple
752 representing the location, if any, denoted by the path. Each
753 element in the result tuple will be (name, info), where info will
754 be a VFS item that can be passed to functions like item_mode().
756 If a path segment that does not exist is encountered during
757 resolution, the result will represent the location of the missing
758 item, and that item in the result will be None.
760 Any symlinks along the path, including at the end, will be
761 resolved. A Loop exception will be raised if too many symlinks
762 are traversed whiile following the path. raised if too many
763 symlinks are traversed while following the path. That exception
764 is effectively like a normal ELOOP IOError exception, but will
765 include a terminus element describing the location of the failure,
766 which will be a tuple of (name, info) elements.
768 Currently, a path ending in '/' will still resolve if it exists,
769 even if not a directory. The parent, if specified, must be a
770 sequence of (name, item) tuples, and will provide the starting
771 point for the resolution of the path. The result may include
772 elements of parent directly, so they must not be modified later.
773 If this is a concern, pass in "name, copy_item(item) for
774 name, item in parent" instead.
776 When want_meta is true, detailed metadata will be included in each
777 result item if it's avaiable, otherwise item.meta will be an
778 integer mode. The metadata size may or may not be provided, but
779 can be computed by item_size() or augment_item_meta(...,
780 include_size=True). Setting want_meta=False is rarely desirable
781 since it can limit the VFS to just the metadata git itself can
782 represent, and so, as an example, fifos and sockets will appear to
783 be regular files (e.g. S_ISREG(item_mode(item)) will be true) .
784 But the option is provided because it may be more efficient when
785 only the path names or the more limited metadata is sufficient.
787 Do not modify any item.meta Metadata instances directly. If
788 needed, make a copy via item.meta.copy() and modify that instead.
791 result = _resolve_path(repo, path, parent=parent, want_meta=want_meta,
793 _, leaf_item = result[-1]
795 assert not S_ISLNK(item_mode(leaf_item))
798 def augment_item_meta(repo, item, include_size=False):
799 """Ensure item has a Metadata instance for item.meta. If item.meta is
800 currently a mode, replace it with a compatible "fake" Metadata
801 instance. If include_size is true, ensure item.meta.size is
802 correct, computing it if needed. If item.meta is a Metadata
803 instance, this call may modify it in place or replace it.
806 # If we actually had parallelism, we'd need locking...
809 if isinstance(m, Metadata):
810 if include_size and m.size is None:
811 m.size = _compute_item_size(repo, item)
812 return item._replace(meta=m)
817 meta.uid = meta.gid = meta.atime = meta.mtime = meta.ctime = 0
819 target = _readlink(repo, item.oid)
820 meta.symlink_target = target
821 meta.size = len(target)
823 meta.size = _compute_item_size(repo, item)
824 return item._replace(meta=meta)
826 def fill_in_metadata_if_dir(repo, item):
827 """If item is a directory and item.meta is not a Metadata instance,
828 attempt to find the metadata for the directory. If found, return
829 a new item augmented to include that metadata. Otherwise, return
830 item. May be useful for the output of contents().
833 if S_ISDIR(item_mode(item)) and not isinstance(item.meta, Metadata):
834 items = tuple(contents(repo, item, ('.',), want_meta=True))
835 assert len(items) == 1
836 assert items[0][0] == '.'
840 def ensure_item_has_metadata(repo, item, include_size=False):
841 """If item is a directory, attempt to find and add its metadata. If
842 the item still doesn't have a Metadata instance for item.meta,
843 give it one via augment_item_meta(). May be useful for the output
847 return augment_item_meta(repo,
848 fill_in_metadata_if_dir(repo, item),
849 include_size=include_size)