1 """Helper functions and classes for bup."""
3 from __future__ import absolute_import, division
4 from collections import namedtuple
5 from contextlib import contextmanager
6 from ctypes import sizeof, c_void_p
9 from pipes import quote
10 from subprocess import PIPE, Popen
11 import sys, os, pwd, subprocess, errno, socket, select, mmap, stat, re, struct
12 import hashlib, heapq, math, operator, time, grp, tempfile
14 from bup import _helpers
15 from bup import compat
16 from bup.compat import byte_int
17 from bup.io import path_msg
18 # This function should really be in helpers, not in bup.options. But we
19 # want options.py to be standalone so people can include it in other projects.
20 from bup.options import _tty_width as tty_width
24 """Helper to deal with Python scoping issues"""
28 sc_page_size = os.sysconf('SC_PAGE_SIZE')
29 assert(sc_page_size > 0)
31 sc_arg_max = os.sysconf('SC_ARG_MAX')
32 if sc_arg_max == -1: # "no definite limit" - let's choose 2M
33 sc_arg_max = 2 * 1024 * 1024
37 for result in iterable:
43 """Convert s (ascii bytes) to an integer. Return 0 if s is not a number."""
51 """Convert s (ascii bytes) to a float. Return 0 if s is not a number."""
53 return float(s or b'0')
58 buglvl = atoi(os.environ.get('BUP_DEBUG', 0))
62 _fdatasync = os.fdatasync
63 except AttributeError:
66 if sys.platform.startswith('darwin'):
67 # Apparently os.fsync on OS X doesn't guarantee to sync all the way down
71 return fcntl.fcntl(fd, fcntl.F_FULLFSYNC)
73 # Fallback for file systems (SMB) that do not support F_FULLFSYNC
74 if e.errno == errno.ENOTSUP:
79 fdatasync = _fdatasync
82 def partition(predicate, stream):
83 """Returns (leading_matches_it, rest_it), where leading_matches_it
84 must be completely exhausted before traversing rest_it.
89 ns.first_nonmatch = None
90 def leading_matches():
95 ns.first_nonmatch = (x,)
99 yield ns.first_nonmatch[0]
102 return (leading_matches(), rest())
112 def lines_until_sentinel(f, sentinel, ex_type):
113 # sentinel must end with \n and must contain only one \n
116 if not (line and line.endswith('\n')):
117 raise ex_type('Hit EOF while reading line')
123 def stat_if_exists(path):
127 if e.errno != errno.ENOENT:
132 # Write (blockingly) to sockets that may or may not be in blocking mode.
133 # We need this because our stderr is sometimes eaten by subprocesses
134 # (probably ssh) that sometimes make it nonblocking, if only temporarily,
135 # leading to race conditions. Ick. We'll do it the hard way.
136 def _hard_write(fd, buf):
138 (r,w,x) = select.select([], [fd], [], None)
140 raise IOError('select(fd) returned without being writable')
142 sz = os.write(fd, buf)
144 if e.errno != errno.EAGAIN:
152 """Print a log message to stderr."""
155 _hard_write(sys.stderr.fileno(), s if isinstance(s, bytes) else s.encode())
169 istty1 = os.isatty(1) or (atoi(os.environ.get('BUP_FORCE_TTY')) & 1)
170 istty2 = os.isatty(2) or (atoi(os.environ.get('BUP_FORCE_TTY')) & 2)
173 """Calls log() if stderr is a TTY. Does nothing otherwise."""
174 global _last_progress
181 """Calls progress() only if we haven't printed progress in a while.
183 This avoids overloading the stderr buffer with excess junk.
187 if now - _last_prog > 0.1:
193 """Calls progress() to redisplay the most recent progress message.
195 Useful after you've printed some other message that wipes out the
198 if _last_progress and _last_progress.endswith('\r'):
199 progress(_last_progress)
202 def mkdirp(d, mode=None):
203 """Recursively create directories on path 'd'.
205 Unlike os.makedirs(), it doesn't raise an exception if the last element of
206 the path already exists.
214 if e.errno == errno.EEXIST:
221 def __init__(self, entry, read_it):
223 self.read_it = read_it
225 return self.entry < x.entry
227 def merge_iter(iters, pfreq, pfunc, pfinal, key=None):
229 samekey = lambda e, pe: getattr(e, key) == getattr(pe, key, None)
231 samekey = operator.eq
233 total = sum(len(it) for it in iters)
234 iters = (iter(it) for it in iters)
235 heap = ((next(it, None),it) for it in iters)
236 heap = [MergeIterItem(e, it) for e, it in heap if e]
241 if not count % pfreq:
243 e, it = heap[0].entry, heap[0].read_it
244 if not samekey(e, pe):
250 except StopIteration:
251 heapq.heappop(heap) # remove current
253 # shift current to new location
254 heapq.heapreplace(heap, MergeIterItem(e, it))
259 """Delete a file at path 'f' if it currently exists.
261 Unlike os.unlink(), does not throw an exception if the file didn't already
267 if e.errno != errno.ENOENT:
272 if isinstance(cmd, compat.str_type):
275 return ' '.join(map(quote, cmd))
277 exc = subprocess.check_call
287 assert stdin in (None, PIPE)
290 stdin=stdin, stdout=PIPE, stderr=stderr,
292 preexec_fn=preexec_fn)
293 out, err = p.communicate(input)
294 if check and p.returncode != 0:
295 raise Exception('subprocess %r failed with status %d, stderr: %r'
296 % (b' '.join(map(quote, cmd)), p.returncode, err))
299 def readpipe(argv, preexec_fn=None, shell=False):
300 """Run a subprocess and return its output."""
301 p = subprocess.Popen(argv, stdout=subprocess.PIPE, preexec_fn=preexec_fn,
303 out, err = p.communicate()
304 if p.returncode != 0:
305 raise Exception('subprocess %r failed with status %d'
306 % (b' '.join(argv), p.returncode))
310 def _argmax_base(command):
313 base_size += len(command) + 1
314 for k, v in compat.items(environ):
315 base_size += len(k) + len(v) + 2 + sizeof(c_void_p)
319 def _argmax_args_size(args):
320 return sum(len(x) + 1 + sizeof(c_void_p) for x in args)
323 def batchpipe(command, args, preexec_fn=None, arg_max=sc_arg_max):
324 """If args is not empty, yield the output produced by calling the
325 command list with args as a sequence of strings (It may be necessary
326 to return multiple strings in order to respect ARG_MAX)."""
327 # The optional arg_max arg is a workaround for an issue with the
328 # current wvtest behavior.
329 base_size = _argmax_base(command)
331 room = arg_max - base_size
334 next_size = _argmax_args_size(args[i:i+1])
335 if room - next_size < 0:
341 assert(len(sub_args))
342 yield readpipe(command + sub_args, preexec_fn=preexec_fn)
345 def resolve_parent(p):
346 """Return the absolute path of a file without following any final symlink.
348 Behaves like os.path.realpath, but doesn't follow a symlink for the last
349 element. (ie. if 'p' itself is a symlink, this one won't follow it, but it
350 will follow symlinks in p's directory)
356 if st and stat.S_ISLNK(st.st_mode):
357 (dir, name) = os.path.split(p)
358 dir = os.path.realpath(dir)
359 out = os.path.join(dir, name)
361 out = os.path.realpath(p)
362 #log('realpathing:%r,%r\n' % (p, out))
366 def detect_fakeroot():
367 "Return True if we appear to be running under fakeroot."
368 return os.getenv("FAKEROOTKEY") != None
371 if sys.platform.startswith('cygwin'):
373 # https://cygwin.com/ml/cygwin/2015-02/msg00057.html
374 groups = os.getgroups()
375 return 544 in groups or 0 in groups
378 return os.geteuid() == 0
381 def cache_key_value(get_value, key, cache):
382 """Return (value, was_cached). If there is a value in the cache
383 for key, use that, otherwise, call get_value(key) which should
384 throw a KeyError if there is no value -- in which case the cached
385 and returned value will be None.
387 try: # Do we already have it (or know there wasn't one)?
394 cache[key] = value = get_value(key)
402 """Get the FQDN of this machine."""
405 _hostname = socket.getfqdn().encode('iso-8859-1')
409 def format_filesize(size):
414 exponent = int(math.log(size) // math.log(unit))
415 size_prefix = "KMGTPE"[exponent - 1]
416 return "%.1f%s" % (size // math.pow(unit, exponent), size_prefix)
419 class NotOk(Exception):
424 def __init__(self, outp):
428 while self._read(65536): pass
430 def read(self, size):
431 """Read 'size' bytes from input stream."""
433 return self._read(size)
436 """Read from input stream until a newline is found."""
438 return self._readline()
440 def write(self, data):
441 """Write 'data' to output stream."""
442 #log('%d writing: %d bytes\n' % (os.getpid(), len(data)))
443 self.outp.write(data)
446 """Return true if input stream is readable."""
447 raise NotImplemented("Subclasses must implement has_input")
450 """Indicate end of output from last sent command."""
454 """Indicate server error to the client."""
455 s = re.sub(r'\s+', ' ', str(s))
456 self.write('\nerror %s\n' % s)
458 def _check_ok(self, onempty):
461 for rl in linereader(self):
462 #log('%d got line: %r\n' % (os.getpid(), rl))
463 if not rl: # empty line
467 elif rl.startswith('error '):
468 #log('client: error: %s\n' % rl[6:])
472 raise Exception('server exited unexpectedly; see errors above')
474 def drain_and_check_ok(self):
475 """Remove all data for the current command from input stream."""
478 return self._check_ok(onempty)
481 """Verify that server action completed successfully."""
483 raise Exception('expected "ok", got %r' % rl)
484 return self._check_ok(onempty)
487 class Conn(BaseConn):
488 def __init__(self, inp, outp):
489 BaseConn.__init__(self, outp)
492 def _read(self, size):
493 return self.inp.read(size)
496 return self.inp.readline()
499 [rl, wl, xl] = select.select([self.inp.fileno()], [], [], 0)
501 assert(rl[0] == self.inp.fileno())
507 def checked_reader(fd, n):
509 rl, _, _ = select.select([fd], [], [])
512 if not buf: raise Exception("Unexpected EOF reading %d more bytes" % n)
517 MAX_PACKET = 128 * 1024
518 def mux(p, outfd, outr, errr):
521 while p.poll() is None:
522 rl, _, _ = select.select(fds, [], [])
525 buf = os.read(outr, MAX_PACKET)
527 os.write(outfd, struct.pack('!IB', len(buf), 1) + buf)
529 buf = os.read(errr, 1024)
531 os.write(outfd, struct.pack('!IB', len(buf), 2) + buf)
533 os.write(outfd, struct.pack('!IB', 0, 3))
536 class DemuxConn(BaseConn):
537 """A helper class for bup's client-server protocol."""
538 def __init__(self, infd, outp):
539 BaseConn.__init__(self, outp)
540 # Anything that comes through before the sync string was not
541 # multiplexed and can be assumed to be debug/log before mux init.
543 while tail != 'BUPMUX':
544 b = os.read(infd, (len(tail) < 6) and (6-len(tail)) or 1)
546 raise IOError('demux: unexpected EOF during initialization')
548 sys.stderr.write(tail[:-6]) # pre-mux log messages
555 def write(self, data):
557 BaseConn.write(self, data)
559 def _next_packet(self, timeout):
560 if self.closed: return False
561 rl, wl, xl = select.select([self.infd], [], [], timeout)
562 if not rl: return False
563 assert(rl[0] == self.infd)
564 ns = ''.join(checked_reader(self.infd, 5))
565 n, fdw = struct.unpack('!IB', ns)
566 assert(n <= MAX_PACKET)
568 self.reader = checked_reader(self.infd, n)
570 for buf in checked_reader(self.infd, n):
571 sys.stderr.write(buf)
574 debug2("DemuxConn: marked closed\n")
577 def _load_buf(self, timeout):
578 if self.buf is not None:
580 while not self.closed:
581 while not self.reader:
582 if not self._next_packet(timeout):
585 self.buf = next(self.reader)
587 except StopIteration:
591 def _read_parts(self, ix_fn):
592 while self._load_buf(None):
593 assert(self.buf is not None)
595 if i is None or i == len(self.buf):
600 self.buf = self.buf[i:]
608 return buf.index('\n')+1
611 return ''.join(self._read_parts(find_eol))
613 def _read(self, size):
615 def until_size(buf): # Closes on csize
616 if len(buf) < csize[0]:
621 return ''.join(self._read_parts(until_size))
624 return self._load_buf(0)
628 """Generate a list of input lines from 'f' without terminating newlines."""
636 def chunkyreader(f, count = None):
637 """Generate a list of chunks of data read from 'f'.
639 If count is None, read until EOF is reached.
641 If count is a positive integer, read 'count' bytes from 'f'. If EOF is
642 reached while reading, raise IOError.
646 b = f.read(min(count, 65536))
648 raise IOError('EOF with %d bytes remaining' % count)
659 def atomically_replaced_file(name, mode='w', buffering=-1):
660 """Yield a file that will be atomically renamed name when leaving the block.
662 This contextmanager yields an open file object that is backed by a
663 temporary file which will be renamed (atomically) to the target
664 name if everything succeeds.
666 The mode and buffering arguments are handled exactly as with open,
667 and the yielded file will have very restrictive permissions, as
672 with atomically_replaced_file('foo.txt', 'w') as f:
673 f.write('hello jack.')
677 (ffd, tempname) = tempfile.mkstemp(dir=os.path.dirname(name),
678 text=('b' not in mode))
681 f = os.fdopen(ffd, mode, buffering)
689 os.rename(tempname, name)
691 unlink(tempname) # nonexistant file is ignored
695 """Append "/" to 's' if it doesn't aleady end in "/"."""
696 assert isinstance(s, bytes)
697 if s and not s.endswith(b'/'):
703 def _mmap_do(f, sz, flags, prot, close):
705 st = os.fstat(f.fileno())
708 # trying to open a zero-length map gives an error, but an empty
709 # string has all the same behaviour of a zero-length map, ie. it has
712 map = mmap.mmap(f.fileno(), sz, flags, prot)
714 f.close() # map will persist beyond file close
718 def mmap_read(f, sz = 0, close=True):
719 """Create a read-only memory mapped region on file 'f'.
720 If sz is 0, the region will cover the entire file.
722 return _mmap_do(f, sz, mmap.MAP_PRIVATE, mmap.PROT_READ, close)
725 def mmap_readwrite(f, sz = 0, close=True):
726 """Create a read-write memory mapped region on file 'f'.
727 If sz is 0, the region will cover the entire file.
729 return _mmap_do(f, sz, mmap.MAP_SHARED, mmap.PROT_READ|mmap.PROT_WRITE,
733 def mmap_readwrite_private(f, sz = 0, close=True):
734 """Create a read-write memory mapped region on file 'f'.
735 If sz is 0, the region will cover the entire file.
736 The map is private, which means the changes are never flushed back to the
739 return _mmap_do(f, sz, mmap.MAP_PRIVATE, mmap.PROT_READ|mmap.PROT_WRITE,
743 _mincore = getattr(_helpers, 'mincore', None)
745 # ./configure ensures that we're on Linux if MINCORE_INCORE isn't defined.
746 MINCORE_INCORE = getattr(_helpers, 'MINCORE_INCORE', 1)
748 _fmincore_chunk_size = None
749 def _set_fmincore_chunk_size():
750 global _fmincore_chunk_size
751 pref_chunk_size = 64 * 1024 * 1024
752 chunk_size = sc_page_size
753 if (sc_page_size < pref_chunk_size):
754 chunk_size = sc_page_size * (pref_chunk_size // sc_page_size)
755 _fmincore_chunk_size = chunk_size
758 """Return the mincore() data for fd as a bytearray whose values can be
759 tested via MINCORE_INCORE, or None if fd does not fully
760 support the operation."""
762 if (st.st_size == 0):
764 if not _fmincore_chunk_size:
765 _set_fmincore_chunk_size()
766 pages_per_chunk = _fmincore_chunk_size // sc_page_size;
767 page_count = (st.st_size + sc_page_size - 1) // sc_page_size;
768 chunk_count = page_count // _fmincore_chunk_size
771 result = bytearray(page_count)
772 for ci in compat.range(chunk_count):
773 pos = _fmincore_chunk_size * ci;
774 msize = min(_fmincore_chunk_size, st.st_size - pos)
776 m = mmap.mmap(fd, msize, mmap.MAP_PRIVATE, 0, 0, pos)
777 except mmap.error as ex:
778 if ex.errno == errno.EINVAL or ex.errno == errno.ENODEV:
779 # Perhaps the file was a pipe, i.e. "... | bup split ..."
783 _mincore(m, msize, 0, result, ci * pages_per_chunk)
784 except OSError as ex:
785 if ex.errno == errno.ENOSYS:
791 def parse_timestamp(epoch_str):
792 """Return the number of nanoseconds since the epoch that are described
793 by epoch_str (100ms, 100ns, ...); when epoch_str cannot be parsed,
794 throw a ValueError that may contain additional information."""
795 ns_per = {'s' : 1000000000,
799 match = re.match(r'^((?:[-+]?[0-9]+)?)(s|ms|us|ns)$', epoch_str)
801 if re.match(r'^([-+]?[0-9]+)$', epoch_str):
802 raise ValueError('must include units, i.e. 100ns, 100ms, ...')
804 (n, units) = match.group(1, 2)
808 return n * ns_per[units]
812 """Parse string or bytes as a possibly unit suffixed number.
815 199.2k means 203981 bytes
816 1GB means 1073741824 bytes
817 2.1 tb means 2199023255552 bytes
819 if isinstance(s, bytes):
820 # FIXME: should this raise a ValueError for UnicodeDecodeError
821 # (perhaps with the latter as the context).
822 s = s.decode('ascii')
823 g = re.match(r'([-+\d.e]+)\s*(\w*)', str(s))
825 raise ValueError("can't parse %r as a number" % s)
826 (val, unit) = g.groups()
829 if unit in ['t', 'tb']:
830 mult = 1024*1024*1024*1024
831 elif unit in ['g', 'gb']:
832 mult = 1024*1024*1024
833 elif unit in ['m', 'mb']:
835 elif unit in ['k', 'kb']:
837 elif unit in ['', 'b']:
840 raise ValueError("invalid unit %r in number %r" % (unit, s))
845 """Count the number of elements in an iterator. (consumes the iterator)"""
846 return reduce(lambda x,y: x+1, l)
851 """Append an error message to the list of saved errors.
853 Once processing is able to stop and output the errors, the saved errors are
854 accessible in the module variable helpers.saved_errors.
856 saved_errors.append(e)
865 def die_if_errors(msg=None, status=1):
869 msg = 'warning: %d errors encountered\n' % len(saved_errors)
875 """Replace the default exception handler for KeyboardInterrupt (Ctrl-C).
877 The new exception handler will make sure that bup will exit without an ugly
878 stacktrace when Ctrl-C is hit.
880 oldhook = sys.excepthook
881 def newhook(exctype, value, traceback):
882 if exctype == KeyboardInterrupt:
883 log('\nInterrupted.\n')
885 return oldhook(exctype, value, traceback)
886 sys.excepthook = newhook
889 def columnate(l, prefix):
890 """Format elements of 'l' in columns with 'prefix' leading each line.
892 The number of columns is determined automatically based on the string
898 clen = max(len(s) for s in l)
899 ncols = (tty_width() - len(prefix)) // (clen + 2)
904 while len(l) % ncols:
906 rows = len(l) // ncols
907 for s in compat.range(0, len(l), rows):
908 cols.append(l[s:s+rows])
910 for row in zip(*cols):
911 out += prefix + ''.join(('%-*s' % (clen+2, s)) for s in row) + '\n'
915 def parse_date_or_fatal(str, fatal):
916 """Parses the given date or calls Option.fatal().
917 For now we expect a string that contains a float."""
920 except ValueError as e:
921 raise fatal('invalid date format (should be a float): %r' % e)
926 def parse_excludes(options, fatal):
927 """Traverse the options and extract all excludes, or call Option.fatal()."""
931 (option, parameter) = flag
932 if option == '--exclude':
933 excluded_paths.append(resolve_parent(parameter))
934 elif option == '--exclude-from':
936 f = open(resolve_parent(parameter))
938 raise fatal("couldn't read %s" % parameter)
939 for exclude_path in f.readlines():
940 # FIXME: perhaps this should be rstrip('\n')
941 exclude_path = resolve_parent(exclude_path.strip())
943 excluded_paths.append(exclude_path)
944 return sorted(frozenset(excluded_paths))
947 def parse_rx_excludes(options, fatal):
948 """Traverse the options and extract all rx excludes, or call
950 excluded_patterns = []
953 (option, parameter) = flag
954 if option == '--exclude-rx':
956 excluded_patterns.append(re.compile(parameter))
957 except re.error as ex:
958 fatal('invalid --exclude-rx pattern (%s): %s' % (parameter, ex))
959 elif option == '--exclude-rx-from':
961 f = open(resolve_parent(parameter))
963 raise fatal("couldn't read %s" % parameter)
964 for pattern in f.readlines():
965 spattern = pattern.rstrip('\n')
969 excluded_patterns.append(re.compile(spattern))
970 except re.error as ex:
971 fatal('invalid --exclude-rx pattern (%s): %s' % (spattern, ex))
972 return excluded_patterns
975 def should_rx_exclude_path(path, exclude_rxs):
976 """Return True if path matches a regular expression in exclude_rxs."""
977 for rx in exclude_rxs:
979 debug1('Skipping %r: excluded by rx pattern %r.\n'
980 % (path, rx.pattern))
985 # FIXME: Carefully consider the use of functions (os.path.*, etc.)
986 # that resolve against the current filesystem in the strip/graft
987 # functions for example, but elsewhere as well. I suspect bup's not
988 # always being careful about that. For some cases, the contents of
989 # the current filesystem should be irrelevant, and consulting it might
990 # produce the wrong result, perhaps via unintended symlink resolution,
993 def path_components(path):
994 """Break path into a list of pairs of the form (name,
995 full_path_to_name). Path must start with '/'.
997 '/home/foo' -> [('', '/'), ('home', '/home'), ('foo', '/home/foo')]"""
998 if not path.startswith(b'/'):
999 raise Exception('path must start with "/": %s' % path_msg(path))
1000 # Since we assume path startswith('/'), we can skip the first element.
1001 result = [(b'', b'/')]
1002 norm_path = os.path.abspath(path)
1003 if norm_path == b'/':
1006 for p in norm_path.split(b'/')[1:]:
1007 full_path += b'/' + p
1008 result.append((p, full_path))
1012 def stripped_path_components(path, strip_prefixes):
1013 """Strip any prefix in strip_prefixes from path and return a list
1014 of path components where each component is (name,
1015 none_or_full_fs_path_to_name). Assume path startswith('/').
1016 See thelpers.py for examples."""
1017 normalized_path = os.path.abspath(path)
1018 sorted_strip_prefixes = sorted(strip_prefixes, key=len, reverse=True)
1019 for bp in sorted_strip_prefixes:
1020 normalized_bp = os.path.abspath(bp)
1021 if normalized_bp == b'/':
1023 if normalized_path.startswith(normalized_bp):
1024 prefix = normalized_path[:len(normalized_bp)]
1026 for p in normalized_path[len(normalized_bp):].split(b'/'):
1030 result.append((p, prefix))
1033 return path_components(path)
1036 def grafted_path_components(graft_points, path):
1037 # Create a result that consists of some number of faked graft
1038 # directories before the graft point, followed by all of the real
1039 # directories from path that are after the graft point. Arrange
1040 # for the directory at the graft point in the result to correspond
1041 # to the "orig" directory in --graft orig=new. See t/thelpers.py
1042 # for some examples.
1044 # Note that given --graft orig=new, orig and new have *nothing* to
1045 # do with each other, even if some of their component names
1046 # match. i.e. --graft /foo/bar/baz=/foo/bar/bax is semantically
1047 # equivalent to --graft /foo/bar/baz=/x/y/z, or even
1050 # FIXME: This can't be the best solution...
1051 clean_path = os.path.abspath(path)
1052 for graft_point in graft_points:
1053 old_prefix, new_prefix = graft_point
1054 # Expand prefixes iff not absolute paths.
1055 old_prefix = os.path.normpath(old_prefix)
1056 new_prefix = os.path.normpath(new_prefix)
1057 if clean_path.startswith(old_prefix):
1058 escaped_prefix = re.escape(old_prefix)
1059 grafted_path = re.sub(br'^' + escaped_prefix, new_prefix, clean_path)
1060 # Handle /foo=/ (at least) -- which produces //whatever.
1061 grafted_path = b'/' + grafted_path.lstrip(b'/')
1062 clean_path_components = path_components(clean_path)
1063 # Count the components that were stripped.
1064 strip_count = 0 if old_prefix == b'/' else old_prefix.count(b'/')
1065 new_prefix_parts = new_prefix.split(b'/')
1066 result_prefix = grafted_path.split(b'/')[:new_prefix.count(b'/')]
1067 result = [(p, None) for p in result_prefix] \
1068 + clean_path_components[strip_count:]
1069 # Now set the graft point name to match the end of new_prefix.
1070 graft_point = len(result_prefix)
1071 result[graft_point] = \
1072 (new_prefix_parts[-1], clean_path_components[strip_count][1])
1073 if new_prefix == b'/': # --graft ...=/ is a special case.
1076 return path_components(clean_path)
1082 _localtime = getattr(_helpers, 'localtime', None)
1085 bup_time = namedtuple('bup_time', ['tm_year', 'tm_mon', 'tm_mday',
1086 'tm_hour', 'tm_min', 'tm_sec',
1087 'tm_wday', 'tm_yday',
1088 'tm_isdst', 'tm_gmtoff', 'tm_zone'])
1090 # Define a localtime() that returns bup_time when possible. Note:
1091 # this means that any helpers.localtime() results may need to be
1092 # passed through to_py_time() before being passed to python's time
1093 # module, which doesn't appear willing to ignore the extra items.
1095 def localtime(time):
1096 return bup_time(*_helpers.localtime(floor(time)))
1097 def utc_offset_str(t):
1098 """Return the local offset from UTC as "+hhmm" or "-hhmm" for time t.
1099 If the current UTC offset does not represent an integer number
1100 of minutes, the fractional component will be truncated."""
1101 off = localtime(t).tm_gmtoff
1102 # Note: // doesn't truncate like C for negative values, it rounds down.
1103 offmin = abs(off) // 60
1105 h = (offmin - m) // 60
1106 return b'%+03d%02d' % (-h if off < 0 else h, m)
1108 if isinstance(x, time.struct_time):
1110 return time.struct_time(x[:9])
1112 localtime = time.localtime
1113 def utc_offset_str(t):
1114 return time.strftime(b'%z', localtime(t))
1119 _some_invalid_save_parts_rx = re.compile(br'[\[ ~^:?*\\]|\.\.|//|@{')
1121 def valid_save_name(name):
1122 # Enforce a superset of the restrictions in git-check-ref-format(1)
1124 or name.startswith(b'/') or name.endswith(b'/') \
1125 or name.endswith(b'.'):
1127 if _some_invalid_save_parts_rx.search(name):
1130 if byte_int(c) < 0x20 or byte_int(c) == 0x7f:
1132 for part in name.split(b'/'):
1133 if part.startswith(b'.') or part.endswith(b'.lock'):
1138 _period_rx = re.compile(r'^([0-9]+)(s|min|h|d|w|m|y)$')
1140 def period_as_secs(s):
1143 match = _period_rx.match(s)
1146 mag = int(match.group(1))
1147 scale = match.group(2)
1148 return mag * {'s': 1,
1152 'w': 60 * 60 * 24 * 7,
1153 'm': 60 * 60 * 24 * 31,
1154 'y': 60 * 60 * 24 * 366}[scale]