1 """Git interaction library.
2 bup repositories are in Git format. This library allows us to
3 interact with the Git data structures.
5 import os, sys, zlib, time, subprocess, struct, stat, re, tempfile, glob
6 from bup.helpers import *
7 from bup import _helpers, path, midx, bloom
9 SEEK_END=2 # os.SEEK_END is not defined in python 2.4
13 home_repodir = os.path.expanduser('~/.bup')
16 _typemap = { 'blob':3, 'tree':2, 'commit':1, 'tag':4 }
17 _typermap = { 3:'blob', 2:'tree', 1:'commit', 4:'tag' }
23 class GitError(Exception):
28 """Get the path to the git repository or one of its subdirectories."""
31 raise GitError('You should call check_repo_or_die()')
33 # If there's a .git subdirectory, then the actual repo is in there.
34 gd = os.path.join(repodir, '.git')
35 if os.path.exists(gd):
38 return os.path.join(repodir, sub)
42 full = os.path.abspath(path)
43 fullrepo = os.path.abspath(repo(''))
44 if not fullrepo.endswith('/'):
46 if full.startswith(fullrepo):
47 path = full[len(fullrepo):]
48 if path.startswith('index-cache/'):
49 path = path[len('index-cache/'):]
54 paths = [repo('objects/pack')]
55 paths += glob.glob(repo('index-cache/*/.'))
59 def auto_midx(objdir):
60 args = [path.exe(), 'midx', '--auto', '--dir', objdir]
62 rv = subprocess.call(args, stdout=open('/dev/null', 'w'))
64 # make sure 'args' gets printed to help with debugging
65 add_error('%r: exception: %s' % (args, e))
68 add_error('%r: returned %d' % (args, rv))
70 args = [path.exe(), 'bloom', '--dir', objdir]
72 rv = subprocess.call(args, stdout=open('/dev/null', 'w'))
74 # make sure 'args' gets printed to help with debugging
75 add_error('%r: exception: %s' % (args, e))
78 add_error('%r: returned %d' % (args, rv))
81 def mangle_name(name, mode, gitmode):
82 """Mangle a file name to present an abstract name for segmented files.
83 Mangled file names will have the ".bup" extension added to them. If a
84 file's name already ends with ".bup", a ".bupl" extension is added to
85 disambiguate normal files from semgmented ones.
87 if stat.S_ISREG(mode) and not stat.S_ISREG(gitmode):
89 elif name.endswith('.bup') or name[:-1].endswith('.bup'):
95 (BUP_NORMAL, BUP_CHUNKED) = (0,1)
96 def demangle_name(name):
97 """Remove name mangling from a file name, if necessary.
99 The return value is a tuple (demangled_filename,mode), where mode is one of
102 * BUP_NORMAL : files that should be read as-is from the repository
103 * BUP_CHUNKED : files that were chunked and need to be assembled
105 For more information on the name mangling algorythm, see mangle_name()
107 if name.endswith('.bupl'):
108 return (name[:-5], BUP_NORMAL)
109 elif name.endswith('.bup'):
110 return (name[:-4], BUP_CHUNKED)
112 return (name, BUP_NORMAL)
115 def _encode_packobj(type, content):
118 szbits = (sz & 0x0f) | (_typemap[type]<<4)
121 if sz: szbits |= 0x80
127 z = zlib.compressobj(1)
129 yield z.compress(content)
133 def _encode_looseobj(type, content):
134 z = zlib.compressobj(1)
135 yield z.compress('%s %d\0' % (type, len(content)))
136 yield z.compress(content)
140 def _decode_looseobj(buf):
142 s = zlib.decompress(buf)
149 assert(type in _typemap)
150 assert(sz == len(content))
151 return (type, content)
154 def _decode_packobj(buf):
157 type = _typermap[(c & 0x70) >> 4]
164 sz |= (c & 0x7f) << shift
168 return (type, zlib.decompress(buf[i+1:]))
175 def find_offset(self, hash):
176 """Get the offset of an object inside the index file."""
177 idx = self._idx_from_hash(hash)
179 return self._ofs_from_idx(idx)
182 def exists(self, hash, want_source=False):
183 """Return nonempty if the object exists in this index."""
184 if hash and (self._idx_from_hash(hash) != None):
185 return want_source and os.path.basename(self.name) or True
189 return int(self.fanout[255])
191 def _idx_from_hash(self, hash):
192 global _total_searches, _total_steps
194 assert(len(hash) == 20)
196 start = self.fanout[b1-1] # range -1..254
197 end = self.fanout[b1] # range 0..255
199 _total_steps += 1 # lookup table is a step
202 mid = start + (end-start)/2
203 v = self._idx_to_hash(mid)
213 class PackIdxV1(PackIdx):
214 """Object representation of a Git pack index (version 1) file."""
215 def __init__(self, filename, f):
217 self.idxnames = [self.name]
218 self.map = mmap_read(f)
219 self.fanout = list(struct.unpack('!256I',
220 str(buffer(self.map, 0, 256*4))))
221 self.fanout.append(0) # entry "-1"
222 nsha = self.fanout[255]
224 self.shatable = buffer(self.map, self.sha_ofs, nsha*24)
226 def _ofs_from_idx(self, idx):
227 return struct.unpack('!I', str(self.shatable[idx*24 : idx*24+4]))[0]
229 def _idx_to_hash(self, idx):
230 return str(self.shatable[idx*24+4 : idx*24+24])
233 for i in xrange(self.fanout[255]):
234 yield buffer(self.map, 256*4 + 24*i + 4, 20)
237 class PackIdxV2(PackIdx):
238 """Object representation of a Git pack index (version 2) file."""
239 def __init__(self, filename, f):
241 self.idxnames = [self.name]
242 self.map = mmap_read(f)
243 assert(str(self.map[0:8]) == '\377tOc\0\0\0\2')
244 self.fanout = list(struct.unpack('!256I',
245 str(buffer(self.map, 8, 256*4))))
246 self.fanout.append(0) # entry "-1"
247 nsha = self.fanout[255]
248 self.sha_ofs = 8 + 256*4
249 self.shatable = buffer(self.map, self.sha_ofs, nsha*20)
250 self.ofstable = buffer(self.map,
251 self.sha_ofs + nsha*20 + nsha*4,
253 self.ofs64table = buffer(self.map,
254 8 + 256*4 + nsha*20 + nsha*4 + nsha*4)
256 def _ofs_from_idx(self, idx):
257 ofs = struct.unpack('!I', str(buffer(self.ofstable, idx*4, 4)))[0]
259 idx64 = ofs & 0x7fffffff
260 ofs = struct.unpack('!Q',
261 str(buffer(self.ofs64table, idx64*8, 8)))[0]
264 def _idx_to_hash(self, idx):
265 return str(self.shatable[idx*20:(idx+1)*20])
268 for i in xrange(self.fanout[255]):
269 yield buffer(self.map, 8 + 256*4 + 20*i, 20)
274 def __init__(self, dir):
276 assert(_mpi_count == 0) # these things suck tons of VM; don't waste it
281 self.do_bloom = False
288 assert(_mpi_count == 0)
291 return iter(idxmerge(self.packs))
294 return sum(len(pack) for pack in self.packs)
296 def exists(self, hash, want_source=False):
297 """Return nonempty if the object exists in the index files."""
298 global _total_searches
300 if hash in self.also:
302 if self.do_bloom and self.bloom:
303 if self.bloom.exists(hash):
304 self.do_bloom = False
306 _total_searches -= 1 # was counted by bloom
308 for i in xrange(len(self.packs)):
310 _total_searches -= 1 # will be incremented by sub-pack
311 ix = p.exists(hash, want_source=want_source)
313 # reorder so most recently used packs are searched first
314 self.packs = [p] + self.packs[:i] + self.packs[i+1:]
319 def refresh(self, skip_midx = False):
320 """Refresh the index list.
321 This method verifies if .midx files were superseded (e.g. all of its
322 contents are in another, bigger .midx file) and removes the superseded
325 If skip_midx is True, all work on .midx files will be skipped and .midx
326 files will be removed from the list.
328 The module-global variable 'ignore_midx' can force this function to
329 always act as if skip_midx was True.
331 self.bloom = None # Always reopen the bloom as it may have been relaced
332 self.do_bloom = False
333 skip_midx = skip_midx or ignore_midx
334 d = dict((p.name, p) for p in self.packs
335 if not skip_midx or not isinstance(p, midx.PackMidx))
336 if os.path.exists(self.dir):
339 for ix in self.packs:
340 if isinstance(ix, midx.PackMidx):
341 for name in ix.idxnames:
342 d[os.path.join(self.dir, name)] = ix
343 for full in glob.glob(os.path.join(self.dir,'*.midx')):
345 mx = midx.PackMidx(full)
346 (mxd, mxf) = os.path.split(mx.name)
348 for n in mx.idxnames:
349 if not os.path.exists(os.path.join(mxd, n)):
350 log(('warning: index %s missing\n' +
351 ' used by %s\n') % (n, mxf))
358 midxl.sort(lambda x,y: -cmp(len(x),len(y)))
361 for sub in ix.idxnames:
362 found = d.get(os.path.join(self.dir, sub))
363 if not found or isinstance(found, PackIdx):
364 # doesn't exist, or exists but not in a midx
369 for name in ix.idxnames:
370 d[os.path.join(self.dir, name)] = ix
371 elif not ix.force_keep:
372 debug1('midx: removing redundant: %s\n'
373 % os.path.basename(ix.name))
375 for full in glob.glob(os.path.join(self.dir,'*.idx')):
383 bfull = os.path.join(self.dir, 'bup.bloom')
384 if self.bloom is None and os.path.exists(bfull):
385 self.bloom = bloom.ShaBloom(bfull)
386 self.packs = list(set(d.values()))
387 self.packs.sort(lambda x,y: -cmp(len(x),len(y)))
388 if self.bloom and self.bloom.valid() and len(self.bloom) >= len(self):
392 debug1('PackIdxList: using %d index%s.\n'
393 % (len(self.packs), len(self.packs)!=1 and 'es' or ''))
396 """Insert an additional object in the list."""
400 def calc_hash(type, content):
401 """Calculate some content's hash in the Git fashion."""
402 header = '%s %d\0' % (type, len(content))
408 def _shalist_sort_key(ent):
409 (mode, name, id) = ent
410 if stat.S_ISDIR(int(mode, 8)):
416 def open_idx(filename):
417 if filename.endswith('.idx'):
418 f = open(filename, 'rb')
420 if header[0:4] == '\377tOc':
421 version = struct.unpack('!I', header[4:8])[0]
423 return PackIdxV2(filename, f)
425 raise GitError('%s: expected idx file version 2, got %d'
426 % (filename, version))
427 elif len(header) == 8 and header[0:4] < '\377tOc':
428 return PackIdxV1(filename, f)
430 raise GitError('%s: unrecognized idx file header' % filename)
431 elif filename.endswith('.midx'):
432 return midx.PackMidx(filename)
434 raise GitError('idx filenames must end with .idx or .midx')
437 def idxmerge(idxlist, final_progress=True):
438 """Generate a list of all the objects reachable in a PackIdxList."""
439 def pfunc(count, total):
440 qprogress('Reading indexes: %.2f%% (%d/%d)\r'
441 % (count*100.0/total, count, total))
442 def pfinal(count, total):
444 progress('Reading indexes: %.2f%% (%d/%d), done.\n'
445 % (100, total, total))
446 return merge_iter(idxlist, 10024, pfunc, pfinal)
449 def _make_objcache():
450 return PackIdxList(repo('objects/pack'))
453 """Writes Git objects insid a pack file."""
454 def __init__(self, objcache_maker=_make_objcache):
460 self.objcache_maker = objcache_maker
468 (fd,name) = tempfile.mkstemp(suffix='.pack', dir=repo('objects'))
469 self.file = os.fdopen(fd, 'w+b')
470 assert(name.endswith('.pack'))
471 self.filename = name[:-5]
472 self.file.write('PACK\0\0\0\2\0\0\0\0')
473 self.idx = list(list() for i in xrange(256))
475 def _raw_write(self, datalist, sha):
478 # in case we get interrupted (eg. KeyboardInterrupt), it's best if
479 # the file never has a *partial* blob. So let's make sure it's
480 # all-or-nothing. (The blob shouldn't be very big anyway, thanks
481 # to our hashsplit algorithm.) f.write() does its own buffering,
482 # but that's okay because we'll flush it in _end().
483 oneblob = ''.join(datalist)
487 raise GitError, e, sys.exc_info()[2]
489 crc = zlib.crc32(oneblob) & 0xffffffff
490 self._update_idx(sha, crc, nw)
495 def _update_idx(self, sha, crc, size):
498 self.idx[ord(sha[0])].append((sha, crc, self.file.tell() - size))
500 def _write(self, sha, type, content):
504 sha = calc_hash(type, content)
505 size, crc = self._raw_write(_encode_packobj(type, content), sha=sha)
508 def breakpoint(self):
509 """Clear byte and object counts and return the last processed id."""
511 self.outbytes = self.count = 0
514 def _require_objcache(self):
515 if self.objcache is None and self.objcache_maker:
516 self.objcache = self.objcache_maker()
517 if self.objcache is None:
519 "PackWriter not opened or can't check exists w/o objcache")
521 def exists(self, id, want_source=False):
522 """Return non-empty if an object is found in the object cache."""
523 self._require_objcache()
524 return self.objcache.exists(id, want_source=want_source)
526 def maybe_write(self, type, content):
527 """Write an object to the pack file if not present and return its id."""
528 self._require_objcache()
529 sha = calc_hash(type, content)
530 if not self.exists(sha):
531 self._write(sha, type, content)
532 self.objcache.add(sha)
535 def new_blob(self, blob):
536 """Create a blob object in the pack with the supplied content."""
537 return self.maybe_write('blob', blob)
539 def new_tree(self, shalist):
540 """Create a tree object in the pack."""
541 shalist = sorted(shalist, key = _shalist_sort_key)
543 for (mode,name,bin) in shalist:
546 assert(mode[0] != '0')
548 assert(len(bin) == 20)
549 l.append('%s %s\0%s' % (mode,name,bin))
550 return self.maybe_write('tree', ''.join(l))
552 def _new_commit(self, tree, parent, author, adate, committer, cdate, msg):
554 if tree: l.append('tree %s' % tree.encode('hex'))
555 if parent: l.append('parent %s' % parent.encode('hex'))
556 if author: l.append('author %s %s' % (author, _git_date(adate)))
557 if committer: l.append('committer %s %s' % (committer, _git_date(cdate)))
560 return self.maybe_write('commit', '\n'.join(l))
562 def new_commit(self, parent, tree, date, msg):
563 """Create a commit object in the pack."""
564 userline = '%s <%s@%s>' % (userfullname(), username(), hostname())
565 commit = self._new_commit(tree, parent,
566 userline, date, userline, date,
571 """Remove the pack file from disk."""
577 os.unlink(self.filename + '.pack')
579 def _end(self, run_midx=True):
581 if not f: return None
587 # update object count
589 cp = struct.pack('!i', self.count)
593 # calculate the pack sha1sum
596 for b in chunkyreader(f):
598 packbin = sum.digest()
602 obj_list_sha = self._write_pack_idx_v2(self.filename + '.idx', idx, packbin)
604 nameprefix = repo('objects/pack/pack-%s' % obj_list_sha)
605 if os.path.exists(self.filename + '.map'):
606 os.unlink(self.filename + '.map')
607 os.rename(self.filename + '.pack', nameprefix + '.pack')
608 os.rename(self.filename + '.idx', nameprefix + '.idx')
611 auto_midx(repo('objects/pack'))
614 def close(self, run_midx=True):
615 """Close the pack file and move it to its definitive path."""
616 return self._end(run_midx=run_midx)
618 def _write_pack_idx_v2(self, filename, idx, packbin):
619 idx_f = open(filename, 'w+b')
620 idx_f.write('\377tOc\0\0\0\2')
622 ofs64_ofs = 8 + 4*256 + 28*self.count
623 idx_f.truncate(ofs64_ofs)
625 idx_map = mmap_readwrite(idx_f, close=False)
626 idx_f.seek(0, SEEK_END)
627 count = _helpers.write_idx(idx_f, idx_map, idx, self.count)
628 assert(count == self.count)
634 b = idx_f.read(8 + 4*256)
637 obj_list_sum = Sha1()
638 for b in chunkyreader(idx_f, 20*self.count):
640 obj_list_sum.update(b)
641 namebase = obj_list_sum.hexdigest()
643 for b in chunkyreader(idx_f):
645 idx_f.write(idx_sum.digest())
652 return '%d %s' % (date, time.strftime('%z', time.localtime(date)))
656 os.environ['GIT_DIR'] = os.path.abspath(repo())
659 def list_refs(refname = None):
660 """Generate a list of tuples in the form (refname,hash).
661 If a ref name is specified, list only this particular ref.
663 argv = ['git', 'show-ref', '--']
666 p = subprocess.Popen(argv, preexec_fn = _gitenv, stdout = subprocess.PIPE)
667 out = p.stdout.read().strip()
668 rv = p.wait() # not fatal
672 for d in out.split('\n'):
673 (sha, name) = d.split(' ', 1)
674 yield (name, sha.decode('hex'))
677 def read_ref(refname):
678 """Get the commit id of the most recent commit made on a given ref."""
679 l = list(list_refs(refname))
687 def rev_list(ref, count=None):
688 """Generate a list of reachable commits in reverse chronological order.
690 This generator walks through commits, from child to parent, that are
691 reachable via the specified ref and yields a series of tuples of the form
694 If count is a non-zero integer, limit the number of commits to "count"
697 assert(not ref.startswith('-'))
700 opts += ['-n', str(atoi(count))]
701 argv = ['git', 'rev-list', '--pretty=format:%ct'] + opts + [ref, '--']
702 p = subprocess.Popen(argv, preexec_fn = _gitenv, stdout = subprocess.PIPE)
706 if s.startswith('commit '):
707 commit = s[7:].decode('hex')
711 rv = p.wait() # not fatal
713 raise GitError, 'git rev-list returned error %d' % rv
716 def rev_get_date(ref):
717 """Get the date of the latest commit on the specified ref."""
718 for (date, commit) in rev_list(ref, count=1):
720 raise GitError, 'no such commit %r' % ref
723 def rev_parse(committish):
724 """Resolve the full hash for 'committish', if it exists.
726 Should be roughly equivalent to 'git rev-parse'.
728 Returns the hex value of the hash if it is found, None if 'committish' does
729 not correspond to anything.
731 head = read_ref(committish)
733 debug2("resolved from ref: commit = %s\n" % head.encode('hex'))
736 pL = PackIdxList(repo('objects/pack'))
738 if len(committish) == 40:
740 hash = committish.decode('hex')
750 def update_ref(refname, newval, oldval):
751 """Change the commit pointed to by a branch."""
754 assert(refname.startswith('refs/heads/'))
755 p = subprocess.Popen(['git', 'update-ref', refname,
756 newval.encode('hex'), oldval.encode('hex')],
757 preexec_fn = _gitenv)
758 _git_wait('git update-ref', p)
761 def guess_repo(path=None):
762 """Set the path value in the global variable "repodir".
763 This makes bup look for an existing bup repository, but not fail if a
764 repository doesn't exist. Usually, if you are interacting with a bup
765 repository, you would not be calling this function but using
772 repodir = os.environ.get('BUP_DIR')
774 repodir = os.path.expanduser('~/.bup')
777 def init_repo(path=None):
778 """Create the Git bare repository for bup in a given path."""
780 d = repo() # appends a / to the path
781 parent = os.path.dirname(os.path.dirname(d))
782 if parent and not os.path.exists(parent):
783 raise GitError('parent directory "%s" does not exist\n' % parent)
784 if os.path.exists(d) and not os.path.isdir(os.path.join(d, '.')):
785 raise GitError('"%d" exists but is not a directory\n' % d)
786 p = subprocess.Popen(['git', '--bare', 'init'], stdout=sys.stderr,
787 preexec_fn = _gitenv)
788 _git_wait('git init', p)
789 # Force the index version configuration in order to ensure bup works
790 # regardless of the version of the installed Git binary.
791 p = subprocess.Popen(['git', 'config', 'pack.indexVersion', '2'],
792 stdout=sys.stderr, preexec_fn = _gitenv)
793 _git_wait('git config', p)
796 def check_repo_or_die(path=None):
797 """Make sure a bup repository exists, and abort if not.
798 If the path to a particular repository was not specified, this function
799 initializes the default repository automatically.
802 if not os.path.isdir(repo('objects/pack/.')):
803 if repodir == home_repodir:
806 log('error: %r is not a bup/git repository\n' % repo())
811 """Generate a list of (mode, name, hash) tuples of objects from 'buf'."""
813 while ofs < len(buf):
814 z = buf[ofs:].find('\0')
816 spl = buf[ofs:ofs+z].split(' ', 1)
817 assert(len(spl) == 2)
818 sha = buf[ofs+z+1:ofs+z+1+20]
820 yield (spl[0], spl[1], sha)
825 """Get Git's version and ensure a usable version is installed.
827 The returned version is formatted as an ordered tuple with each position
828 representing a digit in the version tag. For example, the following tuple
829 would represent version 1.6.6.9:
835 p = subprocess.Popen(['git', '--version'],
836 stdout=subprocess.PIPE)
837 gvs = p.stdout.read()
838 _git_wait('git --version', p)
839 m = re.match(r'git version (\S+.\S+)', gvs)
841 raise GitError('git --version weird output: %r' % gvs)
842 _ver = tuple(m.group(1).split('.'))
843 needed = ('1','5', '3', '1')
845 raise GitError('git version %s or higher is required; you have %s'
846 % ('.'.join(needed), '.'.join(_ver)))
850 def _git_wait(cmd, p):
853 raise GitError('%s returned %d' % (cmd, rv))
856 def _git_capture(argv):
857 p = subprocess.Popen(argv, stdout=subprocess.PIPE, preexec_fn = _gitenv)
859 _git_wait(repr(argv), p)
863 class _AbortableIter:
864 def __init__(self, it, onabort = None):
866 self.onabort = onabort
874 return self.it.next()
875 except StopIteration, e:
883 """Abort iteration and call the abortion callback, if needed."""
895 """Link to 'git cat-file' that is used to retrieve blob data."""
898 wanted = ('1','5','6')
901 log('warning: git version < %s; bup will be slow.\n'
904 self.get = self._slow_get
906 self.p = self.inprogress = None
907 self.get = self._fast_get
911 self.p.stdout.close()
914 self.inprogress = None
918 self.p = subprocess.Popen(['git', 'cat-file', '--batch'],
919 stdin=subprocess.PIPE,
920 stdout=subprocess.PIPE,
923 preexec_fn = _gitenv)
925 def _fast_get(self, id):
926 if not self.p or self.p.poll() != None:
929 assert(self.p.poll() == None)
931 log('_fast_get: opening %r while %r is open'
932 % (id, self.inprogress))
933 assert(not self.inprogress)
934 assert(id.find('\n') < 0)
935 assert(id.find('\r') < 0)
936 assert(not id.startswith('-'))
938 self.p.stdin.write('%s\n' % id)
940 hdr = self.p.stdout.readline()
941 if hdr.endswith(' missing\n'):
942 self.inprogress = None
943 raise KeyError('blob %r is missing' % id)
945 if len(spl) != 3 or len(spl[0]) != 40:
946 raise GitError('expected blob, got %r' % spl)
947 (hex, type, size) = spl
949 it = _AbortableIter(chunkyreader(self.p.stdout, int(spl[2])),
950 onabort = self._abort)
955 assert(self.p.stdout.readline() == '\n')
956 self.inprogress = None
961 def _slow_get(self, id):
962 assert(id.find('\n') < 0)
963 assert(id.find('\r') < 0)
965 type = _git_capture(['git', 'cat-file', '-t', id]).strip()
968 p = subprocess.Popen(['git', 'cat-file', type, id],
969 stdout=subprocess.PIPE,
970 preexec_fn = _gitenv)
971 for blob in chunkyreader(p.stdout):
973 _git_wait('git cat-file', p)
981 treefile = ''.join(it)
982 for (mode, name, sha) in treeparse(treefile):
983 for blob in self.join(sha.encode('hex')):
985 elif type == 'commit':
986 treeline = ''.join(it).split('\n')[0]
987 assert(treeline.startswith('tree '))
988 for blob in self.join(treeline[5:]):
991 raise GitError('invalid object type %r: expected blob/tree/commit'
995 """Generate a list of the content of all blobs that can be reached
996 from an object. The hash given in 'id' must point to a blob, a tree
997 or a commit. The content of all blobs that can be seen from trees or
998 commits will be added to the list.
1001 for d in self._join(self.get(id)):
1003 except StopIteration:
1007 """Return a dictionary of all tags in the form {hash: [tag_names, ...]}."""
1009 for (n,c) in list_refs():
1010 if n.startswith('refs/tags/'):
1015 tags[c].append(name) # more than one tag can point at 'c'