mercurial/changegroup.py
changeset 43076 2372284d9457
parent 43059 4bbc9569e722
child 43077 687b865b95ad
equal deleted inserted replaced
43075:57875cf423c9 43076:2372284d9457
    26     phases,
    26     phases,
    27     pycompat,
    27     pycompat,
    28     util,
    28     util,
    29 )
    29 )
    30 
    30 
    31 from .interfaces import (
    31 from .interfaces import repository
    32     repository,
       
    33 )
       
    34 
    32 
    35 _CHANGEGROUPV1_DELTA_HEADER = struct.Struct("20s20s20s20s")
    33 _CHANGEGROUPV1_DELTA_HEADER = struct.Struct("20s20s20s20s")
    36 _CHANGEGROUPV2_DELTA_HEADER = struct.Struct("20s20s20s20s20s")
    34 _CHANGEGROUPV2_DELTA_HEADER = struct.Struct("20s20s20s20s20s")
    37 _CHANGEGROUPV3_DELTA_HEADER = struct.Struct(">20s20s20s20s20sH")
    35 _CHANGEGROUPV3_DELTA_HEADER = struct.Struct(">20s20s20s20s20sH")
    38 
    36 
    39 LFS_REQUIREMENT = 'lfs'
    37 LFS_REQUIREMENT = 'lfs'
    40 
    38 
    41 readexactly = util.readexactly
    39 readexactly = util.readexactly
       
    40 
    42 
    41 
    43 def getchunk(stream):
    42 def getchunk(stream):
    44     """return the next chunk from stream as a string"""
    43     """return the next chunk from stream as a string"""
    45     d = readexactly(stream, 4)
    44     d = readexactly(stream, 4)
    46     l = struct.unpack(">l", d)[0]
    45     l = struct.unpack(">l", d)[0]
    48         if l:
    47         if l:
    49             raise error.Abort(_("invalid chunk length %d") % l)
    48             raise error.Abort(_("invalid chunk length %d") % l)
    50         return ""
    49         return ""
    51     return readexactly(stream, l - 4)
    50     return readexactly(stream, l - 4)
    52 
    51 
       
    52 
    53 def chunkheader(length):
    53 def chunkheader(length):
    54     """return a changegroup chunk header (string)"""
    54     """return a changegroup chunk header (string)"""
    55     return struct.pack(">l", length + 4)
    55     return struct.pack(">l", length + 4)
    56 
    56 
       
    57 
    57 def closechunk():
    58 def closechunk():
    58     """return a changegroup chunk header (string) for a zero-length chunk"""
    59     """return a changegroup chunk header (string) for a zero-length chunk"""
    59     return struct.pack(">l", 0)
    60     return struct.pack(">l", 0)
    60 
    61 
       
    62 
    61 def _fileheader(path):
    63 def _fileheader(path):
    62     """Obtain a changegroup chunk header for a named path."""
    64     """Obtain a changegroup chunk header for a named path."""
    63     return chunkheader(len(path)) + path
    65     return chunkheader(len(path)) + path
       
    66 
    64 
    67 
    65 def writechunks(ui, chunks, filename, vfs=None):
    68 def writechunks(ui, chunks, filename, vfs=None):
    66     """Write chunks to a file and return its filename.
    69     """Write chunks to a file and return its filename.
    67 
    70 
    68     The stream is assumed to be a bundle file.
    71     The stream is assumed to be a bundle file.
    94             if filename and vfs:
    97             if filename and vfs:
    95                 vfs.unlink(cleanup)
    98                 vfs.unlink(cleanup)
    96             else:
    99             else:
    97                 os.unlink(cleanup)
   100                 os.unlink(cleanup)
    98 
   101 
       
   102 
    99 class cg1unpacker(object):
   103 class cg1unpacker(object):
   100     """Unpacker for cg1 changegroup streams.
   104     """Unpacker for cg1 changegroup streams.
   101 
   105 
   102     A changegroup unpacker handles the framing of the revision data in
   106     A changegroup unpacker handles the framing of the revision data in
   103     the wire format. Most consumers will want to use the apply()
   107     the wire format. Most consumers will want to use the apply()
   112     consumers should prefer apply() instead.
   116     consumers should prefer apply() instead.
   113 
   117 
   114     A few other public methods exist. Those are used only for
   118     A few other public methods exist. Those are used only for
   115     bundlerepo and some debug commands - their use is discouraged.
   119     bundlerepo and some debug commands - their use is discouraged.
   116     """
   120     """
       
   121 
   117     deltaheader = _CHANGEGROUPV1_DELTA_HEADER
   122     deltaheader = _CHANGEGROUPV1_DELTA_HEADER
   118     deltaheadersize = deltaheader.size
   123     deltaheadersize = deltaheader.size
   119     version = '01'
   124     version = '01'
   120     _grouplistcount = 1 # One list of files after the manifests
   125     _grouplistcount = 1  # One list of files after the manifests
   121 
   126 
   122     def __init__(self, fh, alg, extras=None):
   127     def __init__(self, fh, alg, extras=None):
   123         if alg is None:
   128         if alg is None:
   124             alg = 'UN'
   129             alg = 'UN'
   125         if alg not in util.compengines.supportedbundletypes:
   130         if alg not in util.compengines.supportedbundletypes:
   126             raise error.Abort(_('unknown stream compression type: %s')
   131             raise error.Abort(_('unknown stream compression type: %s') % alg)
   127                              % alg)
       
   128         if alg == 'BZ':
   132         if alg == 'BZ':
   129             alg = '_truncatedBZ'
   133             alg = '_truncatedBZ'
   130 
   134 
   131         compengine = util.compengines.forbundletype(alg)
   135         compengine = util.compengines.forbundletype(alg)
   132         self._stream = compengine.decompressorreader(fh)
   136         self._stream = compengine.decompressorreader(fh)
   136 
   140 
   137     # These methods (compressed, read, seek, tell) all appear to only
   141     # These methods (compressed, read, seek, tell) all appear to only
   138     # be used by bundlerepo, but it's a little hard to tell.
   142     # be used by bundlerepo, but it's a little hard to tell.
   139     def compressed(self):
   143     def compressed(self):
   140         return self._type is not None and self._type != 'UN'
   144         return self._type is not None and self._type != 'UN'
       
   145 
   141     def read(self, l):
   146     def read(self, l):
   142         return self._stream.read(l)
   147         return self._stream.read(l)
       
   148 
   143     def seek(self, pos):
   149     def seek(self, pos):
   144         return self._stream.seek(pos)
   150         return self._stream.seek(pos)
       
   151 
   145     def tell(self):
   152     def tell(self):
   146         return self._stream.tell()
   153         return self._stream.tell()
       
   154 
   147     def close(self):
   155     def close(self):
   148         return self._stream.close()
   156         return self._stream.close()
   149 
   157 
   150     def _chunklength(self):
   158     def _chunklength(self):
   151         d = readexactly(self._stream, 4)
   159         d = readexactly(self._stream, 4)
   231                     break
   239                     break
   232                 noentries = False
   240                 noentries = False
   233                 yield chunkheader(len(chunk))
   241                 yield chunkheader(len(chunk))
   234                 pos = 0
   242                 pos = 0
   235                 while pos < len(chunk):
   243                 while pos < len(chunk):
   236                     next = pos + 2**20
   244                     next = pos + 2 ** 20
   237                     yield chunk[pos:next]
   245                     yield chunk[pos:next]
   238                     pos = next
   246                     pos = next
   239             yield closechunk()
   247             yield closechunk()
   240 
   248 
   241     def _unpackmanifests(self, repo, revmap, trp, prog):
   249     def _unpackmanifests(self, repo, revmap, trp, prog):
   248         deltas = self.deltaiter()
   256         deltas = self.deltaiter()
   249         repo.manifestlog.getstorage(b'').addgroup(deltas, revmap, trp)
   257         repo.manifestlog.getstorage(b'').addgroup(deltas, revmap, trp)
   250         prog.complete()
   258         prog.complete()
   251         self.callback = None
   259         self.callback = None
   252 
   260 
   253     def apply(self, repo, tr, srctype, url, targetphase=phases.draft,
   261     def apply(
   254               expectedtotal=None):
   262         self,
       
   263         repo,
       
   264         tr,
       
   265         srctype,
       
   266         url,
       
   267         targetphase=phases.draft,
       
   268         expectedtotal=None,
       
   269     ):
   255         """Add the changegroup returned by source.read() to this repo.
   270         """Add the changegroup returned by source.read() to this repo.
   256         srctype is a string like 'push', 'pull', or 'unbundle'.  url is
   271         srctype is a string like 'push', 'pull', or 'unbundle'.  url is
   257         the URL of the repo where this changegroup is coming from.
   272         the URL of the repo where this changegroup is coming from.
   258 
   273 
   259         Return an integer summarizing the change to this repo:
   274         Return an integer summarizing the change to this repo:
   261         - more heads than before: 1+added heads (2..n)
   276         - more heads than before: 1+added heads (2..n)
   262         - fewer heads than before: -1-removed heads (-2..-n)
   277         - fewer heads than before: -1-removed heads (-2..-n)
   263         - number of heads stays the same: 1
   278         - number of heads stays the same: 1
   264         """
   279         """
   265         repo = repo.unfiltered()
   280         repo = repo.unfiltered()
       
   281 
   266         def csmap(x):
   282         def csmap(x):
   267             repo.ui.debug("add changeset %s\n" % short(x))
   283             repo.ui.debug("add changeset %s\n" % short(x))
   268             return len(cl)
   284             return len(cl)
   269 
   285 
   270         def revmap(x):
   286         def revmap(x):
   277             # case we use the top level data. We overwrite the argument
   293             # case we use the top level data. We overwrite the argument
   278             # because we need to use the top level value (if they exist)
   294             # because we need to use the top level value (if they exist)
   279             # in this function.
   295             # in this function.
   280             srctype = tr.hookargs.setdefault('source', srctype)
   296             srctype = tr.hookargs.setdefault('source', srctype)
   281             tr.hookargs.setdefault('url', url)
   297             tr.hookargs.setdefault('url', url)
   282             repo.hook('prechangegroup',
   298             repo.hook(
   283                       throw=True, **pycompat.strkwargs(tr.hookargs))
   299                 'prechangegroup', throw=True, **pycompat.strkwargs(tr.hookargs)
       
   300             )
   284 
   301 
   285             # write changelog data to temp files so concurrent readers
   302             # write changelog data to temp files so concurrent readers
   286             # will not see an inconsistent view
   303             # will not see an inconsistent view
   287             cl = repo.changelog
   304             cl = repo.changelog
   288             cl.delayupdate(tr)
   305             cl.delayupdate(tr)
   290 
   307 
   291             trp = weakref.proxy(tr)
   308             trp = weakref.proxy(tr)
   292             # pull off the changeset group
   309             # pull off the changeset group
   293             repo.ui.status(_("adding changesets\n"))
   310             repo.ui.status(_("adding changesets\n"))
   294             clstart = len(cl)
   311             clstart = len(cl)
   295             progress = repo.ui.makeprogress(_('changesets'), unit=_('chunks'),
   312             progress = repo.ui.makeprogress(
   296                                             total=expectedtotal)
   313                 _('changesets'), unit=_('chunks'), total=expectedtotal
       
   314             )
   297             self.callback = progress.increment
   315             self.callback = progress.increment
   298 
   316 
   299             efiles = set()
   317             efiles = set()
       
   318 
   300             def onchangelog(cl, node):
   319             def onchangelog(cl, node):
   301                 efiles.update(cl.readfiles(node))
   320                 efiles.update(cl.readfiles(node))
   302 
   321 
   303             self.changelogheader()
   322             self.changelogheader()
   304             deltas = self.deltaiter()
   323             deltas = self.deltaiter()
   305             cgnodes = cl.addgroup(deltas, csmap, trp, addrevisioncb=onchangelog)
   324             cgnodes = cl.addgroup(deltas, csmap, trp, addrevisioncb=onchangelog)
   306             efiles = len(efiles)
   325             efiles = len(efiles)
   307 
   326 
   308             if not cgnodes:
   327             if not cgnodes:
   309                 repo.ui.develwarn('applied empty changelog from changegroup',
   328                 repo.ui.develwarn(
   310                                   config='warn-empty-changegroup')
   329                     'applied empty changelog from changegroup',
       
   330                     config='warn-empty-changegroup',
       
   331                 )
   311             clend = len(cl)
   332             clend = len(cl)
   312             changesets = clend - clstart
   333             changesets = clend - clstart
   313             progress.complete()
   334             progress.complete()
   314             self.callback = None
   335             self.callback = None
   315 
   336 
   316             # pull off the manifest group
   337             # pull off the manifest group
   317             repo.ui.status(_("adding manifests\n"))
   338             repo.ui.status(_("adding manifests\n"))
   318             # We know that we'll never have more manifests than we had
   339             # We know that we'll never have more manifests than we had
   319             # changesets.
   340             # changesets.
   320             progress = repo.ui.makeprogress(_('manifests'), unit=_('chunks'),
   341             progress = repo.ui.makeprogress(
   321                                             total=changesets)
   342                 _('manifests'), unit=_('chunks'), total=changesets
       
   343             )
   322             self._unpackmanifests(repo, revmap, trp, progress)
   344             self._unpackmanifests(repo, revmap, trp, progress)
   323 
   345 
   324             needfiles = {}
   346             needfiles = {}
   325             if repo.ui.configbool('server', 'validate'):
   347             if repo.ui.configbool('server', 'validate'):
   326                 cl = repo.changelog
   348                 cl = repo.changelog
   334                         needfiles.setdefault(f, set()).add(n)
   356                         needfiles.setdefault(f, set()).add(n)
   335 
   357 
   336             # process the files
   358             # process the files
   337             repo.ui.status(_("adding file changes\n"))
   359             repo.ui.status(_("adding file changes\n"))
   338             newrevs, newfiles = _addchangegroupfiles(
   360             newrevs, newfiles = _addchangegroupfiles(
   339                 repo, self, revmap, trp, efiles, needfiles)
   361                 repo, self, revmap, trp, efiles, needfiles
       
   362             )
   340 
   363 
   341             # making sure the value exists
   364             # making sure the value exists
   342             tr.changes.setdefault('changegroup-count-changesets', 0)
   365             tr.changes.setdefault('changegroup-count-changesets', 0)
   343             tr.changes.setdefault('changegroup-count-revisions', 0)
   366             tr.changes.setdefault('changegroup-count-revisions', 0)
   344             tr.changes.setdefault('changegroup-count-files', 0)
   367             tr.changes.setdefault('changegroup-count-files', 0)
   378                     hookargs = dict(tr.hookargs)
   401                     hookargs = dict(tr.hookargs)
   379                 else:
   402                 else:
   380                     hookargs = dict(tr.hookargs)
   403                     hookargs = dict(tr.hookargs)
   381                     hookargs['node'] = hex(cl.node(clstart))
   404                     hookargs['node'] = hex(cl.node(clstart))
   382                     hookargs['node_last'] = hex(cl.node(clend - 1))
   405                     hookargs['node_last'] = hex(cl.node(clend - 1))
   383                 repo.hook('pretxnchangegroup',
   406                 repo.hook(
   384                           throw=True, **pycompat.strkwargs(hookargs))
   407                     'pretxnchangegroup',
       
   408                     throw=True,
       
   409                     **pycompat.strkwargs(hookargs)
       
   410                 )
   385 
   411 
   386             added = [cl.node(r) for r in pycompat.xrange(clstart, clend)]
   412             added = [cl.node(r) for r in pycompat.xrange(clstart, clend)]
   387             phaseall = None
   413             phaseall = None
   388             if srctype in ('push', 'serve'):
   414             if srctype in ('push', 'serve'):
   389                 # Old servers can not push the boundary themselves.
   415                 # Old servers can not push the boundary themselves.
   422                         args = hookargs.copy()
   448                         args = hookargs.copy()
   423                         args['node'] = hex(n)
   449                         args['node'] = hex(n)
   424                         del args['node_last']
   450                         del args['node_last']
   425                         repo.hook("incoming", **pycompat.strkwargs(args))
   451                         repo.hook("incoming", **pycompat.strkwargs(args))
   426 
   452 
   427                     newheads = [h for h in repo.heads()
   453                     newheads = [h for h in repo.heads() if h not in oldheads]
   428                                 if h not in oldheads]
   454                     repo.ui.log(
   429                     repo.ui.log("incoming",
   455                         "incoming",
   430                                 "%d incoming changes - new heads: %s\n",
   456                         "%d incoming changes - new heads: %s\n",
   431                                 len(added),
   457                         len(added),
   432                                 ', '.join([hex(c[:6]) for c in newheads]))
   458                         ', '.join([hex(c[:6]) for c in newheads]),
   433 
   459                     )
   434                 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
   460 
   435                                 lambda tr: repo._afterlock(runhooks))
   461                 tr.addpostclose(
       
   462                     'changegroup-runhooks-%020i' % clstart,
       
   463                     lambda tr: repo._afterlock(runhooks),
       
   464                 )
   436         finally:
   465         finally:
   437             repo.ui.flush()
   466             repo.ui.flush()
   438         # never return 0 here:
   467         # never return 0 here:
   439         if deltaheads < 0:
   468         if deltaheads < 0:
   440             ret = deltaheads - 1
   469             ret = deltaheads - 1
   452         for chunkdata in iter(lambda: self.deltachunk(chain), {}):
   481         for chunkdata in iter(lambda: self.deltachunk(chain), {}):
   453             # Chunkdata: (node, p1, p2, cs, deltabase, delta, flags)
   482             # Chunkdata: (node, p1, p2, cs, deltabase, delta, flags)
   454             yield chunkdata
   483             yield chunkdata
   455             chain = chunkdata[0]
   484             chain = chunkdata[0]
   456 
   485 
       
   486 
   457 class cg2unpacker(cg1unpacker):
   487 class cg2unpacker(cg1unpacker):
   458     """Unpacker for cg2 streams.
   488     """Unpacker for cg2 streams.
   459 
   489 
   460     cg2 streams add support for generaldelta, so the delta header
   490     cg2 streams add support for generaldelta, so the delta header
   461     format is slightly different. All other features about the data
   491     format is slightly different. All other features about the data
   462     remain the same.
   492     remain the same.
   463     """
   493     """
       
   494 
   464     deltaheader = _CHANGEGROUPV2_DELTA_HEADER
   495     deltaheader = _CHANGEGROUPV2_DELTA_HEADER
   465     deltaheadersize = deltaheader.size
   496     deltaheadersize = deltaheader.size
   466     version = '02'
   497     version = '02'
   467 
   498 
   468     def _deltaheader(self, headertuple, prevnode):
   499     def _deltaheader(self, headertuple, prevnode):
   469         node, p1, p2, deltabase, cs = headertuple
   500         node, p1, p2, deltabase, cs = headertuple
   470         flags = 0
   501         flags = 0
   471         return node, p1, p2, deltabase, cs, flags
   502         return node, p1, p2, deltabase, cs, flags
   472 
   503 
       
   504 
   473 class cg3unpacker(cg2unpacker):
   505 class cg3unpacker(cg2unpacker):
   474     """Unpacker for cg3 streams.
   506     """Unpacker for cg3 streams.
   475 
   507 
   476     cg3 streams add support for exchanging treemanifests and revlog
   508     cg3 streams add support for exchanging treemanifests and revlog
   477     flags. It adds the revlog flags to the delta header and an empty chunk
   509     flags. It adds the revlog flags to the delta header and an empty chunk
   478     separating manifests and files.
   510     separating manifests and files.
   479     """
   511     """
       
   512 
   480     deltaheader = _CHANGEGROUPV3_DELTA_HEADER
   513     deltaheader = _CHANGEGROUPV3_DELTA_HEADER
   481     deltaheadersize = deltaheader.size
   514     deltaheadersize = deltaheader.size
   482     version = '03'
   515     version = '03'
   483     _grouplistcount = 2 # One list of manifests and one list of files
   516     _grouplistcount = 2  # One list of manifests and one list of files
   484 
   517 
   485     def _deltaheader(self, headertuple, prevnode):
   518     def _deltaheader(self, headertuple, prevnode):
   486         node, p1, p2, deltabase, cs, flags = headertuple
   519         node, p1, p2, deltabase, cs, flags = headertuple
   487         return node, p1, p2, deltabase, cs, flags
   520         return node, p1, p2, deltabase, cs, flags
   488 
   521 
   494             repo.ui.debug("adding %s revisions\n" % d)
   527             repo.ui.debug("adding %s revisions\n" % d)
   495             deltas = self.deltaiter()
   528             deltas = self.deltaiter()
   496             if not repo.manifestlog.getstorage(d).addgroup(deltas, revmap, trp):
   529             if not repo.manifestlog.getstorage(d).addgroup(deltas, revmap, trp):
   497                 raise error.Abort(_("received dir revlog group is empty"))
   530                 raise error.Abort(_("received dir revlog group is empty"))
   498 
   531 
       
   532 
   499 class headerlessfixup(object):
   533 class headerlessfixup(object):
   500     def __init__(self, fh, h):
   534     def __init__(self, fh, h):
   501         self._h = h
   535         self._h = h
   502         self._fh = fh
   536         self._fh = fh
       
   537 
   503     def read(self, n):
   538     def read(self, n):
   504         if self._h:
   539         if self._h:
   505             d, self._h = self._h[:n], self._h[n:]
   540             d, self._h = self._h[:n], self._h[n:]
   506             if len(d) < n:
   541             if len(d) < n:
   507                 d += readexactly(self._fh, n - len(d))
   542                 d += readexactly(self._fh, n - len(d))
   508             return d
   543             return d
   509         return readexactly(self._fh, n)
   544         return readexactly(self._fh, n)
   510 
   545 
       
   546 
   511 def _revisiondeltatochunks(delta, headerfn):
   547 def _revisiondeltatochunks(delta, headerfn):
   512     """Serialize a revisiondelta to changegroup chunks."""
   548     """Serialize a revisiondelta to changegroup chunks."""
   513 
   549 
   514     # The captured revision delta may be encoded as a delta against
   550     # The captured revision delta may be encoded as a delta against
   515     # a base revision or as a full revision. The changegroup format
   551     # a base revision or as a full revision. The changegroup format
   522     elif delta.basenode == nullid:
   558     elif delta.basenode == nullid:
   523         data = delta.revision
   559         data = delta.revision
   524         prefix = mdiff.trivialdiffheader(len(data))
   560         prefix = mdiff.trivialdiffheader(len(data))
   525     else:
   561     else:
   526         data = delta.revision
   562         data = delta.revision
   527         prefix = mdiff.replacediffheader(delta.baserevisionsize,
   563         prefix = mdiff.replacediffheader(delta.baserevisionsize, len(data))
   528                                          len(data))
       
   529 
   564 
   530     meta = headerfn(delta)
   565     meta = headerfn(delta)
   531 
   566 
   532     yield chunkheader(len(meta) + len(prefix) + len(data))
   567     yield chunkheader(len(meta) + len(prefix) + len(data))
   533     yield meta
   568     yield meta
   534     if prefix:
   569     if prefix:
   535         yield prefix
   570         yield prefix
   536     yield data
   571     yield data
       
   572 
   537 
   573 
   538 def _sortnodesellipsis(store, nodes, cl, lookup):
   574 def _sortnodesellipsis(store, nodes, cl, lookup):
   539     """Sort nodes for changegroup generation."""
   575     """Sort nodes for changegroup generation."""
   540     # Ellipses serving mode.
   576     # Ellipses serving mode.
   541     #
   577     #
   554     # changelog, so what we do is we sort the non-changelog histories
   590     # changelog, so what we do is we sort the non-changelog histories
   555     # by the order in which they are used by the changelog.
   591     # by the order in which they are used by the changelog.
   556     key = lambda n: cl.rev(lookup(n))
   592     key = lambda n: cl.rev(lookup(n))
   557     return sorted(nodes, key=key)
   593     return sorted(nodes, key=key)
   558 
   594 
   559 def _resolvenarrowrevisioninfo(cl, store, ischangelog, rev, linkrev,
   595 
   560                                linknode, clrevtolocalrev, fullclnodes,
   596 def _resolvenarrowrevisioninfo(
   561                                precomputedellipsis):
   597     cl,
       
   598     store,
       
   599     ischangelog,
       
   600     rev,
       
   601     linkrev,
       
   602     linknode,
       
   603     clrevtolocalrev,
       
   604     fullclnodes,
       
   605     precomputedellipsis,
       
   606 ):
   562     linkparents = precomputedellipsis[linkrev]
   607     linkparents = precomputedellipsis[linkrev]
       
   608 
   563     def local(clrev):
   609     def local(clrev):
   564         """Turn a changelog revnum into a local revnum.
   610         """Turn a changelog revnum into a local revnum.
   565 
   611 
   566         The ellipsis dag is stored as revnums on the changelog,
   612         The ellipsis dag is stored as revnums on the changelog,
   567         but when we're producing ellipsis entries for
   613         but when we're producing ellipsis entries for
   591             p = walk[0]
   637             p = walk[0]
   592             walk = walk[1:]
   638             walk = walk[1:]
   593             if p in clrevtolocalrev:
   639             if p in clrevtolocalrev:
   594                 return clrevtolocalrev[p]
   640                 return clrevtolocalrev[p]
   595             elif p in fullclnodes:
   641             elif p in fullclnodes:
   596                 walk.extend([pp for pp in cl.parentrevs(p)
   642                 walk.extend([pp for pp in cl.parentrevs(p) if pp != nullrev])
   597                                 if pp != nullrev])
       
   598             elif p in precomputedellipsis:
   643             elif p in precomputedellipsis:
   599                 walk.extend([pp for pp in precomputedellipsis[p]
   644                 walk.extend(
   600                                 if pp != nullrev])
   645                     [pp for pp in precomputedellipsis[p] if pp != nullrev]
       
   646                 )
   601             else:
   647             else:
   602                 # In this case, we've got an ellipsis with parents
   648                 # In this case, we've got an ellipsis with parents
   603                 # outside the current bundle (likely an
   649                 # outside the current bundle (likely an
   604                 # incremental pull). We "know" that we can use the
   650                 # incremental pull). We "know" that we can use the
   605                 # value of this same revlog at whatever revision
   651                 # value of this same revlog at whatever revision
   616                         return i
   662                         return i
   617                 # We failed to resolve a parent for this node, so
   663                 # We failed to resolve a parent for this node, so
   618                 # we crash the changegroup construction.
   664                 # we crash the changegroup construction.
   619                 raise error.Abort(
   665                 raise error.Abort(
   620                     'unable to resolve parent while packing %r %r'
   666                     'unable to resolve parent while packing %r %r'
   621                     ' for changeset %r' % (store.indexfile, rev, clrev))
   667                     ' for changeset %r' % (store.indexfile, rev, clrev)
       
   668                 )
   622 
   669 
   623         return nullrev
   670         return nullrev
   624 
   671 
   625     if not linkparents or (
   672     if not linkparents or (store.parentrevs(rev) == (nullrev, nullrev)):
   626         store.parentrevs(rev) == (nullrev, nullrev)):
       
   627         p1, p2 = nullrev, nullrev
   673         p1, p2 = nullrev, nullrev
   628     elif len(linkparents) == 1:
   674     elif len(linkparents) == 1:
   629         p1, = sorted(local(p) for p in linkparents)
   675         (p1,) = sorted(local(p) for p in linkparents)
   630         p2 = nullrev
   676         p2 = nullrev
   631     else:
   677     else:
   632         p1, p2 = sorted(local(p) for p in linkparents)
   678         p1, p2 = sorted(local(p) for p in linkparents)
   633 
   679 
   634     p1node, p2node = store.node(p1), store.node(p2)
   680     p1node, p2node = store.node(p1), store.node(p2)
   635 
   681 
   636     return p1node, p2node, linknode
   682     return p1node, p2node, linknode
   637 
   683 
   638 def deltagroup(repo, store, nodes, ischangelog, lookup, forcedeltaparentprev,
   684 
   639                topic=None,
   685 def deltagroup(
   640                ellipses=False, clrevtolocalrev=None, fullclnodes=None,
   686     repo,
   641                precomputedellipsis=None):
   687     store,
       
   688     nodes,
       
   689     ischangelog,
       
   690     lookup,
       
   691     forcedeltaparentprev,
       
   692     topic=None,
       
   693     ellipses=False,
       
   694     clrevtolocalrev=None,
       
   695     fullclnodes=None,
       
   696     precomputedellipsis=None,
       
   697 ):
   642     """Calculate deltas for a set of revisions.
   698     """Calculate deltas for a set of revisions.
   643 
   699 
   644     Is a generator of ``revisiondelta`` instances.
   700     Is a generator of ``revisiondelta`` instances.
   645 
   701 
   646     If topic is not None, progress detail will be generated using this
   702     If topic is not None, progress detail will be generated using this
   696 
   752 
   697             else:
   753             else:
   698                 # We could probably do this later and avoid the dict
   754                 # We could probably do this later and avoid the dict
   699                 # holding state. But it likely doesn't matter.
   755                 # holding state. But it likely doesn't matter.
   700                 p1node, p2node, linknode = _resolvenarrowrevisioninfo(
   756                 p1node, p2node, linknode = _resolvenarrowrevisioninfo(
   701                     cl, store, ischangelog, rev, linkrev, linknode,
   757                     cl,
   702                     clrevtolocalrev, fullclnodes, precomputedellipsis)
   758                     store,
       
   759                     ischangelog,
       
   760                     rev,
       
   761                     linkrev,
       
   762                     linknode,
       
   763                     clrevtolocalrev,
       
   764                     fullclnodes,
       
   765                     precomputedellipsis,
       
   766                 )
   703 
   767 
   704                 adjustedparents[node] = (p1node, p2node)
   768                 adjustedparents[node] = (p1node, p2node)
   705                 linknodes[node] = linknode
   769                 linknodes[node] = linknode
   706 
   770 
   707             filtered.append(node)
   771             filtered.append(node)
   710 
   774 
   711     # We expect the first pass to be fast, so we only engage the progress
   775     # We expect the first pass to be fast, so we only engage the progress
   712     # meter for constructing the revision deltas.
   776     # meter for constructing the revision deltas.
   713     progress = None
   777     progress = None
   714     if topic is not None:
   778     if topic is not None:
   715         progress = repo.ui.makeprogress(topic, unit=_('chunks'),
   779         progress = repo.ui.makeprogress(
   716                                         total=len(nodes))
   780             topic, unit=_('chunks'), total=len(nodes)
       
   781         )
   717 
   782 
   718     configtarget = repo.ui.config('devel', 'bundle.delta')
   783     configtarget = repo.ui.config('devel', 'bundle.delta')
   719     if configtarget not in ('', 'p1', 'full'):
   784     if configtarget not in ('', 'p1', 'full'):
   720         msg = _("""config "devel.bundle.delta" as unknown value: %s""")
   785         msg = _("""config "devel.bundle.delta" as unknown value: %s""")
   721         repo.ui.warn(msg % configtarget)
   786         repo.ui.warn(msg % configtarget)
   731     revisions = store.emitrevisions(
   796     revisions = store.emitrevisions(
   732         nodes,
   797         nodes,
   733         nodesorder=nodesorder,
   798         nodesorder=nodesorder,
   734         revisiondata=True,
   799         revisiondata=True,
   735         assumehaveparentrevisions=not ellipses,
   800         assumehaveparentrevisions=not ellipses,
   736         deltamode=deltamode)
   801         deltamode=deltamode,
       
   802     )
   737 
   803 
   738     for i, revision in enumerate(revisions):
   804     for i, revision in enumerate(revisions):
   739         if progress:
   805         if progress:
   740             progress.update(i + 1)
   806             progress.update(i + 1)
   741 
   807 
   755         yield revision
   821         yield revision
   756 
   822 
   757     if progress:
   823     if progress:
   758         progress.complete()
   824         progress.complete()
   759 
   825 
       
   826 
   760 class cgpacker(object):
   827 class cgpacker(object):
   761     def __init__(self, repo, oldmatcher, matcher, version,
   828     def __init__(
   762                  builddeltaheader, manifestsend,
   829         self,
   763                  forcedeltaparentprev=False,
   830         repo,
   764                  bundlecaps=None, ellipses=False,
   831         oldmatcher,
   765                  shallow=False, ellipsisroots=None, fullnodes=None):
   832         matcher,
       
   833         version,
       
   834         builddeltaheader,
       
   835         manifestsend,
       
   836         forcedeltaparentprev=False,
       
   837         bundlecaps=None,
       
   838         ellipses=False,
       
   839         shallow=False,
       
   840         ellipsisroots=None,
       
   841         fullnodes=None,
       
   842     ):
   766         """Given a source repo, construct a bundler.
   843         """Given a source repo, construct a bundler.
   767 
   844 
   768         oldmatcher is a matcher that matches on files the client already has.
   845         oldmatcher is a matcher that matches on files the client already has.
   769         These will not be included in the changegroup.
   846         These will not be included in the changegroup.
   770 
   847 
   821         if self._repo.ui.verbose and not self._repo.ui.debugflag:
   898         if self._repo.ui.verbose and not self._repo.ui.debugflag:
   822             self._verbosenote = self._repo.ui.note
   899             self._verbosenote = self._repo.ui.note
   823         else:
   900         else:
   824             self._verbosenote = lambda s: None
   901             self._verbosenote = lambda s: None
   825 
   902 
   826     def generate(self, commonrevs, clnodes, fastpathlinkrev, source,
   903     def generate(
   827                  changelog=True):
   904         self, commonrevs, clnodes, fastpathlinkrev, source, changelog=True
       
   905     ):
   828         """Yield a sequence of changegroup byte chunks.
   906         """Yield a sequence of changegroup byte chunks.
   829         If changelog is False, changelog data won't be added to changegroup
   907         If changelog is False, changelog data won't be added to changegroup
   830         """
   908         """
   831 
   909 
   832         repo = self._repo
   910         repo = self._repo
   833         cl = repo.changelog
   911         cl = repo.changelog
   834 
   912 
   835         self._verbosenote(_('uncompressed size of bundle content:\n'))
   913         self._verbosenote(_('uncompressed size of bundle content:\n'))
   836         size = 0
   914         size = 0
   837 
   915 
   838         clstate, deltas = self._generatechangelog(cl, clnodes,
   916         clstate, deltas = self._generatechangelog(
   839                                                   generate=changelog)
   917             cl, clnodes, generate=changelog
       
   918         )
   840         for delta in deltas:
   919         for delta in deltas:
   841             for chunk in _revisiondeltatochunks(delta,
   920             for chunk in _revisiondeltatochunks(delta, self._builddeltaheader):
   842                                                 self._builddeltaheader):
       
   843                 size += len(chunk)
   921                 size += len(chunk)
   844                 yield chunk
   922                 yield chunk
   845 
   923 
   846         close = closechunk()
   924         close = closechunk()
   847         size += len(close)
   925         size += len(close)
   870 
   948 
   871         # Treemanifests don't work correctly with fastpathlinkrev
   949         # Treemanifests don't work correctly with fastpathlinkrev
   872         # either, because we don't discover which directory nodes to
   950         # either, because we don't discover which directory nodes to
   873         # send along with files. This could probably be fixed.
   951         # send along with files. This could probably be fixed.
   874         fastpathlinkrev = fastpathlinkrev and (
   952         fastpathlinkrev = fastpathlinkrev and (
   875             'treemanifest' not in repo.requirements)
   953             'treemanifest' not in repo.requirements
       
   954         )
   876 
   955 
   877         fnodes = {}  # needed file nodes
   956         fnodes = {}  # needed file nodes
   878 
   957 
   879         size = 0
   958         size = 0
   880         it = self.generatemanifests(
   959         it = self.generatemanifests(
   881             commonrevs, clrevorder, fastpathlinkrev, manifests, fnodes, source,
   960             commonrevs,
   882             clstate['clrevtomanifestrev'])
   961             clrevorder,
       
   962             fastpathlinkrev,
       
   963             manifests,
       
   964             fnodes,
       
   965             source,
       
   966             clstate['clrevtomanifestrev'],
       
   967         )
   883 
   968 
   884         for tree, deltas in it:
   969         for tree, deltas in it:
   885             if tree:
   970             if tree:
   886                 assert self.version == b'03'
   971                 assert self.version == b'03'
   887                 chunk = _fileheader(tree)
   972                 chunk = _fileheader(tree)
   901         self._verbosenote(_('%8.i (manifests)\n') % size)
   986         self._verbosenote(_('%8.i (manifests)\n') % size)
   902         yield self._manifestsend
   987         yield self._manifestsend
   903 
   988 
   904         mfdicts = None
   989         mfdicts = None
   905         if self._ellipses and self._isshallow:
   990         if self._ellipses and self._isshallow:
   906             mfdicts = [(self._repo.manifestlog[n].read(), lr)
   991             mfdicts = [
   907                        for (n, lr) in manifests.iteritems()]
   992                 (self._repo.manifestlog[n].read(), lr)
       
   993                 for (n, lr) in manifests.iteritems()
       
   994             ]
   908 
   995 
   909         manifests.clear()
   996         manifests.clear()
   910         clrevs = set(cl.rev(x) for x in clnodes)
   997         clrevs = set(cl.rev(x) for x in clnodes)
   911 
   998 
   912         it = self.generatefiles(changedfiles, commonrevs,
   999         it = self.generatefiles(
   913                                 source, mfdicts, fastpathlinkrev,
  1000             changedfiles,
   914                                 fnodes, clrevs)
  1001             commonrevs,
       
  1002             source,
       
  1003             mfdicts,
       
  1004             fastpathlinkrev,
       
  1005             fnodes,
       
  1006             clrevs,
       
  1007         )
   915 
  1008 
   916         for path, deltas in it:
  1009         for path, deltas in it:
   917             h = _fileheader(path)
  1010             h = _fileheader(path)
   918             size = len(h)
  1011             size = len(h)
   919             yield h
  1012             yield h
   982             if self._ellipses:
  1075             if self._ellipses:
   983                 # Only update manifests if x is going to be sent. Otherwise we
  1076                 # Only update manifests if x is going to be sent. Otherwise we
   984                 # end up with bogus linkrevs specified for manifests and
  1077                 # end up with bogus linkrevs specified for manifests and
   985                 # we skip some manifest nodes that we should otherwise
  1078                 # we skip some manifest nodes that we should otherwise
   986                 # have sent.
  1079                 # have sent.
   987                 if (x in self._fullclnodes
  1080                 if (
   988                     or cl.rev(x) in self._precomputedellipsis):
  1081                     x in self._fullclnodes
       
  1082                     or cl.rev(x) in self._precomputedellipsis
       
  1083                 ):
   989 
  1084 
   990                     manifestnode = c.manifest
  1085                     manifestnode = c.manifest
   991                     # Record the first changeset introducing this manifest
  1086                     # Record the first changeset introducing this manifest
   992                     # version.
  1087                     # version.
   993                     manifests.setdefault(manifestnode, x)
  1088                     manifests.setdefault(manifestnode, x)
   994                     # Set this narrow-specific dict so we have the lowest
  1089                     # Set this narrow-specific dict so we have the lowest
   995                     # manifest revnum to look up for this cl revnum. (Part of
  1090                     # manifest revnum to look up for this cl revnum. (Part of
   996                     # mapping changelog ellipsis parents to manifest ellipsis
  1091                     # mapping changelog ellipsis parents to manifest ellipsis
   997                     # parents)
  1092                     # parents)
   998                     clrevtomanifestrev.setdefault(
  1093                     clrevtomanifestrev.setdefault(
   999                         cl.rev(x), mfl.rev(manifestnode))
  1094                         cl.rev(x), mfl.rev(manifestnode)
       
  1095                     )
  1000                 # We can't trust the changed files list in the changeset if the
  1096                 # We can't trust the changed files list in the changeset if the
  1001                 # client requested a shallow clone.
  1097                 # client requested a shallow clone.
  1002                 if self._isshallow:
  1098                 if self._isshallow:
  1003                     changedfiles.update(mfl[c.manifest].read().keys())
  1099                     changedfiles.update(mfl[c.manifest].read().keys())
  1004                 else:
  1100                 else:
  1011                 changedfiles.update(c.files)
  1107                 changedfiles.update(c.files)
  1012 
  1108 
  1013             return x
  1109             return x
  1014 
  1110 
  1015         gen = deltagroup(
  1111         gen = deltagroup(
  1016             self._repo, cl, nodes, True, lookupcl,
  1112             self._repo,
       
  1113             cl,
       
  1114             nodes,
       
  1115             True,
       
  1116             lookupcl,
  1017             self._forcedeltaparentprev,
  1117             self._forcedeltaparentprev,
  1018             ellipses=self._ellipses,
  1118             ellipses=self._ellipses,
  1019             topic=_('changesets'),
  1119             topic=_('changesets'),
  1020             clrevtolocalrev={},
  1120             clrevtolocalrev={},
  1021             fullclnodes=self._fullclnodes,
  1121             fullclnodes=self._fullclnodes,
  1022             precomputedellipsis=self._precomputedellipsis)
  1122             precomputedellipsis=self._precomputedellipsis,
       
  1123         )
  1023 
  1124 
  1024         return state, gen
  1125         return state, gen
  1025 
  1126 
  1026     def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev,
  1127     def generatemanifests(
  1027                           manifests, fnodes, source, clrevtolocalrev):
  1128         self,
       
  1129         commonrevs,
       
  1130         clrevorder,
       
  1131         fastpathlinkrev,
       
  1132         manifests,
       
  1133         fnodes,
       
  1134         source,
       
  1135         clrevtolocalrev,
       
  1136     ):
  1028         """Returns an iterator of changegroup chunks containing manifests.
  1137         """Returns an iterator of changegroup chunks containing manifests.
  1029 
  1138 
  1030         `source` is unused here, but is used by extensions like remotefilelog to
  1139         `source` is unused here, but is used by extensions like remotefilelog to
  1031         change what is sent based in pulls vs pushes, etc.
  1140         change what is sent based in pulls vs pushes, etc.
  1032         """
  1141         """
  1059                 treemanifests to send.
  1168                 treemanifests to send.
  1060                 """
  1169                 """
  1061                 clnode = nodes[x]
  1170                 clnode = nodes[x]
  1062                 mdata = mfl.get(tree, x).readfast(shallow=True)
  1171                 mdata = mfl.get(tree, x).readfast(shallow=True)
  1063                 for p, n, fl in mdata.iterentries():
  1172                 for p, n, fl in mdata.iterentries():
  1064                     if fl == 't': # subdirectory manifest
  1173                     if fl == 't':  # subdirectory manifest
  1065                         subtree = tree + p + '/'
  1174                         subtree = tree + p + '/'
  1066                         tmfclnodes = tmfnodes.setdefault(subtree, {})
  1175                         tmfclnodes = tmfnodes.setdefault(subtree, {})
  1067                         tmfclnode = tmfclnodes.setdefault(n, clnode)
  1176                         tmfclnode = tmfclnodes.setdefault(n, clnode)
  1068                         if clrevorder[clnode] < clrevorder[tmfclnode]:
  1177                         if clrevorder[clnode] < clrevorder[tmfclnode]:
  1069                             tmfclnodes[n] = clnode
  1178                             tmfclnodes[n] = clnode
  1072                         fclnodes = fnodes.setdefault(f, {})
  1181                         fclnodes = fnodes.setdefault(f, {})
  1073                         fclnode = fclnodes.setdefault(n, clnode)
  1182                         fclnode = fclnodes.setdefault(n, clnode)
  1074                         if clrevorder[clnode] < clrevorder[fclnode]:
  1183                         if clrevorder[clnode] < clrevorder[fclnode]:
  1075                             fclnodes[n] = clnode
  1184                             fclnodes[n] = clnode
  1076                 return clnode
  1185                 return clnode
       
  1186 
  1077             return lookupmflinknode
  1187             return lookupmflinknode
  1078 
  1188 
  1079         while tmfnodes:
  1189         while tmfnodes:
  1080             tree, nodes = tmfnodes.popitem()
  1190             tree, nodes = tmfnodes.popitem()
  1081 
  1191 
  1101                 continue
  1211                 continue
  1102 
  1212 
  1103             lookupfn = makelookupmflinknode(tree, nodes)
  1213             lookupfn = makelookupmflinknode(tree, nodes)
  1104 
  1214 
  1105             deltas = deltagroup(
  1215             deltas = deltagroup(
  1106                 self._repo, store, prunednodes, False, lookupfn,
  1216                 self._repo,
       
  1217                 store,
       
  1218                 prunednodes,
       
  1219                 False,
       
  1220                 lookupfn,
  1107                 self._forcedeltaparentprev,
  1221                 self._forcedeltaparentprev,
  1108                 ellipses=self._ellipses,
  1222                 ellipses=self._ellipses,
  1109                 topic=_('manifests'),
  1223                 topic=_('manifests'),
  1110                 clrevtolocalrev=clrevtolocalrev,
  1224                 clrevtolocalrev=clrevtolocalrev,
  1111                 fullclnodes=self._fullclnodes,
  1225                 fullclnodes=self._fullclnodes,
  1112                 precomputedellipsis=self._precomputedellipsis)
  1226                 precomputedellipsis=self._precomputedellipsis,
       
  1227             )
  1113 
  1228 
  1114             if not self._oldmatcher.visitdir(store.tree[:-1]):
  1229             if not self._oldmatcher.visitdir(store.tree[:-1]):
  1115                 yield tree, deltas
  1230                 yield tree, deltas
  1116             else:
  1231             else:
  1117                 # 'deltas' is a generator and we need to consume it even if
  1232                 # 'deltas' is a generator and we need to consume it even if
  1136         # layer.
  1251         # layer.
  1137         frev, flr = store.rev, store.linkrev
  1252         frev, flr = store.rev, store.linkrev
  1138         return [n for n in nodes if flr(frev(n)) not in commonrevs]
  1253         return [n for n in nodes if flr(frev(n)) not in commonrevs]
  1139 
  1254 
  1140     # The 'source' parameter is useful for extensions
  1255     # The 'source' parameter is useful for extensions
  1141     def generatefiles(self, changedfiles, commonrevs, source,
  1256     def generatefiles(
  1142                       mfdicts, fastpathlinkrev, fnodes, clrevs):
  1257         self,
  1143         changedfiles = [f for f in changedfiles
  1258         changedfiles,
  1144                         if self._matcher(f) and not self._oldmatcher(f)]
  1259         commonrevs,
       
  1260         source,
       
  1261         mfdicts,
       
  1262         fastpathlinkrev,
       
  1263         fnodes,
       
  1264         clrevs,
       
  1265     ):
       
  1266         changedfiles = [
       
  1267             f
       
  1268             for f in changedfiles
       
  1269             if self._matcher(f) and not self._oldmatcher(f)
       
  1270         ]
  1145 
  1271 
  1146         if not fastpathlinkrev:
  1272         if not fastpathlinkrev:
       
  1273 
  1147             def normallinknodes(unused, fname):
  1274             def normallinknodes(unused, fname):
  1148                 return fnodes.get(fname, {})
  1275                 return fnodes.get(fname, {})
       
  1276 
  1149         else:
  1277         else:
  1150             cln = self._repo.changelog.node
  1278             cln = self._repo.changelog.node
  1151 
  1279 
  1152             def normallinknodes(store, fname):
  1280             def normallinknodes(store, fname):
  1153                 flinkrev = store.linkrev
  1281                 flinkrev = store.linkrev
  1154                 fnode = store.node
  1282                 fnode = store.node
  1155                 revs = ((r, flinkrev(r)) for r in store)
  1283                 revs = ((r, flinkrev(r)) for r in store)
  1156                 return dict((fnode(r), cln(lr))
  1284                 return dict(
  1157                             for r, lr in revs if lr in clrevs)
  1285                     (fnode(r), cln(lr)) for r, lr in revs if lr in clrevs
       
  1286                 )
  1158 
  1287 
  1159         clrevtolocalrev = {}
  1288         clrevtolocalrev = {}
  1160 
  1289 
  1161         if self._isshallow:
  1290         if self._isshallow:
  1162             # In a shallow clone, the linknodes callback needs to also include
  1291             # In a shallow clone, the linknodes callback needs to also include
  1179                         if fnode in links:
  1308                         if fnode in links:
  1180                             links[fnode] = min(links[fnode], lr, key=clrev)
  1309                             links[fnode] = min(links[fnode], lr, key=clrev)
  1181                         elif fnode:
  1310                         elif fnode:
  1182                             links[fnode] = lr
  1311                             links[fnode] = lr
  1183                 return links
  1312                 return links
       
  1313 
  1184         else:
  1314         else:
  1185             linknodes = normallinknodes
  1315             linknodes = normallinknodes
  1186 
  1316 
  1187         repo = self._repo
  1317         repo = self._repo
  1188         progress = repo.ui.makeprogress(_('files'), unit=_('files'),
  1318         progress = repo.ui.makeprogress(
  1189                                         total=len(changedfiles))
  1319             _('files'), unit=_('files'), total=len(changedfiles)
       
  1320         )
  1190         for i, fname in enumerate(sorted(changedfiles)):
  1321         for i, fname in enumerate(sorted(changedfiles)):
  1191             filerevlog = repo.file(fname)
  1322             filerevlog = repo.file(fname)
  1192             if not filerevlog:
  1323             if not filerevlog:
  1193                 raise error.Abort(_("empty or missing file data for %s") %
  1324                 raise error.Abort(
  1194                                   fname)
  1325                     _("empty or missing file data for %s") % fname
       
  1326                 )
  1195 
  1327 
  1196             clrevtolocalrev.clear()
  1328             clrevtolocalrev.clear()
  1197 
  1329 
  1198             linkrevnodes = linknodes(filerevlog, fname)
  1330             linkrevnodes = linknodes(filerevlog, fname)
  1199             # Lookup for filenodes, we collected the linkrev nodes above in the
  1331             # Lookup for filenodes, we collected the linkrev nodes above in the
  1204             frev, flr = filerevlog.rev, filerevlog.linkrev
  1336             frev, flr = filerevlog.rev, filerevlog.linkrev
  1205             # Skip sending any filenode we know the client already
  1337             # Skip sending any filenode we know the client already
  1206             # has. This avoids over-sending files relatively
  1338             # has. This avoids over-sending files relatively
  1207             # inexpensively, so it's not a problem if we under-filter
  1339             # inexpensively, so it's not a problem if we under-filter
  1208             # here.
  1340             # here.
  1209             filenodes = [n for n in linkrevnodes
  1341             filenodes = [
  1210                          if flr(frev(n)) not in commonrevs]
  1342                 n for n in linkrevnodes if flr(frev(n)) not in commonrevs
       
  1343             ]
  1211 
  1344 
  1212             if not filenodes:
  1345             if not filenodes:
  1213                 continue
  1346                 continue
  1214 
  1347 
  1215             progress.update(i + 1, item=fname)
  1348             progress.update(i + 1, item=fname)
  1216 
  1349 
  1217             deltas = deltagroup(
  1350             deltas = deltagroup(
  1218                 self._repo, filerevlog, filenodes, False, lookupfilelog,
  1351                 self._repo,
       
  1352                 filerevlog,
       
  1353                 filenodes,
       
  1354                 False,
       
  1355                 lookupfilelog,
  1219                 self._forcedeltaparentprev,
  1356                 self._forcedeltaparentprev,
  1220                 ellipses=self._ellipses,
  1357                 ellipses=self._ellipses,
  1221                 clrevtolocalrev=clrevtolocalrev,
  1358                 clrevtolocalrev=clrevtolocalrev,
  1222                 fullclnodes=self._fullclnodes,
  1359                 fullclnodes=self._fullclnodes,
  1223                 precomputedellipsis=self._precomputedellipsis)
  1360                 precomputedellipsis=self._precomputedellipsis,
       
  1361             )
  1224 
  1362 
  1225             yield fname, deltas
  1363             yield fname, deltas
  1226 
  1364 
  1227         progress.complete()
  1365         progress.complete()
  1228 
  1366 
  1229 def _makecg1packer(repo, oldmatcher, matcher, bundlecaps,
  1367 
  1230                    ellipses=False, shallow=False, ellipsisroots=None,
  1368 def _makecg1packer(
  1231                    fullnodes=None):
  1369     repo,
       
  1370     oldmatcher,
       
  1371     matcher,
       
  1372     bundlecaps,
       
  1373     ellipses=False,
       
  1374     shallow=False,
       
  1375     ellipsisroots=None,
       
  1376     fullnodes=None,
       
  1377 ):
  1232     builddeltaheader = lambda d: _CHANGEGROUPV1_DELTA_HEADER.pack(
  1378     builddeltaheader = lambda d: _CHANGEGROUPV1_DELTA_HEADER.pack(
  1233         d.node, d.p1node, d.p2node, d.linknode)
  1379         d.node, d.p1node, d.p2node, d.linknode
  1234 
  1380     )
  1235     return cgpacker(repo, oldmatcher, matcher, b'01',
  1381 
  1236                     builddeltaheader=builddeltaheader,
  1382     return cgpacker(
  1237                     manifestsend=b'',
  1383         repo,
  1238                     forcedeltaparentprev=True,
  1384         oldmatcher,
  1239                     bundlecaps=bundlecaps,
  1385         matcher,
  1240                     ellipses=ellipses,
  1386         b'01',
  1241                     shallow=shallow,
  1387         builddeltaheader=builddeltaheader,
  1242                     ellipsisroots=ellipsisroots,
  1388         manifestsend=b'',
  1243                     fullnodes=fullnodes)
  1389         forcedeltaparentprev=True,
  1244 
  1390         bundlecaps=bundlecaps,
  1245 def _makecg2packer(repo, oldmatcher, matcher, bundlecaps,
  1391         ellipses=ellipses,
  1246                    ellipses=False, shallow=False, ellipsisroots=None,
  1392         shallow=shallow,
  1247                    fullnodes=None):
  1393         ellipsisroots=ellipsisroots,
       
  1394         fullnodes=fullnodes,
       
  1395     )
       
  1396 
       
  1397 
       
  1398 def _makecg2packer(
       
  1399     repo,
       
  1400     oldmatcher,
       
  1401     matcher,
       
  1402     bundlecaps,
       
  1403     ellipses=False,
       
  1404     shallow=False,
       
  1405     ellipsisroots=None,
       
  1406     fullnodes=None,
       
  1407 ):
  1248     builddeltaheader = lambda d: _CHANGEGROUPV2_DELTA_HEADER.pack(
  1408     builddeltaheader = lambda d: _CHANGEGROUPV2_DELTA_HEADER.pack(
  1249         d.node, d.p1node, d.p2node, d.basenode, d.linknode)
  1409         d.node, d.p1node, d.p2node, d.basenode, d.linknode
  1250 
  1410     )
  1251     return cgpacker(repo, oldmatcher, matcher, b'02',
  1411 
  1252                     builddeltaheader=builddeltaheader,
  1412     return cgpacker(
  1253                     manifestsend=b'',
  1413         repo,
  1254                     bundlecaps=bundlecaps,
  1414         oldmatcher,
  1255                     ellipses=ellipses,
  1415         matcher,
  1256                     shallow=shallow,
  1416         b'02',
  1257                     ellipsisroots=ellipsisroots,
  1417         builddeltaheader=builddeltaheader,
  1258                     fullnodes=fullnodes)
  1418         manifestsend=b'',
  1259 
  1419         bundlecaps=bundlecaps,
  1260 def _makecg3packer(repo, oldmatcher, matcher, bundlecaps,
  1420         ellipses=ellipses,
  1261                    ellipses=False, shallow=False, ellipsisroots=None,
  1421         shallow=shallow,
  1262                    fullnodes=None):
  1422         ellipsisroots=ellipsisroots,
       
  1423         fullnodes=fullnodes,
       
  1424     )
       
  1425 
       
  1426 
       
  1427 def _makecg3packer(
       
  1428     repo,
       
  1429     oldmatcher,
       
  1430     matcher,
       
  1431     bundlecaps,
       
  1432     ellipses=False,
       
  1433     shallow=False,
       
  1434     ellipsisroots=None,
       
  1435     fullnodes=None,
       
  1436 ):
  1263     builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack(
  1437     builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack(
  1264         d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags)
  1438         d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags
  1265 
  1439     )
  1266     return cgpacker(repo, oldmatcher, matcher, b'03',
  1440 
  1267                     builddeltaheader=builddeltaheader,
  1441     return cgpacker(
  1268                     manifestsend=closechunk(),
  1442         repo,
  1269                     bundlecaps=bundlecaps,
  1443         oldmatcher,
  1270                     ellipses=ellipses,
  1444         matcher,
  1271                     shallow=shallow,
  1445         b'03',
  1272                     ellipsisroots=ellipsisroots,
  1446         builddeltaheader=builddeltaheader,
  1273                     fullnodes=fullnodes)
  1447         manifestsend=closechunk(),
  1274 
  1448         bundlecaps=bundlecaps,
  1275 _packermap = {'01': (_makecg1packer, cg1unpacker),
  1449         ellipses=ellipses,
  1276              # cg2 adds support for exchanging generaldelta
  1450         shallow=shallow,
  1277              '02': (_makecg2packer, cg2unpacker),
  1451         ellipsisroots=ellipsisroots,
  1278              # cg3 adds support for exchanging revlog flags and treemanifests
  1452         fullnodes=fullnodes,
  1279              '03': (_makecg3packer, cg3unpacker),
  1453     )
       
  1454 
       
  1455 
       
  1456 _packermap = {
       
  1457     '01': (_makecg1packer, cg1unpacker),
       
  1458     # cg2 adds support for exchanging generaldelta
       
  1459     '02': (_makecg2packer, cg2unpacker),
       
  1460     # cg3 adds support for exchanging revlog flags and treemanifests
       
  1461     '03': (_makecg3packer, cg3unpacker),
  1280 }
  1462 }
       
  1463 
  1281 
  1464 
  1282 def allsupportedversions(repo):
  1465 def allsupportedversions(repo):
  1283     versions = set(_packermap.keys())
  1466     versions = set(_packermap.keys())
  1284     needv03 = False
  1467     needv03 = False
  1285     if (repo.ui.configbool('experimental', 'changegroup3') or
  1468     if (
  1286         repo.ui.configbool('experimental', 'treemanifest') or
  1469         repo.ui.configbool('experimental', 'changegroup3')
  1287         'treemanifest' in repo.requirements):
  1470         or repo.ui.configbool('experimental', 'treemanifest')
       
  1471         or 'treemanifest' in repo.requirements
       
  1472     ):
  1288         # we keep version 03 because we need to to exchange treemanifest data
  1473         # we keep version 03 because we need to to exchange treemanifest data
  1289         #
  1474         #
  1290         # we also keep vresion 01 and 02, because it is possible for repo to
  1475         # we also keep vresion 01 and 02, because it is possible for repo to
  1291         # contains both normal and tree manifest at the same time. so using
  1476         # contains both normal and tree manifest at the same time. so using
  1292         # older version to pull data is viable
  1477         # older version to pull data is viable
  1295         needv03 = True
  1480         needv03 = True
  1296     if not needv03:
  1481     if not needv03:
  1297         versions.discard('03')
  1482         versions.discard('03')
  1298     return versions
  1483     return versions
  1299 
  1484 
       
  1485 
  1300 # Changegroup versions that can be applied to the repo
  1486 # Changegroup versions that can be applied to the repo
  1301 def supportedincomingversions(repo):
  1487 def supportedincomingversions(repo):
  1302     return allsupportedversions(repo)
  1488     return allsupportedversions(repo)
       
  1489 
  1303 
  1490 
  1304 # Changegroup versions that can be created from the repo
  1491 # Changegroup versions that can be created from the repo
  1305 def supportedoutgoingversions(repo):
  1492 def supportedoutgoingversions(repo):
  1306     versions = allsupportedversions(repo)
  1493     versions = allsupportedversions(repo)
  1307     if 'treemanifest' in repo.requirements:
  1494     if 'treemanifest' in repo.requirements:
  1323         versions.discard('01')
  1510         versions.discard('01')
  1324         versions.discard('02')
  1511         versions.discard('02')
  1325 
  1512 
  1326     return versions
  1513     return versions
  1327 
  1514 
       
  1515 
  1328 def localversion(repo):
  1516 def localversion(repo):
  1329     # Finds the best version to use for bundles that are meant to be used
  1517     # Finds the best version to use for bundles that are meant to be used
  1330     # locally, such as those from strip and shelve, and temporary bundles.
  1518     # locally, such as those from strip and shelve, and temporary bundles.
  1331     return max(supportedoutgoingversions(repo))
  1519     return max(supportedoutgoingversions(repo))
       
  1520 
  1332 
  1521 
  1333 def safeversion(repo):
  1522 def safeversion(repo):
  1334     # Finds the smallest version that it's safe to assume clients of the repo
  1523     # Finds the smallest version that it's safe to assume clients of the repo
  1335     # will support. For example, all hg versions that support generaldelta also
  1524     # will support. For example, all hg versions that support generaldelta also
  1336     # support changegroup 02.
  1525     # support changegroup 02.
  1338     if 'generaldelta' in repo.requirements:
  1527     if 'generaldelta' in repo.requirements:
  1339         versions.discard('01')
  1528         versions.discard('01')
  1340     assert versions
  1529     assert versions
  1341     return min(versions)
  1530     return min(versions)
  1342 
  1531 
  1343 def getbundler(version, repo, bundlecaps=None, oldmatcher=None,
  1532 
  1344                matcher=None, ellipses=False, shallow=False,
  1533 def getbundler(
  1345                ellipsisroots=None, fullnodes=None):
  1534     version,
       
  1535     repo,
       
  1536     bundlecaps=None,
       
  1537     oldmatcher=None,
       
  1538     matcher=None,
       
  1539     ellipses=False,
       
  1540     shallow=False,
       
  1541     ellipsisroots=None,
       
  1542     fullnodes=None,
       
  1543 ):
  1346     assert version in supportedoutgoingversions(repo)
  1544     assert version in supportedoutgoingversions(repo)
  1347 
  1545 
  1348     if matcher is None:
  1546     if matcher is None:
  1349         matcher = matchmod.always()
  1547         matcher = matchmod.always()
  1350     if oldmatcher is None:
  1548     if oldmatcher is None:
  1351         oldmatcher = matchmod.never()
  1549         oldmatcher = matchmod.never()
  1352 
  1550 
  1353     if version == '01' and not matcher.always():
  1551     if version == '01' and not matcher.always():
  1354         raise error.ProgrammingError('version 01 changegroups do not support '
  1552         raise error.ProgrammingError(
  1355                                      'sparse file matchers')
  1553             'version 01 changegroups do not support ' 'sparse file matchers'
       
  1554         )
  1356 
  1555 
  1357     if ellipses and version in (b'01', b'02'):
  1556     if ellipses and version in (b'01', b'02'):
  1358         raise error.Abort(
  1557         raise error.Abort(
  1359             _('ellipsis nodes require at least cg3 on client and server, '
  1558             _(
  1360               'but negotiated version %s') % version)
  1559                 'ellipsis nodes require at least cg3 on client and server, '
       
  1560                 'but negotiated version %s'
       
  1561             )
       
  1562             % version
       
  1563         )
  1361 
  1564 
  1362     # Requested files could include files not in the local store. So
  1565     # Requested files could include files not in the local store. So
  1363     # filter those out.
  1566     # filter those out.
  1364     matcher = repo.narrowmatch(matcher)
  1567     matcher = repo.narrowmatch(matcher)
  1365 
  1568 
  1366     fn = _packermap[version][0]
  1569     fn = _packermap[version][0]
  1367     return fn(repo, oldmatcher, matcher, bundlecaps, ellipses=ellipses,
  1570     return fn(
  1368               shallow=shallow, ellipsisroots=ellipsisroots,
  1571         repo,
  1369               fullnodes=fullnodes)
  1572         oldmatcher,
       
  1573         matcher,
       
  1574         bundlecaps,
       
  1575         ellipses=ellipses,
       
  1576         shallow=shallow,
       
  1577         ellipsisroots=ellipsisroots,
       
  1578         fullnodes=fullnodes,
       
  1579     )
       
  1580 
  1370 
  1581 
  1371 def getunbundler(version, fh, alg, extras=None):
  1582 def getunbundler(version, fh, alg, extras=None):
  1372     return _packermap[version][1](fh, alg, extras=extras)
  1583     return _packermap[version][1](fh, alg, extras=extras)
       
  1584 
  1373 
  1585 
  1374 def _changegroupinfo(repo, nodes, source):
  1586 def _changegroupinfo(repo, nodes, source):
  1375     if repo.ui.verbose or source == 'bundle':
  1587     if repo.ui.verbose or source == 'bundle':
  1376         repo.ui.status(_("%d changesets found\n") % len(nodes))
  1588         repo.ui.status(_("%d changesets found\n") % len(nodes))
  1377     if repo.ui.debugflag:
  1589     if repo.ui.debugflag:
  1378         repo.ui.debug("list of changesets:\n")
  1590         repo.ui.debug("list of changesets:\n")
  1379         for node in nodes:
  1591         for node in nodes:
  1380             repo.ui.debug("%s\n" % hex(node))
  1592             repo.ui.debug("%s\n" % hex(node))
  1381 
  1593 
  1382 def makechangegroup(repo, outgoing, version, source, fastpath=False,
  1594 
  1383                     bundlecaps=None):
  1595 def makechangegroup(
  1384     cgstream = makestream(repo, outgoing, version, source,
  1596     repo, outgoing, version, source, fastpath=False, bundlecaps=None
  1385                           fastpath=fastpath, bundlecaps=bundlecaps)
  1597 ):
  1386     return getunbundler(version, util.chunkbuffer(cgstream), None,
  1598     cgstream = makestream(
  1387                         {'clcount': len(outgoing.missing) })
  1599         repo,
  1388 
  1600         outgoing,
  1389 def makestream(repo, outgoing, version, source, fastpath=False,
  1601         version,
  1390                bundlecaps=None, matcher=None):
  1602         source,
  1391     bundler = getbundler(version, repo, bundlecaps=bundlecaps,
  1603         fastpath=fastpath,
  1392                          matcher=matcher)
  1604         bundlecaps=bundlecaps,
       
  1605     )
       
  1606     return getunbundler(
       
  1607         version,
       
  1608         util.chunkbuffer(cgstream),
       
  1609         None,
       
  1610         {'clcount': len(outgoing.missing)},
       
  1611     )
       
  1612 
       
  1613 
       
  1614 def makestream(
       
  1615     repo,
       
  1616     outgoing,
       
  1617     version,
       
  1618     source,
       
  1619     fastpath=False,
       
  1620     bundlecaps=None,
       
  1621     matcher=None,
       
  1622 ):
       
  1623     bundler = getbundler(version, repo, bundlecaps=bundlecaps, matcher=matcher)
  1393 
  1624 
  1394     repo = repo.unfiltered()
  1625     repo = repo.unfiltered()
  1395     commonrevs = outgoing.common
  1626     commonrevs = outgoing.common
  1396     csets = outgoing.missing
  1627     csets = outgoing.missing
  1397     heads = outgoing.missingheads
  1628     heads = outgoing.missingheads
  1398     # We go through the fast path if we get told to, or if all (unfiltered
  1629     # We go through the fast path if we get told to, or if all (unfiltered
  1399     # heads have been requested (since we then know there all linkrevs will
  1630     # heads have been requested (since we then know there all linkrevs will
  1400     # be pulled by the client).
  1631     # be pulled by the client).
  1401     heads.sort()
  1632     heads.sort()
  1402     fastpathlinkrev = fastpath or (
  1633     fastpathlinkrev = fastpath or (
  1403             repo.filtername is None and heads == sorted(repo.heads()))
  1634         repo.filtername is None and heads == sorted(repo.heads())
       
  1635     )
  1404 
  1636 
  1405     repo.hook('preoutgoing', throw=True, source=source)
  1637     repo.hook('preoutgoing', throw=True, source=source)
  1406     _changegroupinfo(repo, csets, source)
  1638     _changegroupinfo(repo, csets, source)
  1407     return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
  1639     return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
  1408 
  1640 
       
  1641 
  1409 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
  1642 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
  1410     revisions = 0
  1643     revisions = 0
  1411     files = 0
  1644     files = 0
  1412     progress = repo.ui.makeprogress(_('files'), unit=_('files'),
  1645     progress = repo.ui.makeprogress(
  1413                                     total=expectedfiles)
  1646         _('files'), unit=_('files'), total=expectedfiles
       
  1647     )
  1414     for chunkdata in iter(source.filelogheader, {}):
  1648     for chunkdata in iter(source.filelogheader, {}):
  1415         files += 1
  1649         files += 1
  1416         f = chunkdata["filename"]
  1650         f = chunkdata["filename"]
  1417         repo.ui.debug("adding %s revisions\n" % f)
  1651         repo.ui.debug("adding %s revisions\n" % f)
  1418         progress.increment()
  1652         progress.increment()
  1430             for new in pycompat.xrange(o, len(fl)):
  1664             for new in pycompat.xrange(o, len(fl)):
  1431                 n = fl.node(new)
  1665                 n = fl.node(new)
  1432                 if n in needs:
  1666                 if n in needs:
  1433                     needs.remove(n)
  1667                     needs.remove(n)
  1434                 else:
  1668                 else:
  1435                     raise error.Abort(
  1669                     raise error.Abort(_("received spurious file revlog entry"))
  1436                         _("received spurious file revlog entry"))
       
  1437             if not needs:
  1670             if not needs:
  1438                 del needfiles[f]
  1671                 del needfiles[f]
  1439     progress.complete()
  1672     progress.complete()
  1440 
  1673 
  1441     for f, needs in needfiles.iteritems():
  1674     for f, needs in needfiles.iteritems():
  1443         for n in needs:
  1676         for n in needs:
  1444             try:
  1677             try:
  1445                 fl.rev(n)
  1678                 fl.rev(n)
  1446             except error.LookupError:
  1679             except error.LookupError:
  1447                 raise error.Abort(
  1680                 raise error.Abort(
  1448                     _('missing file data for %s:%s - run hg verify') %
  1681                     _('missing file data for %s:%s - run hg verify')
  1449                     (f, hex(n)))
  1682                     % (f, hex(n))
       
  1683                 )
  1450 
  1684 
  1451     return revisions, files
  1685     return revisions, files