setdiscovery: back out changeset 5cfdf6137af8 (issue5809)
authorMartin von Zweigbergk <>
Sun, 04 Mar 2018 07:37:08 -0800
changeset 43466 613954a17a2558700bb1f60afd1e6a6ec9921a8a
parent 43465 250f3168d907d6e0a99976d72ce53af5ec8f6803
child 43467 bf485b70d0aef13b1b6cccb4770fea664af12661
push id729
push dateMon, 05 Mar 2018 00:43:25 +0000
setdiscovery: back out changeset 5cfdf6137af8 (issue5809) As explained in the bug report, this commit caused a performance regression. The problem occurs when the local repo has very many heads. Before 5cfdf6137af8, we used to get the remote's list of heads and if these heads mostly overlapped with the local repo's heads, we would mark these common heads as common, which would greatly reduce the size of the set of undecided nodes. Note that a similar problem existed before 5cfdf6137af8: If the local repo had very many heads and the server just had a few (or many heads from a disjoint set), we would do the same kind of slow discovery as we would with 5cfdf6137af8 in the case where local and remote repos share a large set of common nodes. For now, we just back out 5cfdf6137af8. We should improve the discovery in the "local has many heads, remote has few heads" case, but let's do that after backing this out. Differential Revision:
--- a/mercurial/
+++ b/mercurial/
@@ -52,17 +52,17 @@ def findcommonincoming(repo, remote, hea
     if not remote.capable('getbundle'):
         return treediscovery.findcommonincoming(repo, remote, heads, force)
     if heads:
         knownnode = repo.changelog.hasnode # no nodemap until it is filtered
         if all(knownnode(h) for h in heads):
             return (heads, False, heads)
-    res = setdiscovery.findcommonheads(repo.ui, repo, remote, heads,
+    res = setdiscovery.findcommonheads(repo.ui, repo, remote,
                                        abortwhenunrelated=not force,
     common, anyinc, srvheads = res
     return (list(common), anyinc, heads or list(srvheads))
 class outgoing(object):
     '''Represents the set of nodes present in a local repo but not in a
     (possibly) remote one.
--- a/mercurial/
+++ b/mercurial/
@@ -125,17 +125,17 @@ def _takefullsample(dag, nodes, size):
     return sample
 def _limitsample(sample, desiredlen):
     """return a random subset of sample of at most desiredlen item"""
     if len(sample) > desiredlen:
         sample = set(random.sample(sample, desiredlen))
     return sample
-def findcommonheads(ui, local, remote, heads=None,
+def findcommonheads(ui, local, remote,
     '''Return a tuple (common, anyincoming, remoteheads) used to identify
     missing nodes from or in remote.
     start = util.timer()
@@ -150,25 +150,21 @@ def findcommonheads(ui, local, remote, h
     # early exit if we know all the specified remote heads already
     ui.debug("query 1; heads\n")
     roundtrips += 1
     ownheads = dag.heads()
     sample = _limitsample(ownheads, initialsamplesize)
     # indices between sample and externalized version must match
     sample = list(sample)
-    if heads:
-        srvheadhashes = heads
-        yesno = remote.known(dag.externalizeall(sample))
-    else:
-        batch = remote.iterbatch()
-        batch.heads()
-        batch.known(dag.externalizeall(sample))
-        batch.submit()
-        srvheadhashes, yesno = batch.results()
+    batch = remote.iterbatch()
+    batch.heads()
+    batch.known(dag.externalizeall(sample))
+    batch.submit()
+    srvheadhashes, yesno = batch.results()
     if cl.tip() == nullid:
         if srvheadhashes != [nullid]:
             return [nullid], True, srvheadhashes
         return [nullid], False, []
     # start actual discovery (we note this before the next "if" for
     # compatibility reasons)