remotefilelog: use progress helper in repack
authorMartin von Zweigbergk <martinvonz@google.com>
Tue, 04 Dec 2018 16:29:05 -0800
changeset 53608 b80915b524768563caac2516a5576723208c4c6e
parent 53607 fbd053af2eda078a0f53aea5af86db1b438da375
child 53609 4e08bbfc6d51bc51e1faa99f3675a42a4aeba501
push id1079
push usergszorc@mozilla.com
push dateMon, 10 Dec 2018 19:44:59 +0000
remotefilelog: use progress helper in repack Differential Revision: https://phab.mercurial-scm.org/D5378
hgext/remotefilelog/repack.py
--- a/hgext/remotefilelog/repack.py
+++ b/hgext/remotefilelog/repack.py
@@ -490,37 +490,39 @@ class repacker(object):
         maxchainlen = ui.configint('packs', 'maxchainlen', 1000)
 
         byfile = {}
         for entry in ledger.entries.itervalues():
             if entry.datasource:
                 byfile.setdefault(entry.filename, {})[entry.node] = entry
 
         count = 0
+        repackprogress = ui.makeprogress(_("repacking data"), unit=self.unit,
+                                            total=len(byfile))
         for filename, entries in sorted(byfile.iteritems()):
-            ui.progress(_("repacking data"), count, unit=self.unit,
-                        total=len(byfile))
+            repackprogress.update(count)
 
             ancestors = {}
             nodes = list(node for node in entries)
             nohistory = []
+            buildprogress = ui.makeprogress(_("building history"), unit='nodes',
+                                            total=len(nodes))
             for i, node in enumerate(nodes):
                 if node in ancestors:
                     continue
-                ui.progress(_("building history"), i, unit='nodes',
-                            total=len(nodes))
+                buildprogress.update(i)
                 try:
                     ancestors.update(self.fullhistory.getancestors(filename,
                         node, known=ancestors))
                 except KeyError:
                     # Since we're packing data entries, we may not have the
                     # corresponding history entries for them. It's not a big
                     # deal, but the entries won't be delta'd perfectly.
                     nohistory.append(node)
-            ui.progress(_("building history"), None)
+            buildprogress.complete()
 
             # Order the nodes children first, so we can produce reverse deltas
             orderednodes = list(reversed(self._toposort(ancestors)))
             if len(nohistory) > 0:
                 ui.debug('repackdata: %d nodes without history\n' %
                          len(nohistory))
             orderednodes.extend(sorted(nohistory))
 
@@ -542,19 +544,21 @@ class repacker(object):
                     neworderednodes.append(node)
                 orderednodes = neworderednodes
 
             # Compute delta bases for nodes:
             deltabases = {}
             nobase = set()
             referenced = set()
             nodes = set(nodes)
+            processprogress = ui.makeprogress(_("processing nodes"),
+                                              unit='nodes',
+                                              total=len(orderednodes))
             for i, node in enumerate(orderednodes):
-                ui.progress(_("processing nodes"), i, unit='nodes',
-                            total=len(orderednodes))
+                processprogress.update(i)
                 # Find delta base
                 # TODO: allow delta'ing against most recent descendant instead
                 # of immediate child
                 deltatuple = deltabases.get(node, None)
                 if deltatuple is None:
                     deltabase, chainlen = nullid, 0
                     deltabases[node] = (nullid, 0)
                     nobase.add(node)
@@ -615,31 +619,32 @@ class repacker(object):
 
                 # TODO: don't use the delta if it's larger than the fulltext
                 if constants.METAKEYSIZE not in meta:
                     meta[constants.METAKEYSIZE] = size
                 target.add(filename, node, deltabase, delta, meta)
 
                 entries[node].datarepacked = True
 
-            ui.progress(_("processing nodes"), None)
+            processprogress.complete()
             count += 1
 
-        ui.progress(_("repacking data"), None)
+        repackprogress.complete()
         target.close(ledger=ledger)
 
     def repackhistory(self, ledger, target):
         ui = self.repo.ui
 
         byfile = {}
         for entry in ledger.entries.itervalues():
             if entry.historysource:
                 byfile.setdefault(entry.filename, {})[entry.node] = entry
 
-        count = 0
+        progress = ui.makeprogress(_("repacking history"), unit=self.unit,
+                                   total=len(byfile))
         for filename, entries in sorted(byfile.iteritems()):
             ancestors = {}
             nodes = list(node for node in entries)
 
             for node in nodes:
                 if node in ancestors:
                     continue
                 ancestors.update(self.history.getancestors(filename, node,
@@ -673,21 +678,19 @@ class repacker(object):
                 if copyfrom:
                     dontprocess.add(p1)
 
                 target.add(filename, node, p1, p2, linknode, copyfrom)
 
                 if node in entries:
                     entries[node].historyrepacked = True
 
-            count += 1
-            ui.progress(_("repacking history"), count, unit=self.unit,
-                        total=len(byfile))
+            progress.increment()
 
-        ui.progress(_("repacking history"), None)
+        progress.complete()
         target.close(ledger=ledger)
 
     def _toposort(self, ancestors):
         def parentfunc(node):
             p1, p2, linknode, copyfrom = ancestors[node]
             parents = []
             if p1 != nullid:
                 parents.append(p1)