Bug 1234913 - Clarify things that are hg-specific; make iteration lazy. r=chmanchester
authorNick Alexander <nalexander@mozilla.com>
Wed, 24 Feb 2016 21:58:22 -0800
changeset 324767 205f1459dcb80cefc0241adc4ea6466581122701
parent 324766 1b7bcd5eefb3980e4dea5f75e32f614df80b76c1
child 324768 ac1fff91cf4d5a07e5defaa85010c709b487ca8a
push id1128
push userjlund@mozilla.com
push dateWed, 01 Jun 2016 01:31:59 +0000
treeherdermozilla-release@fe0d30de989d [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1234913 - Clarify things that are hg-specific; make iteration lazy. r=chmanchester MozReview-Commit-ID: LL6kO8QS5p9
--- a/python/mozbuild/mozbuild/artifacts.py
+++ b/python/mozbuild/mozbuild/artifacts.py
@@ -725,20 +725,24 @@ class Artifacts(object):
         if buildconfig.defines.get('XP_WIN', False):
             return 'win64' if target_64bit else 'win32'
         if buildconfig.defines.get('XP_MACOSX', False):
             # We only produce unified builds in automation, so the target_cpu
             # check is not relevant.
             return 'macosx64'
         raise Exception('Cannot determine default job for |mach artifact|!')
-    def _find_pushheads(self):
-        # Return an ordered dict associating revisions that are pushheads with
-        # trees they are known to be in (starting with the first tree they're
-        # known to be in).
+    def _find_hg_pushheads(self):
+        """Return an iterator of (hg_hash, {tree-set}) associating hg revision
+        hashes that might be pushheads with the trees they are known
+        to be in.
+        More recent hashes should come earlier in the list.  We always
+        have the pushlog locally, so we'll never yield an empty tree-set.
+        """
             output = subprocess.check_output([
                 self._hg, 'log',
                 '--template', '{node},{join(trees, ",")}\n',
                 '-r', 'last(pushhead({tree}) and ::., {num})'.format(
                     tree=self._tree or '', num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT)
@@ -746,35 +750,34 @@ class Artifacts(object):
             # We probably don't have the mozext extension installed.
             ret = subprocess.call([self._hg, 'showconfig', 'extensions.mozext'])
             if ret:
                 raise Exception('Could not find pushheads for recent revisions.\n\n'
                                 'You need to enable the "mozext" hg extension: '
                                 'see https://developer.mozilla.org/en-US/docs/Artifact_builds')
-        rev_trees = collections.OrderedDict()
+        count = 0
         for line in output.splitlines():
             if not line:
             rev_info = line.split(',')
             if len(rev_info) == 1:
                 # If pushhead() is true, it would seem "trees" should be
                 # non-empty, but this is defensive.
-            rev_trees[rev_info[0]] = tuple(rev_info[1:])
+            count += 1
+            yield rev_info[0], tuple(rev_info[1:])
-        if not rev_trees:
+        if not count:
             raise Exception('Could not find any candidate pushheads in the last {num} revisions.\n\n'
                             'Try running |hg pushlogsync|;\n'
                             'see https://developer.mozilla.org/en-US/docs/Artifact_builds'.format(
-        return rev_trees
     def find_pushhead_artifacts(self, task_cache, tree_cache, job, pushhead, trees):
         known_trees = set(tree_cache.artifact_trees(pushhead, trees))
         if not known_trees:
             return None
         if not trees:
             # Accept artifacts from any tree where they are available.
             trees = list(known_trees)
@@ -849,68 +852,76 @@ class Artifacts(object):
     def install_from_url(self, url, distdir):
         self.log(logging.INFO, 'artifact',
             {'url': url},
             'Installing from {url}')
         with self._artifact_cache as artifact_cache:  # The with block handles persistence.
             filename = artifact_cache.fetch(url)
         return self.install_from_file(filename, distdir)
-    def _install_from_pushheads(self, rev_pushheads, distdir):
+    def _install_from_hg_pushheads(self, hg_pushheads, distdir):
+        """Iterate pairs (hg_hash, {tree-set}) associating hg revision hashes
+        and tree-sets they are known to be in, trying to download and
+        install from each.
+        """
         urls = None
+        count = 0
         # with blocks handle handle persistence.
         with self._task_cache as task_cache, self._tree_cache as tree_cache:
-            for rev, trees in rev_pushheads.items():
+            for hg_hash, trees in hg_pushheads:
+                count += 1
                 self.log(logging.DEBUG, 'artifact',
-                         {'rev': rev},
-                         'Trying to find artifacts for pushhead {rev}.')
+                         {'hg_hash': hg_hash},
+                         'Trying to find artifacts for hg revision {hg_hash}.')
                 urls = self.find_pushhead_artifacts(task_cache, tree_cache,
-                                                    self._job, rev, trees)
+                                                    self._job, hg_hash, trees)
                 if urls:
                     for url in urls:
                         if self.install_from_url(url, distdir):
                             return 1
                     return 0
         self.log(logging.ERROR, 'artifact',
-                 {'count': len(rev_pushheads)},
+                 {'count': count},
                  'Tried {count} pushheads, no built artifacts found.')
         return 1
-    def install_from_recent(self, distdir):
-        rev_pushheads = self._find_pushheads()
-        return self._install_from_pushheads(rev_pushheads, distdir)
+    def install_from_hg_recent(self, distdir):
+        hg_pushheads = self._find_hg_pushheads()
+        return self._install_from_hg_pushheads(hg_pushheads, distdir)
-    def install_from_revset(self, revset, distdir):
+    def install_from_hg_revset(self, revset, distdir):
         revision = subprocess.check_output([self._hg, 'log', '--template', '{node}\n',
                                             '-r', revset]).strip()
         if len(revision.split('\n')) != 1:
             raise ValueError('hg revision specification must resolve to exactly one commit')
-        rev_pushheads = {revision: None}
+        hg_pushheads = [(revision, tuple())]
         self.log(logging.INFO, 'artifact',
                  {'revset': revset,
                   'revision': revision},
                  'Will only accept artifacts from a pushhead at {revision} '
                  '(matched revset "{revset}").')
-        return self._install_from_pushheads(rev_pushheads, distdir)
+        return self._install_from_hg_pushheads(hg_pushheads, distdir)
     def install_from(self, source, distdir):
         """Install artifacts from a ``source`` into the given ``distdir``.
         if source and os.path.isfile(source):
             return self.install_from_file(source, distdir)
         elif source and urlparse.urlparse(source).scheme:
             return self.install_from_url(source, distdir)
             if source is None and 'MOZ_ARTIFACT_REVISION' in os.environ:
                 source = os.environ['MOZ_ARTIFACT_REVISION']
             if source:
-                return self.install_from_revset(source, distdir)
+                return self.install_from_hg_revset(source, distdir)
-            return self.install_from_recent(distdir)
+            return self.install_from_hg_recent(distdir)
     def print_last(self):
         self.log(logging.INFO, 'artifact',
             'Printing last used artifact details.')