Bug 1617043 - Track the time spent in fetch-content and mach artifact toolchain. r=rstewart
authorMike Hommey <mh+mozilla@glandium.org>
Sat, 07 Mar 2020 10:46:14 +0000
changeset 517470 e6ab708ef6f34365162e87851b1bb3f146200a2b
parent 517469 9ac9c80edf1f96c470d5f425885f9ad72909b41b
child 517471 0db5d88e7a66a713408c3cb29613ea28eb99b341
push id109413
push usermh@glandium.org
push dateSat, 07 Mar 2020 10:59:44 +0000
treeherderautoland@e6ab708ef6f3 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersrstewart
bugs1617043
milestone75.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1617043 - Track the time spent in fetch-content and mach artifact toolchain. r=rstewart Note: while we can use time.monotonic in fetch-content, we can't in mach artifact toolchain yet because it's still python2. Differential Revision: https://phabricator.services.mozilla.com/D65690
python/mozbuild/mozbuild/artifact_commands.py
taskcluster/scripts/misc/fetch-content
--- a/python/mozbuild/mozbuild/artifact_commands.py
+++ b/python/mozbuild/mozbuild/artifact_commands.py
@@ -185,21 +185,23 @@ class PackageFrontend(MachCommandBase):
         from mozbuild.artifacts import ArtifactCache
         from mozbuild.action.tooltool import (
             FileRecord,
             open_manifest,
             unpack_file,
         )
         import redo
         import requests
+        import time
 
         from taskgraph.util.taskcluster import (
             get_artifact_url,
         )
 
+        start = time.time()
         self._set_log_level(verbose)
         # Normally, we'd use self.log_manager.enable_unstructured(),
         # but that enables all logging, while we only really want tooltool's
         # and it also makes structured log output twice.
         # So we manually do what it does, and limit that to the tooltool
         # logger.
         if self.log_manager.terminal_handler:
             logging.getLogger('mozbuild.action.tooltool').addHandler(
@@ -425,9 +427,25 @@ class PackageFrontend(MachCommandBase):
             if files:
                 return 1
 
         if artifacts:
             ensureParentDir(artifact_manifest)
             with open(artifact_manifest, 'w') as fh:
                 json.dump(artifacts, fh, indent=4, sort_keys=True)
 
+        if 'MOZ_AUTOMATION' in os.environ:
+            end = time.time()
+
+            perfherder_data = {
+                'framework': {'name': 'build_metrics'},
+                'suites': [{
+                    'name': 'mach_artifact_toolchain',
+                    'value': end - start,
+                    'lowerIsBetter': True,
+                    'shouldAlert': False,
+                    'subtests': [],
+                }],
+            }
+            self.log(logging.INFO, 'perfherder', {'data': json.dumps(perfherder_data)},
+                     'PERFHERDER_DATA: {data}')
+
         return 0
--- a/taskcluster/scripts/misc/fetch-content
+++ b/taskcluster/scripts/misc/fetch-content
@@ -579,16 +579,17 @@ def command_static_url(args):
 def api(root_url, service, version, path):
     # taskcluster-lib-urls is not available when this script runs, so
     # simulate its behavior:
     return '{root_url}/api/{service}/{version}/{path}'.format(
             root_url=root_url, service=service, version=version, path=path)
 
 
 def command_task_artifacts(args):
+    start = time.monotonic()
     fetches = json.loads(os.environ['MOZ_FETCHES'])
     downloads = []
     for fetch in fetches:
         extdir = pathlib.Path(args.dest)
         if 'dest' in fetch:
             extdir = extdir.joinpath(fetch['dest'])
         extdir.mkdir(parents=True, exist_ok=True)
         root_url = os.environ['TASKCLUSTER_ROOT_URL']
@@ -599,16 +600,29 @@ def command_task_artifacts(args):
         else:
             url = ('{proxy_url}/api/queue/v1/task/{task}/artifacts/{artifact}').format(
                     proxy_url=os.environ['TASKCLUSTER_PROXY_URL'],
                     task=fetch['task'],
                     artifact=fetch['artifact'])
         downloads.append((url, extdir, fetch['extract']))
 
     fetch_urls(downloads)
+    end = time.monotonic()
+
+    perfherder_data = {
+        'framework': {'name': 'build_metrics'},
+        'suites': [{
+            'name': 'fetch_content',
+            'value': end - start,
+            'lowerIsBetter': True,
+            'shouldAlert': False,
+            'subtests': [],
+        }],
+    }
+    print('PERFHERDER_DATA: {}'.format(json.dumps(perfherder_data)), file=sys.stderr)
 
 
 def main():
     parser = argparse.ArgumentParser()
     subparsers = parser.add_subparsers(title='sub commands')
 
     git_checkout = subparsers.add_parser(
         'git-checkout-archive',