Bug 1532236 Improve logging and timeouts in partials generation r=mtabara
authorSimon Fraser <sfraser@mozilla.com>
Mon, 04 Mar 2019 11:56:47 +0000
changeset 462221 817014bcd372fd598b5f44c62070069076544c42
parent 462220 4565c0c6aea8295b8f5df972fb12098752d43e19
child 462222 0ece01da444e26fd8ccb16028e95b4ac6ff03d94
push id35644
push useraciure@mozilla.com
push dateMon, 04 Mar 2019 21:48:23 +0000
treeherdermozilla-central@a9bb4a23d407 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmtabara
bugs1532236
milestone67.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1532236 Improve logging and timeouts in partials generation r=mtabara Differential Revision: https://phabricator.services.mozilla.com/D21909
taskcluster/docker/funsize-update-generator/scripts/funsize.py
taskcluster/taskgraph/transforms/partials.py
--- a/taskcluster/docker/funsize-update-generator/scripts/funsize.py
+++ b/taskcluster/docker/funsize-update-generator/scripts/funsize.py
@@ -151,36 +151,38 @@ async def download(url, dest, mode=None)
                 os.chmod(dest, mode)
 
 
 async def run_command(cmd, cwd='/', env=None, label=None, silent=False):
     if not env:
         env = dict()
     process = await asyncio.create_subprocess_shell(cmd,
                                                     stdout=asyncio.subprocess.PIPE,
-                                                    stderr=asyncio.subprocess.STDOUT,
+                                                    stderr=asyncio.subprocess.PIPE,
                                                     cwd=cwd, env=env)
-    stdout, stderr = await process.communicate()
+    if label:
+        label = "{}: ".format(label)
+    else:
+        label = ""
 
-    await process.wait()
+    async def read_output(stream, label, printcmd):
+        while True:
+            line = await stream.readline()
+            if line == b'':
+                break
+            printcmd("%s%s", label, line.decode('utf-8'))
 
     if silent:
-        return
-
-    if not stderr:
-        stderr = ""
-    if not stdout:
-        stdout = ""
-
-    label = "{}: ".format(label)
-
-    for line in stdout.splitlines():
-        log.debug("%s%s", label, line.decode('utf-8'))
-    for line in stderr.splitlines():
-        log.warn("%s%s", label, line.decode('utf-8'))
+        await process.wait()
+    else:
+        await asyncio.gather(
+            read_output(process.stdout, label, log.info),
+            read_output(process.stderr, label, log.warn)
+            )
+        await process.wait()
 
 
 async def unpack(work_env, mar, dest_dir):
     os.mkdir(dest_dir)
     log.debug("Unwrapping %s", mar)
     env = work_env.env
     if not is_lzma_compressed_mar(mar):
         env['MAR_OLD_FORMAT'] = '1'
--- a/taskcluster/taskgraph/transforms/partials.py
+++ b/taskcluster/taskgraph/transforms/partials.py
@@ -117,17 +117,17 @@ def make_task_description(config, jobs):
 
         level = config.params['level']
 
         worker = {
             'artifacts': _generate_task_output_files(dep_job, builds.keys(), locale),
             'implementation': 'docker-worker',
             'docker-image': {'in-tree': 'funsize-update-generator'},
             'os': 'linux',
-            'max-run-time': 3600,
+            'max-run-time': 600,
             'chain-of-trust': True,
             'taskcluster-proxy': True,
             'env': {
                 'SHA1_SIGNING_CERT': 'nightly_sha1',
                 'SHA384_SIGNING_CERT': 'nightly_sha384',
                 'DATADOG_API_SECRET':
                     'project/releng/gecko/build/level-{}/datadog-api-key'.format(level),
                 'EXTRA_PARAMS': '--arch={}'.format(architecture(attributes['build_platform'])),