Backed out 5 changesets (bug 1498640, bug 1436037, bug 1497660, bug 1505579) for mass failures on a CLOSED TREE
authorAndreea Pavel <apavel@mozilla.com>
Thu, 15 Nov 2018 07:53:41 +0200
changeset 446473 09be3daa07878cd46f6b30281bf159038ca0c2fe
parent 446472 3a0b29de3cfb7c1abff8406f53aefb052c41b72c
child 446474 6eeaeba79a5281d47a71ce26dd1f3540104cd974
push id35041
push useraiakab@mozilla.com
push dateThu, 15 Nov 2018 09:52:43 +0000
treeherdermozilla-central@48720735b142 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs1498640, 1436037, 1497660, 1505579
milestone65.0a1
backs out914a7a899dd1418ad957e8162eb93fa63df37507
e072757bf6918ce50287d6556bbdc57663c88181
b0805a8cf6eb120525408dd486ca2c362afb2db3
aa32ba9e6df3e20aab896a63538c8e8a432b4988
ce441b8a784c3c08fc575f5ff1632a303a244fa3
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out 5 changesets (bug 1498640, bug 1436037, bug 1497660, bug 1505579) for mass failures on a CLOSED TREE Backed out changeset 914a7a899dd1 (bug 1436037) Backed out changeset e072757bf691 (bug 1436037) Backed out changeset b0805a8cf6eb (bug 1505579) Backed out changeset aa32ba9e6df3 (bug 1498640) Backed out changeset ce441b8a784c (bug 1497660)
python/mozlint/test/python.ini
taskcluster/ci/source-test/python.yml
taskcluster/docker/image_builder/VERSION
taskcluster/docs/docker-images.rst
taskcluster/scripts/run-task
taskcluster/taskgraph/transforms/docker_image.py
taskcluster/taskgraph/transforms/job/common.py
taskcluster/taskgraph/transforms/job/mach.py
taskcluster/taskgraph/transforms/job/python_test.py
taskcluster/taskgraph/transforms/job/run_task.py
taskcluster/taskgraph/transforms/task.py
taskcluster/taskgraph/util/docker.py
testing/marionette/harness/marionette_harness/tests/harness_unit/test_marionette_runner.py
testing/mozbase/manifestparser/tests/manifest.ini
testing/mozbase/mozcrash/tests/manifest.ini
testing/mozbase/mozdebug/tests/manifest.ini
testing/mozbase/mozdevice/tests/manifest.ini
testing/mozbase/mozfile/tests/manifest.ini
testing/mozbase/mozhttpd/tests/manifest.ini
testing/mozbase/mozinfo/tests/manifest.ini
testing/mozbase/mozinstall/tests/manifest.ini
testing/mozbase/mozlog/tests/manifest.ini
testing/mozbase/moznetwork/tests/manifest.ini
testing/mozbase/mozprocess/tests/manifest.ini
testing/mozbase/mozprofile/tests/manifest.ini
testing/mozbase/mozrunner/tests/manifest.ini
testing/mozbase/mozsystemmonitor/tests/manifest.ini
testing/mozbase/moztest/tests/manifest.ini
testing/mozbase/mozversion/tests/manifest.ini
testing/mozharness/external_tools/robustcheckout.py
tools/lint/test/python.ini
--- a/python/mozlint/test/python.ini
+++ b/python/mozlint/test/python.ini
@@ -1,10 +1,10 @@
 [DEFAULT]
-subsuite = mozlint
+subsuite = mozlint, os == "linux"
 skip-if = python == 3
 
 [test_cli.py]
 [test_editor.py]
 [test_formatters.py]
 [test_parser.py]
 [test_pathutils.py]
 [test_result.py]
--- a/taskcluster/ci/source-test/python.yml
+++ b/taskcluster/ci/source-test/python.yml
@@ -1,22 +1,19 @@
 job-defaults:
     platform: linux64/opt
     always-target: true
     worker-type:
         by-platform:
             linux64.*: aws-provisioner-v1/gecko-t-linux-xlarge
-            windows10-64.*: aws-provisioner-v1/gecko-t-win10-64
     worker:
         by-platform:
             linux64.*:
                 docker-image: {in-tree: "lint"}
                 max-run-time: 3600
-            default:
-                max-run-time: 3600
     treeherder:
         kind: test
         tier: 2
     run:
         using: mach
     when:
         files-changed:
             - 'config/mozunit/**'
@@ -32,19 +29,16 @@ taskgraph-tests:
         subsuite: taskgraph
     when:
         files-changed:
             - 'taskcluster/**/*.py'
             - 'python/mach/**/*.py'
 
 marionette-harness:
     description: testing/marionette/harness unit tests
-    platform:
-        - linux64/opt
-        - windows10-64/opt
     python-version: [2]
     treeherder:
         symbol: mnh
     run:
         using: python-test
         subsuite: marionette-harness
     when:
         files-changed:
@@ -90,19 +84,16 @@ mochitest-harness:
             - 'testing/mozbase/moztest/moztest/selftest/**'
             - 'testing/mozharness/mozharness/base/log.py'
             - 'testing/mozharness/mozharness/mozilla/structuredlog.py'
             - 'testing/mozharness/mozharness/mozilla/testing/errors.py'
             - 'testing/profiles/**'
 
 mozbase:
     description: testing/mozbase unit tests
-    platform:
-        - linux64/opt
-        - windows10-64/opt
     python-version: [2, 3]
     treeherder:
         symbol: mb
     run:
         using: python-test
         subsuite: mozbase
     when:
         files-changed:
@@ -119,19 +110,16 @@ mozharness:
             cd /builds/worker/checkouts/gecko/testing/mozharness &&
             /usr/local/bin/tox -e py27-hg4.3
     when:
         files-changed:
             - 'testing/mozharness/**'
 
 mozlint:
     description: python/mozlint unit tests
-    platform:
-        - linux64/opt
-        - windows10-64/opt
     python-version: [2]
     treeherder:
         symbol: ml
     run:
         using: python-test
         subsuite: mozlint
     when:
         files-changed:
@@ -147,19 +135,16 @@ mozrelease:
         using: python-test
         subsuite: mozrelease
     when:
         files-changed:
             - 'python/mozrelease/**'
 
 mozterm:
     description: python/mozterm unit tests
-    platform:
-        - linux64/opt
-        - windows10-64/opt
     python-version: [2, 3]
     treeherder:
         symbol: term
     run:
         using: python-test
         subsuite: mozterm
     when:
         files-changed:
@@ -174,19 +159,16 @@ mozversioncontrol:
         using: python-test
         subsuite: mozversioncontrol
     when:
         files-changed:
             - 'python/mozversioncontrol/**'
 
 raptor:
     description: testing/raptor unit tests
-    platform:
-        - linux64/opt
-        - windows10-64/opt
     python-version: [2]
     treeherder:
         symbol: rap
     run:
         using: python-test
         subsuite: raptor
     when:
         files-changed:
@@ -228,19 +210,16 @@ reftest-harness:
             - 'layout/tools/reftest/**'
             - 'testing/mozbase/moztest/moztest/selftest/**'
             - 'testing/mozharness/mozharness/base/log.py'
             - 'testing/mozharness/mozharness/mozilla/structuredlog.py'
             - 'testing/mozharness/mozharness/mozilla/testing/errors.py'
 
 tryselect:
     description: tools/tryselect unit tests
-    platform:
-        - linux64/opt
-        - windows10-64/opt
     python-version: [2]
     treeherder:
         symbol: try
     run:
         using: python-test
         subsuite: try
     when:
         files-changed:
deleted file mode 100644
--- a/taskcluster/docker/image_builder/VERSION
+++ /dev/null
@@ -1,1 +0,0 @@
-3.0.0
--- a/taskcluster/docs/docker-images.rst
+++ b/taskcluster/docs/docker-images.rst
@@ -89,39 +89,40 @@ following namespaces.
 
 Not only can images be browsed by the pushdate and context hash, but the 'latest' namespace
 is meant to view the latest built image.  This functions similarly to the 'latest' tag
 for docker images that are pushed to a registry.
 
 Docker Registry Images (prebuilt)
 :::::::::::::::::::::::::::::::::
 
-***Warning: Registry images are only used for ``decision`` and
-``image_builder`` images.***
+***Warning: Use of prebuilt images should only be used for base images (those that other images
+will inherit from), or private images that must be stored in a private docker registry account.***
 
 These are images that are intended to be pushed to a docker registry and used
 by specifying the docker image name in task definitions.  They are generally
 referred to by a ``<repo>@<repodigest>`` string:
 
 Example:
 
 .. code-block:: none
 
     image: taskcluster/decision:0.1.10@sha256:c5451ee6c655b3d97d4baa3b0e29a5115f23e0991d4f7f36d2a8f793076d6854
 
-Such images must always be referred to with both a version and a repo digest.
-For the decision image, the repo digest is stored in the ``HASH`` file in the
-image directory and used to refer to the image as above.  The version for both
-images is in ``VERSION``.
+Each image has a repo digest and a version. The repo digest is stored in the
+``HASH`` file in the image directory and used to refer to the image as above.
+The version is in ``VERSION``.
 
-The version file serves to help users identify which image is being used, and makes old
-versions easy to discover in the registry.
+The version file only serves to provide convenient names, such that old
+versions are easy to discover in the registry (and ensuring old versions aren't
+deleted by garbage-collection).
 
-The file ``taskcluster/docker/REGISTRY`` specifies the image registry to which
-the completed image should be uploaded.
+Each image directory also has a ``REGISTRY``, defaulting to the ``REGISTRY`` in
+the ``taskcluster/docker`` directory, and specifying the image registry to
+which the completed image should be uploaded.
 
 Docker Hashes and Digests
 .........................
 
 There are several hashes involved in this process:
 
  * Image Hash -- the long version of the image ID; can be seen with
    ``docker images --no-trunc`` or in the ``Id`` field in ``docker inspect``.
@@ -158,29 +159,23 @@ It's a good idea to bump the ``VERSION``
 For task images, test your image locally or push to try. This is all that is
 required.
 
 Docker Registry Images
 ::::::::::::::::::::::
 
 Landing docker registry images takes a little more care.
 
-Begin by bumping the ``VERSION``.  Once the new version of the image has been
-built and tested locally, push it to the docker registry and make note of the
-resulting repo digest.  Put this value in the ``HASH`` file for the
-``decision`` image and in ``taskcluster/taskgraph/transforms/docker_image.py``
-for the ``image_builder`` image.
+Once a new version of the image has been built and tested locally, push it to
+the docker registry and make note of the resulting repo digest.  Put this value
+in the ``HASH`` file, and update any references to the image in the code or
+task definitions.
 
 The change is now safe to use in Try pushes.
 
-Note that ``image_builder`` change can be tested directly in try pushes without
-using a registry, as the in-registry ``image_builder`` image is used to build a
-task image which is then used to build other images.  It is referenced by hash
-in ``taskcluster/taskgraph/transforms/docker_image.py``.
-
 Special Dockerfile Syntax
 -------------------------
 
 Dockerfile syntax has been extended to allow *any* file from the
 source checkout to be added to the image build *context*. (Traditionally
 you can only ``ADD`` files from the same directory as the Dockerfile.)
 
 Simply add the following syntax as a comment in a Dockerfile::
--- a/taskcluster/scripts/run-task
+++ b/taskcluster/scripts/run-task
@@ -35,18 +35,18 @@ import subprocess
 
 import urllib.error
 import urllib.request
 
 
 FINGERPRINT_URL = 'http://taskcluster/secrets/v1/secret/project/taskcluster/gecko/hgfingerprint'
 FALLBACK_FINGERPRINT = {
     'fingerprints':
-        "sha256:17:38:aa:92:0b:84:3e:aa:8e:52:52:e9:4c:2f:98:a9:0e:bf:6c:3e:e9"
-        ":15:ff:0a:29:80:f7:06:02:5b:e8:48"}
+        "sha256:8e:ad:f7:6a:eb:44:06:15:ed:f3:e4:69:a6:64:60:37:2d:ff:98:88:37"
+        ":bf:d7:b8:40:84:01:48:9c:26:ce:d9"}
 
 
 CACHE_UID_GID_MISMATCH = '''
 There is a UID/GID mismatch on the cache. This likely means:
 
 a) different tasks are running as a different user/group
 b) different Docker images have different UID/GID for the same user/group
 
@@ -443,24 +443,23 @@ def vcs_checkout(source_repo, dest, stor
             '--config', 'hostsecurity.hg.mozilla.org:fingerprints=%s' % hgmo_fingerprint,
         ])
 
     if base_repo:
         args.extend(['--upstream', base_repo])
     if sparse_profile:
         args.extend(['--sparseprofile', sparse_profile])
 
-    dest = os.path.abspath(dest)
     args.extend([
         revision_flag, revision_value,
         source_repo, dest,
     ])
 
     res = run_and_prefix_output(b'vcs', args,
-                                extra_env={'PYTHONUNBUFFERED': '1'})
+                                extra_env={b'PYTHONUNBUFFERED': b'1'})
     if res:
         sys.exit(res)
 
     # Update the current revision hash and ensure that it is well formed.
     revision = subprocess.check_output(
         [hg_bin, 'log',
          '--rev', '.',
          '--template', '{node}'],
@@ -734,19 +733,16 @@ def main(args):
             branch=os.environ.get('COMM_HEAD_REF'))
 
     elif not os.environ.get('COMM_HEAD_REV') and \
             os.environ.get('COMM_HEAD_REF'):
         print('task should be defined in terms of non-symbolic revision')
         return 1
 
     try:
-        if 'GECKO_PATH' in os.environ:
-            os.environ['GECKO_PATH'] = os.path.abspath(os.environ['GECKO_PATH'])
-
         if 'MOZ_FETCHES' in os.environ:
             fetch_artifacts()
 
         return run_and_prefix_output(b'task', task_args)
     finally:
         fetches_dir = os.environ.get('MOZ_FETCHES_DIR')
         if fetches_dir and os.path.isdir(fetches_dir):
             print_line(b'fetches', b'removing %s\n' % fetches_dir.encode('utf-8'))
--- a/taskcluster/taskgraph/transforms/docker_image.py
+++ b/taskcluster/taskgraph/transforms/docker_image.py
@@ -184,22 +184,21 @@ def fill_template(config, tasks):
         # Retry for 'funsize-update-generator' if exit status code is -1
         if image_name in ['funsize-update-generator']:
             taskdesc['worker']['retry-exit-status'] = [-1]
 
         worker = taskdesc['worker']
 
         # We use the in-tree image_builder image to build docker images, but
         # that can't be used to build the image_builder image itself,
-        # obviously. So we fall back to an image on docker hub, identified
-        # by hash.  After the image-builder image is updated, it's best to push
-        # and update this hash as well, to keep image-builder builds up to date.
+        # obviously. So we fall back to the last snapshot of the image that
+        # was uploaded to docker hub.
         if image_name == 'image_builder':
-            hash = 'sha256:c6622fd3e5794842ad83d129850330b26e6ba671e39c58ee288a616a3a1c4c73'
-            worker['docker-image'] = 'taskcluster/image_builder@' + hash
+            worker['docker-image'] = 'taskcluster/image_builder@sha256:' + \
+                '24ce54a1602453bc93515aecd9d4ad25a22115fbc4b209ddb5541377e9a37315'
             # Keep in sync with the Dockerfile used to generate the
             # docker image whose digest is referenced above.
             worker['volumes'] = [
                 '/builds/worker/checkouts',
                 '/builds/worker/workspace',
             ]
             cache_name = 'imagebuilder-v1'
         else:
--- a/taskcluster/taskgraph/transforms/job/common.py
+++ b/taskcluster/taskgraph/transforms/job/common.py
@@ -59,55 +59,45 @@ def generic_worker_add_artifacts(config,
 
 
 def support_vcs_checkout(config, job, taskdesc, sparse=False):
     """Update a job/task with parameters to enable a VCS checkout.
 
     This can only be used with ``run-task`` tasks, as the cache name is
     reserved for ``run-task`` tasks.
     """
-    is_win = job['worker']['os'] == 'windows'
+    level = config.params['level']
 
-    if is_win:
-        checkoutdir = './build'
-        geckodir = '{}/src'.format(checkoutdir)
-        hgstore = 'y:/hg-shared'
-    else:
-        checkoutdir = '{workdir}/checkouts'.format(**job['run'])
-        geckodir = '{}/gecko'.format(checkoutdir)
-        hgstore = '{}/hg-store'.format(checkoutdir)
-
-    level = config.params['level']
-    # native-engine and generic-worker do not support caches (yet), so we just
-    # do a full clone every time :(
+    # native-engine does not support caches (yet), so we just do a full clone
+    # every time :(
     if job['worker']['implementation'] in ('docker-worker', 'docker-engine'):
         name = 'level-%s-checkouts' % level
 
         # comm-central checkouts need their own cache, because clobber won't
         # remove the comm-central checkout
         if job['run'].get('comm-checkout', False):
             name += '-comm'
 
         # Sparse checkouts need their own cache because they can interfere
         # with clients that aren't sparse aware.
         if sparse:
             name += '-sparse'
 
         taskdesc['worker'].setdefault('caches', []).append({
             'type': 'persistent',
             'name': name,
-            'mount-point': checkoutdir,
+            'mount-point': '{workdir}/checkouts'.format(**job['run']),
         })
 
     taskdesc['worker'].setdefault('env', {}).update({
         'GECKO_BASE_REPOSITORY': config.params['base_repository'],
         'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
         'GECKO_HEAD_REV': config.params['head_rev'],
-        'GECKO_PATH': geckodir,
-        'HG_STORE_PATH': hgstore,
+        'GECKO_PATH': '{workdir}/checkouts/gecko'.format(**job['run']),
+        'HG_STORE_PATH': '{workdir}/checkouts/hg-store'.format(**job['run']),
     })
 
     if 'comm_base_repository' in config.params:
         taskdesc['worker']['env'].update({
             'COMM_BASE_REPOSITORY': config.params['comm_base_repository'],
             'COMM_HEAD_REPOSITORY': config.params['comm_head_repository'],
             'COMM_HEAD_REV': config.params['comm_head_rev'],
         })
--- a/taskcluster/taskgraph/transforms/job/mach.py
+++ b/taskcluster/taskgraph/transforms/job/mach.py
@@ -25,24 +25,18 @@ mach_schema = Schema({
     # gecko checkout
     Required('comm-checkout'): bool,
 
     # Base work directory used to set up the task.
     Required('workdir'): basestring,
 })
 
 
-defaults = {
-    'comm-checkout': False,
-}
-
-
-@run_job_using("docker-worker", "mach", schema=mach_schema, defaults=defaults)
-@run_job_using("native-engine", "mach", schema=mach_schema, defaults=defaults)
-@run_job_using("generic-worker", "mach", schema=mach_schema, defaults=defaults)
-def configure_mach(config, job, taskdesc):
+@run_job_using("docker-worker", "mach", schema=mach_schema, defaults={'comm-checkout': False})
+@run_job_using("native-engine", "mach", schema=mach_schema, defaults={'comm-checkout': False})
+def docker_worker_mach(config, job, taskdesc):
     run = job['run']
 
     # defer to the run_task implementation
-    run['command'] = 'cd $GECKO_PATH && ./mach {mach}'.format(**run)
+    run['command'] = 'cd {workdir}/checkouts/gecko && ./mach {mach}'.format(**run)
     run['using'] = 'run-task'
     del run['mach']
     configure_taskdesc_for_run(config, job, taskdesc, job['worker']['implementation'])
--- a/taskcluster/taskgraph/transforms/job/python_test.py
+++ b/taskcluster/taskgraph/transforms/job/python_test.py
@@ -20,26 +20,23 @@ python_test_schema = Schema({
     # The subsuite to run
     Required('subsuite'): basestring,
 
     # Base work directory used to set up the task.
     Required('workdir'): basestring,
 })
 
 
-defaults = {
-    'python-version': 2,
-    'subsuite': 'default',
-}
-
-
-@run_job_using('docker-worker', 'python-test', schema=python_test_schema, defaults=defaults)
-@run_job_using('generic-worker', 'python-test', schema=python_test_schema, defaults=defaults)
-def configure_python_test(config, job, taskdesc):
+@run_job_using(
+    'docker-worker',
+    'python-test',
+    schema=python_test_schema,
+    defaults={'python-version': 2, 'subsuite': 'default'})
+def docker_worker_python_test(config, job, taskdesc):
     run = job['run']
 
     # defer to the run_task implementation
-    run['command'] = 'cd $GECKO_PATH && ' \
+    run['command'] = 'cd {workdir}/checkouts/gecko && ' \
         './mach python-test --python {python-version} --subsuite {subsuite}'.format(**run)
     run['using'] = 'run-task'
     del run['python-version']
     del run['subsuite']
     configure_taskdesc_for_run(config, job, taskdesc, job['worker']['implementation'])
--- a/taskcluster/taskgraph/transforms/job/run_task.py
+++ b/taskcluster/taskgraph/transforms/job/run_task.py
@@ -35,22 +35,22 @@ run_task_schema = Schema({
     # it will be included in a single argument to `bash -cx`.
     Required('command'): Any([basestring], basestring),
 
     # Base work directory used to set up the task.
     Required('workdir'): basestring,
 })
 
 
-def common_setup(config, job, taskdesc, command, geckodir):
+def common_setup(config, job, taskdesc, command, checkoutdir):
     run = job['run']
     if run['checkout']:
         support_vcs_checkout(config, job, taskdesc,
                              sparse=bool(run['sparse-profile']))
-        command.append('--vcs-checkout={}'.format(geckodir))
+        command.append('--vcs-checkout={}/gecko'.format(checkoutdir))
 
     if run['sparse-profile']:
         command.append('--sparse-profile=build/sparse-profiles/%s' %
                        run['sparse-profile'])
 
     taskdesc['worker'].setdefault('env', {})['MOZ_SCM_LEVEL'] = config.params['level']
 
 
@@ -67,18 +67,17 @@ def run_task_url(config):
                 config.params['head_repository'], config.params['head_rev'])
 
 
 @run_job_using("docker-worker", "run-task", schema=run_task_schema, defaults=worker_defaults)
 def docker_worker_run_task(config, job, taskdesc):
     run = job['run']
     worker = taskdesc['worker'] = job['worker']
     command = ['/builds/worker/bin/run-task']
-    common_setup(config, job, taskdesc, command,
-                 geckodir='{workdir}/checkouts/gecko'.format(**run))
+    common_setup(config, job, taskdesc, command, checkoutdir='{workdir}/checkouts'.format(**run))
 
     if run.get('cache-dotcache'):
         worker['caches'].append({
             'type': 'persistent',
             'name': 'level-{level}-{project}-dotcache'.format(**config.params),
             'mount-point': '{workdir}/.cache'.format(**run),
             'skip-untrusted': True,
         })
@@ -94,18 +93,17 @@ def docker_worker_run_task(config, job, 
     worker['command'] = command
 
 
 @run_job_using("native-engine", "run-task", schema=run_task_schema, defaults=worker_defaults)
 def native_engine_run_task(config, job, taskdesc):
     run = job['run']
     worker = taskdesc['worker'] = job['worker']
     command = ['./run-task']
-    common_setup(config, job, taskdesc, command,
-                 geckodir='{workdir}/checkouts/gecko'.format(**run))
+    common_setup(config, job, taskdesc, command, checkoutdir='{workdir}/checkouts'.format(**run))
 
     worker['context'] = run_task_url(config)
 
     if run.get('cache-dotcache'):
         raise Exception("No cache support on native-worker; can't use cache-dotcache")
 
     run_command = run['command']
     if isinstance(run_command, basestring):
@@ -114,46 +112,30 @@ def native_engine_run_task(config, job, 
     command.extend(run_command)
     worker['command'] = command
 
 
 @run_job_using("generic-worker", "run-task", schema=run_task_schema, defaults=worker_defaults)
 def generic_worker_run_task(config, job, taskdesc):
     run = job['run']
     worker = taskdesc['worker'] = job['worker']
-    is_win = worker['os'] == 'windows'
-
-    if is_win:
-        command = ['C:/mozilla-build/python3/python3.exe', 'run-task']
-        geckodir = './build/src'
-    else:
-        command = ['./run-task']
-        geckodir = '{workdir}/checkouts/gecko'.format(**run)
-
-    common_setup(config, job, taskdesc, command, geckodir=geckodir)
+    command = ['./run-task']
+    common_setup(config, job, taskdesc, command, checkoutdir='{workdir}/checkouts'.format(**run))
 
     worker.setdefault('mounts', [])
     if run.get('cache-dotcache'):
         worker['mounts'].append({
             'cache-name': 'level-{level}-{project}-dotcache'.format(**config.params),
             'directory': '{workdir}/.cache'.format(**run),
         })
     worker['mounts'].append({
         'content': {
             'url': run_task_url(config),
         },
         'file': './run-task',
     })
 
     run_command = run['command']
     if isinstance(run_command, basestring):
-        run_command = ['bash', '-cx', '"{}"'.format(run_command)]
-
+        run_command = ['bash', '-cx', run_command]
     command.append('--')
     command.extend(run_command)
-
-    if is_win:
-        worker['command'] = [' '.join(command)]
-    else:
-        worker['command'] = [
-            ['chmod', '+x', 'run-task'],
-            command,
-        ]
+    worker['command'] = [['chmod', '+x', 'run-task'], command]
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -1864,18 +1864,18 @@ def check_caches_are_volumes(task):
     volumes = set(task['worker']['volumes'])
     paths = set(c['mount-point'] for c in task['worker'].get('caches', []))
     missing = paths - volumes
 
     if not missing:
         return
 
     raise Exception('task %s (image %s) has caches that are not declared as '
-                    'Docker volumes: %s '
-                    '(have you added them as VOLUMEs in the Dockerfile?)'
+                    'Docker volumes: %s'
+                    'Have you added them as VOLUMEs in the Dockerfile?'
                     % (task['label'], task['worker']['docker-image'],
                        ', '.join(sorted(missing))))
 
 
 @transforms.add
 def check_run_task_caches(config, tasks):
     """Audit for caches requiring run-task.
 
--- a/taskcluster/taskgraph/util/docker.py
+++ b/taskcluster/taskgraph/util/docker.py
@@ -98,18 +98,16 @@ def post_to_docker(tar, api_path, **kwar
                     if status != data['status']:
                         sys.stderr.write('{}: {}\n'.format(data['id'], data['status']))
                         status_line[data['id']] = data['status']
             else:
                 status_line = {}
                 sys.stderr.write('{}\n'.format(data['status']))
         elif 'stream' in data:
             sys.stderr.write(data['stream'])
-        elif 'aux' in data:
-            sys.stderr.write(repr(data['aux']))
         elif 'error' in data:
             sys.stderr.write('{}\n'.format(data['error']))
             # Sadly, docker doesn't give more than a plain string for errors,
             # so the best we can do to propagate the error code from the command
             # that failed is to parse the error message...
             errcode = 1
             m = re.search(r'returned a non-zero code: (\d+)', data['error'])
             if m:
--- a/testing/marionette/harness/marionette_harness/tests/harness_unit/test_marionette_runner.py
+++ b/testing/marionette/harness/marionette_harness/tests/harness_unit/test_marionette_runner.py
@@ -1,16 +1,14 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import
 
-import os
-
 import manifestparser
 import mozunit
 import pytest
 
 from mock import Mock, patch, mock_open, sentinel, DEFAULT
 
 from marionette_harness.runtests import MarionetteTestRunner
 
@@ -293,17 +291,17 @@ def test_add_test_directory(runner):
     tests = list(dir_contents[0][2] + dir_contents[1][2])
     assert len(runner.tests) == 0
     # Need to use side effect to make isdir return True for test_dir and False for tests
     with patch('os.path.isdir', side_effect=[True] + [False for t in tests]) as isdir:
         with patch('os.walk', return_value=dir_contents) as walk:
             runner.add_test(test_dir)
     assert isdir.called and walk.called
     for test in runner.tests:
-        assert os.path.normpath(test_dir) in test['filepath']
+        assert test_dir in test['filepath']
     assert len(runner.tests) == 2
 
 
 @pytest.mark.parametrize("test_files_exist", [True, False])
 def test_add_test_manifest(mock_runner, manifest_with_tests, monkeypatch, test_files_exist):
     monkeypatch.setattr('marionette_harness.runner.base.TestManifest',
                         manifest_with_tests.manifest_class)
     mock_runner.marionette = mock_runner.driverclass()
--- a/testing/mozbase/manifestparser/tests/manifest.ini
+++ b/testing/mozbase/manifestparser/tests/manifest.ini
@@ -1,10 +1,10 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 skip-if = python == 3
 [test_expressionparser.py]
 [test_manifestparser.py]
 [test_testmanifest.py]
 [test_read_ini.py]
 [test_convert_directory.py]
 [test_filters.py]
 [test_chunking.py]
--- a/testing/mozbase/mozcrash/tests/manifest.ini
+++ b/testing/mozbase/mozcrash/tests/manifest.ini
@@ -1,8 +1,8 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 skip-if = python == 3
 [test_basic.py]
 [test_java_exception.py]
 [test_save_path.py]
 [test_stackwalk.py]
 [test_symbols_path.py]
--- a/testing/mozbase/mozdebug/tests/manifest.ini
+++ b/testing/mozbase/mozdebug/tests/manifest.ini
@@ -1,3 +1,3 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 [test.py]
--- a/testing/mozbase/mozdevice/tests/manifest.ini
+++ b/testing/mozbase/mozdevice/tests/manifest.ini
@@ -1,7 +1,7 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 skip-if = python == 3
 [test_socket_connection.py]
 [test_is_app_installed.py]
 [test_chown.py]
 [test_escape_command_line.py]
--- a/testing/mozbase/mozfile/tests/manifest.ini
+++ b/testing/mozbase/mozfile/tests/manifest.ini
@@ -1,9 +1,9 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 [test_extract.py]
 [test_load.py]
 [test_move_remove.py]
 [test_tempdir.py]
 [test_tempfile.py]
 [test_tree.py]
 [test_url.py]
--- a/testing/mozbase/mozhttpd/tests/manifest.ini
+++ b/testing/mozbase/mozhttpd/tests/manifest.ini
@@ -1,10 +1,10 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 [api.py]
 skip-if = python == 3
 [baseurl.py]
 [basic.py]
 [filelisting.py]
 skip-if = python == 3
 [paths.py]
 [requestlog.py]
--- a/testing/mozbase/mozinfo/tests/manifest.ini
+++ b/testing/mozbase/mozinfo/tests/manifest.ini
@@ -1,3 +1,3 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 [test.py]
--- a/testing/mozbase/mozinstall/tests/manifest.ini
+++ b/testing/mozbase/mozinstall/tests/manifest.ini
@@ -1,7 +1,7 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 skip-if = python == 3
 [test_binary.py]
 [test_install.py]
 [test_is_installer.py]
 [test_uninstall.py]
--- a/testing/mozbase/mozlog/tests/manifest.ini
+++ b/testing/mozbase/mozlog/tests/manifest.ini
@@ -1,9 +1,9 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 [test_logger.py]
 skip-if = python == 3
 [test_logtypes.py]
 [test_formatters.py]
 skip-if = python == 3
 [test_structured.py]
 skip-if = python == 3
--- a/testing/mozbase/moznetwork/tests/manifest.ini
+++ b/testing/mozbase/moznetwork/tests/manifest.ini
@@ -1,4 +1,4 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 skip-if = python == 3
 [test.py]
--- a/testing/mozbase/mozprocess/tests/manifest.ini
+++ b/testing/mozbase/mozprocess/tests/manifest.ini
@@ -1,10 +1,10 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 skip-if = python == 3
 [test_detached.py]
 skip-if = os == "win"  # Bug 1493796
 [test_kill.py]
 [test_misc.py]
 [test_pid.py]
 [test_poll.py]
 [test_wait.py]
--- a/testing/mozbase/mozprofile/tests/manifest.ini
+++ b/testing/mozbase/mozprofile/tests/manifest.ini
@@ -1,10 +1,10 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 [test_addonid.py]
 [test_server_locations.py]
 [test_preferences.py]
 [test_permissions.py]
 [test_bug758250.py]
 [test_nonce.py]
 [test_clone_cleanup.py]
 [test_profile.py]
--- a/testing/mozbase/mozrunner/tests/manifest.ini
+++ b/testing/mozbase/mozrunner/tests/manifest.ini
@@ -1,10 +1,10 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 # We skip these tests in automated Windows builds because they trigger crashes
 # in sh.exe; see bug 1489277.
 skip-if = python == 3 || (automation && os == "win")
 [test_crash.py]
 [test_interactive.py]
 [test_start.py]
 [test_states.py]
 [test_stop.py]
--- a/testing/mozbase/mozsystemmonitor/tests/manifest.ini
+++ b/testing/mozbase/mozsystemmonitor/tests/manifest.ini
@@ -1,3 +1,3 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 [test_resource_monitor.py]
--- a/testing/mozbase/moztest/tests/manifest.ini
+++ b/testing/mozbase/moztest/tests/manifest.ini
@@ -1,5 +1,5 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 skip-if = python == 3
 [test.py]
 [test_resolve.py]
--- a/testing/mozbase/mozversion/tests/manifest.ini
+++ b/testing/mozbase/mozversion/tests/manifest.ini
@@ -1,6 +1,6 @@
 [DEFAULT]
-subsuite = mozbase
+subsuite = mozbase, os == "linux"
 
 [test_binary.py]
 [test_apk.py]
 skip-if = python == 3
--- a/testing/mozharness/external_tools/robustcheckout.py
+++ b/testing/mozharness/external_tools/robustcheckout.py
@@ -44,18 +44,18 @@ try:
     from mercurial import configitems
     configitems.dynamicdefault
 except ImportError:
     configitems = None
 
 # Causes worker to purge caches on process exit and for task to retry.
 EXIT_PURGE_CACHE = 72
 
-testedwith = '4.3 4.4 4.5 4.6 4.7 4.8'
-minimumhgversion = '4.3'
+testedwith = '3.7 3.8 3.9 4.0 4.1 4.2 4.3 4.4 4.5 4.6 4.7'
+minimumhgversion = '3.7'
 
 cmdtable = {}
 
 # TRACKING hg43 Mercurial 4.3 introduced registrar.command as a replacement for
 # cmdutil.command.
 if util.safehasattr(registrar, 'command'):
     command = registrar.command(cmdtable)
 else:
@@ -83,19 +83,23 @@ def getvfs():
 
 def getsparse():
     from mercurial import sparse
     return sparse
 
 
 def supported_hg():
     '''Returns True if the Mercurial version is supported for robustcheckout'''
-    return '.'.join(
-        str(v) for v in util.versiontuple(n=2)
-    ) in testedwith.split()
+    return util.versiontuple(n=2) in (
+        (4, 3),
+        (4, 4),
+        (4, 5),
+        (4, 6),
+        (4, 7),
+    )
 
 
 if os.name == 'nt':
     import ctypes
 
     # Get a reference to the DeleteFileW function
     # DeleteFileW accepts filenames encoded as a null terminated sequence of
     # wide chars (UTF-16). Python's ctypes.c_wchar_p correctly encodes unicode
@@ -671,22 +675,17 @@ def _docheckout(ui, url, dest, upstream,
         # By default, Mercurial will ignore unknown sparse profiles. This could
         # lead to a full checkout. Be more strict.
         try:
             repo.filectx(sparse_profile, changeid=checkoutrevision).data()
         except error.ManifestLookupError:
             raise error.Abort('sparse profile %s does not exist at revision '
                               '%s' % (sparse_profile, checkoutrevision))
 
-        # TRACKING hg48 - parseconfig takes `action` param
-        if util.versiontuple(n=2) >= (4, 8):
-            old_config = sparsemod.parseconfig(repo.ui, repo.vfs.tryread('sparse'), 'sparse')
-        else:
-            old_config = sparsemod.parseconfig(repo.ui, repo.vfs.tryread('sparse'))
-
+        old_config = sparsemod.parseconfig(repo.ui, repo.vfs.tryread('sparse'))
         old_includes, old_excludes, old_profiles = old_config
 
         if old_profiles == {sparse_profile} and not old_includes and not \
                 old_excludes:
             ui.write('(sparse profile %s already set; no need to update '
                      'sparse config)\n' % sparse_profile)
         else:
             if old_includes or old_excludes or old_profiles:
--- a/tools/lint/test/python.ini
+++ b/tools/lint/test/python.ini
@@ -1,5 +1,5 @@
 [DEFAULT]
-subsuite = mozlint
+subsuite=mozlint, os == "linux"
 skip-if = python == 3
 
 [test_flake8.py]