Backed out changeset 61f33f8c8750 (bug 1468812) for Linux ccov mass failures (bug 1478211). a=backout
authorSebastian Hengst <archaeopteryx@coole-files.de>
Wed, 25 Jul 2018 18:05:09 +0300
changeset 822648 4c4abe35d80851590954b52eabf828c763104696
parent 822631 befd4afd29bf9c145701853d308a59b7fc37e6a4
child 822649 13c6faaf92d341470e11fbdf8a46c2831c035a96
child 822667 1c7b512ac526c3af59c94c340b50e593845c04c3
child 822672 c769229d19661370d5448f0b5b236ce8b2eb91c4
child 822716 45d84b1c8ef5a6fc2a9c060dba44b2f56147740b
child 822717 12f42d129762967c3f912085787ad489ec97feaa
child 822792 9b59ea6c2845ba5501ddc45064bd965ab2afe38d
child 822804 b89b6c6da94b95abf74a79797f58542acb3e55c1
child 822878 f6a2924ee429bde8789367df4fb1c3a5d2295d06
child 823590 1cafb45ab1b24d965814ddeffbc7a7f42589c3e7
child 824675 f7fdae2f41a4eef239cb50d4ac0fc5015926cfe9
push id117424
push userrwood@mozilla.com
push dateWed, 25 Jul 2018 15:49:12 +0000
reviewersbackout
bugs1468812, 1478211
milestone63.0a1
backs out61f33f8c87508dae19c577c74ec71eae459c6aaf
Backed out changeset 61f33f8c8750 (bug 1468812) for Linux ccov mass failures (bug 1478211). a=backout
layout/tools/reftest/selftest/conftest.py
taskcluster/ci/source-test/jsshell.yml
taskcluster/ci/source-test/python.yml
taskcluster/scripts/misc/build-gcc-4.9-linux.sh
taskcluster/scripts/misc/build-gcc-6-linux.sh
taskcluster/scripts/misc/build-gcc-7-linux.sh
taskcluster/scripts/misc/build-gcc-mingw32.sh
taskcluster/scripts/misc/build-gcc-sixgill-plugin-linux.sh
taskcluster/scripts/run-task
taskcluster/taskgraph/transforms/job/__init__.py
taskcluster/taskgraph/transforms/job/common.py
taskcluster/taskgraph/transforms/job/run_task.py
taskcluster/taskgraph/transforms/job/toolchain.py
--- a/layout/tools/reftest/selftest/conftest.py
+++ b/layout/tools/reftest/selftest/conftest.py
@@ -44,18 +44,18 @@ def runtests(setup_test_harness, binary,
         package_root = os.path.dirname(harness_root)
         options.update({
             'extraProfileFiles': [os.path.join(package_root, 'bin', 'plugins')],
             'reftestExtensionPath': os.path.join(harness_root, 'reftest'),
             'sandboxReadWhitelist': [here, os.environ['PYTHON_TEST_TMP']],
             'utilityPath': os.path.join(package_root, 'bin'),
         })
 
-        if 'MOZ_FETCHES_DIR' in os.environ:
-            options['sandboxReadWhitelist'].append(os.environ['MOZ_FETCHES_DIR'])
+        if 'USE_ARTIFACT_PATH' in os.environ:
+            options['sandboxReadWhitelist'].append(os.environ['USE_ARTIFACT_PATH'])
     else:
         options.update({
             'extraProfileFiles': [os.path.join(build.topobjdir, 'dist', 'plugins')],
             'sandboxReadWhitelist': [build.topobjdir, build.topsrcdir],
         })
 
     def normalize(test):
         if os.path.isabs(test):
--- a/taskcluster/ci/source-test/jsshell.yml
+++ b/taskcluster/ci/source-test/jsshell.yml
@@ -4,41 +4,55 @@ job-defaults:
     worker-type:
         by-platform:
             linux64.*: releng-hardware/gecko-t-linux-talos
     worker:
         by-platform:
             linux64.*:
                 env:
                     SHELL: /bin/bash
-                    JSSHELL: /home/cltbld/fetches/js
                 max-run-time: 1800
     treeherder:
         kind: test
         tier: 2
     run:
-        using: mach
+        using: run-task
+        use-artifacts:
+            build:
+                - target.jsshell.zip
         workdir: /home/cltbld
     run-on-projects: ['mozilla-central', 'try']
-    fetches:
-        build:
-            - target.jsshell.zip
 
 bench-ares6:
     description: Ares6 JavaScript shell benchmark suite
     treeherder:
         symbol: js-bench(ares6)
     run:
-        mach: jsshell-bench --binary $JSSHELL --perfherder ares6
+        command: >
+            cd $USE_ARTIFACT_PATH/build &&
+            unzip -qo -d jsshell target.jsshell.zip &&
+            export JSSHELL=$USE_ARTIFACT_PATH/build/jsshell/js &&
+            cd $GECKO_PATH &&
+            ./mach jsshell-bench --binary $JSSHELL --perfherder ares6
 
 bench-sixspeed:
     description: Six-Speed JavaScript shell benchmark suite
     treeherder:
         symbol: js-bench(6speed)
     run:
-        mach: jsshell-bench --binary $JSSHELL --perfherder six-speed
+        command: >
+            cd $USE_ARTIFACT_PATH/build &&
+            unzip -qo -d jsshell target.jsshell.zip &&
+            export JSSHELL=$USE_ARTIFACT_PATH/build/jsshell/js &&
+            cd $GECKO_PATH &&
+            ./mach jsshell-bench --binary $JSSHELL --perfherder six-speed
 
 bench-asmjsapps:
     description: asm.js Apps shell benchmark suite
     treeherder:
         symbol: js-bench(asm.js-apps)
     run:
-        mach: jsshell-bench --binary $JSSHELL --perfherder asmjs-apps
+        command: >
+            cd $USE_ARTIFACT_PATH/build &&
+            unzip -qo -d jsshell target.jsshell.zip &&
+            export JSSHELL=$USE_ARTIFACT_PATH/build/jsshell/js &&
+            cd $GECKO_PATH &&
+            ./mach jsshell-bench --binary $JSSHELL --perfherder asmjs-apps
--- a/taskcluster/ci/source-test/python.yml
+++ b/taskcluster/ci/source-test/python.yml
@@ -59,28 +59,32 @@ mochitest-harness:
         symbol: py2(mch)
     worker:
         by-platform:
             linux64.*:
                 docker-image: {in-tree: "desktop1604-test"}
                 max-run-time: 3600
     run:
         using: run-task
+        use-artifacts:
+            build:
+                - target.tar.bz2
+                - target.common.tests.zip
+                - target.mochitest.tests.zip
         command: >
             source /builds/worker/scripts/xvfb.sh &&
             start_xvfb '1600x1200x24' 0 &&
-            export GECKO_BINARY_PATH=$MOZ_FETCHES_DIR/firefox/firefox &&
-            export TEST_HARNESS_ROOT=$MOZ_FETCHES_DIR/tests &&
+            cd $USE_ARTIFACT_PATH/build &&
+            tar -xf target.tar.bz2 &&
+            unzip -q -d tests target.common.tests.zip &&
+            unzip -q -d tests target.mochitest.tests.zip &&
+            export GECKO_BINARY_PATH=$USE_ARTIFACT_PATH/build/firefox/firefox &&
+            export TEST_HARNESS_ROOT=$USE_ARTIFACT_PATH/build/tests &&
             cd /builds/worker/checkouts/gecko &&
             ./mach python-test --python 2 --subsuite mochitest
-    fetches:
-        build:
-            - target.tar.bz2
-            - target.common.tests.zip>tests
-            - target.mochitest.tests.zip>tests
     when:
         files-changed:
             - 'testing/mochitest/**'
             - 'testing/mozbase/moztest/moztest/selftest/**'
             - 'testing/mozharness/mozharness/base/log.py'
             - 'testing/mozharness/mozharness/mozilla/structuredlog.py'
             - 'testing/mozharness/mozharness/mozilla/testing/errors.py'
             - 'testing/profiles/**'
@@ -172,28 +176,32 @@ reftest-harness:
         symbol: py2(ref)
     worker:
         by-platform:
             linux64.*:
                 docker-image: {in-tree: "desktop1604-test"}
                 max-run-time: 3600
     run:
         using: run-task
+        use-artifacts:
+            build:
+                - target.tar.bz2
+                - target.common.tests.zip
+                - target.reftest.tests.zip
         command: >
             source /builds/worker/scripts/xvfb.sh &&
             start_xvfb '1600x1200x24' 0 &&
-            export GECKO_BINARY_PATH=$MOZ_FETCHES_DIR/firefox/firefox &&
-            export TEST_HARNESS_ROOT=$MOZ_FETCHES_DIR/tests &&
+            cd $USE_ARTIFACT_PATH/build &&
+            tar -xf target.tar.bz2 &&
+            unzip -q -d tests target.common.tests.zip &&
+            unzip -q -d tests target.reftest.tests.zip &&
+            export GECKO_BINARY_PATH=$USE_ARTIFACT_PATH/build/firefox/firefox &&
+            export TEST_HARNESS_ROOT=$USE_ARTIFACT_PATH/build/tests &&
             cd /builds/worker/checkouts/gecko &&
             ./mach python-test --python 2 --subsuite reftest
-    fetches:
-        build:
-            - target.tar.bz2
-            - target.common.tests.zip>tests
-            - target.reftest.tests.zip>tests
     when:
         files-changed:
             - 'layout/tools/reftest/**'
             - 'testing/mozbase/moztest/moztest/selftest/**'
             - 'testing/mozharness/mozharness/base/log.py'
             - 'testing/mozharness/mozharness/mozilla/structuredlog.py'
             - 'testing/mozharness/mozharness/mozilla/testing/errors.py'
 
--- a/taskcluster/scripts/misc/build-gcc-4.9-linux.sh
+++ b/taskcluster/scripts/misc/build-gcc-4.9-linux.sh
@@ -12,16 +12,18 @@ data_dir=$HOME_DIR/src/build/unix/build-
 
 . $data_dir/build-gcc.sh
 
 gcc_version=4.9.4
 gcc_ext=bz2
 binutils_version=2.25.1
 binutils_ext=bz2
 
+$HOME_DIR/src/taskcluster/scripts/misc/fetch-content task-artifacts -d $root_dir $MOZ_FETCHES
+
 pushd $root_dir/gcc-$gcc_version
 ln -sf ../cloog-0.18.1 cloog
 ln -sf ../gmp-5.1.3 gmp
 ln -sf ../mpc-0.8.2 mpc
 ln -sf ../isl-0.12.2 isl
 ln -sf ../mpfr-3.1.5 mpfr
 popd
 
--- a/taskcluster/scripts/misc/build-gcc-6-linux.sh
+++ b/taskcluster/scripts/misc/build-gcc-6-linux.sh
@@ -12,16 +12,18 @@ data_dir=$HOME_DIR/src/build/unix/build-
 
 . $data_dir/build-gcc.sh
 
 gcc_version=6.4.0
 gcc_ext=xz
 binutils_version=2.28.1
 binutils_ext=xz
 
+$HOME_DIR/src/taskcluster/scripts/misc/fetch-content task-artifacts -d $root_dir $MOZ_FETCHES
+
 pushd $root_dir/gcc-$gcc_version
 ln -sf ../gmp-5.1.3 gmp
 ln -sf ../isl-0.15 isl
 ln -sf ../mpc-0.8.2 mpc
 ln -sf ../mpfr-3.1.5 mpfr
 popd
 
 build_binutils
--- a/taskcluster/scripts/misc/build-gcc-7-linux.sh
+++ b/taskcluster/scripts/misc/build-gcc-7-linux.sh
@@ -12,16 +12,18 @@ data_dir=$HOME_DIR/src/build/unix/build-
 
 . $data_dir/build-gcc.sh
 
 gcc_version=7.3.0
 gcc_ext=xz
 binutils_version=2.28.1
 binutils_ext=xz
 
+$HOME_DIR/src/taskcluster/scripts/misc/fetch-content task-artifacts -d $root_dir $MOZ_FETCHES
+
 pushd $root_dir/gcc-$gcc_version
 ln -sf ../gmp-6.1.0 gmp
 ln -sf ../isl-0.16.1 isl
 ln -sf ../mpc-1.0.3 mpc
 ln -sf ../mpfr-3.1.4 mpfr
 popd
 
 build_binutils
--- a/taskcluster/scripts/misc/build-gcc-mingw32.sh
+++ b/taskcluster/scripts/misc/build-gcc-mingw32.sh
@@ -15,16 +15,18 @@ data_dir=$HOME_DIR/src/build/unix/build-
 
 gcc_version=6.4.0
 gcc_ext=xz
 binutils_version=2.27
 binutils_ext=bz2
 binutils_configure_flags="--target=i686-w64-mingw32"
 mingw_version=bcf1f29d6dc80b6025b416bef104d2314fa9be57
 
+$HOME_DIR/src/taskcluster/scripts/misc/fetch-content task-artifacts -d $root_dir $MOZ_FETCHES
+
 pushd $root_dir/gcc-$gcc_version
 ln -sf ../gmp-5.1.3 gmp
 ln -sf ../isl-0.15 isl
 ln -sf ../mpc-0.8.2 mpc
 ln -sf ../mpfr-3.1.5 mpfr
 popd
 
 prepare_mingw
--- a/taskcluster/scripts/misc/build-gcc-sixgill-plugin-linux.sh
+++ b/taskcluster/scripts/misc/build-gcc-sixgill-plugin-linux.sh
@@ -22,16 +22,18 @@ gcc_version=6.4.0
 gcc_ext=xz
 binutils_version=2.28.1
 binutils_ext=xz
 sixgill_rev=ab06fc42cf0f
 sixgill_repo=https://hg.mozilla.org/users/sfink_mozilla.com/sixgill
 
 . $data_dir/build-gcc.sh
 
+$HOME_DIR/src/taskcluster/scripts/misc/fetch-content task-artifacts -d $root_dir $MOZ_FETCHES
+
 pushd $root_dir/gcc-$gcc_version
 ln -sf ../binutils-2.28.1 binutils
 ln -sf ../gmp-5.1.3 gmp
 ln -sf ../isl-0.15 isl
 ln -sf ../mpc-0.8.2 mpc
 ln -sf ../mpfr-3.1.5 mpfr
 popd
 
--- a/taskcluster/scripts/run-task
+++ b/taskcluster/scripts/run-task
@@ -68,28 +68,16 @@ Our Docker image policy requires volumes
 The volume was likely populated as part of building the Docker image.
 Change the Dockerfile and anything run from it to not create files in
 any VOLUME.
 
 A lesser possibility is that you stumbled upon a TaskCluster platform bug
 where it fails to use new volumes for tasks.
 '''
 
-
-FETCH_CONTENT_NOT_FOUND = '''
-error: fetch-content script not found
-
-The script at `taskcluster/scripts/misc/fetch-content` could not be
-detected in the current environment.
-
-If this task clones gecko, make sure the GECKO_PATH environment variable
-is set to proper location. Otherwise, the script may need to be mounted
-or added to the task's docker image then added to the PATH.
-'''
-
 # The exit code to use when caches should be purged and the task retried.
 # This is EX_OSFILE (from sysexits.h):
 #     Some system file  does not exist, cannot be opened, or has some
 #     sort of error (e.g., syntax error).
 EXIT_PURGE_CACHE = 72
 
 
 IS_POSIX = os.name == 'posix'
@@ -475,33 +463,16 @@ def vcs_checkout(source_repo, dest, stor
                                      source_repo=source_repo,
                                      repo_name=source_repo.split('/')[-1]))
 
     print_line(b'vcs', msg.encode('utf-8'))
 
     return revision
 
 
-def fetch_artifacts(fetches):
-    print_line(b'fetches', b'fetching artifacts\n')
-
-    fetch_content = shutil.which('fetch-content')
-    if not fetch_content and os.environ.get('GECKO_PATH'):
-        fetch_content = os.path.join(os.environ['GECKO_PATH'], 'taskcluster',
-                                     'scripts', 'misc', 'fetch-content')
-
-    if not fetch_content or not os.path.isfile(fetch_content):
-        print(FETCH_CONTENT_NOT_FOUND)
-        sys.exit(1)
-
-    cmd = [fetch_content, 'task-artifacts'] + fetches.split()
-    subprocess.run(cmd, check=True, env=os.environ)
-    print_line(b'fetches', b'finished fetching artifacts\n')
-
-
 def main(args):
     print_line(b'setup', b'run-task started in %s\n' % os.getcwd().encode('utf-8'))
     running_as_root = IS_POSIX and os.getuid() == 0
 
     # Arguments up to '--' are ours. After are for the main task
     # to be executed.
     try:
         i = args.index('--')
@@ -732,24 +703,38 @@ def main(args):
             revision=os.environ.get('COMM_HEAD_REV'),
             branch=os.environ.get('COMM_HEAD_REF'))
 
     elif not os.environ.get('COMM_HEAD_REV') and \
             os.environ.get('COMM_HEAD_REF'):
         print('task should be defined in terms of non-symbolic revision')
         return 1
 
-    fetches = os.environ.get('MOZ_FETCHES')
-    if fetches:
-        fetch_artifacts(fetches)
+    use_artifact_path = os.environ.get('USE_ARTIFACT_PATH')
+
+    def prepare_use_artifact(key, url):
+        print_line(b'setup', b'fetching artifact from %s\n' % url.encode('utf-8'))
+        path = os.path.join(use_artifact_path, key)
+        if not os.path.isdir(path):
+            os.makedirs(path)
+
+        url = url.rstrip('/')
+        path = os.path.join(path, os.path.basename(url))
+        response = urllib.request.urlopen(url)
+        with open(path, 'wb') as fh:
+            fh.write(response.read())
+
+    use_artifacts = os.environ.get('USE_ARTIFACT_URLS')
+    if use_artifacts:
+        use_artifacts = json.loads(use_artifacts)
+        for key, urls in use_artifacts.items():
+            for url in urls:
+                prepare_use_artifact(key, url)
 
     try:
         return run_and_prefix_output(b'task', task_args)
     finally:
-        fetches_dir = os.environ.get('MOZ_FETCHES_DIR')
-        if fetches_dir and os.path.isdir(fetches_dir):
-            print_line(b'fetches', b'removing %s\n' % fetches_dir.encode('utf-8'))
-            shutil.rmtree(fetches_dir)
-            print_line(b'fetches', b'finished\n')
+        if use_artifact_path and os.path.isdir(use_artifact_path):
+            shutil.rmtree(use_artifact_path)
 
 
 if __name__ == '__main__':
     sys.exit(main(sys.argv[1:]))
--- a/taskcluster/taskgraph/transforms/job/__init__.py
+++ b/taskcluster/taskgraph/transforms/job/__init__.py
@@ -192,17 +192,17 @@ def use_fetches(config, jobs):
                 for path in artifacts:
                     job_fetches.append('{prefix}/{path}@<{dep}>'.format(
                         prefix=prefix, path=path, dep=kind))
 
         env = job.setdefault('worker', {}).setdefault('env', {})
         env['MOZ_FETCHES'] = {'task-reference': ' '.join(job_fetches)}
 
         workdir = job['run'].get('workdir', '/builds/worker')
-        env.setdefault('MOZ_FETCHES_DIR', '{}/fetches'.format(workdir))
+        env['MOZ_FETCHES_DIR'] = '{}/fetches'.format(workdir)
         yield job
 
 
 @transforms.add
 def make_task_description(config, jobs):
     """Given a build description, create a task description"""
     # import plugin modules first, before iterating over jobs
     import_all()
--- a/taskcluster/taskgraph/transforms/job/common.py
+++ b/taskcluster/taskgraph/transforms/job/common.py
@@ -4,17 +4,18 @@
 """
 Common support for various job types.  These functions are all named after the
 worker implementation they operate on, and take the same three parameters, for
 consistency.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from taskgraph.util.taskcluster import get_artifact_prefix
+import json
+from taskgraph.util.taskcluster import get_artifact_url, get_artifact_prefix
 
 SECRET_SCOPE = 'secrets:get:project/releng/gecko/{}/level-{}/{}'
 
 
 def docker_worker_add_workspace_cache(config, job, taskdesc, extra=None):
     """Add the workspace cache.
 
     ``extra`` is an optional kwarg passed in that supports extending the cache
@@ -187,8 +188,31 @@ def docker_worker_add_tooltool(config, j
     taskdesc['scopes'].extend([
         'docker-worker:relengapi-proxy:tooltool.download.public',
     ])
 
     if internal:
         taskdesc['scopes'].extend([
             'docker-worker:relengapi-proxy:tooltool.download.internal',
         ])
+
+
+def support_use_artifacts(config, job, taskdesc, use_artifacts):
+    """Set a JSON object of artifact URLs in an environment variable.
+
+    This will tell the run-task script to download the artifacts.
+    """
+    urls = {}
+    prefix = get_artifact_prefix(taskdesc)
+    for kind, artifacts in use_artifacts.items():
+        if kind not in taskdesc['dependencies']:
+            raise Exception("{label} can't use '{kind}' artifacts because it has no '{kind}' "
+                            "dependency!".format(label=job['label'], kind=kind))
+        task_id = '<{}>'.format(kind)
+        urls[kind] = []
+
+        for artifact in artifacts:
+            path = '/'.join([prefix, artifact])
+            urls[kind].append(get_artifact_url(task_id, path))
+
+    env = taskdesc['worker'].setdefault('env', {})
+    env['USE_ARTIFACT_URLS'] = {'task-reference': json.dumps(urls)}
+    env['USE_ARTIFACT_PATH'] = '{workdir}/use-artifacts'.format(**job['run'])
--- a/taskcluster/taskgraph/transforms/job/run_task.py
+++ b/taskcluster/taskgraph/transforms/job/run_task.py
@@ -4,17 +4,17 @@
 """
 Support for running jobs that are invoked via the `run-task` script.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 from taskgraph.transforms.job import run_job_using
 from taskgraph.util.schema import Schema
-from taskgraph.transforms.job.common import support_vcs_checkout
+from taskgraph.transforms.job.common import support_use_artifacts, support_vcs_checkout
 from voluptuous import Required, Any
 
 run_task_schema = Schema({
     Required('using'): 'run-task',
 
     # if true, add a cache at ~worker/.cache, which is where things like pip
     # tend to hide their caches.  This cache is never added for level-1 jobs.
     Required('cache-dotcache'): bool,
@@ -25,32 +25,43 @@ run_task_schema = Schema({
     # The sparse checkout profile to use. Value is the filename relative to the
     # directory where sparse profiles are defined (build/sparse-profiles/).
     Required('sparse-profile'): Any(basestring, None),
 
     # if true, perform a checkout of a comm-central based branch inside the
     # gecko checkout
     Required('comm-checkout'): bool,
 
+    # maps a dependency to a list of artifact names to use from that dependency.
+    # E.g: {"build": ["target.tar.bz2"]}
+    # In the above example, the artifact would be downloaded to:
+    # $USE_ARTIFACT_PATH/build/target.tar.bz2
+    Required('use-artifacts'): Any(None, {
+        basestring: [basestring],
+    }),
+
     # The command arguments to pass to the `run-task` script, after the
     # checkout arguments.  If a list, it will be passed directly; otherwise
     # it will be included in a single argument to `bash -cx`.
     Required('command'): Any([basestring], basestring),
 
     # Base work directory used to set up the task.
     Required('workdir'): basestring,
 })
 
 
 def common_setup(config, job, taskdesc):
     run = job['run']
     if run['checkout']:
         support_vcs_checkout(config, job, taskdesc,
                              sparse=bool(run['sparse-profile']))
 
+    if run['use-artifacts']:
+        support_use_artifacts(config, job, taskdesc, run['use-artifacts'])
+
     taskdesc['worker'].setdefault('env', {})['MOZ_SCM_LEVEL'] = config.params['level']
 
 
 def add_checkout_to_command(run, command):
     if not run['checkout']:
         return
 
     command.append('--vcs-checkout={workdir}/checkouts/gecko'.format(**run))
@@ -60,16 +71,17 @@ def add_checkout_to_command(run, command
                        run['sparse-profile'])
 
 
 defaults = {
     'cache-dotcache': False,
     'checkout': True,
     'comm-checkout': False,
     'sparse-profile': None,
+    'use-artifacts': None,
 }
 
 
 @run_job_using("docker-worker", "run-task", schema=run_task_schema, defaults=defaults)
 def docker_worker_run_task(config, job, taskdesc):
     run = job['run']
     worker = taskdesc['worker'] = job['worker']
     common_setup(config, job, taskdesc)
--- a/taskcluster/taskgraph/transforms/job/toolchain.py
+++ b/taskcluster/taskgraph/transforms/job/toolchain.py
@@ -126,52 +126,46 @@ def docker_worker_toolchain(config, job,
     # Allow the job to specify where artifacts come from, but add
     # public/build if it's not there already.
     artifacts = worker.setdefault('artifacts', [])
     if not any(artifact.get('name') == 'public/build' for artifact in artifacts):
         docker_worker_add_artifacts(config, job, taskdesc)
 
     support_vcs_checkout(config, job, taskdesc, sparse=True)
 
-    # Toolchain checkouts don't live under {workdir}/checkouts
-    workspace = '{workdir}/workspace/build'.format(**run)
-    gecko_path = '{}/src'.format(workspace)
-
     env = worker['env']
     env.update({
         'MOZ_BUILD_DATE': config.params['moz_build_date'],
         'MOZ_SCM_LEVEL': config.params['level'],
         'TOOLS_DISABLE': 'true',
         'MOZ_AUTOMATION': '1',
-        'MOZ_FETCHES_DIR': workspace,
-        'GECKO_PATH': gecko_path,
     })
 
     if run['tooltool-downloads']:
         internal = run['tooltool-downloads'] == 'internal'
         docker_worker_add_tooltool(config, job, taskdesc, internal=internal)
 
     # Use `mach` to invoke python scripts so in-tree libraries are available.
     if run['script'].endswith('.py'):
-        wrapper = '{}/mach python '.format(gecko_path)
+        wrapper = 'workspace/build/src/mach python '
     else:
         wrapper = ''
 
     args = run.get('arguments', '')
     if args:
         args = ' ' + shell_quote(*args)
 
     sparse_profile = []
     if run.get('sparse-profile'):
         sparse_profile = ['--sparse-profile',
                           'build/sparse-profiles/{}'.format(run['sparse-profile'])]
 
     worker['command'] = [
         '{workdir}/bin/run-task'.format(**run),
-        '--vcs-checkout={}'.format(gecko_path),
+        '--vcs-checkout={workdir}/workspace/build/src'.format(**run),
     ] + sparse_profile + [
         '--',
         'bash',
         '-c',
         'cd {} && '
         '{}workspace/build/src/taskcluster/scripts/misc/{}{}'.format(
             run['workdir'], wrapper, run['script'], args)
     ]