Bug 1468812 - [ci] Support MOZ_FETCHES and fetch-content in run-task r=gps
authorAndrew Halberstadt <ahalberstadt@mozilla.com>
Tue, 24 Jul 2018 13:11:25 +0000
changeset 822201 61f33f8c87508dae19c577c74ec71eae459c6aaf
parent 822200 79426f83650f78d8d4b6f8ad8832112e2637ac18
child 822202 cf969e57bc7b713077f2cafe40e3c4349de3c1ff
push id117316
push userbgrinstead@mozilla.com
push dateTue, 24 Jul 2018 22:25:31 +0000
reviewersgps
bugs1468812
milestone63.0a1
Bug 1468812 - [ci] Support MOZ_FETCHES and fetch-content in run-task r=gps This removes the 'use-artifacts' mechanism in favour of fetches. There are a few pieces here that need to land atomically: 1. Remove use-artifact related code 2. Call 'fetch-content' from the run-task script 3. Convert existing tasks on top of fetches (jsshell, python unittest) 4. Stop calling 'fetch-content' from toolchain setup tasks (as this now gets handled in run-task) Depends on D2166. Differential Revision: https://phabricator.services.mozilla.com/D2167
layout/tools/reftest/selftest/conftest.py
taskcluster/ci/source-test/jsshell.yml
taskcluster/ci/source-test/python.yml
taskcluster/scripts/misc/build-gcc-4.9-linux.sh
taskcluster/scripts/misc/build-gcc-6-linux.sh
taskcluster/scripts/misc/build-gcc-7-linux.sh
taskcluster/scripts/misc/build-gcc-mingw32.sh
taskcluster/scripts/misc/build-gcc-sixgill-plugin-linux.sh
taskcluster/scripts/run-task
taskcluster/taskgraph/transforms/job/__init__.py
taskcluster/taskgraph/transforms/job/common.py
taskcluster/taskgraph/transforms/job/run_task.py
taskcluster/taskgraph/transforms/job/toolchain.py
--- a/layout/tools/reftest/selftest/conftest.py
+++ b/layout/tools/reftest/selftest/conftest.py
@@ -44,18 +44,18 @@ def runtests(setup_test_harness, binary,
         package_root = os.path.dirname(harness_root)
         options.update({
             'extraProfileFiles': [os.path.join(package_root, 'bin', 'plugins')],
             'reftestExtensionPath': os.path.join(harness_root, 'reftest'),
             'sandboxReadWhitelist': [here, os.environ['PYTHON_TEST_TMP']],
             'utilityPath': os.path.join(package_root, 'bin'),
         })
 
-        if 'USE_ARTIFACT_PATH' in os.environ:
-            options['sandboxReadWhitelist'].append(os.environ['USE_ARTIFACT_PATH'])
+        if 'MOZ_FETCHES_DIR' in os.environ:
+            options['sandboxReadWhitelist'].append(os.environ['MOZ_FETCHES_DIR'])
     else:
         options.update({
             'extraProfileFiles': [os.path.join(build.topobjdir, 'dist', 'plugins')],
             'sandboxReadWhitelist': [build.topobjdir, build.topsrcdir],
         })
 
     def normalize(test):
         if os.path.isabs(test):
--- a/taskcluster/ci/source-test/jsshell.yml
+++ b/taskcluster/ci/source-test/jsshell.yml
@@ -4,55 +4,41 @@ job-defaults:
     worker-type:
         by-platform:
             linux64.*: releng-hardware/gecko-t-linux-talos
     worker:
         by-platform:
             linux64.*:
                 env:
                     SHELL: /bin/bash
+                    JSSHELL: /home/cltbld/fetches/js
                 max-run-time: 1800
     treeherder:
         kind: test
         tier: 2
     run:
-        using: run-task
-        use-artifacts:
-            build:
-                - target.jsshell.zip
+        using: mach
         workdir: /home/cltbld
     run-on-projects: ['mozilla-central', 'try']
+    fetches:
+        build:
+            - target.jsshell.zip
 
 bench-ares6:
     description: Ares6 JavaScript shell benchmark suite
     treeherder:
         symbol: js-bench(ares6)
     run:
-        command: >
-            cd $USE_ARTIFACT_PATH/build &&
-            unzip -qo -d jsshell target.jsshell.zip &&
-            export JSSHELL=$USE_ARTIFACT_PATH/build/jsshell/js &&
-            cd $GECKO_PATH &&
-            ./mach jsshell-bench --binary $JSSHELL --perfherder ares6
+        mach: jsshell-bench --binary $JSSHELL --perfherder ares6
 
 bench-sixspeed:
     description: Six-Speed JavaScript shell benchmark suite
     treeherder:
         symbol: js-bench(6speed)
     run:
-        command: >
-            cd $USE_ARTIFACT_PATH/build &&
-            unzip -qo -d jsshell target.jsshell.zip &&
-            export JSSHELL=$USE_ARTIFACT_PATH/build/jsshell/js &&
-            cd $GECKO_PATH &&
-            ./mach jsshell-bench --binary $JSSHELL --perfherder six-speed
+        mach: jsshell-bench --binary $JSSHELL --perfherder six-speed
 
 bench-asmjsapps:
     description: asm.js Apps shell benchmark suite
     treeherder:
         symbol: js-bench(asm.js-apps)
     run:
-        command: >
-            cd $USE_ARTIFACT_PATH/build &&
-            unzip -qo -d jsshell target.jsshell.zip &&
-            export JSSHELL=$USE_ARTIFACT_PATH/build/jsshell/js &&
-            cd $GECKO_PATH &&
-            ./mach jsshell-bench --binary $JSSHELL --perfherder asmjs-apps
+        mach: jsshell-bench --binary $JSSHELL --perfherder asmjs-apps
--- a/taskcluster/ci/source-test/python.yml
+++ b/taskcluster/ci/source-test/python.yml
@@ -59,32 +59,28 @@ mochitest-harness:
         symbol: py2(mch)
     worker:
         by-platform:
             linux64.*:
                 docker-image: {in-tree: "desktop1604-test"}
                 max-run-time: 3600
     run:
         using: run-task
-        use-artifacts:
-            build:
-                - target.tar.bz2
-                - target.common.tests.zip
-                - target.mochitest.tests.zip
         command: >
             source /builds/worker/scripts/xvfb.sh &&
             start_xvfb '1600x1200x24' 0 &&
-            cd $USE_ARTIFACT_PATH/build &&
-            tar -xf target.tar.bz2 &&
-            unzip -q -d tests target.common.tests.zip &&
-            unzip -q -d tests target.mochitest.tests.zip &&
-            export GECKO_BINARY_PATH=$USE_ARTIFACT_PATH/build/firefox/firefox &&
-            export TEST_HARNESS_ROOT=$USE_ARTIFACT_PATH/build/tests &&
+            export GECKO_BINARY_PATH=$MOZ_FETCHES_DIR/firefox/firefox &&
+            export TEST_HARNESS_ROOT=$MOZ_FETCHES_DIR/tests &&
             cd /builds/worker/checkouts/gecko &&
             ./mach python-test --python 2 --subsuite mochitest
+    fetches:
+        build:
+            - target.tar.bz2
+            - target.common.tests.zip>tests
+            - target.mochitest.tests.zip>tests
     when:
         files-changed:
             - 'testing/mochitest/**'
             - 'testing/mozbase/moztest/moztest/selftest/**'
             - 'testing/mozharness/mozharness/base/log.py'
             - 'testing/mozharness/mozharness/mozilla/structuredlog.py'
             - 'testing/mozharness/mozharness/mozilla/testing/errors.py'
             - 'testing/profiles/**'
@@ -176,32 +172,28 @@ reftest-harness:
         symbol: py2(ref)
     worker:
         by-platform:
             linux64.*:
                 docker-image: {in-tree: "desktop1604-test"}
                 max-run-time: 3600
     run:
         using: run-task
-        use-artifacts:
-            build:
-                - target.tar.bz2
-                - target.common.tests.zip
-                - target.reftest.tests.zip
         command: >
             source /builds/worker/scripts/xvfb.sh &&
             start_xvfb '1600x1200x24' 0 &&
-            cd $USE_ARTIFACT_PATH/build &&
-            tar -xf target.tar.bz2 &&
-            unzip -q -d tests target.common.tests.zip &&
-            unzip -q -d tests target.reftest.tests.zip &&
-            export GECKO_BINARY_PATH=$USE_ARTIFACT_PATH/build/firefox/firefox &&
-            export TEST_HARNESS_ROOT=$USE_ARTIFACT_PATH/build/tests &&
+            export GECKO_BINARY_PATH=$MOZ_FETCHES_DIR/firefox/firefox &&
+            export TEST_HARNESS_ROOT=$MOZ_FETCHES_DIR/tests &&
             cd /builds/worker/checkouts/gecko &&
             ./mach python-test --python 2 --subsuite reftest
+    fetches:
+        build:
+            - target.tar.bz2
+            - target.common.tests.zip>tests
+            - target.reftest.tests.zip>tests
     when:
         files-changed:
             - 'layout/tools/reftest/**'
             - 'testing/mozbase/moztest/moztest/selftest/**'
             - 'testing/mozharness/mozharness/base/log.py'
             - 'testing/mozharness/mozharness/mozilla/structuredlog.py'
             - 'testing/mozharness/mozharness/mozilla/testing/errors.py'
 
--- a/taskcluster/scripts/misc/build-gcc-4.9-linux.sh
+++ b/taskcluster/scripts/misc/build-gcc-4.9-linux.sh
@@ -12,18 +12,16 @@ data_dir=$HOME_DIR/src/build/unix/build-
 
 . $data_dir/build-gcc.sh
 
 gcc_version=4.9.4
 gcc_ext=bz2
 binutils_version=2.25.1
 binutils_ext=bz2
 
-$HOME_DIR/src/taskcluster/scripts/misc/fetch-content task-artifacts -d $root_dir $MOZ_FETCHES
-
 pushd $root_dir/gcc-$gcc_version
 ln -sf ../cloog-0.18.1 cloog
 ln -sf ../gmp-5.1.3 gmp
 ln -sf ../mpc-0.8.2 mpc
 ln -sf ../isl-0.12.2 isl
 ln -sf ../mpfr-3.1.5 mpfr
 popd
 
--- a/taskcluster/scripts/misc/build-gcc-6-linux.sh
+++ b/taskcluster/scripts/misc/build-gcc-6-linux.sh
@@ -12,18 +12,16 @@ data_dir=$HOME_DIR/src/build/unix/build-
 
 . $data_dir/build-gcc.sh
 
 gcc_version=6.4.0
 gcc_ext=xz
 binutils_version=2.28.1
 binutils_ext=xz
 
-$HOME_DIR/src/taskcluster/scripts/misc/fetch-content task-artifacts -d $root_dir $MOZ_FETCHES
-
 pushd $root_dir/gcc-$gcc_version
 ln -sf ../gmp-5.1.3 gmp
 ln -sf ../isl-0.15 isl
 ln -sf ../mpc-0.8.2 mpc
 ln -sf ../mpfr-3.1.5 mpfr
 popd
 
 build_binutils
--- a/taskcluster/scripts/misc/build-gcc-7-linux.sh
+++ b/taskcluster/scripts/misc/build-gcc-7-linux.sh
@@ -12,18 +12,16 @@ data_dir=$HOME_DIR/src/build/unix/build-
 
 . $data_dir/build-gcc.sh
 
 gcc_version=7.3.0
 gcc_ext=xz
 binutils_version=2.28.1
 binutils_ext=xz
 
-$HOME_DIR/src/taskcluster/scripts/misc/fetch-content task-artifacts -d $root_dir $MOZ_FETCHES
-
 pushd $root_dir/gcc-$gcc_version
 ln -sf ../gmp-6.1.0 gmp
 ln -sf ../isl-0.16.1 isl
 ln -sf ../mpc-1.0.3 mpc
 ln -sf ../mpfr-3.1.4 mpfr
 popd
 
 build_binutils
--- a/taskcluster/scripts/misc/build-gcc-mingw32.sh
+++ b/taskcluster/scripts/misc/build-gcc-mingw32.sh
@@ -15,18 +15,16 @@ data_dir=$HOME_DIR/src/build/unix/build-
 
 gcc_version=6.4.0
 gcc_ext=xz
 binutils_version=2.27
 binutils_ext=bz2
 binutils_configure_flags="--target=i686-w64-mingw32"
 mingw_version=bcf1f29d6dc80b6025b416bef104d2314fa9be57
 
-$HOME_DIR/src/taskcluster/scripts/misc/fetch-content task-artifacts -d $root_dir $MOZ_FETCHES
-
 pushd $root_dir/gcc-$gcc_version
 ln -sf ../gmp-5.1.3 gmp
 ln -sf ../isl-0.15 isl
 ln -sf ../mpc-0.8.2 mpc
 ln -sf ../mpfr-3.1.5 mpfr
 popd
 
 prepare_mingw
--- a/taskcluster/scripts/misc/build-gcc-sixgill-plugin-linux.sh
+++ b/taskcluster/scripts/misc/build-gcc-sixgill-plugin-linux.sh
@@ -22,18 +22,16 @@ gcc_version=6.4.0
 gcc_ext=xz
 binutils_version=2.28.1
 binutils_ext=xz
 sixgill_rev=ab06fc42cf0f
 sixgill_repo=https://hg.mozilla.org/users/sfink_mozilla.com/sixgill
 
 . $data_dir/build-gcc.sh
 
-$HOME_DIR/src/taskcluster/scripts/misc/fetch-content task-artifacts -d $root_dir $MOZ_FETCHES
-
 pushd $root_dir/gcc-$gcc_version
 ln -sf ../binutils-2.28.1 binutils
 ln -sf ../gmp-5.1.3 gmp
 ln -sf ../isl-0.15 isl
 ln -sf ../mpc-0.8.2 mpc
 ln -sf ../mpfr-3.1.5 mpfr
 popd
 
--- a/taskcluster/scripts/run-task
+++ b/taskcluster/scripts/run-task
@@ -68,16 +68,28 @@ Our Docker image policy requires volumes
 The volume was likely populated as part of building the Docker image.
 Change the Dockerfile and anything run from it to not create files in
 any VOLUME.
 
 A lesser possibility is that you stumbled upon a TaskCluster platform bug
 where it fails to use new volumes for tasks.
 '''
 
+
+FETCH_CONTENT_NOT_FOUND = '''
+error: fetch-content script not found
+
+The script at `taskcluster/scripts/misc/fetch-content` could not be
+detected in the current environment.
+
+If this task clones gecko, make sure the GECKO_PATH environment variable
+is set to proper location. Otherwise, the script may need to be mounted
+or added to the task's docker image then added to the PATH.
+'''
+
 # The exit code to use when caches should be purged and the task retried.
 # This is EX_OSFILE (from sysexits.h):
 #     Some system file  does not exist, cannot be opened, or has some
 #     sort of error (e.g., syntax error).
 EXIT_PURGE_CACHE = 72
 
 
 IS_POSIX = os.name == 'posix'
@@ -463,16 +475,33 @@ def vcs_checkout(source_repo, dest, stor
                                      source_repo=source_repo,
                                      repo_name=source_repo.split('/')[-1]))
 
     print_line(b'vcs', msg.encode('utf-8'))
 
     return revision
 
 
+def fetch_artifacts(fetches):
+    print_line(b'fetches', b'fetching artifacts\n')
+
+    fetch_content = shutil.which('fetch-content')
+    if not fetch_content and os.environ.get('GECKO_PATH'):
+        fetch_content = os.path.join(os.environ['GECKO_PATH'], 'taskcluster',
+                                     'scripts', 'misc', 'fetch-content')
+
+    if not fetch_content or not os.path.isfile(fetch_content):
+        print(FETCH_CONTENT_NOT_FOUND)
+        sys.exit(1)
+
+    cmd = [fetch_content, 'task-artifacts'] + fetches.split()
+    subprocess.run(cmd, check=True, env=os.environ)
+    print_line(b'fetches', b'finished fetching artifacts\n')
+
+
 def main(args):
     print_line(b'setup', b'run-task started in %s\n' % os.getcwd().encode('utf-8'))
     running_as_root = IS_POSIX and os.getuid() == 0
 
     # Arguments up to '--' are ours. After are for the main task
     # to be executed.
     try:
         i = args.index('--')
@@ -703,38 +732,24 @@ def main(args):
             revision=os.environ.get('COMM_HEAD_REV'),
             branch=os.environ.get('COMM_HEAD_REF'))
 
     elif not os.environ.get('COMM_HEAD_REV') and \
             os.environ.get('COMM_HEAD_REF'):
         print('task should be defined in terms of non-symbolic revision')
         return 1
 
-    use_artifact_path = os.environ.get('USE_ARTIFACT_PATH')
-
-    def prepare_use_artifact(key, url):
-        print_line(b'setup', b'fetching artifact from %s\n' % url.encode('utf-8'))
-        path = os.path.join(use_artifact_path, key)
-        if not os.path.isdir(path):
-            os.makedirs(path)
-
-        url = url.rstrip('/')
-        path = os.path.join(path, os.path.basename(url))
-        response = urllib.request.urlopen(url)
-        with open(path, 'wb') as fh:
-            fh.write(response.read())
-
-    use_artifacts = os.environ.get('USE_ARTIFACT_URLS')
-    if use_artifacts:
-        use_artifacts = json.loads(use_artifacts)
-        for key, urls in use_artifacts.items():
-            for url in urls:
-                prepare_use_artifact(key, url)
+    fetches = os.environ.get('MOZ_FETCHES')
+    if fetches:
+        fetch_artifacts(fetches)
 
     try:
         return run_and_prefix_output(b'task', task_args)
     finally:
-        if use_artifact_path and os.path.isdir(use_artifact_path):
-            shutil.rmtree(use_artifact_path)
+        fetches_dir = os.environ.get('MOZ_FETCHES_DIR')
+        if fetches_dir and os.path.isdir(fetches_dir):
+            print_line(b'fetches', b'removing %s\n' % fetches_dir.encode('utf-8'))
+            shutil.rmtree(fetches_dir)
+            print_line(b'fetches', b'finished\n')
 
 
 if __name__ == '__main__':
     sys.exit(main(sys.argv[1:]))
--- a/taskcluster/taskgraph/transforms/job/__init__.py
+++ b/taskcluster/taskgraph/transforms/job/__init__.py
@@ -192,17 +192,17 @@ def use_fetches(config, jobs):
                 for path in artifacts:
                     job_fetches.append('{prefix}/{path}@<{dep}>'.format(
                         prefix=prefix, path=path, dep=kind))
 
         env = job.setdefault('worker', {}).setdefault('env', {})
         env['MOZ_FETCHES'] = {'task-reference': ' '.join(job_fetches)}
 
         workdir = job['run'].get('workdir', '/builds/worker')
-        env['MOZ_FETCHES_DIR'] = '{}/fetches'.format(workdir)
+        env.setdefault('MOZ_FETCHES_DIR', '{}/fetches'.format(workdir))
         yield job
 
 
 @transforms.add
 def make_task_description(config, jobs):
     """Given a build description, create a task description"""
     # import plugin modules first, before iterating over jobs
     import_all()
--- a/taskcluster/taskgraph/transforms/job/common.py
+++ b/taskcluster/taskgraph/transforms/job/common.py
@@ -4,18 +4,17 @@
 """
 Common support for various job types.  These functions are all named after the
 worker implementation they operate on, and take the same three parameters, for
 consistency.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-import json
-from taskgraph.util.taskcluster import get_artifact_url, get_artifact_prefix
+from taskgraph.util.taskcluster import get_artifact_prefix
 
 SECRET_SCOPE = 'secrets:get:project/releng/gecko/{}/level-{}/{}'
 
 
 def docker_worker_add_workspace_cache(config, job, taskdesc, extra=None):
     """Add the workspace cache.
 
     ``extra`` is an optional kwarg passed in that supports extending the cache
@@ -188,31 +187,8 @@ def docker_worker_add_tooltool(config, j
     taskdesc['scopes'].extend([
         'docker-worker:relengapi-proxy:tooltool.download.public',
     ])
 
     if internal:
         taskdesc['scopes'].extend([
             'docker-worker:relengapi-proxy:tooltool.download.internal',
         ])
-
-
-def support_use_artifacts(config, job, taskdesc, use_artifacts):
-    """Set a JSON object of artifact URLs in an environment variable.
-
-    This will tell the run-task script to download the artifacts.
-    """
-    urls = {}
-    prefix = get_artifact_prefix(taskdesc)
-    for kind, artifacts in use_artifacts.items():
-        if kind not in taskdesc['dependencies']:
-            raise Exception("{label} can't use '{kind}' artifacts because it has no '{kind}' "
-                            "dependency!".format(label=job['label'], kind=kind))
-        task_id = '<{}>'.format(kind)
-        urls[kind] = []
-
-        for artifact in artifacts:
-            path = '/'.join([prefix, artifact])
-            urls[kind].append(get_artifact_url(task_id, path))
-
-    env = taskdesc['worker'].setdefault('env', {})
-    env['USE_ARTIFACT_URLS'] = {'task-reference': json.dumps(urls)}
-    env['USE_ARTIFACT_PATH'] = '{workdir}/use-artifacts'.format(**job['run'])
--- a/taskcluster/taskgraph/transforms/job/run_task.py
+++ b/taskcluster/taskgraph/transforms/job/run_task.py
@@ -4,17 +4,17 @@
 """
 Support for running jobs that are invoked via the `run-task` script.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 from taskgraph.transforms.job import run_job_using
 from taskgraph.util.schema import Schema
-from taskgraph.transforms.job.common import support_use_artifacts, support_vcs_checkout
+from taskgraph.transforms.job.common import support_vcs_checkout
 from voluptuous import Required, Any
 
 run_task_schema = Schema({
     Required('using'): 'run-task',
 
     # if true, add a cache at ~worker/.cache, which is where things like pip
     # tend to hide their caches.  This cache is never added for level-1 jobs.
     Required('cache-dotcache'): bool,
@@ -25,43 +25,32 @@ run_task_schema = Schema({
     # The sparse checkout profile to use. Value is the filename relative to the
     # directory where sparse profiles are defined (build/sparse-profiles/).
     Required('sparse-profile'): Any(basestring, None),
 
     # if true, perform a checkout of a comm-central based branch inside the
     # gecko checkout
     Required('comm-checkout'): bool,
 
-    # maps a dependency to a list of artifact names to use from that dependency.
-    # E.g: {"build": ["target.tar.bz2"]}
-    # In the above example, the artifact would be downloaded to:
-    # $USE_ARTIFACT_PATH/build/target.tar.bz2
-    Required('use-artifacts'): Any(None, {
-        basestring: [basestring],
-    }),
-
     # The command arguments to pass to the `run-task` script, after the
     # checkout arguments.  If a list, it will be passed directly; otherwise
     # it will be included in a single argument to `bash -cx`.
     Required('command'): Any([basestring], basestring),
 
     # Base work directory used to set up the task.
     Required('workdir'): basestring,
 })
 
 
 def common_setup(config, job, taskdesc):
     run = job['run']
     if run['checkout']:
         support_vcs_checkout(config, job, taskdesc,
                              sparse=bool(run['sparse-profile']))
 
-    if run['use-artifacts']:
-        support_use_artifacts(config, job, taskdesc, run['use-artifacts'])
-
     taskdesc['worker'].setdefault('env', {})['MOZ_SCM_LEVEL'] = config.params['level']
 
 
 def add_checkout_to_command(run, command):
     if not run['checkout']:
         return
 
     command.append('--vcs-checkout={workdir}/checkouts/gecko'.format(**run))
@@ -71,17 +60,16 @@ def add_checkout_to_command(run, command
                        run['sparse-profile'])
 
 
 defaults = {
     'cache-dotcache': False,
     'checkout': True,
     'comm-checkout': False,
     'sparse-profile': None,
-    'use-artifacts': None,
 }
 
 
 @run_job_using("docker-worker", "run-task", schema=run_task_schema, defaults=defaults)
 def docker_worker_run_task(config, job, taskdesc):
     run = job['run']
     worker = taskdesc['worker'] = job['worker']
     common_setup(config, job, taskdesc)
--- a/taskcluster/taskgraph/transforms/job/toolchain.py
+++ b/taskcluster/taskgraph/transforms/job/toolchain.py
@@ -126,46 +126,52 @@ def docker_worker_toolchain(config, job,
     # Allow the job to specify where artifacts come from, but add
     # public/build if it's not there already.
     artifacts = worker.setdefault('artifacts', [])
     if not any(artifact.get('name') == 'public/build' for artifact in artifacts):
         docker_worker_add_artifacts(config, job, taskdesc)
 
     support_vcs_checkout(config, job, taskdesc, sparse=True)
 
+    # Toolchain checkouts don't live under {workdir}/checkouts
+    workspace = '{workdir}/workspace/build'.format(**run)
+    gecko_path = '{}/src'.format(workspace)
+
     env = worker['env']
     env.update({
         'MOZ_BUILD_DATE': config.params['moz_build_date'],
         'MOZ_SCM_LEVEL': config.params['level'],
         'TOOLS_DISABLE': 'true',
         'MOZ_AUTOMATION': '1',
+        'MOZ_FETCHES_DIR': workspace,
+        'GECKO_PATH': gecko_path,
     })
 
     if run['tooltool-downloads']:
         internal = run['tooltool-downloads'] == 'internal'
         docker_worker_add_tooltool(config, job, taskdesc, internal=internal)
 
     # Use `mach` to invoke python scripts so in-tree libraries are available.
     if run['script'].endswith('.py'):
-        wrapper = 'workspace/build/src/mach python '
+        wrapper = '{}/mach python '.format(gecko_path)
     else:
         wrapper = ''
 
     args = run.get('arguments', '')
     if args:
         args = ' ' + shell_quote(*args)
 
     sparse_profile = []
     if run.get('sparse-profile'):
         sparse_profile = ['--sparse-profile',
                           'build/sparse-profiles/{}'.format(run['sparse-profile'])]
 
     worker['command'] = [
         '{workdir}/bin/run-task'.format(**run),
-        '--vcs-checkout={workdir}/workspace/build/src'.format(**run),
+        '--vcs-checkout={}'.format(gecko_path),
     ] + sparse_profile + [
         '--',
         'bash',
         '-c',
         'cd {} && '
         '{}workspace/build/src/taskcluster/scripts/misc/{}{}'.format(
             run['workdir'], wrapper, run['script'], args)
     ]