Bug 1492664 - set TASKCLUSTER_ROOT_URL and TASKCLUSTER_PROXY_URL; r=tomprince,glandium
☠☠ backed out by 28f92797e661 ☠ ☠
authorDustin J. Mitchell <dustin@mozilla.com>
Tue, 18 Dec 2018 05:37:41 +0000
changeset 511142 6dc9522ee0bf672fdb407fbfe48643d1b393ae3d
parent 511141 85d7f8b330eb802d1ac9431590a5177bc93de88f
child 511143 b93a0fcc86f36926cdb229f63ba8be9421e3fd00
push id1953
push userffxbld-merge
push dateMon, 11 Mar 2019 12:10:20 +0000
treeherdermozilla-release@9c35dcbaa899 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerstomprince, glandium
bugs1492664, 1460015
milestone66.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1492664 - set TASKCLUSTER_ROOT_URL and TASKCLUSTER_PROXY_URL; r=tomprince,glandium Eventually, workers will provide these variables directly (https://bugzilla.mozilla.org/show_bug.cgi?id=1460015). But for now, this ensures that TASKCLUSTER_ROOT_URL is set everywhere, and TASKCLUSTER_PROXY_URL is set wherever the proxy is active. The setup for the mach commands defaults to https://taskcluster.net for user convenience. When the production instance's URL changes, we can simply change that default. This changes the docker build process to propagate TASKCLUSTER_ROOT_URL into the docker images, and for good measure includes some code to use that value to generate debian repo paths. Differential Revision: https://phabricator.services.mozilla.com/D14196
.taskcluster.yml
python/mozrelease/mozrelease/buglist_creator.py
taskcluster/docker/debian-base/Dockerfile
taskcluster/docker/debian-base/setup_packages.sh
taskcluster/docker/debian7-build/Dockerfile
taskcluster/docker/debian7-mozjs-rust-build/Dockerfile
taskcluster/docker/toolchain-build/Dockerfile
taskcluster/mach_commands.py
taskcluster/taskgraph/__init__.py
taskcluster/taskgraph/test/test_util_docker.py
taskcluster/taskgraph/transforms/docker_image.py
taskcluster/taskgraph/transforms/job/debian_package.py
taskcluster/taskgraph/transforms/task.py
taskcluster/taskgraph/util/docker.py
tools/tryselect/tasks.py
--- a/.taskcluster.yml
+++ b/.taskcluster.yml
@@ -141,16 +141,19 @@ tasks:
             $merge:
               - GECKO_BASE_REPOSITORY: 'https://hg.mozilla.org/mozilla-unified'
                 GECKO_HEAD_REPOSITORY: '${repoUrl}'
                 GECKO_HEAD_REF: '${push.revision}'
                 GECKO_HEAD_REV: '${push.revision}'
                 GECKO_COMMIT_MSG: {$if: 'tasks_for != "action"', then: '${push.comment}'}
                 HG_STORE_PATH: /builds/worker/checkouts/hg-store
                 TASKCLUSTER_CACHES: /builds/worker/checkouts
+                # someday, these will be provided by the worker - Bug 1492664
+                TASKCLUSTER_ROOT_URL: https://taskcluster.net
+                TASKCLUSTER_PROXY_URL: http://taskcluster
               - $if: 'tasks_for == "action"'
                 then:
                   ACTION_TASK_GROUP_ID: '${action.taskGroupId}'     # taskGroupId of the target task
                   ACTION_TASK_ID: {$json: {$eval: 'taskId'}} # taskId of the target task (JSON-encoded)
                   ACTION_INPUT: {$json: {$eval: 'input'}}
                   ACTION_CALLBACK: '${action.cb_name}'
                   ACTION_PARAMETERS: {$json: {$eval: 'parameters'}}
 
--- a/python/mozrelease/mozrelease/buglist_creator.py
+++ b/python/mozrelease/mozrelease/buglist_creator.py
@@ -213,16 +213,17 @@ Task group: [{task_group_id}](https://to
         subject_prefix = "[mobile] "
     if product in {"firefox", "devedition"}:
         subject_prefix = "[desktop] "
 
     subject = '{} Build of {} {} build {}'.format(subject_prefix, product, version, build_number)
 
     notify_options = {}
     if 'TASKCLUSTER_PROXY_URL' in os.environ:
+        # Until bug 1460015 is finished, use the old baseUrl style of proxy URL
         base_url = os.environ['TASKCLUSTER_PROXY_URL'].rstrip('/')
         notify_options['baseUrl'] = '{}/notify/v1'.format(base_url)
     notify = Notify(notify_options)
     for address in addresses:
         notify.email({
             'address': address,
             'subject': subject,
             'content': content,
--- a/taskcluster/docker/debian-base/Dockerfile
+++ b/taskcluster/docker/debian-base/Dockerfile
@@ -44,18 +44,19 @@ RUN for s in debian_$DIST debian_$DIST-u
 RUN apt-get update && \
     apt-get install \
       apt-transport-https \
       ca-certificates
 
 COPY setup_packages.sh /usr/local/sbin/
 COPY cloud-mirror-workaround.sh /usr/local/sbin/
 
+# %ARG TASKCLUSTER_ROOT_URL
 # %ARG DOCKER_IMAGE_PACKAGES
-RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES && \
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \
     echo 'dir::bin::methods::https "/usr/local/sbin/cloud-mirror-workaround.sh";' > /etc/apt/apt.conf.d/99cloud-mirror-workaround && \
     apt-get update && \
     apt-get install \
       git \
       less \
       make \
       mercurial \
       patch \
--- a/taskcluster/docker/debian-base/setup_packages.sh
+++ b/taskcluster/docker/debian-base/setup_packages.sh
@@ -1,5 +1,17 @@
 #!/bin/sh
 
+TASKCLUSTER_ROOT_URL=$1
+shift
+
+# duplicate the functionality of taskcluster-lib-urls, but in bash..
+if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then
+    queue_base='https://queue.taskcluster.net/v1'
+else
+    queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
+fi
+
+
 for task in "$@"; do
-  echo "deb [trusted=yes] https://queue.taskcluster.net/v1/task/$task/artifacts/public/build/ debian/" > "/etc/apt/sources.list.d/99$task.list"
+  echo "adding package source $queue_base/task/$task/artifacts/public/build/"
+  echo "deb [trusted=yes] $queue_base/task/$task/artifacts/public/build/ debian/" > "/etc/apt/sources.list.d/99$task.list"
 done
--- a/taskcluster/docker/debian7-build/Dockerfile
+++ b/taskcluster/docker/debian7-build/Dockerfile
@@ -3,18 +3,19 @@ FROM $DOCKER_IMAGE_PARENT
 MAINTAINER Mike Hommey <mhommey@mozilla.com>
 
 VOLUME /builds/worker/checkouts
 VOLUME /builds/worker/workspace
 VOLUME /builds/worker/tooltool-cache
 
 ENV XZ_OPT=-T0
 
+# %ARG TASKCLUSTER_ROOT_URL
 # %ARG DOCKER_IMAGE_PACKAGES
-RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES
 
 # %ARG ARCH
 RUN dpkg --add-architecture $ARCH
 
 # Ideally, we wouldn't need gcc-multilib and the extra linux-libc-dev,
 # but the latter is required to make the former installable, and the former
 # because of bug 1409276.
 # We exclude /usr/share/doc/*/changelog.Debian* files because they might differ
--- a/taskcluster/docker/debian7-mozjs-rust-build/Dockerfile
+++ b/taskcluster/docker/debian7-mozjs-rust-build/Dockerfile
@@ -1,12 +1,13 @@
 # %ARG DOCKER_IMAGE_PARENT
 FROM $DOCKER_IMAGE_PARENT
 MAINTAINER Mike Hommey <mhommey@mozilla.com>
 
 VOLUME /builds/worker/checkouts
 VOLUME /builds/worker/workspace
 VOLUME /builds/worker/tooltool-cache
 
+# %ARG TASKCLUSTER_ROOT_URL
 # %ARG DOCKER_IMAGE_PACKAGES
-RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES && \
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \
     apt-get update && \
     apt-get install cmake
--- a/taskcluster/docker/toolchain-build/Dockerfile
+++ b/taskcluster/docker/toolchain-build/Dockerfile
@@ -4,17 +4,18 @@ MAINTAINER Mike Hommey <mhommey@mozilla.
 
 VOLUME /builds/worker/checkouts
 VOLUME /builds/worker/workspace
 VOLUME /builds/worker/tooltool-cache
 
 ENV XZ_OPT=-T0
 
 # %ARG DOCKER_IMAGE_PACKAGES
-RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES
+# %ARG TASKCLUSTER_ROOT_URL
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES
 
 RUN apt-get update && \
     apt-get install \
       autoconf \
       automake \
       bison \
       build-essential \
       curl \
--- a/taskcluster/mach_commands.py
+++ b/taskcluster/mach_commands.py
@@ -177,17 +177,17 @@ class MachCommands(MachCommandBase):
         """Run the decision task: generate a task graph and submit to
         TaskCluster.  This is only meant to be called within decision tasks,
         and requires a great many arguments.  Commands like `mach taskgraph
         optimized` are better suited to use on the command line, and can take
         the parameters file generated by a decision task.  """
 
         import taskgraph.decision
         try:
-            self.setup_logging()
+            self.setup()
             return taskgraph.decision.taskgraph_decision(options)
         except Exception:
             traceback.print_exc()
             sys.exit(1)
 
     @SubCommand('taskgraph', 'cron',
                 description="Run the cron task")
     @CommandArgument('--base-repository',
@@ -216,30 +216,30 @@ class MachCommands(MachCommandBase):
     @CommandArgument('--root', '-r',
                      required=False,
                      help="root of the repository to get cron task definitions from")
     def taskgraph_cron(self, **options):
         """Run the cron task; this task creates zero or more decision tasks.  It is run
         from the hooks service on a regular basis."""
         import taskgraph.cron
         try:
-            self.setup_logging()
+            self.setup()
             return taskgraph.cron.taskgraph_cron(options)
         except Exception:
             traceback.print_exc()
             sys.exit(1)
 
     @SubCommand('taskgraph', 'action-callback',
                 description='Run action callback used by action tasks')
     @CommandArgument('--root', '-r', default='taskcluster/ci',
                      help="root of the taskgraph definition relative to topsrcdir")
     def action_callback(self, **options):
         import taskgraph.actions
         try:
-            self.setup_logging()
+            self.setup()
 
             # the target task for this action (or null if it's a group action)
             task_id = json.loads(os.environ.get('ACTION_TASK_ID', 'null'))
             # the target task group for this action
             task_group_id = os.environ.get('ACTION_TASK_GROUP_ID', None)
             input = json.loads(os.environ.get('ACTION_INPUT', 'null'))
             callback = os.environ.get('ACTION_CALLBACK', None)
             parameters = json.loads(os.environ.get('ACTION_PARAMETERS', '{}'))
@@ -282,17 +282,18 @@ class MachCommands(MachCommandBase):
                 if filename.endswith('.yml'):
                     return yaml.safe_load(f)
                 elif filename.endswith('.json'):
                     return json.load(f)
                 else:
                     raise Exception("unknown filename {}".format(filename))
 
         try:
-            self.setup_logging()
+            self.setup()
+
             task_id = options['task_id']
 
             if options['input']:
                 input = load_data(options['input'])
             else:
                 input = None
 
             parameters = taskgraph.parameters.load_parameters_file(options['parameters'])
@@ -307,17 +308,17 @@ class MachCommands(MachCommandBase):
                     callback=options['callback'],
                     parameters=parameters,
                     root=root,
                     test=True)
         except Exception:
             traceback.print_exc()
             sys.exit(1)
 
-    def setup_logging(self, quiet=False, verbose=True):
+    def setup(self, quiet=False, verbose=True):
         """
         Set up Python logging for all loggers, sending results to stderr (so
         that command output can be redirected easily) and adding the typical
         mach timestamp.
         """
         # remove the old terminal handler
         old = self.log_manager.replace_terminal_handler(None)
 
@@ -327,25 +328,30 @@ class MachCommands(MachCommandBase):
             self.log_manager.add_terminal_logging(
                 fh=sys.stderr, level=level,
                 write_interval=old.formatter.write_interval,
                 write_times=old.formatter.write_times)
 
         # all of the taskgraph logging is unstructured logging
         self.log_manager.enable_unstructured()
 
+        # Ensure that TASKCLUSTER_ROOT_URL is set
+        import taskgraph
+        taskgraph.set_root_url_env()
+
     def show_taskgraph(self, graph_attr, options):
         import taskgraph.parameters
         import taskgraph.generator
         import taskgraph
+
         if options['fast']:
             taskgraph.fast = True
 
         try:
-            self.setup_logging(quiet=options['quiet'], verbose=options['verbose'])
+            self.setup(quiet=options['quiet'], verbose=options['verbose'])
             parameters = taskgraph.parameters.load_parameters_file(options['parameters'])
             parameters.check()
 
             tgg = taskgraph.generator.TaskGraphGenerator(
                 root_dir=options.get('root'),
                 parameters=parameters,
                 target_kind=options.get('target_kind'),
             )
@@ -360,17 +366,17 @@ class MachCommands(MachCommandBase):
             sys.exit(1)
 
     def show_taskgraph_labels(self, taskgraph):
         for index in taskgraph.graph.visit_postorder():
             print(taskgraph.tasks[index].label)
 
     def show_taskgraph_json(self, taskgraph):
         print(json.dumps(taskgraph.to_json(),
-              sort_keys=True, indent=2, separators=(',', ': ')))
+                         sort_keys=True, indent=2, separators=(',', ': ')))
 
     def get_filtered_taskgraph(self, taskgraph, tasksregex):
         from taskgraph.graph import Graph
         from taskgraph.taskgraph import TaskGraph
         """
         This class method filters all the tasks on basis of a regular expression
         and returns a new TaskGraph object
         """
@@ -394,17 +400,17 @@ class MachCommands(MachCommandBase):
 
     def show_actions(self, options):
         import taskgraph.parameters
         import taskgraph.generator
         import taskgraph
         import taskgraph.actions
 
         try:
-            self.setup_logging(quiet=options['quiet'], verbose=options['verbose'])
+            self.setup(quiet=options['quiet'], verbose=options['verbose'])
             parameters = taskgraph.parameters.load_parameters_file(options['parameters'])
             parameters.check()
 
             tgg = taskgraph.generator.TaskGraphGenerator(
                 root_dir=options.get('root'),
                 parameters=parameters)
 
             actions = taskgraph.actions.render_actions_json(parameters, tgg.graph_config)
--- a/taskcluster/taskgraph/__init__.py
+++ b/taskcluster/taskgraph/__init__.py
@@ -1,20 +1,39 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
+import logging
 
 GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..'))
 
 # Maximum number of dependencies a single task can have
 # https://docs.taskcluster.net/reference/platform/taskcluster-queue/references/api#createTask
 # specifies 100, but we also optionally add the decision task id as a dep in
 # taskgraph.create, so let's set this to 99.
 MAX_DEPENDENCIES = 99
 
 # Enable fast task generation for local debugging
 # This is normally switched on via the --fast/-F flag to `mach taskgraph`
 # Currently this skips toolchain task optimizations and schema validation
 fast = False
+
+# Default rootUrl to use for command-line invocations
+PRODUCTION_TASKCLUSTER_ROOT_URL = 'https://taskcluster.net'
+
+
+def set_root_url_env():
+    """Ensure that TASKCLUSTER_ROOT_URL is set, defaulting when run outside of a task."""
+    logger = logging.getLogger('set_root_url_env')
+
+    if 'TASKCLUSTER_ROOT_URL' not in os.environ:
+        if 'TASK_ID' in os.environ:
+            raise RuntimeError('TASKCLUSTER_ROOT_URL must be set when running in a task')
+        else:
+            logger.info('Setting TASKCLUSTER_ROOT_URL to default value (Firefox CI production)')
+            os.environ['TASKCLUSTER_ROOT_URL'] = PRODUCTION_TASKCLUSTER_ROOT_URL
+    logger.info('Running in Taskcluster instance {}{}'.format(
+        os.environ['TASKCLUSTER_ROOT_URL'],
+        ' with taskcluster-proxy' if 'TASKCLUSTER_PROXY_URL' in os.environ else ''))
--- a/taskcluster/taskgraph/test/test_util_docker.py
+++ b/taskcluster/taskgraph/test/test_util_docker.py
@@ -5,24 +5,27 @@
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
 import shutil
 import stat
 import tarfile
 import tempfile
 import unittest
+import mock
+import taskcluster_urls as liburls
 
 from taskgraph.util import docker
 from mozunit import main, MockedOpen
 
 
 MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
 
 
+@mock.patch.dict('os.environ', {'TASKCLUSTER_ROOT_URL': liburls.test_root_url()})
 class TestDocker(unittest.TestCase):
 
     def test_generate_context_hash(self):
         tmpdir = tempfile.mkdtemp()
         old_GECKO = docker.GECKO
         docker.GECKO = tmpdir
         try:
             os.makedirs(os.path.join(tmpdir, 'docker', 'my-image'))
--- a/taskcluster/taskgraph/transforms/docker_image.py
+++ b/taskcluster/taskgraph/transforms/docker_image.py
@@ -106,16 +106,18 @@ def fill_template(config, tasks):
         # task-reference value, see further below). We add the package routes
         # containing a hash to get the overall docker image hash, so changes
         # to packages will be reflected in the docker image hash.
         args['DOCKER_IMAGE_PACKAGES'] = ' '.join('<{}>'.format(p)
                                                  for p in packages)
         if parent:
             args['DOCKER_IMAGE_PARENT'] = '{}:{}'.format(parent, context_hashes[parent])
 
+        args['TASKCLUSTER_ROOT_URL'] = os.environ['TASKCLUSTER_ROOT_URL']
+
         if not taskgraph.fast:
             context_path = os.path.join('taskcluster', 'docker', definition)
             context_hash = generate_context_hash(
                 GECKO, context_path, image_name, args)
         else:
             context_hash = '0'*40
         digest_data = [context_hash]
         context_hashes[image_name] = context_hash
--- a/taskcluster/taskgraph/transforms/job/debian_package.py
+++ b/taskcluster/taskgraph/transforms/job/debian_package.py
@@ -4,16 +4,17 @@
 """
 Support for running spidermonkey jobs via dedicated scripts
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
 import re
+import taskcluster_urls
 
 from taskgraph.util.schema import Schema
 from voluptuous import Any, Optional, Required
 
 from taskgraph.transforms.job import run_job_using
 from taskgraph.transforms.job.common import add_artifacts
 
 from taskgraph.util.hash import hash_path
@@ -147,16 +148,18 @@ def docker_worker_debian_package(config,
         )
     if 'patch' not in run and 'pre-build-command' not in run:
         adjust += ('debchange -l ".{prefix}moz" --distribution "{dist}"'
                    ' "Mozilla backport for {dist}." < /dev/null && ').format(
             prefix=name.split('-', 1)[0],
             dist=run['dist'],
         )
 
+    queue_url = taskcluster_urls.api(os.environ['TASKCLUSTER_ROOT_URL'], 'queue', 'v1', '')
+
     # We can't depend on docker images (since docker images depend on packages),
     # so we inline the whole script here.
     worker['command'] = [
         'sh',
         '-x',
         '-c',
         # Fill /etc/apt/sources.list with the relevant snapshot repository.
         'echo "deb http://snapshot.debian.org/archive/debian'
@@ -166,18 +169,17 @@ def docker_worker_debian_package(config,
         'echo "deb http://snapshot.debian.org/archive/debian'
         '/{snapshot}/ {dist}-backports main" >> /etc/apt/sources.list && '
         'echo "deb http://snapshot.debian.org/archive/debian-security'
         '/{snapshot}/ {dist}/updates main" >> /etc/apt/sources.list && '
         'apt-get update -o Acquire::Check-Valid-Until=false -q && '
         # Add sources for packages coming from other package tasks.
         'apt-get install -yyq apt-transport-https ca-certificates && '
         'for task in $PACKAGES; do '
-        '  echo "deb [trusted=yes] https://queue.taskcluster.net/v1/task'
-        '/$task/artifacts/public/build/ debian/" '
+        '  echo "deb [trusted=yes] {queue_url}task/$task/artifacts/public/build/ debian/" '
         '>> /etc/apt/sources.list; '
         'done && '
         # Install the base utilities required to build debian packages.
         'apt-get update -o Acquire::Check-Valid-Until=false -q && '
         'apt-get install -yyq {base_deps} && '
         'cd /tmp && '
         # Get, validate and extract the package source.
         'dget -d -u {src_url} && '
@@ -193,16 +195,17 @@ def docker_worker_debian_package(config,
         # Copy the artifacts
         'mkdir -p {artifacts}/debian && '
         'dcmd cp ../{package}_*.changes {artifacts}/debian/ && '
         'cd {artifacts} && '
         # Make the artifacts directory usable as an APT repository.
         'apt-ftparchive sources debian | gzip -c9 > debian/Sources.gz && '
         'apt-ftparchive packages debian | gzip -c9 > debian/Packages.gz'
         .format(
+            queue_url=queue_url,
             package=package,
             snapshot=run['snapshot'],
             dist=run['dist'],
             src_url=src_url,
             src_file=src_file,
             src_sha256=src_sha256,
             unpack=unpack,
             adjust=adjust,
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -494,16 +494,21 @@ def build_docker_worker_payload(config, 
             image = {
                 "path": "public/image.tar.zst",
                 "namespace": image['indexed'],
                 "type": "indexed-image",
             }
         else:
             raise Exception("unknown docker image type")
 
+    # propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon
+    # be provided directly by the worker, making this redundant:
+    # https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
+    worker['env']['TASKCLUSTER_ROOT_URL'] = os.environ['TASKCLUSTER_ROOT_URL']
+
     features = {}
 
     if worker.get('relengapi-proxy'):
         features['relengAPIProxy'] = True
 
     if worker.get('taskcluster-proxy'):
         features['taskclusterProxy'] = True
         worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster/'
@@ -526,16 +531,21 @@ def build_docker_worker_payload(config, 
                 level=config.params['level'])
         )
         worker['env']['USE_SCCACHE'] = '1'
         # Disable sccache idle shutdown.
         worker['env']['SCCACHE_IDLE_TIMEOUT'] = '0'
     else:
         worker['env']['SCCACHE_DISABLE'] = '1'
 
+    # this will soon be provided directly by the worker:
+    # https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
+    if features.get('taskclusterProxy'):
+        worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster'
+
     capabilities = {}
 
     for lo in 'audio', 'video':
         if worker.get('loopback-' + lo):
             capitalized = 'loopback' + lo.capitalize()
             devices = capabilities.setdefault('devices', {})
             devices[capitalized] = True
             task_def['scopes'].append('docker-worker:capability:device:' + capitalized)
@@ -758,16 +768,21 @@ def build_generic_worker_payload(config,
 
     task_def['payload'] = {
         'command': worker['command'],
         'maxRunTime': worker['max-run-time'],
     }
 
     env = worker.get('env', {})
 
+    # propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon
+    # be provided directly by the worker, making this redundant:
+    # https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
+    env['TASKCLUSTER_ROOT_URL'] = os.environ['TASKCLUSTER_ROOT_URL']
+
     if task.get('needs-sccache'):
         env['USE_SCCACHE'] = '1'
         # Disable sccache idle shutdown.
         env['SCCACHE_IDLE_TIMEOUT'] = '0'
     else:
         env['SCCACHE_DISABLE'] = '1'
 
     if env:
@@ -812,17 +827,19 @@ def build_generic_worker_payload(config,
 
     features = {}
 
     if worker.get('chain-of-trust'):
         features['chainOfTrust'] = True
 
     if worker.get('taskcluster-proxy'):
         features['taskclusterProxy'] = True
-        worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster/'
+        # this will soon be provided directly by the worker:
+        # https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
+        worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster'
 
     if worker.get('run-as-administrator', False):
         features['runAsAdministrator'] = True
 
     if features:
         task_def['payload']['features'] = features
 
     # coalesce / superseding
@@ -1310,16 +1327,21 @@ def build_always_optimized_payload(confi
         # type=directory)
         Required('name'): basestring,
     }],
     # Wether any artifacts are assigned to this worker
     Optional('skip-artifacts'): bool,
 })
 def build_macosx_engine_payload(config, task, task_def):
     worker = task['worker']
+
+    # propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon
+    # be provided directly by the worker, making this redundant
+    worker.setdefault('env', {})['TASKCLUSTER_ROOT_URL'] = os.environ['TASKCLUSTER_ROOT_URL']
+
     artifacts = map(lambda artifact: {
         'name': artifact['name'],
         'path': artifact['path'],
         'type': artifact['type'],
         'expires': task_def['expires'],
     }, worker.get('artifacts', []))
 
     task_def['payload'] = {
--- a/taskcluster/taskgraph/util/docker.py
+++ b/taskcluster/taskgraph/util/docker.py
@@ -203,28 +203,28 @@ def create_context_tar(topsrcdir, contex
         return stream_context_tar(topsrcdir, context_dir, fh, prefix, args)
 
 
 def stream_context_tar(topsrcdir, context_dir, out_file, prefix, args=None):
     """Like create_context_tar, but streams the tar file to the `out_file` file
     object."""
     archive_files = {}
     replace = []
+    content = []
 
     context_dir = os.path.join(topsrcdir, context_dir)
 
     for root, dirs, files in os.walk(context_dir):
         for f in files:
             source_path = os.path.join(root, f)
             rel = source_path[len(context_dir) + 1:]
             archive_path = os.path.join(prefix, rel)
             archive_files[archive_path] = source_path
 
     # Parse Dockerfile for special syntax of extra files to include.
-    content = []
     with open(os.path.join(context_dir, 'Dockerfile'), 'rb') as fh:
         for line in fh:
             if line.startswith('# %ARG'):
                 p = line[len('# %ARG '):].strip()
                 if not args or p not in args:
                     raise Exception('missing argument: {}'.format(p))
                 replace.append((re.compile(r'\${}\b'.format(p)),
                                 args[p].encode('ascii')))
--- a/tools/tryselect/tasks.py
+++ b/tools/tryselect/tasks.py
@@ -50,16 +50,19 @@ def invalidate(cache, root):
     tmod = max(os.path.getmtime(os.path.join(tc_dir, p)) for p, _ in FileFinder(tc_dir))
     cmod = os.path.getmtime(cache)
 
     if tmod > cmod:
         os.remove(cache)
 
 
 def generate_tasks(params, full, root):
+    # Ensure that TASKCLUSTER_ROOT_URL is set
+    taskgraph.set_root_url_env()
+
     params = params or "project=mozilla-central"
 
     # Try to delete the old taskgraph cache directory.
     old_cache_dir = os.path.join(get_state_dir()[0], 'cache', 'taskgraph')
     if os.path.isdir(old_cache_dir):
         shutil.rmtree(old_cache_dir)
 
     root_hash = hashlib.sha256(os.path.abspath(root)).hexdigest()