Bug 1492664 - set TASKCLUSTER_ROOT_URL and TASKCLUSTER_PROXY_URL; r=tomprince,glandium
authorDustin J. Mitchell <dustin@mozilla.com>
Tue, 25 Sep 2018 20:18:19 +0000
changeset 452054 bdaa57b4a2fdbc596f61ec77a42c4322ef9b48c1
parent 452053 cc63b775369504c9299a88d9649e37f3f1d4c792
child 452055 5a7bea3fb23b1c208725be620aa881645af40f8a
push id35282
push userbtara@mozilla.com
push dateFri, 28 Dec 2018 21:50:42 +0000
treeherdermozilla-central@0cf7daf34a37 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerstomprince, glandium
bugs1492664, 1460015
milestone66.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1492664 - set TASKCLUSTER_ROOT_URL and TASKCLUSTER_PROXY_URL; r=tomprince,glandium Eventually, workers will provide these variables directly (https://bugzilla.mozilla.org/show_bug.cgi?id=1460015). But for now, this ensures that TASKCLUSTER_ROOT_URL is set everywhere in production, and TASKCLUSTER_PROXY_URL is set wherever the proxy is active. The taskgraph Taskcluster utils module gets a `get_root_url()` that gets the root URL for the current run, either from an environment variable in production or, on the command line, defaulting to https://taskcluster.net for user convenience. When the production instance's URL changes, we can simply change that default. Other changes to use this function are reserved for later commits. This changes the docker build process propagate TASKCLUSTER_ROOT_URL into the docker images where necessary (using %ARG), specifically to create URLs for debian repo paths.
.taskcluster.yml
python/mozrelease/mozrelease/buglist_creator.py
taskcluster/docker/debian-base/Dockerfile
taskcluster/docker/debian-base/setup_packages.sh
taskcluster/docker/debian7-build/Dockerfile
taskcluster/docker/debian7-mozjs-rust-build/Dockerfile
taskcluster/docker/toolchain-build/Dockerfile
taskcluster/taskgraph/test/test_util_docker.py
taskcluster/taskgraph/transforms/docker_image.py
taskcluster/taskgraph/transforms/job/debian_package.py
taskcluster/taskgraph/transforms/task.py
taskcluster/taskgraph/util/docker.py
taskcluster/taskgraph/util/taskcluster.py
--- a/.taskcluster.yml
+++ b/.taskcluster.yml
@@ -141,16 +141,19 @@ tasks:
             $merge:
               - GECKO_BASE_REPOSITORY: 'https://hg.mozilla.org/mozilla-unified'
                 GECKO_HEAD_REPOSITORY: '${repoUrl}'
                 GECKO_HEAD_REF: '${push.revision}'
                 GECKO_HEAD_REV: '${push.revision}'
                 GECKO_COMMIT_MSG: {$if: 'tasks_for != "action"', then: '${push.comment}'}
                 HG_STORE_PATH: /builds/worker/checkouts/hg-store
                 TASKCLUSTER_CACHES: /builds/worker/checkouts
+                # someday, these will be provided by the worker - Bug 1492664
+                TASKCLUSTER_ROOT_URL: https://taskcluster.net
+                TASKCLUSTER_PROXY_URL: http://taskcluster
               - $if: 'tasks_for == "action"'
                 then:
                   ACTION_TASK_GROUP_ID: '${action.taskGroupId}'     # taskGroupId of the target task
                   ACTION_TASK_ID: {$json: {$eval: 'taskId'}} # taskId of the target task (JSON-encoded)
                   ACTION_INPUT: {$json: {$eval: 'input'}}
                   ACTION_CALLBACK: '${action.cb_name}'
                   ACTION_PARAMETERS: {$json: {$eval: 'parameters'}}
 
--- a/python/mozrelease/mozrelease/buglist_creator.py
+++ b/python/mozrelease/mozrelease/buglist_creator.py
@@ -213,16 +213,17 @@ Task group: [{task_group_id}](https://to
         subject_prefix = "[mobile] "
     if product in {"firefox", "devedition"}:
         subject_prefix = "[desktop] "
 
     subject = '{} Build of {} {} build {}'.format(subject_prefix, product, version, build_number)
 
     notify_options = {}
     if 'TASKCLUSTER_PROXY_URL' in os.environ:
+        # Until bug 1460015 is finished, use the old baseUrl style of proxy URL
         base_url = os.environ['TASKCLUSTER_PROXY_URL'].rstrip('/')
         notify_options['baseUrl'] = '{}/notify/v1'.format(base_url)
     notify = Notify(notify_options)
     for address in addresses:
         notify.email({
             'address': address,
             'subject': subject,
             'content': content,
--- a/taskcluster/docker/debian-base/Dockerfile
+++ b/taskcluster/docker/debian-base/Dockerfile
@@ -44,18 +44,19 @@ RUN for s in debian_$DIST debian_$DIST-u
 RUN apt-get update && \
     apt-get install \
       apt-transport-https \
       ca-certificates
 
 COPY setup_packages.sh /usr/local/sbin/
 COPY cloud-mirror-workaround.sh /usr/local/sbin/
 
+# %ARG TASKCLUSTER_ROOT_URL
 # %ARG DOCKER_IMAGE_PACKAGES
-RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES && \
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \
     echo 'dir::bin::methods::https "/usr/local/sbin/cloud-mirror-workaround.sh";' > /etc/apt/apt.conf.d/99cloud-mirror-workaround && \
     apt-get update && \
     apt-get install \
       git \
       less \
       make \
       mercurial \
       patch \
--- a/taskcluster/docker/debian-base/setup_packages.sh
+++ b/taskcluster/docker/debian-base/setup_packages.sh
@@ -1,5 +1,17 @@
 #!/bin/sh
 
+TASKCLUSTER_ROOT_URL=$1
+shift
+
+# duplicate the functionality of taskcluster-lib-urls, but in bash..
+if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then
+    queue_base='https://queue.taskcluster.net/v1'
+else
+    queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
+fi
+
+
 for task in "$@"; do
-  echo "deb [trusted=yes] https://queue.taskcluster.net/v1/task/$task/artifacts/public/build/ debian/" > "/etc/apt/sources.list.d/99$task.list"
+  echo "adding package source $queue_base/task/$task/artifacts/public/build/"
+  echo "deb [trusted=yes] $queue_base/task/$task/artifacts/public/build/ debian/" > "/etc/apt/sources.list.d/99$task.list"
 done
--- a/taskcluster/docker/debian7-build/Dockerfile
+++ b/taskcluster/docker/debian7-build/Dockerfile
@@ -3,18 +3,19 @@ FROM $DOCKER_IMAGE_PARENT
 MAINTAINER Mike Hommey <mhommey@mozilla.com>
 
 VOLUME /builds/worker/checkouts
 VOLUME /builds/worker/workspace
 VOLUME /builds/worker/tooltool-cache
 
 ENV XZ_OPT=-T0
 
+# %ARG TASKCLUSTER_ROOT_URL
 # %ARG DOCKER_IMAGE_PACKAGES
-RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES
 
 # %ARG ARCH
 RUN dpkg --add-architecture $ARCH
 
 # Ideally, we wouldn't need gcc-multilib and the extra linux-libc-dev,
 # but the latter is required to make the former installable, and the former
 # because of bug 1409276.
 # We exclude /usr/share/doc/*/changelog.Debian* files because they might differ
--- a/taskcluster/docker/debian7-mozjs-rust-build/Dockerfile
+++ b/taskcluster/docker/debian7-mozjs-rust-build/Dockerfile
@@ -1,12 +1,13 @@
 # %ARG DOCKER_IMAGE_PARENT
 FROM $DOCKER_IMAGE_PARENT
 MAINTAINER Mike Hommey <mhommey@mozilla.com>
 
 VOLUME /builds/worker/checkouts
 VOLUME /builds/worker/workspace
 VOLUME /builds/worker/tooltool-cache
 
+# %ARG TASKCLUSTER_ROOT_URL
 # %ARG DOCKER_IMAGE_PACKAGES
-RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES && \
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \
     apt-get update && \
     apt-get install cmake
--- a/taskcluster/docker/toolchain-build/Dockerfile
+++ b/taskcluster/docker/toolchain-build/Dockerfile
@@ -4,17 +4,18 @@ MAINTAINER Mike Hommey <mhommey@mozilla.
 
 VOLUME /builds/worker/checkouts
 VOLUME /builds/worker/workspace
 VOLUME /builds/worker/tooltool-cache
 
 ENV XZ_OPT=-T0
 
 # %ARG DOCKER_IMAGE_PACKAGES
-RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES
+# %ARG TASKCLUSTER_ROOT_URL
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES
 
 RUN apt-get update && \
     apt-get install \
       autoconf \
       automake \
       bison \
       build-essential \
       curl \
--- a/taskcluster/taskgraph/test/test_util_docker.py
+++ b/taskcluster/taskgraph/test/test_util_docker.py
@@ -5,24 +5,27 @@
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
 import shutil
 import stat
 import tarfile
 import tempfile
 import unittest
+import mock
+import taskcluster_urls as liburls
 
 from taskgraph.util import docker
 from mozunit import main, MockedOpen
 
 
 MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
 
 
+@mock.patch.dict('os.environ', {'TASKCLUSTER_ROOT_URL': liburls.test_root_url()})
 class TestDocker(unittest.TestCase):
 
     def test_generate_context_hash(self):
         tmpdir = tempfile.mkdtemp()
         old_GECKO = docker.GECKO
         docker.GECKO = tmpdir
         try:
             os.makedirs(os.path.join(tmpdir, 'docker', 'my-image'))
--- a/taskcluster/taskgraph/transforms/docker_image.py
+++ b/taskcluster/taskgraph/transforms/docker_image.py
@@ -10,16 +10,17 @@ import re
 from collections import deque
 import taskgraph
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.task import _run_task_suffix
 from .. import GECKO
 from taskgraph.util.docker import (
     generate_context_hash,
 )
+from taskgraph.util.taskcluster import get_root_url
 from taskgraph.util.schema import (
     Schema,
 )
 from voluptuous import (
     Optional,
     Required,
 )
 
@@ -106,16 +107,18 @@ def fill_template(config, tasks):
         # task-reference value, see further below). We add the package routes
         # containing a hash to get the overall docker image hash, so changes
         # to packages will be reflected in the docker image hash.
         args['DOCKER_IMAGE_PACKAGES'] = ' '.join('<{}>'.format(p)
                                                  for p in packages)
         if parent:
             args['DOCKER_IMAGE_PARENT'] = '{}:{}'.format(parent, context_hashes[parent])
 
+        args['TASKCLUSTER_ROOT_URL'] = get_root_url()
+
         if not taskgraph.fast:
             context_path = os.path.join('taskcluster', 'docker', definition)
             context_hash = generate_context_hash(
                 GECKO, context_path, image_name, args)
         else:
             context_hash = '0'*40
         digest_data = [context_hash]
         context_hashes[image_name] = context_hash
--- a/taskcluster/taskgraph/transforms/job/debian_package.py
+++ b/taskcluster/taskgraph/transforms/job/debian_package.py
@@ -4,24 +4,26 @@
 """
 Support for running spidermonkey jobs via dedicated scripts
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
 import re
+import taskcluster_urls
 
 from taskgraph.util.schema import Schema
 from voluptuous import Any, Optional, Required
 
 from taskgraph.transforms.job import run_job_using
 from taskgraph.transforms.job.common import add_artifacts
 
 from taskgraph.util.hash import hash_path
+from taskgraph.util.taskcluster import get_root_url
 from taskgraph import GECKO
 import taskgraph
 
 DSC_PACKAGE_RE = re.compile('.*(?=_)')
 SOURCE_PACKAGE_RE = re.compile('.*(?=[-_]\d)')
 
 source_definition = {
     Required('url'): basestring,
@@ -147,16 +149,18 @@ def docker_worker_debian_package(config,
         )
     if 'patch' not in run and 'pre-build-command' not in run:
         adjust += ('debchange -l ".{prefix}moz" --distribution "{dist}"'
                    ' "Mozilla backport for {dist}." < /dev/null && ').format(
             prefix=name.split('-', 1)[0],
             dist=run['dist'],
         )
 
+    queue_url = taskcluster_urls.api(get_root_url(), 'queue', 'v1', '')
+
     # We can't depend on docker images (since docker images depend on packages),
     # so we inline the whole script here.
     worker['command'] = [
         'sh',
         '-x',
         '-c',
         # Fill /etc/apt/sources.list with the relevant snapshot repository.
         'echo "deb http://snapshot.debian.org/archive/debian'
@@ -166,18 +170,17 @@ def docker_worker_debian_package(config,
         'echo "deb http://snapshot.debian.org/archive/debian'
         '/{snapshot}/ {dist}-backports main" >> /etc/apt/sources.list && '
         'echo "deb http://snapshot.debian.org/archive/debian-security'
         '/{snapshot}/ {dist}/updates main" >> /etc/apt/sources.list && '
         'apt-get update -o Acquire::Check-Valid-Until=false -q && '
         # Add sources for packages coming from other package tasks.
         'apt-get install -yyq apt-transport-https ca-certificates && '
         'for task in $PACKAGES; do '
-        '  echo "deb [trusted=yes] https://queue.taskcluster.net/v1/task'
-        '/$task/artifacts/public/build/ debian/" '
+        '  echo "deb [trusted=yes] {queue_url}task/$task/artifacts/public/build/ debian/" '
         '>> /etc/apt/sources.list; '
         'done && '
         # Install the base utilities required to build debian packages.
         'apt-get update -o Acquire::Check-Valid-Until=false -q && '
         'apt-get install -yyq {base_deps} && '
         'cd /tmp && '
         # Get, validate and extract the package source.
         'dget -d -u {src_url} && '
@@ -193,16 +196,17 @@ def docker_worker_debian_package(config,
         # Copy the artifacts
         'mkdir -p {artifacts}/debian && '
         'dcmd cp ../{package}_*.changes {artifacts}/debian/ && '
         'cd {artifacts} && '
         # Make the artifacts directory usable as an APT repository.
         'apt-ftparchive sources debian | gzip -c9 > debian/Sources.gz && '
         'apt-ftparchive packages debian | gzip -c9 > debian/Packages.gz'
         .format(
+            queue_url=queue_url,
             package=package,
             snapshot=run['snapshot'],
             dist=run['dist'],
             src_url=src_url,
             src_file=src_file,
             src_sha256=src_sha256,
             unpack=unpack,
             adjust=adjust,
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -16,16 +16,17 @@ import re
 import time
 from copy import deepcopy
 
 from mozbuild.util import memoize
 from taskgraph.util.attributes import TRUNK_PROJECTS
 from taskgraph.util.hash import hash_path
 from taskgraph.util.treeherder import split_symbol
 from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.taskcluster import get_root_url
 from taskgraph.util.schema import (
     validate_schema,
     Schema,
     optionally_keyed_by,
     resolve_keyed_by,
     OptimizationSchema,
 )
 from taskgraph.util.scriptworker import (
@@ -494,16 +495,21 @@ def build_docker_worker_payload(config, 
             image = {
                 "path": "public/image.tar.zst",
                 "namespace": image['indexed'],
                 "type": "indexed-image",
             }
         else:
             raise Exception("unknown docker image type")
 
+    # propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon
+    # be provided directly by the worker, making this redundant:
+    # https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
+    worker['env']['TASKCLUSTER_ROOT_URL'] = get_root_url()
+
     features = {}
 
     if worker.get('relengapi-proxy'):
         features['relengAPIProxy'] = True
 
     if worker.get('taskcluster-proxy'):
         features['taskclusterProxy'] = True
         worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster/'
@@ -526,16 +532,21 @@ def build_docker_worker_payload(config, 
                 level=config.params['level'])
         )
         worker['env']['USE_SCCACHE'] = '1'
         # Disable sccache idle shutdown.
         worker['env']['SCCACHE_IDLE_TIMEOUT'] = '0'
     else:
         worker['env']['SCCACHE_DISABLE'] = '1'
 
+    # this will soon be provided directly by the worker:
+    # https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
+    if features.get('taskclusterProxy'):
+        worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster'
+
     capabilities = {}
 
     for lo in 'audio', 'video':
         if worker.get('loopback-' + lo):
             capitalized = 'loopback' + lo.capitalize()
             devices = capabilities.setdefault('devices', {})
             devices[capitalized] = True
             task_def['scopes'].append('docker-worker:capability:device:' + capitalized)
@@ -758,16 +769,21 @@ def build_generic_worker_payload(config,
 
     task_def['payload'] = {
         'command': worker['command'],
         'maxRunTime': worker['max-run-time'],
     }
 
     env = worker.get('env', {})
 
+    # propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon
+    # be provided directly by the worker, making this redundant:
+    # https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
+    env['TASKCLUSTER_ROOT_URL'] = get_root_url()
+
     if task.get('needs-sccache'):
         env['USE_SCCACHE'] = '1'
         # Disable sccache idle shutdown.
         env['SCCACHE_IDLE_TIMEOUT'] = '0'
     else:
         env['SCCACHE_DISABLE'] = '1'
 
     if env:
@@ -812,17 +828,19 @@ def build_generic_worker_payload(config,
 
     features = {}
 
     if worker.get('chain-of-trust'):
         features['chainOfTrust'] = True
 
     if worker.get('taskcluster-proxy'):
         features['taskclusterProxy'] = True
-        worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster/'
+        # this will soon be provided directly by the worker:
+        # https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
+        worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster'
 
     if worker.get('run-as-administrator', False):
         features['runAsAdministrator'] = True
 
     if features:
         task_def['payload']['features'] = features
 
     # coalesce / superseding
@@ -1310,16 +1328,21 @@ def build_always_optimized_payload(confi
         # type=directory)
         Required('name'): basestring,
     }],
     # Wether any artifacts are assigned to this worker
     Optional('skip-artifacts'): bool,
 })
 def build_macosx_engine_payload(config, task, task_def):
     worker = task['worker']
+
+    # propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon
+    # be provided directly by the worker, making this redundant
+    worker.setdefault('env', {})['TASKCLUSTER_ROOT_URL'] = get_root_url()
+
     artifacts = map(lambda artifact: {
         'name': artifact['name'],
         'path': artifact['path'],
         'type': artifact['type'],
         'expires': task_def['expires'],
     }, worker.get('artifacts', []))
 
     task_def['payload'] = {
--- a/taskcluster/taskgraph/util/docker.py
+++ b/taskcluster/taskgraph/util/docker.py
@@ -203,28 +203,28 @@ def create_context_tar(topsrcdir, contex
         return stream_context_tar(topsrcdir, context_dir, fh, prefix, args)
 
 
 def stream_context_tar(topsrcdir, context_dir, out_file, prefix, args=None):
     """Like create_context_tar, but streams the tar file to the `out_file` file
     object."""
     archive_files = {}
     replace = []
+    content = []
 
     context_dir = os.path.join(topsrcdir, context_dir)
 
     for root, dirs, files in os.walk(context_dir):
         for f in files:
             source_path = os.path.join(root, f)
             rel = source_path[len(context_dir) + 1:]
             archive_path = os.path.join(prefix, rel)
             archive_files[archive_path] = source_path
 
     # Parse Dockerfile for special syntax of extra files to include.
-    content = []
     with open(os.path.join(context_dir, 'Dockerfile'), 'rb') as fh:
         for line in fh:
             if line.startswith('# %ARG'):
                 p = line[len('# %ARG '):].strip()
                 if not args or p not in args:
                     raise Exception('missing argument: {}'.format(p))
                 replace.append((re.compile(r'\${}\b'.format(p)),
                                 args[p].encode('ascii')))
--- a/taskcluster/taskgraph/util/taskcluster.py
+++ b/taskcluster/taskgraph/util/taskcluster.py
@@ -1,16 +1,17 @@
 # -*- coding: utf-8 -*-
 
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
+import os
 import datetime
 import functools
 import yaml
 import requests
 import logging
 from mozbuild.util import memoize
 from requests.packages.urllib3.util.retry import Retry
 from requests.adapters import HTTPAdapter
@@ -22,16 +23,37 @@ from taskgraph.task import Task
 _PRIVATE_TC_ARTIFACT_LOCATION = \
     'http://taskcluster/queue/v1/task/{task_id}/artifacts/{artifact_prefix}/{postfix}'
 
 logger = logging.getLogger(__name__)
 
 # this is set to true for `mach taskgraph action-callback --test`
 testing = False
 
+# Default rootUrl to use if none is given in the environment; this should point
+# to the production Taskcluster deployment used for CI.
+PRODUCTION_TASKCLUSTER_ROOT_URL = 'https://taskcluster.net'
+
+
+@memoize
+def get_root_url():
+    """Get the current TASKCLUSTER_ROOT_URL.  When running in a task, this must
+    come from $TASKCLUSTER_ROOT_URL; when run on the command line, we apply a
+    defualt that points to the production deployment of Taskcluster."""
+    if 'TASKCLUSTER_ROOT_URL' not in os.environ:
+        if 'TASK_ID' in os.environ:
+            raise RuntimeError('$TASKCLUSTER_ROOT_URL must be set when running in a task')
+        else:
+            logger.debug('Using default TASKCLUSTER_ROOT_URL (Firefox CI production)')
+            return PRODUCTION_TASKCLUSTER_ROOT_URL
+    logger.debug('Running in Taskcluster instance {}{}'.format(
+        os.environ['TASKCLUSTER_ROOT_URL'],
+        ' with taskcluster-proxy' if 'TASKCLUSTER_PROXY_URL' in os.environ else ''))
+    return os.environ['TASKCLUSTER_ROOT_URL']
+
 
 @memoize
 def get_session():
     session = requests.Session()
     retry = Retry(total=5, backoff_factor=0.1,
                   status_forcelist=[500, 502, 503, 504])
     session.mount('http://', HTTPAdapter(max_retries=retry))
     session.mount('https://', HTTPAdapter(max_retries=retry))