Backed out 2 changesets (bug 1342392) for adding .orig file and flake lint failures. r=backout on a CLOSED TREE
authorSebastian Hengst <archaeopteryx@coole-files.de>
Mon, 11 Sep 2017 10:38:08 +0200
changeset 429557 0c0a373e16a6351e1309e6d0579ade47bce192b5
parent 429556 bf0bcdc7482ea84542f5500735e7f219e417c0b7
child 429558 01bf0c29331e6bbee4a7bf674010ea1ebc41cb55
push id7761
push userjlund@mozilla.com
push dateFri, 15 Sep 2017 00:19:52 +0000
treeherdermozilla-beta@c38455951db4 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbackout
bugs1342392
milestone57.0a1
backs outbf0bcdc7482ea84542f5500735e7f219e417c0b7
7f5ae5ff11812721ae7521715729489ddb2643f3
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out 2 changesets (bug 1342392) for adding .orig file and flake lint failures. r=backout on a CLOSED TREE Backed out changeset bf0bcdc7482e (bug 1342392) Backed out changeset 7f5ae5ff1181 (bug 1342392)
taskcluster/ci/balrog/kind.yml
taskcluster/ci/beetmover-partials/kind.yml
taskcluster/ci/docker-image/kind.yml
taskcluster/ci/partials-signing/kind.yml
taskcluster/ci/partials/kind.yml
taskcluster/docker/funsize-update-generator/Dockerfile
taskcluster/docker/funsize-update-generator/Makefile
taskcluster/docker/funsize-update-generator/README
taskcluster/docker/funsize-update-generator/dep.pubkey
taskcluster/docker/funsize-update-generator/nightly_sha1.pubkey
taskcluster/docker/funsize-update-generator/nightly_sha384.pubkey
taskcluster/docker/funsize-update-generator/release_sha1.pubkey
taskcluster/docker/funsize-update-generator/release_sha384.pubkey
taskcluster/docker/funsize-update-generator/requirements.txt
taskcluster/docker/funsize-update-generator/runme.sh
taskcluster/docker/funsize-update-generator/scripts/funsize.py
taskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh
taskcluster/docker/partial-update-generator/Dockerfile
taskcluster/docker/partial-update-generator/Makefile
taskcluster/docker/partial-update-generator/README
taskcluster/docker/partial-update-generator/dep.pubkey
taskcluster/docker/partial-update-generator/nightly_sha1.pubkey
taskcluster/docker/partial-update-generator/nightly_sha384.pubkey
taskcluster/docker/partial-update-generator/release_sha1.pubkey
taskcluster/docker/partial-update-generator/release_sha384.pubkey
taskcluster/docker/partial-update-generator/requirements.txt
taskcluster/docker/partial-update-generator/runme.sh
taskcluster/docker/partial-update-generator/scripts/funsize.py
taskcluster/docker/partial-update-generator/scripts/funsize.py.orig
taskcluster/docker/partial-update-generator/scripts/mbsdiff_hook.sh
taskcluster/docs/kinds.rst
taskcluster/docs/parameters.rst
taskcluster/mach_commands.py
taskcluster/taskgraph/decision.py
taskcluster/taskgraph/parameters.py
taskcluster/taskgraph/transforms/balrog.py
taskcluster/taskgraph/transforms/beetmover_partials.py
taskcluster/taskgraph/transforms/beetmover_repackage.py
taskcluster/taskgraph/transforms/partials.py
taskcluster/taskgraph/transforms/partials_signing.py
taskcluster/taskgraph/transforms/repackage.py
taskcluster/taskgraph/transforms/repackage_signing.py
taskcluster/taskgraph/transforms/task.py
taskcluster/taskgraph/transforms/task.py.orig
taskcluster/taskgraph/util/partials.py
taskcluster/taskgraph/util/taskcluster.py
--- a/taskcluster/ci/balrog/kind.yml
+++ b/taskcluster/ci/balrog/kind.yml
@@ -8,13 +8,12 @@ transforms:
    - taskgraph.transforms.name_sanity:transforms
    - taskgraph.transforms.balrog:transforms
    - taskgraph.transforms.task:transforms
 
 kind-dependencies:
   - beetmover
   - beetmover-l10n
   - beetmover-repackage
-  - beetmover-partials
 
 only-for-attributes:
   - nightly
   - signed
deleted file mode 100644
--- a/taskcluster/ci/beetmover-partials/kind.yml
+++ /dev/null
@@ -1,15 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-loader: taskgraph.loader.single_dep:loader
-
-transforms:
-   - taskgraph.transforms.name_sanity:transforms
-   - taskgraph.transforms.beetmover_repackage_l10n:transforms
-   - taskgraph.transforms.beetmover_repackage:transforms
-   - taskgraph.transforms.beetmover_partials:transforms
-   - taskgraph.transforms.task:transforms
-
-kind-dependencies:
-  - partials-signing
--- a/taskcluster/ci/docker-image/kind.yml
+++ b/taskcluster/ci/docker-image/kind.yml
@@ -21,10 +21,8 @@ jobs:
   valgrind-build:
     symbol: I(vb)
   lint:
     symbol: I(lnt)
   android-gradle-build:
     symbol: I(agb)
   index-task:
     symbol: I(idx)
-  partial-update-generator:
-    symbol: I(pg)
deleted file mode 100644
--- a/taskcluster/ci/partials-signing/kind.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-loader: taskgraph.loader.single_dep:loader
-
-transforms:
-  - taskgraph.transforms.name_sanity:transforms
-  - taskgraph.transforms.partials_signing:transforms
-  - taskgraph.transforms.task:transforms
-
-kind-dependencies:
-  - partials
deleted file mode 100644
--- a/taskcluster/ci/partials/kind.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-loader: taskgraph.loader.single_dep:loader
-
-transforms:
-  - taskgraph.transforms.name_sanity:transforms
-  - taskgraph.transforms.partials:transforms
-  - taskgraph.transforms.task:transforms
-
-kind-dependencies:
-  - repackage-signing
-
-only-for-attributes:
-  - nightly
-
-only-for-build-platforms:
-  - macosx64-nightly/opt
-  - win32-nightly/opt
-  - win64-nightly/opt
-  - linux-nightly/opt
-  - linux64-nightly/opt
rename from taskcluster/docker/partial-update-generator/Dockerfile
rename to taskcluster/docker/funsize-update-generator/Dockerfile
--- a/taskcluster/docker/partial-update-generator/Dockerfile
+++ b/taskcluster/docker/funsize-update-generator/Dockerfile
@@ -20,20 +20,17 @@ RUN for i in 1 2 3 4 5; do freshclam --v
 # python-pip installs a lot of dependencies increasing the size of an image
 # drastically. Using easy_install saves us almost 200M.
 RUN easy_install pip
 RUN pip install -r /tmp/requirements.txt
 
 # scripts
 RUN mkdir /home/worker/bin
 COPY scripts/* /home/worker/bin/
-
 COPY runme.sh /runme.sh
 RUN chmod 755 /home/worker/bin/* /runme.sh
 RUN mkdir /home/worker/keys
 COPY *.pubkey /home/worker/keys/
 
 ENV           HOME          /home/worker
 ENV           SHELL         /bin/bash
 ENV           USER          worker
 ENV           LOGNAME       worker
-
-CMD ["/runme.sh"]
rename from taskcluster/docker/partial-update-generator/Makefile
rename to taskcluster/docker/funsize-update-generator/Makefile
rename from taskcluster/docker/partial-update-generator/README
rename to taskcluster/docker/funsize-update-generator/README
rename from taskcluster/docker/partial-update-generator/dep.pubkey
rename to taskcluster/docker/funsize-update-generator/dep.pubkey
rename from taskcluster/docker/partial-update-generator/nightly_sha1.pubkey
rename to taskcluster/docker/funsize-update-generator/nightly_sha1.pubkey
rename from taskcluster/docker/partial-update-generator/nightly_sha384.pubkey
rename to taskcluster/docker/funsize-update-generator/nightly_sha384.pubkey
rename from taskcluster/docker/partial-update-generator/release_sha1.pubkey
rename to taskcluster/docker/funsize-update-generator/release_sha1.pubkey
rename from taskcluster/docker/partial-update-generator/release_sha384.pubkey
rename to taskcluster/docker/funsize-update-generator/release_sha384.pubkey
rename from taskcluster/docker/partial-update-generator/requirements.txt
rename to taskcluster/docker/funsize-update-generator/requirements.txt
rename from taskcluster/docker/partial-update-generator/runme.sh
rename to taskcluster/docker/funsize-update-generator/runme.sh
rename from taskcluster/docker/partial-update-generator/scripts/funsize.py
rename to taskcluster/docker/funsize-update-generator/scripts/funsize.py
--- a/taskcluster/docker/partial-update-generator/scripts/funsize.py
+++ b/taskcluster/docker/funsize-update-generator/scripts/funsize.py
@@ -22,17 +22,16 @@ log = logging.getLogger(__name__)
 ALLOWED_URL_PREFIXES = [
     "http://download.cdn.mozilla.net/pub/mozilla.org/firefox/nightly/",
     "http://download.cdn.mozilla.net/pub/firefox/nightly/",
     "https://mozilla-nightly-updates.s3.amazonaws.com",
     "https://queue.taskcluster.net/",
     "http://ftp.mozilla.org/",
     "http://download.mozilla.org/",
     "https://archive.mozilla.org/",
-    "https://queue.taskcluster.net/v1/task/",
 ]
 
 DEFAULT_FILENAME_TEMPLATE = "{appName}-{branch}-{version}-{platform}-" \
                             "{locale}-{from_buildid}-{to_buildid}.partial.mar"
 
 
 def verify_signature(mar, certs):
     log.info("Checking %s signature", mar)
@@ -282,21 +281,17 @@ def main():
                       "previousBuildNumber", "toVersion",
                       "toBuildNumber"):
             if field in e:
                 mar_data[field] = e[field]
         mar_data.update(complete_mars)
         # if branch not set explicitly use repo-name
         mar_data["branch"] = e.get("branch",
                                    mar_data["repo"].rstrip("/").split("/")[-1])
-        if 'dest_mar' in e:
-            mar_name = e['dest_mar']
-        else:
-            # default to formatted name if not specified
-            mar_name = args.filename_template.format(**mar_data)
+        mar_name = args.filename_template.format(**mar_data)
         mar_data["mar"] = mar_name
         dest_mar = os.path.join(work_env.workdir, mar_name)
         # TODO: download these once
         work_env.download_buildsystem_bits(repo=mar_data["repo"],
                                            revision=mar_data["revision"])
         generate_partial(work_env, from_path, path, dest_mar,
                          mar_data["ACCEPTED_MAR_CHANNEL_IDS"],
                          mar_data["version"],
rename from taskcluster/docker/partial-update-generator/scripts/mbsdiff_hook.sh
rename to taskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh
deleted file mode 100755
--- a/taskcluster/docker/partial-update-generator/scripts/funsize.py.orig
+++ /dev/null
@@ -1,311 +0,0 @@
-#!/usr/bin/env python
-from __future__ import absolute_import, print_function
-
-import ConfigParser
-import argparse
-import functools
-import hashlib
-import json
-import logging
-import os
-import shutil
-import tempfile
-import requests
-import sh
-
-import redo
-from mardor.reader import MarReader
-from mardor.signing import get_keysize
-
-
-log = logging.getLogger(__name__)
-ALLOWED_URL_PREFIXES = [
-    "http://download.cdn.mozilla.net/pub/mozilla.org/firefox/nightly/",
-    "http://download.cdn.mozilla.net/pub/firefox/nightly/",
-    "https://mozilla-nightly-updates.s3.amazonaws.com",
-    "https://queue.taskcluster.net/",
-    "http://ftp.mozilla.org/",
-    "http://download.mozilla.org/",
-    "https://archive.mozilla.org/",
-]
-
-DEFAULT_FILENAME_TEMPLATE = "{appName}-{branch}-{version}-{platform}-" \
-                            "{locale}-{from_buildid}-{to_buildid}.partial.mar"
-
-
-def verify_signature(mar, certs):
-    log.info("Checking %s signature", mar)
-    with open(mar, 'rb') as mar_fh:
-        m = MarReader(mar_fh)
-        m.verify(verify_key=certs.get(m.signature_type))
-
-
-def is_lzma_compressed_mar(mar):
-    log.info("Checking %s for lzma compression", mar)
-    result = MarReader(open(mar, 'rb')).compression_type == 'xz'
-    if result:
-        log.info("%s is lzma compressed", mar)
-    else:
-        log.info("%s is not lzma compressed", mar)
-    return result
-
-
-@redo.retriable()
-def download(url, dest, mode=None):
-    log.debug("Downloading %s to %s", url, dest)
-    r = requests.get(url)
-    r.raise_for_status()
-
-    bytes_downloaded = 0
-    with open(dest, 'wb') as fd:
-        for chunk in r.iter_content(4096):
-            fd.write(chunk)
-            bytes_downloaded += len(chunk)
-
-    log.debug('Downloaded %s bytes', bytes_downloaded)
-    if 'content-length' in r.headers:
-        log.debug('Content-Length: %s bytes', r.headers['content-length'])
-        if bytes_downloaded != int(r.headers['content-length']):
-            raise IOError('Unexpected number of bytes downloaded')
-
-    if mode:
-        log.debug("chmod %o %s", mode, dest)
-        os.chmod(dest, mode)
-
-
-def unpack(work_env, mar, dest_dir):
-    os.mkdir(dest_dir)
-    unwrap_cmd = sh.Command(os.path.join(work_env.workdir,
-                                         "unwrap_full_update.pl"))
-    log.debug("Unwrapping %s", mar)
-    env = work_env.env
-    if not is_lzma_compressed_mar(mar):
-        env['MAR_OLD_FORMAT'] = '1'
-    elif 'MAR_OLD_FORMAT' in env:
-        del env['MAR_OLD_FORMAT']
-    out = unwrap_cmd(mar, _cwd=dest_dir, _env=env, _timeout=240,
-                     _err_to_out=True)
-    if out:
-        log.debug(out)
-
-
-def find_file(directory, filename):
-    log.debug("Searching for %s in %s", filename, directory)
-    for root, dirs, files in os.walk(directory):
-        if filename in files:
-            f = os.path.join(root, filename)
-            log.debug("Found %s", f)
-            return f
-
-
-def get_option(directory, filename, section, option):
-    log.debug("Exctracting [%s]: %s from %s/**/%s", section, option, directory,
-              filename)
-    f = find_file(directory, filename)
-    config = ConfigParser.ConfigParser()
-    config.read(f)
-    rv = config.get(section, option)
-    log.debug("Found %s", rv)
-    return rv
-
-
-def generate_partial(work_env, from_dir, to_dir, dest_mar, channel_ids,
-                     version, use_old_format):
-    log.debug("Generating partial %s", dest_mar)
-    env = work_env.env
-    env["MOZ_PRODUCT_VERSION"] = version
-    env["MOZ_CHANNEL_ID"] = channel_ids
-    if use_old_format:
-        env['MAR_OLD_FORMAT'] = '1'
-    elif 'MAR_OLD_FORMAT' in env:
-        del env['MAR_OLD_FORMAT']
-    make_incremental_update = os.path.join(work_env.workdir,
-                                           "make_incremental_update.sh")
-    out = sh.bash(make_incremental_update, dest_mar, from_dir, to_dir,
-                  _cwd=work_env.workdir, _env=env, _timeout=900,
-                  _err_to_out=True)
-    if out:
-        log.debug(out)
-
-
-def get_hash(path, hash_type="sha512"):
-    h = hashlib.new(hash_type)
-    with open(path, "rb") as f:
-        for chunk in iter(functools.partial(f.read, 4096), ''):
-            h.update(chunk)
-    return h.hexdigest()
-
-
-class WorkEnv(object):
-
-    def __init__(self):
-        self.workdir = tempfile.mkdtemp()
-
-    def setup(self):
-        self.download_unwrap()
-        self.download_martools()
-
-    def download_unwrap(self):
-        # unwrap_full_update.pl is not too sensitive to the revision
-        url = "https://hg.mozilla.org/mozilla-central/raw-file/default/" \
-            "tools/update-packaging/unwrap_full_update.pl"
-        download(url, dest=os.path.join(self.workdir, "unwrap_full_update.pl"),
-                 mode=0o755)
-
-    def download_buildsystem_bits(self, repo, revision):
-        prefix = "{repo}/raw-file/{revision}/tools/update-packaging"
-        prefix = prefix.format(repo=repo, revision=revision)
-        for f in ("make_incremental_update.sh", "common.sh"):
-            url = "{prefix}/{f}".format(prefix=prefix, f=f)
-            download(url, dest=os.path.join(self.workdir, f), mode=0o755)
-
-    def download_martools(self):
-        # TODO: check if the tools have to be branch specific
-        prefix = "https://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/" \
-            "latest-mozilla-central/mar-tools/linux64"
-        for f in ("mar", "mbsdiff"):
-            url = "{prefix}/{f}".format(prefix=prefix, f=f)
-            download(url, dest=os.path.join(self.workdir, f), mode=0o755)
-
-    def cleanup(self):
-        shutil.rmtree(self.workdir)
-
-    @property
-    def env(self):
-        my_env = os.environ.copy()
-        my_env['LC_ALL'] = 'C'
-        my_env['MAR'] = os.path.join(self.workdir, "mar")
-        my_env['MBSDIFF'] = os.path.join(self.workdir, "mbsdiff")
-        return my_env
-
-
-def verify_allowed_url(mar):
-    if not any(mar.startswith(prefix) for prefix in ALLOWED_URL_PREFIXES):
-        raise ValueError("{mar} is not in allowed URL prefixes: {p}".format(
-            mar=mar, p=ALLOWED_URL_PREFIXES
-        ))
-
-
-def main():
-    parser = argparse.ArgumentParser()
-    parser.add_argument("--artifacts-dir", required=True)
-    parser.add_argument("--sha1-signing-cert", required=True)
-    parser.add_argument("--sha384-signing-cert", required=True)
-    parser.add_argument("--task-definition", required=True,
-                        type=argparse.FileType('r'))
-    parser.add_argument("--filename-template",
-                        default=DEFAULT_FILENAME_TEMPLATE)
-    parser.add_argument("--no-freshclam", action="store_true", default=False,
-                        help="Do not refresh ClamAV DB")
-    parser.add_argument("-q", "--quiet", dest="log_level",
-                        action="store_const", const=logging.WARNING,
-                        default=logging.DEBUG)
-    args = parser.parse_args()
-
-    logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s",
-                        level=args.log_level)
-    task = json.load(args.task_definition)
-    # TODO: verify task["extra"]["funsize"]["partials"] with jsonschema
-
-    signing_certs = {
-        'sha1': open(args.sha1_signing_cert, 'rb').read(),
-        'sha384': open(args.sha384_signing_cert, 'rb').read(),
-    }
-
-    assert(get_keysize(signing_certs['sha1']) == 2048)
-    assert(get_keysize(signing_certs['sha384']) == 4096)
-
-    if args.no_freshclam:
-        log.info("Skipping freshclam")
-    else:
-        log.info("Refreshing clamav db...")
-        try:
-            redo.retry(lambda: sh.freshclam("--stdout", "--verbose",
-                                            _timeout=300, _err_to_out=True))
-            log.info("Done.")
-        except sh.ErrorReturnCode:
-            log.warning("Freshclam failed, skipping DB update")
-    manifest = []
-    for e in task["extra"]["funsize"]["partials"]:
-        for mar in (e["from_mar"], e["to_mar"]):
-            verify_allowed_url(mar)
-
-        work_env = WorkEnv()
-        # TODO: run setup once
-        work_env.setup()
-        complete_mars = {}
-        use_old_format = False
-        for mar_type, f in (("from", e["from_mar"]), ("to", e["to_mar"])):
-            dest = os.path.join(work_env.workdir, "{}.mar".format(mar_type))
-            unpack_dir = os.path.join(work_env.workdir, mar_type)
-            download(f, dest)
-            if not os.getenv("MOZ_DISABLE_MAR_CERT_VERIFICATION"):
-                verify_signature(dest, signing_certs)
-            complete_mars["%s_size" % mar_type] = os.path.getsize(dest)
-            complete_mars["%s_hash" % mar_type] = get_hash(dest)
-            if mar_type == 'to' and not is_lzma_compressed_mar(dest):
-                use_old_format = True
-            unpack(work_env, dest, unpack_dir)
-            log.info("AV-scanning %s ...", unpack_dir)
-            sh.clamscan("-r", unpack_dir, _timeout=600, _err_to_out=True)
-            log.info("Done.")
-
-        path = os.path.join(work_env.workdir, "to")
-        from_path = os.path.join(work_env.workdir, "from")
-        mar_data = {
-            "ACCEPTED_MAR_CHANNEL_IDS": get_option(
-                path, filename="update-settings.ini", section="Settings",
-                option="ACCEPTED_MAR_CHANNEL_IDS"),
-            "version": get_option(path, filename="application.ini",
-                                  section="App", option="Version"),
-            "to_buildid": get_option(path, filename="application.ini",
-                                     section="App", option="BuildID"),
-            "from_buildid": get_option(from_path, filename="application.ini",
-                                       section="App", option="BuildID"),
-            "appName": get_option(from_path, filename="application.ini",
-                                  section="App", option="Name"),
-            # Use Gecko repo and rev from platform.ini, not application.ini
-            "repo": get_option(path, filename="platform.ini", section="Build",
-                               option="SourceRepository"),
-            "revision": get_option(path, filename="platform.ini",
-                                   section="Build", option="SourceStamp"),
-            "from_mar": e["from_mar"],
-            "to_mar": e["to_mar"],
-            "platform": e["platform"],
-            "locale": e["locale"],
-        }
-        # Override ACCEPTED_MAR_CHANNEL_IDS if needed
-        if "ACCEPTED_MAR_CHANNEL_IDS" in os.environ:
-            mar_data["ACCEPTED_MAR_CHANNEL_IDS"] = os.environ["ACCEPTED_MAR_CHANNEL_IDS"]
-        for field in ("update_number", "previousVersion",
-                      "previousBuildNumber", "toVersion",
-                      "toBuildNumber"):
-            if field in e:
-                mar_data[field] = e[field]
-        mar_data.update(complete_mars)
-        # if branch not set explicitly use repo-name
-        mar_data["branch"] = e.get("branch",
-                                   mar_data["repo"].rstrip("/").split("/")[-1])
-        mar_name = args.filename_template.format(**mar_data)
-        mar_data["mar"] = mar_name
-        dest_mar = os.path.join(work_env.workdir, mar_name)
-        # TODO: download these once
-        work_env.download_buildsystem_bits(repo=mar_data["repo"],
-                                           revision=mar_data["revision"])
-        generate_partial(work_env, from_path, path, dest_mar,
-                         mar_data["ACCEPTED_MAR_CHANNEL_IDS"],
-                         mar_data["version"],
-                         use_old_format)
-        mar_data["size"] = os.path.getsize(dest_mar)
-        mar_data["hash"] = get_hash(dest_mar)
-
-        shutil.copy(dest_mar, args.artifacts_dir)
-        work_env.cleanup()
-        manifest.append(mar_data)
-    manifest_file = os.path.join(args.artifacts_dir, "manifest.json")
-    with open(manifest_file, "w") as fp:
-        json.dump(manifest, fp, indent=2, sort_keys=True)
-
-
-if __name__ == '__main__':
-    main()
--- a/taskcluster/docs/kinds.rst
+++ b/taskcluster/docs/kinds.rst
@@ -188,24 +188,16 @@ the language in the final artifact names
 
 beetmover-repackage
 -------------------
 
 Beetmover-repackage is beetmover but for tasks that need an intermediate step
 between signing and packaging, such as OSX. For more details see the definitions
 of the Beetmover kind above and the repackage kind below.
 
-beetmover-partials
-------------------
-
-Beetmover-partials is beetmover but for the partial updates that have been
-generated. Not every build produces partial updates, and so these are kept
-separate from the regular beetmover jobs to avoid situations where the completes
-are not uploaded.
-
 checksums-signing
 -----------------
 Checksums-signing take as input the checksums file generated by beetmover tasks
 and sign it via the signing scriptworkers. Returns the same file signed and
 additional detached signature.
 
 beetmover-checksums
 -------------------
@@ -236,18 +228,8 @@ repackage-l10n
 --------------
 Repackage-L10n is a ```Repackage``` task split up to be suitable for use after l10n repacks.
 
 
 repackage-signing
 -----------------
 Repackage-signing take the repackaged installers (windows) and update packaging (with
 the signed internal bits) and signs them.
-
-partials
---------
-Partials takes the complete.mar files produced in previous tasks and generates partial
-updates between previous nightly releases and the new one. Requires a release_history
-in the parameters. See ``mach release-history`` if doing this manually.
-
-partials-signing
-----------------
-Partials-signing takes the partial updates produced in Partials and signs them.
--- a/taskcluster/docs/parameters.rst
+++ b/taskcluster/docs/parameters.rst
@@ -102,22 +102,16 @@ syntax or reading a project-specific con
     one of the functions in ``taskcluster/taskgraph/target_tasks.py``.
 
 ``optimize_target_tasks``
     If true, then target tasks are eligible for optimization.
 
 ``include_nightly``
     If true, then nightly tasks are eligible for optimization.
 
-``release_history``
-   History of recent releases by platform and locale, used when generating
-   partial updates for nightly releases.
-   Suitable contents can be generated with ``mach release-history``,
-   which will print to the console by default.
-
 Morphed Set
 -----------
 
 ``morph_templates``
     Dict of JSON-e templates to apply to each task, keyed by template name.
     Values are extra context that will be available to the template under the
     ``input.<template>`` key. Available templates live in
     ``taskcluster/taskgraph/templates``. Enabled on try only.
--- a/taskcluster/mach_commands.py
+++ b/taskcluster/mach_commands.py
@@ -499,27 +499,8 @@ class TaskClusterImagesProvider(object):
         try:
             if context_only is None:
                 build_image(image_name)
             else:
                 build_context(image_name, context_only)
         except Exception:
             traceback.print_exc()
             sys.exit(1)
-
-@CommandProvider
-class TaskClusterPartialsData(object):
-    @Command('release-history', category="ci",
-             description="Query balrog for release history used by enable partials generation")
-    @CommandArgument('-b', '--branch',
-                     help="The gecko project branch used in balrog, such as "
-                          "mozilla-central, release, date")
-    @CommandArgument('--product', default='Firefox',
-                     help="The product identifier, such as 'Firefox'")
-    def generate_partials_builds(self, product, branch):
-        from taskgraph.util.partials import populate_release_history
-        try:
-            import yaml
-            release_history = {'release_history': populate_release_history(product, branch)}
-            print(yaml.safe_dump(release_history, allow_unicode=True, default_flow_style=False))
-        except Exception:
-            traceback.print_exc()
-            sys.exit(1)
--- a/taskcluster/taskgraph/decision.py
+++ b/taskcluster/taskgraph/decision.py
@@ -13,17 +13,16 @@ import re
 import time
 import yaml
 
 from .generator import TaskGraphGenerator
 from .create import create_tasks
 from .parameters import Parameters
 from .taskgraph import TaskGraph
 from .actions import render_actions_json
-from taskgraph.util.partials import populate_release_history
 from . import GECKO
 
 from taskgraph.util.templates import Templates
 from taskgraph.util.time import (
     json_time_from_now,
     current_json_time,
 )
 
@@ -103,17 +102,16 @@ def taskgraph_decision(options):
      * processing decision task command-line options into parameters
      * running task-graph generation exactly the same way the other `mach
        taskgraph` commands do
      * generating a set of artifacts to memorialize the graph
      * calling TaskCluster APIs to create the graph
     """
 
     parameters = get_decision_parameters(options)
-
     # create a TaskGraphGenerator instance
     tgg = TaskGraphGenerator(
         root_dir=options['root'],
         parameters=parameters)
 
     # write out the parameters used to generate this graph
     write_artifact('parameters.yml', dict(**parameters))
 
@@ -199,23 +197,16 @@ def get_decision_parameters(options):
             task_config = json.load(fh)
         parameters['morph_templates'] = task_config.get('templates', {})
         parameters['target_task_labels'] = task_config.get('tasks')
 
     # `target_tasks_method` has higher precedence than `project` parameters
     if options.get('target_tasks_method'):
         parameters['target_tasks_method'] = options['target_tasks_method']
 
-    # If the target method is nightly, we should build partials. This means
-    # knowing what has been released previously.
-    # An empty release_history is fine, it just means no partials will be built
-    parameters.setdefault('release_history', dict())
-    if 'nightly' in parameters.get('target_tasks_method', ''):
-        parameters['release_history'] = populate_release_history('Firefox', project)
-
     return Parameters(parameters)
 
 
 def write_artifact(filename, data):
     logger.info('writing artifact file `{}`'.format(filename))
     if not os.path.isdir(ARTIFACTS_DIR):
         os.mkdir(ARTIFACTS_DIR)
     path = os.path.join(ARTIFACTS_DIR, filename)
--- a/taskcluster/taskgraph/parameters.py
+++ b/taskcluster/taskgraph/parameters.py
@@ -23,17 +23,16 @@ PARAMETER_NAMES = set([
     'message',
     'morph_templates',
     'moz_build_date',
     'optimize_target_tasks',
     'owner',
     'project',
     'pushdate',
     'pushlog_id',
-    'release_history',
     'target_task_labels',
     'target_tasks_method',
 ])
 
 TRY_ONLY_PARAMETERS = set([
     'morph_templates',
     'target_task_labels',
 ])
--- a/taskcluster/taskgraph/transforms/balrog.py
+++ b/taskcluster/taskgraph/transforms/balrog.py
@@ -51,38 +51,31 @@ def validate(config, jobs):
 
 
 @transforms.add
 def make_task_description(config, jobs):
     for job in jobs:
         dep_job = job['dependent-task']
 
         treeherder = job.get('treeherder', {})
-        treeherder.setdefault('symbol', 'c-Up(N)')
+        treeherder.setdefault('symbol', 'tc-Up(N)')
         dep_th_platform = dep_job.task.get('extra', {}).get(
             'treeherder', {}).get('machine', {}).get('platform', '')
         treeherder.setdefault('platform',
                               "{}/opt".format(dep_th_platform))
         treeherder.setdefault('tier', 1)
         treeherder.setdefault('kind', 'build')
 
         attributes = copy_attributes_from_dependent_job(dep_job)
 
-        treeherder_job_symbol = dep_job.attributes.get('locale', 'N')
-
         if dep_job.attributes.get('locale'):
-            treeherder['symbol'] = 'c-Up({})'.format(treeherder_job_symbol)
+            treeherder['symbol'] = 'tc-Up({})'.format(dep_job.attributes.get('locale'))
             attributes['locale'] = dep_job.attributes.get('locale')
 
         label = job['label']
-        if 'partials' in dep_job.kind:
-            label = "partials-{}".format(label)
-            treeherder['symbol'] = 'cp-Up({})'.format(treeherder_job_symbol)
-            treeherder['tier'] = 3  # remove once proven stable
-
         description = (
             "Balrog submission for locale '{locale}' for build '"
             "{build_platform}/{build_type}'".format(
                 locale=attributes.get('locale', 'en-US'),
                 build_platform=attributes.get('build_platform'),
                 build_type=attributes.get('build_type')
             )
         )
@@ -96,16 +89,17 @@ def make_task_description(config, jobs):
         }]
 
         server_scope = get_balrog_server_scope(config)
         channel_scopes = get_balrog_channel_scopes(config)
 
         task = {
             'label': label,
             'description': description,
+            # do we have to define worker type somewhere?
             'worker-type': 'scriptworker-prov-v1/balrogworker-v1',
             'worker': {
                 'implementation': 'balrog',
                 'upstream-artifacts': upstream_artifacts,
             },
             'scopes': [server_scope] + channel_scopes,
             'dependencies': {'beetmover': dep_job.label},
             'attributes': attributes,
deleted file mode 100644
--- a/taskcluster/taskgraph/transforms/beetmover_partials.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-"""
-Add partial update artifacts to a beetmover task.
-"""
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-from taskgraph.transforms.base import TransformSequence
-from taskgraph.util.partials import (get_balrog_platform_name,
-                                     get_partials_artifacts,
-                                     get_partials_artifact_map)
-
-import logging
-logger = logging.getLogger(__name__)
-
-transforms = TransformSequence()
-
-
-def generate_upstream_artifacts(release_history, platform, locale=None):
-    if not locale or locale == 'en-US':
-        artifact_prefix = 'public/build'
-    else:
-        artifact_prefix = 'public/build/{}'.format(locale)
-
-    artifacts = get_partials_artifacts(release_history, platform, locale)
-
-    upstream_artifacts = [{
-        'taskId': {'task-reference': '<partials-signing>'},
-        'taskType': 'signing',
-        'paths': ["{}/{}".format(artifact_prefix, p)
-                  for p in artifacts],
-        'locale': locale or 'en-US',
-    }]
-
-    return upstream_artifacts
-
-
-@transforms.add
-def make_partials_artifacts(config, jobs):
-    for job in jobs:
-        locale = job["attributes"].get("locale")
-        if locale:
-            job['treeherder']['symbol'] = 'pBM({})'.format(locale)
-        else:
-            locale = 'en-US'
-            job['treeherder']['symbol'] = 'pBM(N)'
-
-        # Remove when proved reliable
-        job['treeherder']['tier'] = 3
-
-        platform = job["attributes"]["build_platform"]
-
-        platform = get_balrog_platform_name(platform)
-        upstream_artifacts = generate_upstream_artifacts(
-            config.params.get('release_history'), platform, locale
-        )
-
-        job['worker']['upstream-artifacts'].extend(upstream_artifacts)
-
-        extra = list()
-
-        artifact_map = get_partials_artifact_map(
-            config.params.get('release_history'), platform, locale)
-        for artifact in artifact_map:
-            extra.append({
-                'locale': locale,
-                'artifact_name': artifact,
-                'buildid': artifact_map[artifact],
-                'platform': platform,
-            })
-
-        job.setdefault('extra', {})
-        job['extra']['partials'] = extra
-
-        yield job
--- a/taskcluster/taskgraph/transforms/beetmover_repackage.py
+++ b/taskcluster/taskgraph/transforms/beetmover_repackage.py
@@ -210,24 +210,16 @@ def make_task_description(config, jobs):
 
         repackage_name = "repackage"
         # repackage-l10n actually uses the repackage depname here
         repackage_dependencies = {"repackage":
                                   dep_job.dependencies[repackage_name]
                                   }
         dependencies.update(repackage_dependencies)
 
-        # If this isn't a direct dependency, it won't be in there.
-        if 'repackage-signing' not in dependencies:
-            repackage_signing_name = "repackage-signing"
-            repackage_signing_deps = {"repackage-signing":
-                                      dep_job.dependencies[repackage_signing_name]
-                                      }
-            dependencies.update(repackage_signing_deps)
-
         attributes = copy_attributes_from_dependent_job(dep_job)
         if job.get('locale'):
             attributes['locale'] = job['locale']
 
         bucket_scope = get_beetmover_bucket_scope(config)
         action_scope = get_beetmover_action_scope(config)
 
         task = {
@@ -276,16 +268,17 @@ def generate_upstream_artifacts(build_ta
 
     for ref, tasktype, mapping in zip(task_refs, tasktypes, mapping):
         plarform_was_previously_matched_by_regex = None
         for platform_regex, paths in mapping.iteritems():
             if platform_regex.match(platform) is not None:
                 _check_platform_matched_only_one_regex(
                     tasktype, platform, plarform_was_previously_matched_by_regex, platform_regex
                 )
+
                 upstream_artifacts.append({
                     "taskId": {"task-reference": ref},
                     "taskType": tasktype,
                     "paths": ["{}/{}".format(artifact_prefix, path) for path in paths],
                     "locale": locale or "en-US",
                 })
                 plarform_was_previously_matched_by_regex = platform_regex
 
@@ -301,22 +294,18 @@ least 2 regular expressions. First match
 "{second_matched}"'.format(
             task_type=task_type, platform=platform,
             first_matched=plarform_was_previously_matched_by_regex.pattern,
             second_matched=platform_regex.pattern
         ))
 
 
 def is_valid_beetmover_job(job):
-    # beetmover after partials-signing should have six dependencies.
-    # windows builds w/o partials don't have docker-image, so fewer
-    # dependencies
-    if 'partials-signing' in job['dependencies'].keys():
-        expected_dep_count = 6
-    elif any(b in job['attributes']['build_platform'] for b in _WINDOWS_BUILD_PLATFORMS):
+    # windows builds don't have docker-image, so fewer dependencies
+    if any(b in job['attributes']['build_platform'] for b in _WINDOWS_BUILD_PLATFORMS):
         expected_dep_count = 4
     else:
         expected_dep_count = 5
 
     return (len(job["dependencies"]) == expected_dep_count and
             any(['repackage' in j for j in job['dependencies']]))
 
 
@@ -327,17 +316,16 @@ def make_task_worker(config, jobs):
             raise NotImplementedError("Beetmover_repackage must have five dependencies.")
 
         locale = job["attributes"].get("locale")
         platform = job["attributes"]["build_platform"]
         build_task = None
         build_signing_task = None
         repackage_task = None
         repackage_signing_task = None
-
         for dependency in job["dependencies"].keys():
             if 'repackage-signing' in dependency:
                 repackage_signing_task = dependency
             elif 'repackage' in dependency:
                 repackage_task = dependency
             elif 'signing' in dependency:
                 # catches build-signing and nightly-l10n-signing
                 build_signing_task = dependency
deleted file mode 100644
--- a/taskcluster/taskgraph/transforms/partials.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-"""
-Transform the partials task into an actual task description.
-"""
-from __future__ import absolute_import, print_function, unicode_literals
-
-from taskgraph.transforms.base import TransformSequence
-from taskgraph.util.attributes import copy_attributes_from_dependent_job
-from taskgraph.util.partials import get_balrog_platform_name, get_builds
-from taskgraph.util.taskcluster import get_taskcluster_artifact_prefix
-
-import logging
-logger = logging.getLogger(__name__)
-
-transforms = TransformSequence()
-
-
-def _generate_task_output_files(filenames, locale=None):
-    locale_output_path = '{}/'.format(locale) if locale else ''
-
-    data = list()
-    for filename in filenames:
-        data.append({
-            'type': 'file',
-            'path': '/home/worker/artifacts/{}'.format(filename),
-            'name': 'public/build/{}{}'.format(locale_output_path, filename)
-        })
-    data.append({
-        'type': 'file',
-        'path': '/home/worker/artifacts/manifest.json',
-        'name': 'public/build/{}manifest.json'.format(locale_output_path)
-    })
-    return data
-
-
-@transforms.add
-def make_task_description(config, jobs):
-    # If no balrog release history, then don't generate partials
-    if not config.params.get('release_history'):
-        return
-    for job in jobs:
-        dep_job = job['dependent-task']
-
-        treeherder = job.get('treeherder', {})
-        treeherder.setdefault('symbol', 'p(N)')
-
-        label = job.get('label', "partials-{}".format(dep_job.label))
-        dep_th_platform = dep_job.task.get('extra', {}).get(
-            'treeherder', {}).get('machine', {}).get('platform', '')
-
-        treeherder.setdefault('platform',
-                              "{}/opt".format(dep_th_platform))
-        treeherder.setdefault('kind', 'build')
-        treeherder.setdefault('tier', 3)
-
-        dependent_kind = str(dep_job.kind)
-        dependencies = {dependent_kind: dep_job.label}
-        signing_dependencies = dep_job.dependencies
-        # This is so we get the build task etc in our dependencies to
-        # have better beetmover support.
-        dependencies.update(signing_dependencies)
-
-        attributes = copy_attributes_from_dependent_job(dep_job)
-        locale = dep_job.attributes.get('locale')
-        if locale:
-            attributes['locale'] = locale
-            treeherder['symbol'] = "p({})".format(locale)
-
-        build_locale = locale or 'en-US'
-
-        builds = get_builds(config.params['release_history'], dep_th_platform,
-                            build_locale)
-
-        # If the list is empty there's no available history for this platform
-        # and locale combination, so we can't build any partials.
-        if not builds:
-            continue
-
-        signing_task = None
-        for dependency in dependencies.keys():
-            if 'repackage-signing' in dependency:
-                signing_task = dependency
-        signing_task_ref = '<{}>'.format(signing_task)
-
-        extra = {'funsize': {'partials': list()}}
-        update_number = 1
-        artifact_path = "{}{}".format(get_taskcluster_artifact_prefix(signing_task_ref, locale=locale), 'target.complete.mar')
-        for build in builds:
-            extra['funsize']['partials'].append({
-                'locale': build_locale,
-                'from_mar': builds[build]['mar_url'],
-                'to_mar': {'task-reference': artifact_path},
-                'platform': get_balrog_platform_name(dep_th_platform),
-                'branch': config.params['project'],
-                'update_number': update_number,
-                'dest_mar': build,
-            })
-            update_number += 1
-
-        cot = extra.setdefault('chainOfTrust', {})
-        cot.setdefault('inputs', {})['docker-image'] = {"task-reference": "<docker-image>"}
-
-        worker = {
-            'artifacts': _generate_task_output_files(builds.keys(), locale),
-            'implementation': 'docker-worker',
-            'docker-image': {'in-tree': 'partial-update-generator'},
-            'os': 'linux',
-            'max-run-time': 3600,
-            'chain-of-trust': True,
-            'env': {
-                'SHA1_SIGNING_CERT': 'nightly_sha1',
-                'SHA384_SIGNING_CERT': 'nightly_sha384'
-            }
-        }
-
-        level = config.params['level']
-
-        task = {
-            'label': label,
-            'description': "{} Partials".format(
-                dep_job.task["metadata"]["description"]),
-            'worker-type': 'aws-provisioner-v1/gecko-%s-b-linux' % level,
-            'dependencies': dependencies,
-            'attributes': attributes,
-            'run-on-projects': dep_job.attributes.get('run_on_projects'),
-            'treeherder': treeherder,
-            'extra': extra,
-            'worker': worker,
-        }
-
-        yield task
deleted file mode 100644
--- a/taskcluster/taskgraph/transforms/partials_signing.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-"""
-Transform the partials task into an actual task description.
-"""
-from __future__ import absolute_import, print_function, unicode_literals
-
-from taskgraph.transforms.base import TransformSequence
-from taskgraph.util.attributes import copy_attributes_from_dependent_job
-from taskgraph.util.scriptworker import get_signing_cert_scope_per_platform
-from taskgraph.util.partials import get_balrog_platform_name, get_partials_artifacts
-
-import logging
-logger = logging.getLogger(__name__)
-
-transforms = TransformSequence()
-
-
-def generate_upstream_artifacts(release_history, platform, locale=None):
-    artifact_prefix = 'public/build'
-    if locale:
-        artifact_prefix = 'public/build/{}'.format(locale)
-    else:
-        locale = 'en-US'
-
-    artifacts = get_partials_artifacts(release_history, platform, locale)
-
-    upstream_artifacts = [{
-        "taskId": {"task-reference": '<partials>'},
-        "taskType": 'partials',
-        "paths": ["{}/{}".format(artifact_prefix, p)
-                  for p in artifacts],
-        "formats": ["mar_sha384"],
-    }]
-
-    return upstream_artifacts
-
-
-@transforms.add
-def make_task_description(config, jobs):
-    for job in jobs:
-        dep_job = job['dependent-task']
-
-        treeherder = job.get('treeherder', {})
-        treeherder.setdefault('symbol', 'ps(N)')
-
-        dep_th_platform = dep_job.task.get('extra', {}).get(
-            'treeherder', {}).get('machine', {}).get('platform', '')
-        label = job.get('label', "partials-signing-{}".format(dep_job.label))
-        dep_th_platform = dep_job.task.get('extra', {}).get(
-            'treeherder', {}).get('machine', {}).get('platform', '')
-        treeherder.setdefault('platform',
-                              "{}/opt".format(dep_th_platform))
-        treeherder.setdefault('kind', 'build')
-        treeherder.setdefault('tier', 3)
-
-        dependent_kind = str(dep_job.kind)
-        dependencies = {dependent_kind: dep_job.label}
-        signing_dependencies = dep_job.dependencies
-        # This is so we get the build task etc in our dependencies to
-        # have better beetmover support.
-        dependencies.update(signing_dependencies)
-
-        attributes = copy_attributes_from_dependent_job(dep_job)
-        locale = dep_job.attributes.get('locale')
-        if locale:
-            attributes['locale'] = locale
-            treeherder['symbol'] = 'ps({})'.format(locale)
-
-        balrog_platform = get_balrog_platform_name(dep_th_platform)
-        upstream_artifacts = generate_upstream_artifacts(config.params['release_history'], balrog_platform, locale)
-
-        build_platform = dep_job.attributes.get('build_platform')
-        is_nightly = dep_job.attributes.get('nightly')
-        signing_cert_scope = get_signing_cert_scope_per_platform(
-            build_platform, is_nightly, config
-        )
-        scopes = [signing_cert_scope, 'project:releng:signing:format:mar_sha384']
-        task = {
-            'label': label,
-            'description': "{} Partials".format(
-                dep_job.task["metadata"]["description"]),
-            'worker-type': 'scriptworker-prov-v1/signing-linux-v1',
-            'worker': {'implementation': 'scriptworker-signing',
-                           'upstream-artifacts': upstream_artifacts,
-                           'max-run-time': 3600},
-            'dependencies': dependencies,
-            'attributes': attributes,
-            'scopes': scopes,
-            'run-on-projects': dep_job.attributes.get('run_on_projects'),
-            'treeherder': treeherder,
-        }
-
-        yield task
--- a/taskcluster/taskgraph/transforms/repackage.py
+++ b/taskcluster/taskgraph/transforms/repackage.py
@@ -5,20 +5,22 @@
 Transform the repackage task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.schema import validate_schema, Schema
-from taskgraph.util.taskcluster import get_taskcluster_artifact_prefix
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Any, Required, Optional
 
+_TC_ARTIFACT_LOCATION = \
+        'https://queue.taskcluster.net/v1/task/{task_id}/artifacts/public/build/{postfix}'
+
 transforms = TransformSequence()
 
 # Voluptuous uses marker objects as dictionary *keys*, but they are not
 # comparable, so we cast all of the keys back to regular strings
 task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
 
 # shortcut for a string where task references are allowed
 taskref_or_string = Any(
@@ -196,18 +198,18 @@ def _generate_task_mozharness_config(bui
             return ['repackage/linux{}_signed.py'.format(bits)]
         elif build_platform.startswith('win'):
             return ['repackage/win{}_signed.py'.format(bits)]
 
     raise NotImplementedError('Unsupported build_platform: "{}"'.format(build_platform))
 
 
 def _generate_task_env(build_platform, build_task_ref, signing_task_ref, locale=None):
-    mar_prefix = get_taskcluster_artifact_prefix(build_task_ref, postfix='host/bin/', locale=None)
-    signed_prefix = get_taskcluster_artifact_prefix(signing_task_ref, locale=locale)
+    mar_prefix = _generate_taskcluster_prefix(build_task_ref, postfix='host/bin/', locale=None)
+    signed_prefix = _generate_taskcluster_prefix(signing_task_ref, locale=locale)
 
     if build_platform.startswith('linux') or build_platform.startswith('macosx'):
         tarball_extension = 'bz2' if build_platform.startswith('linux') else 'gz'
         return {
             'SIGNED_INPUT': {'task-reference': '{}target.tar.{}'.format(
                 signed_prefix, tarball_extension
             )},
             'UNSIGNED_MAR': {'task-reference': '{}mar'.format(mar_prefix)},
@@ -224,16 +226,23 @@ def _generate_task_env(build_platform, b
             task_env['SIGNED_SETUP_STUB'] = {
                 'task-reference': '{}setup-stub.exe'.format(signed_prefix),
             }
         return task_env
 
     raise NotImplementedError('Unsupported build_platform: "{}"'.format(build_platform))
 
 
+def _generate_taskcluster_prefix(task_id, postfix='', locale=None):
+    if locale:
+        postfix = '{}/{}'.format(locale, postfix)
+
+    return _TC_ARTIFACT_LOCATION.format(task_id=task_id, postfix=postfix)
+
+
 def _generate_task_output_files(build_platform, locale=None):
     locale_output_path = '{}/'.format(locale) if locale else ''
 
     if build_platform.startswith('linux') or build_platform.startswith('macosx'):
         output_files = [{
             'type': 'file',
             'path': '/builds/worker/workspace/build/artifacts/{}target.complete.mar'
                     .format(locale_output_path),
--- a/taskcluster/taskgraph/transforms/repackage_signing.py
+++ b/taskcluster/taskgraph/transforms/repackage_signing.py
@@ -124,14 +124,28 @@ def make_repackage_signing_description(c
                        'max-run-time': 3600},
             'scopes': scopes,
             'dependencies': dependencies,
             'attributes': attributes,
             'run-on-projects': dep_job.attributes.get('run_on_projects'),
             'treeherder': treeherder,
         }
 
+        funsize_platforms = [
+            'linux-nightly',
+            'linux64-nightly',
+            'macosx64-nightly',
+            'win32-nightly',
+            'win64-nightly'
+        ]
+        if build_platform in funsize_platforms and is_nightly:
+            route_template = "project.releng.funsize.level-{level}.{project}"
+            task['routes'] = [
+                route_template.format(project=config.params['project'],
+                                      level=config.params['level'])
+            ]
+
         yield task
 
 
 def _generate_worker_type(signing_cert_scope):
     worker_type = 'depsigning' if 'dep-signing' in signing_cert_scope else 'signing-linux-v1'
     return 'scriptworker-prov-v1/{}'.format(worker_type)
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -494,33 +494,29 @@ GROUP_NAMES = {
     'tc-W': 'Web platform tests executed by TaskCluster',
     'tc-W-e10s': 'Web platform tests executed by TaskCluster with e10s',
     'tc-X': 'Xpcshell tests executed by TaskCluster',
     'tc-X-e10s': 'Xpcshell tests executed by TaskCluster with e10s',
     'tc-L10n': 'Localised Repacks executed by Taskcluster',
     'tc-L10n-Rpk': 'Localized Repackaged Repacks executed by Taskcluster',
     'tc-BM-L10n': 'Beetmover for locales executed by Taskcluster',
     'tc-BMR-L10n': 'Beetmover repackages for locales executed by Taskcluster',
-    'c-Up': 'Balrog submission of complete updates',
+    'tc-Up': 'Balrog submission of updates, executed by Taskcluster',
     'tc-cs': 'Checksum signing executed by Taskcluster',
     'tc-rs': 'Repackage signing executed by Taskcluster',
     'tc-BMcs': 'Beetmover checksums, executed by Taskcluster',
     'Aries': 'Aries Device Image',
     'Nexus 5-L': 'Nexus 5-L Device Image',
     'I': 'Docker Image Builds',
     'TL': 'Toolchain builds for Linux 64-bits',
     'TM': 'Toolchain builds for OSX',
     'TW32': 'Toolchain builds for Windows 32-bits',
     'TW64': 'Toolchain builds for Windows 64-bits',
     'SM-tc': 'Spidermonkey builds',
     'pub': 'APK publishing',
-    'p': 'Partial generation',
-    'ps': 'Partials signing',
-    'pBM': 'Beetmover for partials',
-    'cp-Up': 'Balrog submission of updates, completes and partials',
 }
 UNKNOWN_GROUP_NAME = "Treeherder group {} has no name; add it to " + __file__
 
 V2_ROUTE_TEMPLATES = [
     "index.gecko.v2.{project}.latest.{product}.{job-name}",
     "index.gecko.v2.{project}.pushdate.{build_date_long}.{product}.{job-name}",
     "index.gecko.v2.{project}.pushlog-id.{pushlog_id}.{product}.{job-name}",
     "index.gecko.v2.{project}.revision.{head_rev}.{product}.{job-name}",
deleted file mode 100644
--- a/taskcluster/taskgraph/transforms/task.py.orig
+++ /dev/null
@@ -1,1328 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-"""
-These transformations take a task description and turn it into a TaskCluster
-task definition (along with attributes, label, etc.).  The input to these
-transformations is generic to any kind of task, but abstracts away some of the
-complexities of worker implementations, scopes, and treeherder annotations.
-"""
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-import hashlib
-import json
-import os
-import re
-import time
-from copy import deepcopy
-
-from mozbuild.util import memoize
-from taskgraph.util.attributes import TRUNK_PROJECTS
-from taskgraph.util.hash import hash_path
-from taskgraph.util.treeherder import split_symbol
-from taskgraph.transforms.base import TransformSequence
-from taskgraph.util.schema import validate_schema, Schema
-from taskgraph.util.scriptworker import get_release_config
-from voluptuous import Any, Required, Optional, Extra
-from taskgraph import GECKO
-from ..util import docker as dockerutil
-
-from .gecko_v2_whitelist import JOB_NAME_WHITELIST, JOB_NAME_WHITELIST_ERROR
-
-
-RUN_TASK = os.path.join(GECKO, 'taskcluster', 'docker', 'recipes', 'run-task')
-
-
-@memoize
-def _run_task_suffix():
-    """String to append to cache names under control of run-task."""
-    return hash_path(RUN_TASK)[0:20]
-
-
-# shortcut for a string where task references are allowed
-taskref_or_string = Any(
-    basestring,
-    {Required('task-reference'): basestring})
-
-# A task description is a general description of a TaskCluster task
-task_description_schema = Schema({
-    # the label for this task
-    Required('label'): basestring,
-
-    # description of the task (for metadata)
-    Required('description'): basestring,
-
-    # attributes for this task
-    Optional('attributes'): {basestring: object},
-
-    # relative path (from config.path) to the file task was defined in
-    Optional('job-from'): basestring,
-
-    # dependencies of this task, keyed by name; these are passed through
-    # verbatim and subject to the interpretation of the Task's get_dependencies
-    # method.
-    Optional('dependencies'): {basestring: object},
-
-    # expiration and deadline times, relative to task creation, with units
-    # (e.g., "14 days").  Defaults are set based on the project.
-    Optional('expires-after'): basestring,
-    Optional('deadline-after'): basestring,
-
-    # custom routes for this task; the default treeherder routes will be added
-    # automatically
-    Optional('routes'): [basestring],
-
-    # custom scopes for this task; any scopes required for the worker will be
-    # added automatically. The following parameters will be substituted in each
-    # scope:
-    #  {level} -- the scm level of this push
-    Optional('scopes'): [basestring],
-
-    # Tags
-    Optional('tags'): {basestring: basestring},
-
-    # custom "task.extra" content
-    Optional('extra'): {basestring: object},
-
-    # treeherder-related information; see
-    # https://schemas.taskcluster.net/taskcluster-treeherder/v1/task-treeherder-config.json
-    # If not specified, no treeherder extra information or routes will be
-    # added to the task
-    Optional('treeherder'): {
-        # either a bare symbol, or "grp(sym)".
-        'symbol': basestring,
-
-        # the job kind
-        'kind': Any('build', 'test', 'other'),
-
-        # tier for this task
-        'tier': int,
-
-        # task platform, in the form platform/collection, used to set
-        # treeherder.machine.platform and treeherder.collection or
-        # treeherder.labels
-        'platform': basestring,
-
-        # treeherder environments (defaults to both staging and production)
-        Required('environments', default=['production', 'staging']): ['production', 'staging'],
-    },
-
-    # information for indexing this build so its artifacts can be discovered;
-    # if omitted, the build will not be indexed.
-    Optional('index'): {
-        # the name of the product this build produces
-        'product': Any('firefox', 'mobile', 'static-analysis', 'devedition'),
-
-        # the names to use for this job in the TaskCluster index
-        'job-name': basestring,
-
-        # Type of gecko v2 index to use
-        'type': Any('generic', 'nightly', 'l10n', 'nightly-with-multi-l10n'),
-
-        # The rank that the task will receive in the TaskCluster
-        # index.  A newly completed task supercedes the currently
-        # indexed task iff it has a higher rank.  If unspecified,
-        # 'by-tier' behavior will be used.
-        'rank': Any(
-            # Rank is equal the timestamp of the build_date for tier-1
-            # tasks, and zero for non-tier-1.  This sorts tier-{2,3}
-            # builds below tier-1 in the index.
-            'by-tier',
-
-            # Rank is given as an integer constant (e.g. zero to make
-            # sure a task is last in the index).
-            int,
-
-            # Rank is equal to the timestamp of the build_date.  This
-            # option can be used to override the 'by-tier' behavior
-            # for non-tier-1 tasks.
-            'build_date',
-        ),
-    },
-
-    # The `run_on_projects` attribute, defaulting to "all".  This dictates the
-    # projects on which this task should be included in the target task set.
-    # See the attributes documentation for details.
-    Optional('run-on-projects'): [basestring],
-
-    # If the task can be coalesced, this is the name used in the coalesce key
-    # the project, etc. will be added automatically.  Note that try (level 1)
-    # tasks are never coalesced
-    Optional('coalesce-name'): basestring,
-
-    # Optimizations to perform on this task during the optimization phase,
-    # specified in order.  These optimizations are defined in
-    # taskcluster/taskgraph/optimize.py.
-    Optional('optimizations'): [Any(
-        # search the index for the given index namespace, and replace this task if found
-        ['index-search', basestring],
-        # consult SETA and skip this task if it is low-value
-        ['seta'],
-        # skip this task if none of the given file patterns match
-        ['skip-unless-changed', [basestring]],
-    )],
-
-    # the provisioner-id/worker-type for the task.  The following parameters will
-    # be substituted in this string:
-    #  {level} -- the scm level of this push
-    'worker-type': basestring,
-
-    # Whether the job should use sccache compiler caching.
-    Required('needs-sccache', default=False): bool,
-
-    # information specific to the worker implementation that will run this task
-    'worker': Any({
-        Required('implementation'): Any('docker-worker', 'docker-engine'),
-        Required('os'): 'linux',
-
-        # For tasks that will run in docker-worker or docker-engine, this is the
-        # name of the docker image or in-tree docker image to run the task in.  If
-        # in-tree, then a dependency will be created automatically.  This is
-        # generally `desktop-test`, or an image that acts an awful lot like it.
-        Required('docker-image'): Any(
-            # a raw Docker image path (repo/image:tag)
-            basestring,
-            # an in-tree generated docker image (from `taskcluster/docker/<name>`)
-            {'in-tree': basestring},
-            # an indexed docker image
-            {'indexed': basestring},
-        ),
-
-        # worker features that should be enabled
-        Required('relengapi-proxy', default=False): bool,
-        Required('chain-of-trust', default=False): bool,
-        Required('taskcluster-proxy', default=False): bool,
-        Required('allow-ptrace', default=False): bool,
-        Required('loopback-video', default=False): bool,
-        Required('loopback-audio', default=False): bool,
-        Required('docker-in-docker', default=False): bool,  # (aka 'dind')
-
-        # Paths to Docker volumes.
-        #
-        # For in-tree Docker images, volumes can be parsed from Dockerfile.
-        # This only works for the Dockerfile itself: if a volume is defined in
-        # a base image, it will need to be declared here. Out-of-tree Docker
-        # images will also require explicit volume annotation.
-        #
-        # Caches are often mounted to the same path as Docker volumes. In this
-        # case, they take precedence over a Docker volume. But a volume still
-        # needs to be declared for the path.
-        Optional('volumes', default=[]): [basestring],
-
-        # caches to set up for the task
-        Optional('caches'): [{
-            # only one type is supported by any of the workers right now
-            'type': 'persistent',
-
-            # name of the cache, allowing re-use by subsequent tasks naming the
-            # same cache
-            'name': basestring,
-
-            # location in the task image where the cache will be mounted
-            'mount-point': basestring,
-
-            # Whether the cache is not used in untrusted environments
-            # (like the Try repo).
-            Optional('skip-untrusted', default=False): bool,
-        }],
-
-        # artifacts to extract from the task image after completion
-        Optional('artifacts'): [{
-            # type of artifact -- simple file, or recursive directory
-            'type': Any('file', 'directory'),
-
-            # task image path from which to read artifact
-            'path': basestring,
-
-            # name of the produced artifact (root of the names for
-            # type=directory)
-            'name': basestring,
-        }],
-
-        # environment variables
-        Required('env', default={}): {basestring: taskref_or_string},
-
-        # the command to run; if not given, docker-worker will default to the
-        # command in the docker image
-        Optional('command'): [taskref_or_string],
-
-        # the maximum time to run, in seconds
-        Required('max-run-time'): int,
-
-        # the exit status code that indicates the task should be retried
-        Optional('retry-exit-status'): int,
-    }, {
-        Required('implementation'): 'generic-worker',
-        Required('os'): Any('windows', 'macosx'),
-        # see http://schemas.taskcluster.net/generic-worker/v1/payload.json
-        # and https://docs.taskcluster.net/reference/workers/generic-worker/payload
-
-        # command is a list of commands to run, sequentially
-        # on Windows, each command is a string, on OS X and Linux, each command is
-        # a string array
-        Required('command'): Any(
-            [taskref_or_string],   # Windows
-            [[taskref_or_string]]  # Linux / OS X
-        ),
-
-        # artifacts to extract from the task image after completion; note that artifacts
-        # for the generic worker cannot have names
-        Optional('artifacts'): [{
-            # type of artifact -- simple file, or recursive directory
-            'type': Any('file', 'directory'),
-
-            # filesystem path from which to read artifact
-            'path': basestring,
-
-            # if not specified, path is used for artifact name
-            Optional('name'): basestring
-        }],
-
-        # Directories and/or files to be mounted.
-        # The actual allowed combinations are stricter than the model below,
-        # but this provides a simple starting point.
-        # See https://docs.taskcluster.net/reference/workers/generic-worker/payload
-        Optional('mounts'): [{
-            # A unique name for the cache volume, implies writable cache directory
-            # (otherwise mount is a read-only file or directory).
-            Optional('cache-name'): basestring,
-            # Optional content for pre-loading cache, or mandatory content for
-            # read-only file or directory. Pre-loaded content can come from either
-            # a task artifact or from a URL.
-            Optional('content'): {
-
-                # *** Either (artifact and task-id) or url must be specified. ***
-
-                # Artifact name that contains the content.
-                Optional('artifact'): basestring,
-                # Task ID that has the artifact that contains the content.
-                Optional('task-id'): taskref_or_string,
-                # URL that supplies the content in response to an unauthenticated
-                # GET request.
-                Optional('url'): basestring
-            },
-
-            # *** Either file or directory must be specified. ***
-
-            # If mounting a cache or read-only directory, the filesystem location of
-            # the directory should be specified as a relative path to the task
-            # directory here.
-            Optional('directory'): basestring,
-            # If mounting a file, specify the relative path within the task
-            # directory to mount the file (the file will be read only).
-            Optional('file'): basestring,
-            # Required if and only if `content` is specified and mounting a
-            # directory (not a file). This should be the archive format of the
-            # content (either pre-loaded cache or read-only directory).
-            Optional('format'): Any('rar', 'tar.bz2', 'tar.gz', 'zip')
-        }],
-
-        # environment variables
-        Required('env', default={}): {basestring: taskref_or_string},
-
-        # the maximum time to run, in seconds
-        Required('max-run-time'): int,
-
-        # os user groups for test task workers
-        Optional('os-groups', default=[]): [basestring],
-
-        # optional features
-        Required('chain-of-trust', default=False): bool,
-    }, {
-        Required('implementation'): 'buildbot-bridge',
-
-        # see
-        # https://github.com/mozilla/buildbot-bridge/blob/master/bbb/schemas/payload.yml
-        Required('buildername'): basestring,
-        Required('sourcestamp'): {
-            'branch': basestring,
-            Optional('revision'): basestring,
-            Optional('repository'): basestring,
-            Optional('project'): basestring,
-        },
-        Required('properties'): {
-            'product': basestring,
-            Extra: taskref_or_string,  # additional properties are allowed
-        },
-    }, {
-        Required('implementation'): 'native-engine',
-        Required('os'): Any('macosx', 'linux'),
-
-        # A link for an executable to download
-        Optional('context'): basestring,
-
-        # Tells the worker whether machine should reboot
-        # after the task is finished.
-        Optional('reboot'):
-            Any('always', 'on-exception', 'on-failure'),
-
-        # the command to run
-        Optional('command'): [taskref_or_string],
-
-        # environment variables
-        Optional('env'): {basestring: taskref_or_string},
-
-        # artifacts to extract from the task image after completion
-        Optional('artifacts'): [{
-            # type of artifact -- simple file, or recursive directory
-            Required('type'): Any('file', 'directory'),
-
-            # task image path from which to read artifact
-            Required('path'): basestring,
-
-            # name of the produced artifact (root of the names for
-            # type=directory)
-            Required('name'): basestring,
-        }],
-    }, {
-        Required('implementation'): 'scriptworker-signing',
-
-        # the maximum time to spend signing, in seconds
-        Required('max-run-time', default=600): int,
-
-        # list of artifact URLs for the artifacts that should be signed
-        Required('upstream-artifacts'): [{
-            # taskId of the task with the artifact
-            Required('taskId'): taskref_or_string,
-
-            # type of signing task (for CoT)
-            Required('taskType'): basestring,
-
-            # Paths to the artifacts to sign
-            Required('paths'): [basestring],
-
-            # Signing formats to use on each of the paths
-            Required('formats'): [basestring],
-        }],
-    }, {
-        Required('implementation'): 'beetmover',
-
-        # the maximum time to spend signing, in seconds
-        Required('max-run-time', default=600): int,
-
-        # locale key, if this is a locale beetmover job
-        Optional('locale'): basestring,
-
-        # list of artifact URLs for the artifacts that should be beetmoved
-        Required('upstream-artifacts'): [{
-            # taskId of the task with the artifact
-            Required('taskId'): taskref_or_string,
-
-            # type of signing task (for CoT)
-            Required('taskType'): basestring,
-
-            # Paths to the artifacts to sign
-            Required('paths'): [basestring],
-
-            # locale is used to map upload path and allow for duplicate simple names
-            Required('locale'): basestring,
-        }],
-    }, {
-        Required('implementation'): 'balrog',
-
-        # list of artifact URLs for the artifacts that should be beetmoved
-        Required('upstream-artifacts'): [{
-            # taskId of the task with the artifact
-            Required('taskId'): taskref_or_string,
-
-            # type of signing task (for CoT)
-            Required('taskType'): basestring,
-
-            # Paths to the artifacts to sign
-            Required('paths'): [basestring],
-        }],
-    }, {
-        Required('implementation'): 'push-apk-breakpoint',
-        Required('payload'): object,
-
-    }, {
-        Required('implementation'): 'invalid',
-        # an invalid task is one which should never actually be created; this is used in
-        # release automation on branches where the task just doesn't make sense
-        Extra: object,
-
-    }, {
-        Required('implementation'): 'push-apk',
-
-        # list of artifact URLs for the artifacts that should be beetmoved
-        Required('upstream-artifacts'): [{
-            # taskId of the task with the artifact
-            Required('taskId'): taskref_or_string,
-
-            # type of signing task (for CoT)
-            Required('taskType'): basestring,
-
-            # Paths to the artifacts to sign
-            Required('paths'): [basestring],
-        }],
-
-        # "Invalid" is a noop for try and other non-supported branches
-        Required('google-play-track'): Any('production', 'beta', 'alpha', 'rollout', 'invalid'),
-        Required('dry-run', default=True): bool,
-        Optional('rollout-percentage'): int,
-    }),
-})
-
-GROUP_NAMES = {
-    'cram': 'Cram tests',
-    'mocha': 'Mocha unit tests',
-    'py': 'Python unit tests',
-    'tc': 'Executed by TaskCluster',
-    'tc-e10s': 'Executed by TaskCluster with e10s',
-    'tc-Fxfn-l': 'Firefox functional tests (local) executed by TaskCluster',
-    'tc-Fxfn-l-e10s': 'Firefox functional tests (local) executed by TaskCluster with e10s',
-    'tc-Fxfn-r': 'Firefox functional tests (remote) executed by TaskCluster',
-    'tc-Fxfn-r-e10s': 'Firefox functional tests (remote) executed by TaskCluster with e10s',
-    'tc-M': 'Mochitests executed by TaskCluster',
-    'tc-M-e10s': 'Mochitests executed by TaskCluster with e10s',
-    'tc-M-V': 'Mochitests on Valgrind executed by TaskCluster',
-    'tc-R': 'Reftests executed by TaskCluster',
-    'tc-R-e10s': 'Reftests executed by TaskCluster with e10s',
-    'tc-T': 'Talos performance tests executed by TaskCluster',
-    'tc-Tsd': 'Talos performance tests executed by TaskCluster with Stylo disabled',
-    'tc-Tss': 'Talos performance tests executed by TaskCluster with Stylo sequential',
-    'tc-T-e10s': 'Talos performance tests executed by TaskCluster with e10s',
-    'tc-Tsd-e10s': 'Talos performance tests executed by TaskCluster with e10s, Stylo disabled',
-    'tc-Tss-e10s': 'Talos performance tests executed by TaskCluster with e10s, Stylo sequential',
-    'tc-tt-c': 'Telemetry client marionette tests',
-    'tc-tt-c-e10s': 'Telemetry client marionette tests with e10s',
-    'tc-SY-e10s': 'Are we slim yet tests by TaskCluster with e10s',
-    'tc-SYsd-e10s': 'Are we slim yet tests by TaskCluster with e10s, Stylo disabled',
-    'tc-SYss-e10s': 'Are we slim yet tests by TaskCluster with e10s, Stylo sequential',
-    'tc-VP': 'VideoPuppeteer tests executed by TaskCluster',
-    'tc-W': 'Web platform tests executed by TaskCluster',
-    'tc-W-e10s': 'Web platform tests executed by TaskCluster with e10s',
-    'tc-X': 'Xpcshell tests executed by TaskCluster',
-    'tc-X-e10s': 'Xpcshell tests executed by TaskCluster with e10s',
-    'tc-L10n': 'Localised Repacks executed by Taskcluster',
-    'tc-L10n-Rpk': 'Localized Repackaged Repacks executed by Taskcluster',
-    'tc-BM-L10n': 'Beetmover for locales executed by Taskcluster',
-    'tc-BMR-L10n': 'Beetmover repackages for locales executed by Taskcluster',
-    'tc-Up': 'Balrog submission of updates, executed by Taskcluster',
-    'tc-cs': 'Checksum signing executed by Taskcluster',
-    'tc-rs': 'Repackage signing executed by Taskcluster',
-    'tc-BMcs': 'Beetmover checksums, executed by Taskcluster',
-    'Aries': 'Aries Device Image',
-    'Nexus 5-L': 'Nexus 5-L Device Image',
-    'I': 'Docker Image Builds',
-    'TL': 'Toolchain builds for Linux 64-bits',
-    'TM': 'Toolchain builds for OSX',
-    'TW32': 'Toolchain builds for Windows 32-bits',
-    'TW64': 'Toolchain builds for Windows 64-bits',
-    'SM-tc': 'Spidermonkey builds',
-    'pub': 'APK publishing',
-}
-UNKNOWN_GROUP_NAME = "Treeherder group {} has no name; add it to " + __file__
-
-V2_ROUTE_TEMPLATES = [
-    "index.gecko.v2.{project}.latest.{product}.{job-name}",
-    "index.gecko.v2.{project}.pushdate.{build_date_long}.{product}.{job-name}",
-    "index.gecko.v2.{project}.pushlog-id.{pushlog_id}.{product}.{job-name}",
-    "index.gecko.v2.{project}.revision.{head_rev}.{product}.{job-name}",
-]
-
-# {central, inbound, autoland} write to a "trunk" index prefix. This facilitates
-# walking of tasks with similar configurations.
-V2_TRUNK_ROUTE_TEMPLATES = [
-    "index.gecko.v2.trunk.revision.{head_rev}.{product}.{job-name}",
-]
-
-V2_NIGHTLY_TEMPLATES = [
-    "index.gecko.v2.{project}.nightly.latest.{product}.{job-name}",
-    "index.gecko.v2.{project}.nightly.{build_date}.revision.{head_rev}.{product}.{job-name}",
-    "index.gecko.v2.{project}.nightly.{build_date}.latest.{product}.{job-name}",
-    "index.gecko.v2.{project}.nightly.revision.{head_rev}.{product}.{job-name}",
-]
-
-V2_L10N_TEMPLATES = [
-    "index.gecko.v2.{project}.revision.{head_rev}.{product}-l10n.{job-name}.{locale}",
-    "index.gecko.v2.{project}.pushdate.{build_date_long}.{product}-l10n.{job-name}.{locale}",
-    "index.gecko.v2.{project}.latest.{product}-l10n.{job-name}.{locale}",
-]
-
-# the roots of the treeherder routes, keyed by treeherder environment
-TREEHERDER_ROUTE_ROOTS = {
-    'production': 'tc-treeherder',
-    'staging': 'tc-treeherder-stage',
-}
-
-COALESCE_KEY = 'builds.{project}.{name}'
-
-DEFAULT_BRANCH_PRIORITY = 'low'
-BRANCH_PRIORITIES = {
-    'mozilla-release': 'highest',
-    'comm-esr45': 'highest',
-    'comm-esr52': 'highest',
-    'mozilla-esr45': 'very-high',
-    'mozilla-esr52': 'very-high',
-    'mozilla-beta': 'high',
-    'comm-beta': 'high',
-    'mozilla-central': 'medium',
-    'comm-central': 'medium',
-    'comm-aurora': 'medium',
-    'autoland': 'low',
-    'mozilla-inbound': 'low',
-    'try': 'very-low',
-    'try-comm-central': 'very-low',
-    'alder': 'very-low',
-    'ash': 'very-low',
-    'birch': 'very-low',
-    'cedar': 'very-low',
-    'cypress': 'very-low',
-    'date': 'very-low',
-    'elm': 'very-low',
-    'fig': 'very-low',
-    'gum': 'very-low',
-    'holly': 'very-low',
-    'jamun': 'very-low',
-    'larch': 'very-low',
-    'maple': 'very-low',
-    'oak': 'very-low',
-    'pine': 'very-low',
-    'graphics': 'very-low',
-    'ux': 'very-low',
-}
-
-# define a collection of payload builders, depending on the worker implementation
-payload_builders = {}
-
-
-def payload_builder(name):
-    def wrap(func):
-        payload_builders[name] = func
-        return func
-    return wrap
-
-
-# define a collection of index builders, depending on the type implementation
-index_builders = {}
-
-
-def index_builder(name):
-    def wrap(func):
-        index_builders[name] = func
-        return func
-    return wrap
-
-
-@payload_builder('docker-worker')
-def build_docker_worker_payload(config, task, task_def):
-    worker = task['worker']
-    level = int(config.params['level'])
-
-    image = worker['docker-image']
-    if isinstance(image, dict):
-        if 'in-tree' in image:
-            name = image['in-tree']
-            docker_image_task = 'build-docker-image-' + image['in-tree']
-            task.setdefault('dependencies', {})['docker-image'] = docker_image_task
-
-            image = {
-                "path": "public/image.tar.zst",
-                "taskId": {"task-reference": "<docker-image>"},
-                "type": "task-image",
-            }
-
-            # Find VOLUME in Dockerfile.
-            volumes = dockerutil.parse_volumes(name)
-            for v in sorted(volumes):
-                if v in worker['volumes']:
-                    raise Exception('volume %s already defined; '
-                                    'if it is defined in a Dockerfile, '
-                                    'it does not need to be specified in the '
-                                    'worker definition' % v)
-
-                worker['volumes'].append(v)
-
-        elif 'indexed' in image:
-            image = {
-                "path": "public/image.tar.zst",
-                "namespace": image['indexed'],
-                "type": "indexed-image",
-            }
-        else:
-            raise Exception("unknown docker image type")
-
-    features = {}
-
-    if worker.get('relengapi-proxy'):
-        features['relengAPIProxy'] = True
-
-    if worker.get('taskcluster-proxy'):
-        features['taskclusterProxy'] = True
-
-    if worker.get('allow-ptrace'):
-        features['allowPtrace'] = True
-        task_def['scopes'].append('docker-worker:feature:allowPtrace')
-
-    if worker.get('chain-of-trust'):
-        features['chainOfTrust'] = True
-
-    if worker.get('docker-in-docker'):
-        features['dind'] = True
-
-    if task.get('needs-sccache'):
-        features['taskclusterProxy'] = True
-        task_def['scopes'].append(
-            'assume:project:taskcluster:level-{level}-sccache-buckets'.format(
-                level=config.params['level'])
-        )
-        worker['env']['USE_SCCACHE'] = '1'
-    else:
-        worker['env']['SCCACHE_DISABLE'] = '1'
-
-    capabilities = {}
-
-    for lo in 'audio', 'video':
-        if worker.get('loopback-' + lo):
-            capitalized = 'loopback' + lo.capitalize()
-            devices = capabilities.setdefault('devices', {})
-            devices[capitalized] = True
-            task_def['scopes'].append('docker-worker:capability:device:' + capitalized)
-
-    task_def['payload'] = payload = {
-        'image': image,
-        'env': worker['env'],
-    }
-    if 'command' in worker:
-        payload['command'] = worker['command']
-
-    if 'max-run-time' in worker:
-        payload['maxRunTime'] = worker['max-run-time']
-
-    if 'retry-exit-status' in worker:
-        payload['onExitStatus'] = {'retry': [worker['retry-exit-status']]}
-
-    if 'artifacts' in worker:
-        artifacts = {}
-        for artifact in worker['artifacts']:
-            artifacts[artifact['name']] = {
-                'path': artifact['path'],
-                'type': artifact['type'],
-                'expires': task_def['expires'],  # always expire with the task
-            }
-        payload['artifacts'] = artifacts
-
-    if isinstance(worker.get('docker-image'), basestring):
-        out_of_tree_image = worker['docker-image']
-    else:
-        out_of_tree_image = None
-
-    run_task = any([
-        payload.get('command', [''])[0].endswith('run-task'),
-        # image_builder is special and doesn't get detected like other tasks.
-        # It uses run-task so it needs our cache manipulations.
-        (out_of_tree_image or '').startswith('taskcluster/image_builder'),
-    ])
-
-    if 'caches' in worker:
-        caches = {}
-
-        # run-task knows how to validate caches.
-        #
-        # To help ensure new run-task features and bug fixes don't interfere
-        # with existing caches, we seed the hash of run-task into cache names.
-        # So, any time run-task changes, we should get a fresh set of caches.
-        # This means run-task can make changes to cache interaction at any time
-        # without regards for backwards or future compatibility.
-        #
-        # But this mechanism only works for in-tree Docker images that are built
-        # with the current run-task! For out-of-tree Docker images, we have no
-        # way of knowing their content of run-task. So, in addition to varying
-        # cache names by the contents of run-task, we also take the Docker image
-        # name into consideration. This means that different Docker images will
-        # never share the same cache. This is a bit unfortunate. But it is the
-        # safest thing to do. Fortunately, most images are defined in-tree.
-        #
-        # For out-of-tree Docker images, we don't strictly need to incorporate
-        # the run-task content into the cache name. However, doing so preserves
-        # the mechanism whereby changing run-task results in new caches
-        # everywhere.
-        if run_task:
-            suffix = '-%s' % _run_task_suffix()
-
-            if out_of_tree_image:
-                name_hash = hashlib.sha256(out_of_tree_image).hexdigest()
-                suffix += name_hash[0:12]
-
-        else:
-            suffix = ''
-
-        skip_untrusted = config.params['project'] == 'try' or level == 1
-
-        for cache in worker['caches']:
-            # Some caches aren't enabled in environments where we can't
-            # guarantee certain behavior. Filter those out.
-            if cache.get('skip-untrusted') and skip_untrusted:
-                continue
-
-            name = '%s%s' % (cache['name'], suffix)
-            caches[name] = cache['mount-point']
-            task_def['scopes'].append('docker-worker:cache:%s' % name)
-
-        # Assertion: only run-task is interested in this.
-        if run_task:
-            payload['env']['TASKCLUSTER_CACHES'] = ';'.join(sorted(
-                caches.values()))
-
-        payload['cache'] = caches
-
-    # And send down volumes information to run-task as well.
-    if run_task and worker.get('volumes'):
-        payload['env']['TASKCLUSTER_VOLUMES'] = ';'.join(
-            sorted(worker['volumes']))
-
-    if payload.get('cache') and skip_untrusted:
-        payload['env']['TASKCLUSTER_UNTRUSTED_CACHES'] = '1'
-
-    if features:
-        payload['features'] = features
-    if capabilities:
-        payload['capabilities'] = capabilities
-
-    # coalesce / superseding
-    if 'coalesce-name' in task and level > 1:
-        key = COALESCE_KEY.format(
-            project=config.params['project'],
-            name=task['coalesce-name'])
-        payload['supersederUrl'] = "https://coalesce.mozilla-releng.net/v1/list/" + key
-
-    check_caches_are_volumes(task)
-
-
-@payload_builder('generic-worker')
-def build_generic_worker_payload(config, task, task_def):
-    worker = task['worker']
-
-    artifacts = []
-
-    for artifact in worker['artifacts']:
-        a = {
-            'path': artifact['path'],
-            'type': artifact['type'],
-            'expires': task_def['expires'],  # always expire with the task
-        }
-        if 'name' in artifact:
-            a['name'] = artifact['name']
-        artifacts.append(a)
-
-    # Need to copy over mounts, but rename keys to respect naming convention
-    #   * 'cache-name' -> 'cacheName'
-    #   * 'task-id'    -> 'taskId'
-    # All other key names are already suitable, and don't need renaming.
-    mounts = deepcopy(worker.get('mounts', []))
-    for mount in mounts:
-        if 'cache-name' in mount:
-            mount['cacheName'] = mount.pop('cache-name')
-        if 'content' in mount:
-            if 'task-id' in mount['content']:
-                mount['content']['taskId'] = mount['content'].pop('task-id')
-
-    task_def['payload'] = {
-        'command': worker['command'],
-        'artifacts': artifacts,
-        'env': worker.get('env', {}),
-        'mounts': mounts,
-        'maxRunTime': worker['max-run-time'],
-        'osGroups': worker.get('os-groups', []),
-    }
-
-    # needs-sccache is handled in mozharness_on_windows
-
-    if 'retry-exit-status' in worker:
-        raise Exception("retry-exit-status not supported in generic-worker")
-
-    # currently only support one feature (chain of trust) but this will likely grow
-    features = {}
-
-    if worker.get('chain-of-trust'):
-        features['chainOfTrust'] = True
-
-    if features:
-        task_def['payload']['features'] = features
-
-
-@payload_builder('scriptworker-signing')
-def build_scriptworker_signing_payload(config, task, task_def):
-    worker = task['worker']
-
-    task_def['payload'] = {
-        'maxRunTime': worker['max-run-time'],
-        'upstreamArtifacts':  worker['upstream-artifacts']
-    }
-
-
-@payload_builder('beetmover')
-def build_beetmover_payload(config, task, task_def):
-    worker = task['worker']
-    release_config = get_release_config(config)
-
-    task_def['payload'] = {
-        'maxRunTime': worker['max-run-time'],
-        'upload_date': config.params['build_date'],
-        'upstreamArtifacts':  worker['upstream-artifacts']
-    }
-    if worker.get('locale'):
-        task_def['payload']['locale'] = worker['locale']
-    if release_config:
-        task_def['payload'].update(release_config)
-
-
-@payload_builder('balrog')
-def build_balrog_payload(config, task, task_def):
-    worker = task['worker']
-
-    task_def['payload'] = {
-        'upstreamArtifacts':  worker['upstream-artifacts']
-    }
-
-
-@payload_builder('push-apk')
-def build_push_apk_payload(config, task, task_def):
-    worker = task['worker']
-
-    task_def['payload'] = {
-        'dry_run': worker['dry-run'],
-        'upstreamArtifacts':  worker['upstream-artifacts'],
-        'google_play_track': worker['google-play-track'],
-    }
-
-    if worker.get('rollout-percentage', None):
-        task_def['payload']['rollout_percentage'] = worker['rollout-percentage']
-
-
-@payload_builder('push-apk-breakpoint')
-def build_push_apk_breakpoint_payload(config, task, task_def):
-    task_def['payload'] = task['worker']['payload']
-
-
-@payload_builder('invalid')
-def build_invalid_payload(config, task, task_def):
-    task_def['payload'] = 'invalid task - should never be created'
-
-
-@payload_builder('native-engine')
-def build_macosx_engine_payload(config, task, task_def):
-    worker = task['worker']
-    artifacts = map(lambda artifact: {
-        'name': artifact['name'],
-        'path': artifact['path'],
-        'type': artifact['type'],
-        'expires': task_def['expires'],
-    }, worker.get('artifacts', []))
-
-    task_def['payload'] = {
-        'context': worker['context'],
-        'command': worker['command'],
-        'env': worker['env'],
-        'artifacts': artifacts,
-    }
-    if worker.get('reboot'):
-        task_def['payload'] = worker['reboot']
-
-    if task.get('needs-sccache'):
-        raise Exception('needs-sccache not supported in native-engine')
-
-
-@payload_builder('buildbot-bridge')
-def build_buildbot_bridge_payload(config, task, task_def):
-    del task['extra']['treeherder']
-    del task['extra']['treeherderEnv']
-    worker = task['worker']
-    task_def['payload'] = {
-        'buildername': worker['buildername'],
-        'sourcestamp': worker['sourcestamp'],
-        'properties': worker['properties'],
-    }
-
-
-transforms = TransformSequence()
-
-
-@transforms.add
-def validate(config, tasks):
-    for task in tasks:
-        yield validate_schema(
-            task_description_schema, task,
-            "In task {!r}:".format(task.get('label', '?no-label?')))
-
-
-@index_builder('generic')
-def add_generic_index_routes(config, task):
-    index = task.get('index')
-    routes = task.setdefault('routes', [])
-
-    job_name = index['job-name']
-    if job_name not in JOB_NAME_WHITELIST:
-        raise Exception(JOB_NAME_WHITELIST_ERROR.format(job_name))
-
-    subs = config.params.copy()
-    subs['job-name'] = job_name
-    subs['build_date_long'] = time.strftime("%Y.%m.%d.%Y%m%d%H%M%S",
-                                            time.gmtime(config.params['build_date']))
-    subs['product'] = index['product']
-
-    project = config.params.get('project')
-
-    for tpl in V2_ROUTE_TEMPLATES:
-        routes.append(tpl.format(**subs))
-
-    # Additionally alias all tasks for "trunk" repos into a common
-    # namespace.
-    if project and project in TRUNK_PROJECTS:
-        for tpl in V2_TRUNK_ROUTE_TEMPLATES:
-            routes.append(tpl.format(**subs))
-
-    return task
-
-
-@index_builder('nightly')
-def add_nightly_index_routes(config, task):
-    index = task.get('index')
-    routes = task.setdefault('routes', [])
-
-    job_name = index['job-name']
-    if job_name not in JOB_NAME_WHITELIST:
-        raise Exception(JOB_NAME_WHITELIST_ERROR.format(job_name))
-
-    subs = config.params.copy()
-    subs['job-name'] = job_name
-    subs['build_date_long'] = time.strftime("%Y.%m.%d.%Y%m%d%H%M%S",
-                                            time.gmtime(config.params['build_date']))
-    subs['build_date'] = time.strftime("%Y.%m.%d",
-                                       time.gmtime(config.params['build_date']))
-    subs['product'] = index['product']
-
-    for tpl in V2_NIGHTLY_TEMPLATES:
-        routes.append(tpl.format(**subs))
-
-    # Also add routes for en-US
-    task = add_l10n_index_routes(config, task, force_locale="en-US")
-
-    return task
-
-
-@index_builder('nightly-with-multi-l10n')
-def add_nightly_multi_index_routes(config, task):
-    task = add_nightly_index_routes(config, task)
-    task = add_l10n_index_routes(config, task, force_locale="multi")
-    return task
-
-
-@index_builder('l10n')
-def add_l10n_index_routes(config, task, force_locale=None):
-    index = task.get('index')
-    routes = task.setdefault('routes', [])
-
-    job_name = index['job-name']
-    if job_name not in JOB_NAME_WHITELIST:
-        raise Exception(JOB_NAME_WHITELIST_ERROR.format(job_name))
-
-    subs = config.params.copy()
-    subs['job-name'] = job_name
-    subs['build_date_long'] = time.strftime("%Y.%m.%d.%Y%m%d%H%M%S",
-                                            time.gmtime(config.params['build_date']))
-    subs['product'] = index['product']
-
-    locales = task['attributes'].get('chunk_locales',
-                                     task['attributes'].get('all_locales'))
-    # Some tasks has only one locale set
-    if task['attributes'].get('locale'):
-        locales = [task['attributes']['locale']]
-
-    if force_locale:
-        # Used for en-US and multi-locale
-        locales = [force_locale]
-
-    if not locales:
-        raise Exception("Error: Unable to use l10n index for tasks without locales")
-
-    # If there are too many locales, we can't write a route for all of them
-    # See Bug 1323792
-    if len(locales) > 18:  # 18 * 3 = 54, max routes = 64
-        return task
-
-    for locale in locales:
-        for tpl in V2_L10N_TEMPLATES:
-            routes.append(tpl.format(locale=locale, **subs))
-
-    return task
-
-
-@transforms.add
-def add_index_routes(config, tasks):
-    for task in tasks:
-        index = task.get('index')
-
-        if not index:
-            yield task
-            continue
-
-        index_type = index.get('type', 'generic')
-        task = index_builders[index_type](config, task)
-
-        # The default behavior is to rank tasks according to their tier
-        extra_index = task.setdefault('extra', {}).setdefault('index', {})
-        rank = index.get('rank', 'by-tier')
-
-        if rank == 'by-tier':
-            # rank is zero for non-tier-1 tasks and based on pushid for others;
-            # this sorts tier-{2,3} builds below tier-1 in the index
-            tier = task.get('treeherder', {}).get('tier', 3)
-            extra_index['rank'] = 0 if tier > 1 else int(config.params['build_date'])
-        elif rank == 'build_date':
-            extra_index['rank'] = int(config.params['build_date'])
-        else:
-            extra_index['rank'] = rank
-
-        del task['index']
-        yield task
-
-
-@transforms.add
-def build_task(config, tasks):
-    for task in tasks:
-        level = str(config.params['level'])
-        worker_type = task['worker-type'].format(level=level)
-        provisioner_id, worker_type = worker_type.split('/', 1)
-
-        routes = task.get('routes', [])
-        scopes = [s.format(level=level) for s in task.get('scopes', [])]
-
-        # set up extra
-        extra = task.get('extra', {})
-        task_th = task.get('treeherder')
-        if task_th:
-            extra['treeherderEnv'] = task_th['environments']
-
-            treeherder = extra.setdefault('treeherder', {})
-
-            machine_platform, collection = task_th['platform'].split('/', 1)
-            treeherder['machine'] = {'platform': machine_platform}
-            treeherder['collection'] = {collection: True}
-
-            groupSymbol, symbol = split_symbol(task_th['symbol'])
-            if groupSymbol != '?':
-                treeherder['groupSymbol'] = groupSymbol
-                if groupSymbol not in GROUP_NAMES:
-                    raise Exception(UNKNOWN_GROUP_NAME.format(groupSymbol))
-                treeherder['groupName'] = GROUP_NAMES[groupSymbol]
-            treeherder['symbol'] = symbol
-            treeherder['jobKind'] = task_th['kind']
-            treeherder['tier'] = task_th['tier']
-
-            routes.extend([
-                '{}.v2.{}.{}.{}'.format(TREEHERDER_ROUTE_ROOTS[env],
-                                        config.params['project'],
-                                        config.params['head_rev'],
-                                        config.params['pushlog_id'])
-                for env in task_th['environments']
-            ])
-
-        if 'expires-after' not in task:
-            task['expires-after'] = '28 days' if config.params['project'] == 'try' else '1 year'
-
-        if 'deadline-after' not in task:
-            task['deadline-after'] = '1 day'
-
-        if 'coalesce-name' in task and int(config.params['level']) > 1:
-            key = COALESCE_KEY.format(
-                project=config.params['project'],
-                name=task['coalesce-name'])
-            routes.append('coalesce.v1.' + key)
-
-        if 'priority' not in task:
-            task['priority'] = BRANCH_PRIORITIES.get(
-                config.params['project'],
-                DEFAULT_BRANCH_PRIORITY)
-
-        tags = task.get('tags', {})
-        tags.update({
-            'createdForUser': config.params['owner'],
-            'kind': config.kind,
-        })
-
-        task_def = {
-            'provisionerId': provisioner_id,
-            'workerType': worker_type,
-            'routes': routes,
-            'created': {'relative-datestamp': '0 seconds'},
-            'deadline': {'relative-datestamp': task['deadline-after']},
-            'expires': {'relative-datestamp': task['expires-after']},
-            'scopes': scopes,
-            'metadata': {
-                'description': task['description'],
-                'name': task['label'],
-                'owner': config.params['owner'],
-                'source': '{}/file/{}/{}'.format(
-                    config.params['head_repository'],
-                    config.params['head_rev'],
-                    config.path),
-            },
-            'extra': extra,
-            'tags': tags,
-            'priority': task['priority'],
-        }
-
-        if task_th:
-            # link back to treeherder in description
-            th_push_link = 'https://treeherder.mozilla.org/#/jobs?repo={}&revision={}'.format(
-                config.params['project'], config.params['head_rev'])
-            task_def['metadata']['description'] += ' ([Treeherder push]({}))'.format(
-                th_push_link)
-
-        # add the payload and adjust anything else as required (e.g., scopes)
-        payload_builders[task['worker']['implementation']](config, task, task_def)
-
-        attributes = task.get('attributes', {})
-        attributes['run_on_projects'] = task.get('run-on-projects', ['all'])
-
-        # Set MOZ_AUTOMATION on all jobs.
-        if task['worker']['implementation'] in (
-            'generic-worker',
-            'docker-engine',
-            'native-engine',
-            'docker-worker',
-        ):
-            payload = task_def.get('payload')
-            if payload:
-                env = payload.setdefault('env', {})
-                env['MOZ_AUTOMATION'] = '1'
-
-        yield {
-            'label': task['label'],
-            'task': task_def,
-            'dependencies': task.get('dependencies', {}),
-            'attributes': attributes,
-            'optimizations': task.get('optimizations', []),
-        }
-
-
-def check_caches_are_volumes(task):
-    """Ensures that all cache paths are defined as volumes.
-
-    Caches and volumes are the only filesystem locations whose content
-    isn't defined by the Docker image itself. Some caches are optional
-    depending on the job environment. We want paths that are potentially
-    caches to have as similar behavior regardless of whether a cache is
-    used. To help enforce this, we require that all paths used as caches
-    to be declared as Docker volumes. This check won't catch all offenders.
-    But it is better than nothing.
-    """
-    volumes = set(task['worker']['volumes'])
-    paths = set(c['mount-point'] for c in task['worker'].get('caches', []))
-    missing = paths - volumes
-
-    if not missing:
-        return
-
-    raise Exception('task %s (image %s) has caches that are not declared as '
-                    'Docker volumes: %s' % (task['label'],
-                                            task['worker']['docker-image'],
-                                            ', '.join(sorted(missing))))
-
-
-@transforms.add
-def check_run_task_caches(config, tasks):
-    """Audit for caches requiring run-task.
-
-    run-task manages caches in certain ways. If a cache managed by run-task
-    is used by a non run-task task, it could cause problems. So we audit for
-    that and make sure certain cache names are exclusive to run-task.
-
-    IF YOU ARE TEMPTED TO MAKE EXCLUSIONS TO THIS POLICY, YOU ARE LIKELY
-    CONTRIBUTING TECHNICAL DEBT AND WILL HAVE TO SOLVE MANY OF THE PROBLEMS
-    THAT RUN-TASK ALREADY SOLVES. THINK LONG AND HARD BEFORE DOING THAT.
-    """
-    re_reserved_caches = re.compile('''^
-        (level-\d+-checkouts|level-\d+-tooltool-cache)
-    ''', re.VERBOSE)
-
-    re_sparse_checkout_cache = re.compile('^level-\d+-checkouts-sparse')
-
-    suffix = _run_task_suffix()
-
-    for task in tasks:
-        payload = task['task'].get('payload', {})
-        command = payload.get('command') or ['']
-
-        main_command = command[0] if isinstance(command[0], basestring) else ''
-        run_task = main_command.endswith('run-task')
-
-        require_sparse_cache = False
-        have_sparse_cache = False
-
-        if run_task:
-            for arg in command[1:]:
-                if not isinstance(arg, basestring):
-                    continue
-
-                if arg == '--':
-                    break
-
-                if arg.startswith('--sparse-profile'):
-                    require_sparse_cache = True
-                    break
-
-        for cache in payload.get('cache', {}):
-            if re_sparse_checkout_cache.match(cache):
-                have_sparse_cache = True
-
-            if not re_reserved_caches.match(cache):
-                continue
-
-            if not run_task:
-                raise Exception(
-                    '%s is using a cache (%s) reserved for run-task '
-                    'change the task to use run-task or use a different '
-                    'cache name' % (task['label'], cache))
-
-            if not cache.endswith(suffix):
-                raise Exception(
-                    '%s is using a cache (%s) reserved for run-task '
-                    'but the cache name is not dependent on the contents '
-                    'of run-task; change the cache name to conform to the '
-                    'naming requirements' % (task['label'], cache))
-
-        if require_sparse_cache and not have_sparse_cache:
-            raise Exception('%s is using a sparse checkout but not using '
-                            'a sparse checkout cache; change the checkout '
-                            'cache name so it is sparse aware' % task['label'])
-
-        yield task
-
-
-# Check that the v2 route templates match those used by Mozharness.  This can
-# go away once Mozharness builds are no longer performed in Buildbot, and the
-# Mozharness code referencing routes.json is deleted.
-def check_v2_routes():
-    with open(os.path.join(GECKO, "testing/mozharness/configs/routes.json"), "rb") as f:
-        routes_json = json.load(f)
-
-    for key in ('routes', 'nightly', 'l10n'):
-        if key == 'routes':
-            tc_template = V2_ROUTE_TEMPLATES
-        elif key == 'nightly':
-            tc_template = V2_NIGHTLY_TEMPLATES
-        elif key == 'l10n':
-            tc_template = V2_L10N_TEMPLATES
-
-        routes = routes_json[key]
-
-        # we use different variables than mozharness
-        for mh, tg in [
-                ('{index}', 'index'),
-                ('{build_product}', '{product}'),
-                ('{build_name}-{build_type}', '{job-name}'),
-                ('{year}.{month}.{day}.{pushdate}', '{build_date_long}'),
-                ('{pushid}', '{pushlog_id}'),
-                ('{year}.{month}.{day}', '{build_date}')]:
-            routes = [r.replace(mh, tg) for r in routes]
-
-        if sorted(routes) != sorted(tc_template):
-            raise Exception("V2 TEMPLATES do not match Mozharness's routes.json: "
-                            "(tc):%s vs (mh):%s" % (tc_template, routes))
-
-
-check_v2_routes()
deleted file mode 100644
--- a/taskcluster/taskgraph/util/partials.py
+++ /dev/null
@@ -1,193 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function, unicode_literals
-import json
-import os
-
-import requests
-import redo
-
-import logging
-logger = logging.getLogger(__name__)
-
-BALROG_API_ROOT = 'https://aus5.mozilla.org/api/v1'
-
-PLATFORM_RENAMES = {
-    'windows2012-32': 'win32',
-    'windows2012-64': 'win64',
-    'osx-cross': 'macosx64',
-}
-
-BALROG_PLATFORM_MAP = {
-    "linux": [
-        "Linux_x86-gcc3"
-    ],
-    "linux64": [
-        "Linux_x86_64-gcc3"
-    ],
-    "macosx64": [
-        "Darwin_x86_64-gcc3-u-i386-x86_64",
-        "Darwin_x86-gcc3-u-i386-x86_64",
-        "Darwin_x86-gcc3",
-        "Darwin_x86_64-gcc3"
-    ],
-    "win32": [
-        "WINNT_x86-msvc",
-        "WINNT_x86-msvc-x86",
-        "WINNT_x86-msvc-x64"
-    ],
-    "win64": [
-        "WINNT_x86_64-msvc",
-        "WINNT_x86_64-msvc-x64"
-    ]
-}
-
-
-def get_balrog_platform_name(platform):
-    """Convert build platform names into balrog platform names"""
-    if '-nightly' in platform:
-        platform = platform.replace('-nightly', '')
-    if '-devedition' in platform:
-        platform = platform.replace('-devedition', '')
-    return PLATFORM_RENAMES.get(platform, platform)
-
-
-def _sanitize_platform(platform):
-    platform = get_balrog_platform_name(platform)
-    if platform not in BALROG_PLATFORM_MAP:
-        return platform
-    return BALROG_PLATFORM_MAP[platform][0]
-
-
-def get_builds(release_history, platform, locale):
-    """Examine cached balrog release history and return the list of
-    builds we need to generate diffs from"""
-    platform = _sanitize_platform(platform)
-    return release_history.get(platform, {}).get(locale, {})
-
-
-def get_partials_artifacts(release_history, platform, locale):
-    platform = _sanitize_platform(platform)
-    return release_history.get(platform, {}).get(locale, {}).keys()
-
-
-def get_partials_artifact_map(release_history, platform, locale):
-    platform = _sanitize_platform(platform)
-    return {k: release_history[platform][locale][k]['buildid'] for k in release_history.get(platform, {}).get(locale, {})}
-
-
-def _retry_on_http_errors(url, verify, params, errors):
-    if params:
-        params_str = "&".join("=".join([k, str(v)])
-                              for k, v in params.iteritems())
-    else:
-        params_str = ''
-    logger.info("Connecting to %s?%s", url, params_str)
-    for _ in redo.retrier(sleeptime=5, max_sleeptime=30, attempts=10):
-        try:
-            req = requests.get(url, verify=verify, params=params)
-            req.raise_for_status()
-            return req
-        except requests.HTTPError as e:
-            if e.response.status_code in errors:
-                logger.exception("Got HTTP %s trying to reach %s",
-                                 e.response.status_code, url)
-            else:
-                raise
-    else:
-        raise
-
-
-def get_sorted_releases(product, branch):
-    """Returns a list of release names from Balrog.
-    :param product: product name, AKA appName
-    :param branch: branch name, e.g. mozilla-central
-    :return: a sorted list of release names, most recent first.
-    """
-    url = "{}/releases".format(BALROG_API_ROOT)
-    params = {
-        "product": product,
-        # Adding -nightly-2 (2 stands for the beginning of build ID
-        # based on date) should filter out release and latest blobs.
-        # This should be changed to -nightly-3 in 3000 ;)
-        "name_prefix": "{}-{}-nightly-2".format(product, branch),
-        "names_only": True
-    }
-    req = _retry_on_http_errors(
-        url=url, verify=True, params=params,
-        errors=[500])
-    releases = req.json()["names"]
-    releases = sorted(releases, reverse=True)
-    return releases
-
-
-def get_release_builds(release):
-    url = "{}/releases/{}".format(BALROG_API_ROOT, release)
-    req = _retry_on_http_errors(
-        url=url, verify=True, params=None,
-        errors=[500])
-    return req.json()
-
-
-def populate_release_history(product, branch, maxbuilds=4, maxsearch=10):
-    """Find relevant releases in Balrog
-    Not all releases have all platforms and locales, due
-    to Taskcluster migration.
-
-        Args:
-            product (str): capitalized product name, AKA appName, e.g. Firefox
-            branch (str): branch name (mozilla-central)
-            maxbuilds (int): Maximum number of historical releases to populate
-            maxsearch(int): Traverse at most this many releases, to avoid
-                working through the entire history.
-        Returns:
-            json object based on data from balrog api
-
-            results = {
-                'platform1': {
-                    'locale1': {
-                        'buildid1': mar_url,
-                        'buildid2': mar_url,
-                        'buildid3': mar_url,
-                    },
-                    'locale2': {
-                        'target.partial-1.mar': ('buildid1': 'mar_url'),
-                    }
-                },
-                'platform2': {
-                }
-            }
-        """
-    last_releases = get_sorted_releases(product, branch)
-
-    partial_mar_tmpl = 'target.partial-{}.mar'
-
-    builds = dict()
-    for release in last_releases[:maxsearch]:
-        # maxbuilds in all categories, don't make any more queries
-        full = len(builds) > 0 and all(
-            len(builds[platform][locale]) >= maxbuilds for platform in builds for locale in builds[platform])
-        if full:
-            break
-        history = get_release_builds(release)
-
-        for platform in history['platforms']:
-            if 'alias' in history['platforms'][platform]:
-                continue
-            if platform not in builds:
-                builds[platform] = dict()
-            for locale in history['platforms'][platform]['locales']:
-                if locale not in builds[platform]:
-                    builds[platform][locale] = dict()
-                if len(builds[platform][locale]) >= maxbuilds:
-                    continue
-                buildid = history['platforms'][platform]['locales'][locale]['buildID']
-                url = history['platforms'][platform]['locales'][locale]['completes'][0]['fileUrl']
-                nextkey = len(builds[platform][locale]) + 1
-                builds[platform][locale][partial_mar_tmpl.format(nextkey)] = {
-                    'buildid': buildid,
-                    'mar_url': url,
-                }
-    return builds
--- a/taskcluster/taskgraph/util/taskcluster.py
+++ b/taskcluster/taskgraph/util/taskcluster.py
@@ -8,19 +8,16 @@ from __future__ import absolute_import, 
 
 import functools
 import yaml
 import requests
 from mozbuild.util import memoize
 from requests.packages.urllib3.util.retry import Retry
 from requests.adapters import HTTPAdapter
 
-_TC_ARTIFACT_LOCATION = \
-        'https://queue.taskcluster.net/v1/task/{task_id}/artifacts/public/build/{postfix}'
-
 
 @memoize
 def get_session():
     session = requests.Session()
     retry = Retry(total=5, backoff_factor=0.1,
                   status_forcelist=[500, 502, 503, 504])
     session.mount('http://', HTTPAdapter(max_retries=retry))
     session.mount('https://', HTTPAdapter(max_retries=retry))
@@ -99,15 +96,8 @@ def get_task_url(task_id, use_proxy=Fals
     else:
         TASK_URL = 'https://queue.taskcluster.net/v1/task/{}'
     return TASK_URL.format(task_id)
 
 
 def get_task_definition(task_id, use_proxy=False):
     response = _do_request(get_task_url(task_id, use_proxy))
     return response.json()
-
-
-def get_taskcluster_artifact_prefix(task_id, postfix='', locale=None):
-    if locale:
-        postfix = '{}/{}'.format(locale, postfix)
-
-    return _TC_ARTIFACT_LOCATION.format(task_id=task_id, postfix=postfix)