Bug 1450029: Remove buildbot/buildbot-bridge references from taskcluster r=aki,tomprince
authorChris AtLee <catlee@mozilla.com>
Tue, 01 May 2018 16:07:19 -0400
changeset 416919 00180532de6175ce239dde65e21b4b9a959fdef2
parent 416918 d7b2ccd7006778ad1208c09e9316a5cca32e45ca
child 416920 35fc3ebc645466ba5a90b4a548f0ea432495e1b3
push id33943
push usercsabou@mozilla.com
push dateFri, 04 May 2018 17:19:55 +0000
treeherdermozilla-central@ef1db4e8bf06 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersaki, tomprince
bugs1450029
milestone61.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1450029: Remove buildbot/buildbot-bridge references from taskcluster r=aki,tomprince MozReview-Commit-ID: Hu9ju4XVQpA
taskcluster/docker/desktop1604-test/Dockerfile
taskcluster/docker/periodic-updates/README.md
taskcluster/docker/recipes/ubuntu1604-test-system-setup.sh
taskcluster/docs/transforms.rst
taskcluster/taskgraph/morph.py
taskcluster/taskgraph/optimize.py
taskcluster/taskgraph/transforms/job/buildbot.py
taskcluster/taskgraph/transforms/job/mozharness_test.py
taskcluster/taskgraph/transforms/task.py
taskcluster/taskgraph/transforms/tests.py
taskcluster/taskgraph/util/bbb_validation.py
taskcluster/taskgraph/util/schema.py
taskcluster/taskgraph/util/seta.py
taskcluster/taskgraph/util/verify.py
taskcluster/taskgraph/util/workertypes.py
--- a/taskcluster/docker/desktop1604-test/Dockerfile
+++ b/taskcluster/docker/desktop1604-test/Dockerfile
@@ -61,22 +61,16 @@ ENV           LOGNAME       worker
 ENV           HOSTNAME      taskcluster-worker
 ENV           LANG          en_US.UTF-8
 ENV           LC_ALL        en_US.UTF-8
 
 # Add utilities and configuration
 COPY           dot-files/config              /builds/worker/.config
 COPY           dot-files/pulse               /builds/worker/.pulse
 RUN            chmod +x bin/*
-# TODO: remove this when buildbot is gone
-COPY           buildprops.json               /builds/worker/buildprops.json
-
-# TODO: remove
-ADD            https://raw.githubusercontent.com/taskcluster/buildbot-step/master/buildbot_step /builds/worker/bin/buildbot_step
-RUN chmod u+x /builds/worker/bin/buildbot_step
 
 # allow the worker user to access video devices
 RUN usermod -a -G video worker
 
 RUN mkdir Documents; mkdir Pictures; mkdir Music; mkdir Videos; mkdir artifacts
 
 ENV PATH $PATH:/builds/worker/bin
 
--- a/taskcluster/docker/periodic-updates/README.md
+++ b/taskcluster/docker/periodic-updates/README.md
@@ -17,18 +17,19 @@ docker run -e DO_HSTS=1 -e DO_HPKP=1 -e 
 
 HSTS checks will only be run if the `DO_HSTS` environment variable is set.
 Likewise for `DO_HPKP` and the HPKP checks, and `DO_BLOCKLIST` and the
 blocklist checks. Environment variables are used rather than command line
 arguments to make constructing taskcluster tasks easier.
 
 ==Background==
 
-These scripts have been moved from `https://hg.mozilla.org/build/tools/scripts/periodic_file_updates/` and
-`security/manager/tools/` in the main repos, as part of the buildbot to taskcluster migration.
+These scripts have been moved from
+`https://hg.mozilla.org/build/tools/scripts/periodic_file_updates/` and
+`security/manager/tools/` in the main repos.
 
 ==HSTS Checks==
 
 `scripts/getHSTSPreloadList.js` will examine the current contents of
 nsSTSPreloadList.inc from whichever `BRANCH` is specified, add in the mandatory
 hosts, and those from the Chromium source, and check them all to see if their
 SSL configuration is valid, and whether or not they have the
 Strict-Transport-Security header set with an appropriate `max-age`. 
--- a/taskcluster/docker/recipes/ubuntu1604-test-system-setup.sh
+++ b/taskcluster/docker/recipes/ubuntu1604-test-system-setup.sh
@@ -89,17 +89,17 @@ apt-get update
 export DEBIAN_FRONTEND=noninteractive
 apt-get install -y -f "${apt_packages[@]}"
 
 dpkg-reconfigure locales
 
 . /setup/common.sh
 . /setup/install-mercurial.sh
 
-pip install --upgrade pip
+pip install --upgrade 'pip<10.0'
 
 pip install virtualenv
 
 . /setup/install-node.sh
 
 # Install custom-built Debian packages.  These come from a set of repositories
 # packaged in tarballs on tooltool to make them replicable.  Because they have
 # inter-dependenices, we install all repositories first, then perform the
--- a/taskcluster/docs/transforms.rst
+++ b/taskcluster/docs/transforms.rst
@@ -132,17 +132,16 @@ verbatim, although it is augmented by th
 The run-using implementations are all located in
 ``taskcluster/taskgraph/transforms/job``, along with the schemas for their
 implementations.  Those well-commented source files are the canonical
 documentation for what constitutes a job description, and should be considered
 part of the documentation.
 
 following ``run-using`` are available
 
-  * ``buildbot``
   * ``hazard``
   * ``mach``
   * ``mozharness``
   * ``mozharness-test``
   * ``run-task``
   * ``spidermonkey`` or ``spidermonkey-package`` or ``spidermonkey-mozjs-crate`` or ``spidermonkey-rust-bindings``
   * ``debian-package``
   * ``toolchain-script``
--- a/taskcluster/taskgraph/morph.py
+++ b/taskcluster/taskgraph/morph.py
@@ -98,18 +98,16 @@ def derive_misc_task(task, purpose, imag
     task.task_id = slugid()
     return task
 
 
 # these regular expressions capture route prefixes for which we have a star
 # scope, allowing them to be summarized.  Each should correspond to a star scope
 # in each Gecko `assume:repo:hg.mozilla.org/...` role.
 SCOPE_SUMMARY_REGEXPS = [
-    re.compile(r'(index:insert-task:buildbot\.branches\.[^.]*\.).*'),
-    re.compile(r'(index:insert-task:buildbot\.revisions\.).*'),
     re.compile(r'(index:insert-task:docker\.images\.v1\.[^.]*\.).*'),
     re.compile(r'(index:insert-task:gecko\.v2\.[^.]*\.).*'),
 ]
 
 
 def make_index_task(parent_task, taskgraph, label_to_taskid):
     index_paths = [r.split('.', 1)[1] for r in parent_task.task['routes']
                    if r.startswith('index.')]
@@ -156,100 +154,16 @@ def add_index_tasks(taskgraph, label_to_
     if added:
         taskgraph, label_to_taskid = amend_taskgraph(
             taskgraph, label_to_taskid, added)
         logger.info('Added {} index tasks'.format(len(added)))
 
     return taskgraph, label_to_taskid
 
 
-def make_s3_uploader_task(parent_task):
-    if parent_task.task['payload']['sourcestamp']['branch'] == 'try':
-        worker_type = 'buildbot-try'
-    else:
-        worker_type = 'buildbot'
-
-    task_def = {
-        # The null-provisioner and buildbot worker type don't actually exist.
-        # So this task doesn't actually run - we just need to create the task so
-        # we have something to attach artifacts to.
-        "provisionerId": "null-provisioner",
-        "workerType": worker_type,
-        "created": {'relative-datestamp': '0 seconds'},
-        "deadline": parent_task.task['deadline'],
-        "routes": parent_task.task['routes'],
-        "payload": {},
-        "extra": {
-            "index": {
-                "rank": 1493912914,
-            }
-        },
-        "metadata": {
-            "name": "Buildbot/mozharness S3 uploader",
-            "description": "Upload outputs of buildbot/mozharness builds to S3",
-            "owner": "mshal@mozilla.com",
-            "source": "http://hg.mozilla.org/build/mozharness/",
-        }
-    }
-    parent_task.task['routes'] = []
-    label = 's3-uploader-{}'.format(parent_task.label)
-    dependencies = {}
-    task = Task(kind='misc', label=label, attributes={}, task=task_def,
-                dependencies=dependencies)
-    task.task_id = parent_task.task['payload']['properties']['upload_to_task_id']
-    return task
-
-
-def update_test_tasks(taskid, build_taskid, taskgraph):
-    """Tests task must download artifacts from uploader task."""
-    # Notice we handle buildbot-bridge, native, and generic-worker payloads
-    # We can do better here in terms of graph searching
-    # We could do post order search and stop as soon as we
-    # reach the build task. Not worring about it because this is
-    # (supposed to be) a temporary solution.
-    for task in taskgraph.tasks.itervalues():
-        if build_taskid in task.task.get('dependencies', []):
-            payload = task.task['payload']
-            task.task['dependencies'].append(taskid)
-            taskgraph.graph.edges.add((task.task_id, taskid, 'uploader'))
-            if 'command' in payload:
-                try:
-                    payload['command'] = [
-                        cmd.replace(build_taskid, taskid) for cmd in payload['command']
-                    ]
-                except AttributeError:
-                    # generic-worker command attribute is an list of lists
-                    payload['command'] = [
-                        [cmd.replace(build_taskid, taskid) for cmd in x]
-                        for x in payload['command']
-                    ]
-            if 'mounts' in payload:
-                for mount in payload['mounts']:
-                    if mount.get('content', {}).get('taskId', '') == build_taskid:
-                        mount['content']['taskId'] = taskid
-            if 'env' in payload:
-                payload['env'] = {
-                    k: v.replace(build_taskid, taskid) for k, v in payload['env'].iteritems()
-                }
-            if 'properties' in payload:
-                payload['properties']['parent_task_id'] = taskid
-
-
-def add_s3_uploader_task(taskgraph, label_to_taskid):
-    """The S3 uploader task is used by mozharness to upload buildbot artifacts."""
-    for task in taskgraph.tasks.itervalues():
-        if 'upload_to_task_id' in task.task.get('payload', {}).get('properties', {}):
-            added = make_s3_uploader_task(task)
-            taskgraph, label_to_taskid = amend_taskgraph(
-                taskgraph, label_to_taskid, [added])
-            update_test_tasks(added.task_id, task.task_id, taskgraph)
-            logger.info('Added s3-uploader task for %s' % task.task_id)
-    return taskgraph, label_to_taskid
-
-
 class apply_jsone_templates(object):
     """Apply a set of JSON-e templates to each task's `task` attribute.
 
     :param templates: A dict with the template name as the key, and extra context
                       to use (in addition to task.to_json()) as the value.
     """
     template_dir = os.path.join(here, 'templates')
 
@@ -285,16 +199,15 @@ class apply_jsone_templates(object):
 
         return taskgraph, label_to_taskid
 
 
 def morph(taskgraph, label_to_taskid, parameters):
     """Apply all morphs"""
     morphs = [
         add_index_tasks,
-        add_s3_uploader_task,
     ]
     if parameters['try_mode'] == 'try_task_config':
         morphs.append(apply_jsone_templates(parameters['try_task_config']))
 
     for m in morphs:
         taskgraph, label_to_taskid = m(taskgraph, label_to_taskid)
     return taskgraph, label_to_taskid
--- a/taskcluster/taskgraph/optimize.py
+++ b/taskcluster/taskgraph/optimize.py
@@ -317,32 +317,24 @@ class IndexSearch(OptimizationStrategy):
                 # 404 will end up here and go on to the next index path
                 pass
 
         return False
 
 
 class SETA(OptimizationStrategy):
     def should_remove_task(self, task, params, _):
-        bbb_task = False
-
-        # for bbb tasks we need to send in the buildbot buildername
-        if task.task.get('provisionerId', '') == 'buildbot-bridge':
-            label = task.task.get('payload').get('buildername')
-            bbb_task = True
-        else:
-            label = task.label
+        label = task.label
 
         # we would like to return 'False, None' while it's high_value_task
         # and we wouldn't optimize it. Otherwise, it will return 'True, None'
         if is_low_value_task(label,
                              params.get('project'),
                              params.get('pushlog_id'),
-                             params.get('pushdate'),
-                             bbb_task):
+                             params.get('pushdate')):
             # Always optimize away low-value tasks
             return True
         else:
             return False
 
 
 class SkipUnlessChanged(OptimizationStrategy):
     def should_remove_task(self, task, params, file_patterns):
deleted file mode 100644
--- a/taskcluster/taskgraph/transforms/job/buildbot.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-"""
-
-Support for running jobs via buildbot.
-
-"""
-
-from __future__ import absolute_import, print_function, unicode_literals
-import slugid
-from urlparse import urlparse
-
-from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
-from taskgraph.util.scriptworker import get_release_config
-from voluptuous import Optional, Required, Any
-
-from taskgraph.transforms.job import run_job_using
-
-
-buildbot_run_schema = Schema({
-    Required('using'): 'buildbot',
-
-    # the buildername to use for buildbot-bridge, will expand {branch} in name from
-    # the current project.
-    Required('buildername'): basestring,
-
-    # the product to use
-    Required('product'): Any('firefox', 'mobile', 'fennec', 'devedition', 'thunderbird'),
-
-    Optional('channels'): optionally_keyed_by('project', basestring),
-
-    Optional('release-promotion'): bool,
-
-    Optional('release-eta'): basestring,
-})
-
-
-def _get_balrog_api_root(branch):
-    if branch in ('mozilla-beta', 'mozilla-release') or branch.startswith('mozilla-esr'):
-        return 'https://aus4-admin.mozilla.org/api'
-    else:
-        return 'https://balrog-admin.stage.mozaws.net/api'
-
-
-def bb_release_worker(config, worker, run):
-    # props
-    release_props = get_release_config(config)
-    repo_path = urlparse(config.params['head_repository']).path.lstrip('/')
-    revision = config.params['head_rev']
-    branch = config.params['project']
-    product = run['product']
-
-    release_props.update({
-        'release_promotion': True,
-        'repo_path': repo_path,
-        'revision': revision,
-    })
-
-    if 'channels' in run:
-        release_props['channels'] = run['channels']
-        resolve_keyed_by(release_props, 'channels', 'channels', **config.params)
-
-    if product in ('devedition', 'firefox'):
-        release_props['balrog_api_root'] = _get_balrog_api_root(branch)
-
-    if run.get('release-eta'):
-        # TODO Use same property name when we move away from BuildBot
-        release_props['schedule_at'] = run['release-eta']
-
-    worker['properties'].update(release_props)
-    # Setting script_repo_revision to the gecko revision doesn't work for
-    # jobs that clone build/tools or other repos instead of gecko.
-    if 'script_repo_revision' not in worker['properties']:
-        worker['properties']['script_repo_revision'] = revision
-
-
-def bb_ci_worker(config, worker):
-    worker['properties'].update({
-        'who': config.params['owner'],
-        'upload_to_task_id': slugid.nice(),
-    })
-
-
-@run_job_using('buildbot-bridge', 'buildbot', schema=buildbot_run_schema)
-def mozharness_on_buildbot_bridge(config, job, taskdesc):
-    run = job['run']
-    worker = taskdesc['worker']
-    branch = config.params['project']
-    product = run['product']
-
-    buildername = run['buildername'].format(branch=branch)
-    revision = config.params['head_rev']
-
-    worker.update({
-        'buildername': buildername,
-        'sourcestamp': {
-            'branch': branch,
-            'repository': config.params['head_repository'],
-            'revision': revision,
-        },
-    })
-    worker.setdefault('properties', {})['product'] = product
-
-    if run.get('release-promotion'):
-        bb_release_worker(config, worker, run)
-    else:
-        bb_ci_worker(config, worker)
--- a/taskcluster/taskgraph/transforms/job/mozharness_test.py
+++ b/taskcluster/taskgraph/transforms/job/mozharness_test.py
@@ -418,126 +418,8 @@ def mozharness_test_on_native_engine(con
             for i, c in enumerate(command):
                 if isinstance(c, basestring) and c.startswith('--test-suite'):
                     command[i] += suffix
 
     if 'download-symbols' in mozharness:
         download_symbols = mozharness['download-symbols']
         download_symbols = {True: 'true', False: 'false'}.get(download_symbols, download_symbols)
         command.append('--download-symbols=' + download_symbols)
-
-
-@run_job_using('buildbot-bridge', 'mozharness-test', schema=mozharness_test_run_schema)
-def mozharness_test_buildbot_bridge(config, job, taskdesc):
-    test = taskdesc['run']['test']
-    mozharness = test['mozharness']
-    worker = taskdesc['worker']
-
-    branch = config.params['project']
-    build_platform, build_type = test['build-platform'].split('/')
-    test_platform = test['test-platform'].split('/')[0]
-    test_name = test.get('try-name', test['test-name'])
-    mozharness = test['mozharness']
-
-    # mochitest e10s follows the pattern mochitest-e10s-<suffix>
-    # in buildbot, except for these special cases
-    buildbot_specials = [
-        'mochitest-webgl',
-        'mochitest-clipboard',
-        'mochitest-media',
-        'mochitest-gpu',
-        'mochitest-e10s',
-    ]
-    test_name = test.get('try-name', test['test-name'])
-    if test['e10s'] and 'e10s' not in test_name:
-        test_name += '-e10s'
-
-    if test_name.startswith('mochitest') \
-            and test_name.endswith('e10s') \
-            and not any(map(
-                lambda name: test_name.startswith(name),
-                buildbot_specials
-            )):
-        split_mochitest = test_name.split('-')
-        test_name = '-'.join([
-            split_mochitest[0],
-            split_mochitest[-1],
-            '-'.join(split_mochitest[1:-1])
-        ])
-
-    # in buildbot, mochitest-webgl is called mochitest-gl
-    test_name = test_name.replace('webgl', 'gl')
-
-    if mozharness.get('chunked', False):
-        this_chunk = test.get('this-chunk')
-        test_name = '{}-{}'.format(test_name, this_chunk)
-    elif test.get('this-chunk', 1) != 1:
-        raise Exception("Unexpected chunking when 'chunked' attribute is 'false'"
-                        " for {}".format(test_name))
-
-    if test.get('suite', '') == 'talos':
-        variant = get_variant(test['test-platform'])
-
-        # On beta and release, we run nightly builds on-push; the talos
-        # builders need to run against non-nightly buildernames
-        if variant == 'nightly':
-            variant = ''
-
-        # this variant name has branch after the variant type in BBB bug 1338871
-        if variant in ('qr', 'stylo', 'stylo-sequential', 'devedition', 'stylo-disabled'):
-            name = '{prefix} {variant} {branch} talos {test_name}'
-        elif variant:
-            name = '{prefix} {branch} {variant} talos {test_name}'
-        else:
-            name = '{prefix} {branch} talos {test_name}'
-
-        buildername = name.format(
-            prefix=BUILDER_NAME_PREFIX[test_platform],
-            variant=variant,
-            branch=branch,
-            test_name=test_name
-        )
-
-        if buildername.startswith('Ubuntu'):
-            buildername = buildername.replace('VM', 'HW')
-    else:
-        variant = get_variant(test['test-platform'])
-        # If we are a pgo type, munge the build_type for the
-        # Unittest builder name generation
-        if 'pgo' in variant:
-            build_type = variant
-        prefix = BUILDER_NAME_PREFIX.get(
-            (test_platform, test.get('virtualization')),
-            BUILDER_NAME_PREFIX[test_platform])
-        if variant in ['stylo-disabled']:
-            buildername = '{prefix} {variant} {branch} {build_type} test {test_name}'.format(
-                prefix=prefix,
-                variant=variant,
-                branch=branch,
-                build_type=build_type,
-                test_name=test_name
-            )
-        else:
-            buildername = '{prefix} {branch} {build_type} test {test_name}'.format(
-                prefix=prefix,
-                branch=branch,
-                build_type=build_type,
-                test_name=test_name
-            )
-
-    worker.update({
-        'buildername': buildername,
-        'sourcestamp': {
-            'branch': branch,
-            'repository': config.params['head_repository'],
-            'revision': config.params['head_rev'],
-        },
-        'properties': {
-            'product': test.get('product', 'firefox'),
-            'who': config.params['owner'],
-            'installer_path': mozharness['build-artifact-name'],
-        }
-    })
-
-    if mozharness['requires-signed-builds']:
-        upstream_task = '<build-signing>'
-        installer_url = get_artifact_url(upstream_task, mozharness['build-artifact-name'])
-        worker['properties']['signed_installer_url'] = {'task-reference': installer_url}
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -373,37 +373,16 @@ task_description_schema = Schema({
 
         # os user groups for test task workers
         Optional('os-groups'): [basestring],
 
         # optional features
         Required('chain-of-trust'): bool,
         Optional('taskcluster-proxy'): bool,
     }, {
-        Required('implementation'): 'buildbot-bridge',
-
-        # see
-        # https://github.com/mozilla/buildbot-bridge/blob/master/bbb/schemas/payload.yml
-        Required('buildername'): basestring,
-        Required('sourcestamp'): {
-            'branch': basestring,
-            Optional('revision'): basestring,
-            Optional('repository'): basestring,
-            Optional('project'): basestring,
-        },
-        Required('properties'): {
-            'product': basestring,
-            Optional('build_number'): int,
-            Optional('release_promotion'): bool,
-            Optional('generate_bz2_blob'): bool,
-            Optional('tuxedo_server_url'): optionally_keyed_by('project', basestring),
-            Optional('release_eta'): basestring,
-            Extra: taskref_or_string,  # additional properties are allowed
-        },
-    }, {
         Required('implementation'): 'native-engine',
         Required('os'): Any('macosx', 'linux'),
 
         # the maximum time to run, in seconds
         Required('max-run-time'): int,
 
         # A link for an executable to download
         Optional('context'): basestring,
@@ -1202,39 +1181,16 @@ def build_macosx_engine_payload(config, 
     }
     if worker.get('reboot'):
         task_def['payload'] = worker['reboot']
 
     if task.get('needs-sccache'):
         raise Exception('needs-sccache not supported in native-engine')
 
 
-@payload_builder('buildbot-bridge')
-def build_buildbot_bridge_payload(config, task, task_def):
-    task['extra'].pop('treeherder', None)
-    worker = task['worker']
-
-    if worker['properties'].get('tuxedo_server_url'):
-        resolve_keyed_by(
-            worker, 'properties.tuxedo_server_url', worker['buildername'],
-            **config.params
-        )
-
-    task_def['payload'] = {
-        'buildername': worker['buildername'],
-        'sourcestamp': worker['sourcestamp'],
-        'properties': worker['properties'],
-    }
-    task_def.setdefault('scopes', [])
-    if worker['properties'].get('release_promotion'):
-        task_def['scopes'].append(
-            "project:releng:buildbot-bridge:builder-name:{}".format(worker['buildername'])
-        )
-
-
 transforms = TransformSequence()
 
 
 @transforms.add
 def set_defaults(config, tasks):
     for task in tasks:
         task.setdefault('shipping-phase', None)
         task.setdefault('shipping-product', None)
--- a/taskcluster/taskgraph/transforms/tests.py
+++ b/taskcluster/taskgraph/transforms/tests.py
@@ -103,32 +103,16 @@ WINDOWS_WORKER_TYPES = {
       'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win10-64-gpu',
       'hardware': 'releng-hardware/gecko-t-win10-64-hw',
     },
     'windows10-64-qr': {
       'virtual': 'aws-provisioner-v1/gecko-t-win10-64',
       'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win10-64-gpu',
       'hardware': 'releng-hardware/gecko-t-win10-64-hw',
     },
-    # These values don't really matter since BBB will be executing them
-    'windows8-64': {
-      'virtual': 'aws-provisioner-v1/gecko-t-win10-64',
-      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win10-64-gpu',
-      'hardware': 'releng-hardware/gecko-t-win10-64-hw',
-    },
-    'windows8-64-pgo': {
-      'virtual': 'aws-provisioner-v1/gecko-t-win10-64',
-      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win10-64-gpu',
-      'hardware': 'releng-hardware/gecko-t-win10-64-hw',
-    },
-    'windows8-64-nightly': {
-      'virtual': 'aws-provisioner-v1/gecko-t-win10-64',
-      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win10-64-gpu',
-      'hardware': 'releng-hardware/gecko-t-win10-64-hw',
-    },
 }
 
 # os x worker types keyed by test-platform
 MACOSX_WORKER_TYPES = {
     'macosx64': 'releng-hardware/gecko-t-osx-1010',
 }
 
 logger = logging.getLogger(__name__)
deleted file mode 100644
--- a/taskcluster/taskgraph/util/bbb_validation.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# -*- coding: utf-8 -*-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-import json
-import logging
-import requests
-
-from mozbuild.util import memoize
-from redo import retry
-from requests import exceptions
-
-ALLTHETHINGS_URL = "https://secure.pub.build.mozilla.org/builddata/reports/allthethings.json.gz"
-
-logger = logging.getLogger(__name__)
-
-
-def fetch_all_the_things():
-    response = retry(requests.get, attempts=2, sleeptime=10,
-                     args=(ALLTHETHINGS_URL,),
-                     kwargs={'timeout': 60})
-    return response.content
-
-
-@memoize
-def valid_bbb_builders():
-    try:
-        allthethings = fetch_all_the_things()
-        builders = set(json.loads(allthethings).get('builders', {}).keys())
-        return builders
-
-    # In the event of request times out, requests will raise a TimeoutError.
-    except exceptions.Timeout:
-        logger.warning("Timeout fetching list of buildbot builders.")
-
-    # In the event of a network problem (e.g. DNS failure, refused connection, etc),
-    # requests will raise a ConnectionError.
-    except exceptions.ConnectionError:
-        logger.warning("Connection Error while fetching list of buildbot builders")
-
-    # We just print the error out as a debug message if we failed to catch the exception above
-    except exceptions.RequestException as error:
-        logger.warning(error)
-
-    # When we get invalid JSON (i.e. 500 error), it results in a ValueError
-    except ValueError as error:
-        logger.warning("Invalid JSON, possible server error: {}".format(error))
-
-    # None returned to treat as "All Builders Valid"
-    return None
--- a/taskcluster/taskgraph/util/schema.py
+++ b/taskcluster/taskgraph/util/schema.py
@@ -142,22 +142,16 @@ def resolve_keyed_by(item, field, item_n
 
 
 # Schemas for YAML files should use dashed identifiers by default.  If there are
 # components of the schema for which there is a good reason to use another format,
 # they can be whitelisted here.
 WHITELISTED_SCHEMA_IDENTIFIERS = [
     # upstream-artifacts are handed directly to scriptWorker, which expects interCaps
     lambda path: "[u'upstream-artifacts']" in path,
-    # bbb release promotion properties
-    lambda path: path.endswith("[u'build_number']"),
-    lambda path: path.endswith("[u'tuxedo_server_url']"),
-    lambda path: path.endswith("[u'release_promotion']"),
-    lambda path: path.endswith("[u'generate_bz2_blob']"),
-    lambda path: path.endswith("[u'release_eta']"),
 ]
 
 
 def check_schema(schema):
     identifier_re = re.compile('^[a-z][a-z0-9-]*$')
 
     def whitelisted(path):
         return any(f(path) for f in WHITELISTED_SCHEMA_IDENTIFIERS)
--- a/taskcluster/taskgraph/util/seta.py
+++ b/taskcluster/taskgraph/util/seta.py
@@ -44,27 +44,23 @@ class SETA(object):
 
         if len(task_tuple) == 0:
             return ''
         if len(task_tuple) != 3:
             return ' '.join(task_tuple)
 
         return 'test-%s/%s-%s' % (task_tuple[0], task_tuple[1], task_tuple[2])
 
-    def query_low_value_tasks(self, project, bbb=False):
-        # Request the set of low value tasks from the SETA service.  Low value tasks will be
-        # optimized out of the task graph.
+    def query_low_value_tasks(self, project):
+        # Request the set of low value tasks from the SETA service.  Low value
+        # tasks will be optimized out of the task graph.
         low_value_tasks = []
 
-        if not bbb:
-            # we want to get low priority taskcluster jobs
-            url = SETA_ENDPOINT % (project, 'taskcluster')
-        else:
-            # we want low priority buildbot jobs
-            url = SETA_ENDPOINT % (project, 'buildbot&priority=5')
+        # we want to get low priority taskcluster jobs
+        url = SETA_ENDPOINT % (project, 'taskcluster')
 
         # Try to fetch the SETA data twice, falling back to an empty list of low value tasks.
         # There are 10 seconds between each try.
         try:
             logger.debug("Retrieving low-value jobs list from SETA")
             response = retry(requests.get, attempts=2, sleeptime=10,
                              args=(url, ),
                              kwargs={'timeout': 60, 'headers': ''})
@@ -167,17 +163,17 @@ class SETA(object):
 
         # We just print the error out as a debug message if we failed to catch the exception above
         except exceptions.RequestException as error:
             logger.warning(error)
             self.failed_json_push_calls.append(prev_push_id)
 
         return min_between_pushes
 
-    def is_low_value_task(self, label, project, pushlog_id, push_date, bbb_task=False):
+    def is_low_value_task(self, label, project, pushlog_id, push_date):
         # marking a task as low_value means it will be optimized out by tc
         if project not in SETA_PROJECTS:
             return False
 
         schedule_all_every = PROJECT_SCHEDULE_ALL_EVERY_PUSHES.get(project, 5)
         # on every Nth push, want to run all tasks
         if int(pushlog_id) % schedule_all_every == 0:
             return False
@@ -185,24 +181,17 @@ class SETA(object):
         # Nth push, so time to call seta based on number of pushes; however
         # we also want to ensure we run all tasks at least once per N minutes
         if self.minutes_between_pushes(
                 project,
                 int(pushlog_id),
                 int(push_date)) >= PROJECT_SCHEDULE_ALL_EVERY_MINUTES.get(project, 60):
             return False
 
-        if not bbb_task:
-            # cache the low value tasks per project to avoid repeated SETA server queries
-            if project not in self.low_value_tasks:
-                self.low_value_tasks[project] = self.query_low_value_tasks(project)
-            return label in self.low_value_tasks[project]
-
-        # gecko decision task requesting if a bbb task is a low value task, so use bb jobs
-        # in this case, the label param sent in will be the buildbot buildername already
-        if project not in self.low_value_bb_tasks:
-            self.low_value_bb_tasks[project] = self.query_low_value_tasks(project, bbb=True)
-        return label in self.low_value_bb_tasks[project]
+        # cache the low value tasks per project to avoid repeated SETA server queries
+        if project not in self.low_value_tasks:
+            self.low_value_tasks[project] = self.query_low_value_tasks(project)
+        return label in self.low_value_tasks[project]
 
 
 # create a single instance of this class, and expose its `is_low_value_task`
 # bound method as a module-level function
 is_low_value_task = SETA().is_low_value_task
--- a/taskcluster/taskgraph/util/verify.py
+++ b/taskcluster/taskgraph/util/verify.py
@@ -6,17 +6,16 @@
 from __future__ import absolute_import, print_function, unicode_literals
 
 import logging
 import re
 import os
 import sys
 
 from .. import GECKO
-from taskgraph.util.bbb_validation import valid_bbb_builders
 
 logger = logging.getLogger(__name__)
 base_path = os.path.join(GECKO, 'taskcluster', 'docs')
 
 
 class VerificationSequence(object):
     """
     Container for a sequence of verifications over a TaskGraph. Each
@@ -138,58 +137,30 @@ def verify_dependency_tiers(task, taskgr
                                      .get('tier', sys.maxint)
     else:
         def printable_tier(tier):
             if tier == sys.maxint:
                 return 'unknown'
             return tier
 
         for task in taskgraph.tasks.itervalues():
-            # Buildbot bridge tasks cannot have tiers, so we cannot enforce
-            # this check for them
-            if task.task.get("workerType") == "buildbot-bridge":
-                continue
             tier = tiers[task.label]
             for d in task.dependencies.itervalues():
-                if taskgraph[d].task.get("workerType") in ("buildbot-bridge",
-                                                           "always-optimized"):
+                if taskgraph[d].task.get("workerType") == "always-optimized":
                     continue
                 if "dummy" in taskgraph[d].kind:
                     continue
                 if tier < tiers[d]:
                     raise Exception(
                         '{} (tier {}) cannot depend on {} (tier {})'
                         .format(task.label, printable_tier(tier),
                                 d, printable_tier(tiers[d])))
 
 
 @verifications.add('optimized_task_graph')
-def verify_bbb_builders_valid(task, taskgraph, scratch_pad):
-    """
-        This function ensures that any task which is run
-        in buildbot (via buildbot-bridge) is using a recognized buildername.
-
-        If you see an unexpected failure with a task due to this check, please
-        see the IRC Channel, #releng.
-    """
-    if task is None:
-        return
-    valid_builders = valid_bbb_builders()
-    if valid_builders is None:
-        return
-    if task.task.get('workerType') == 'buildbot-bridge':
-        buildername = task.task['payload']['buildername']
-        if buildername not in valid_builders:
-            logger.warning(
-                '{} uses an invalid buildbot buildername ("{}") '
-                ' - contact #releng for help'
-                .format(task.label, buildername))
-
-
-@verifications.add('optimized_task_graph')
 def verify_always_optimized(task, taskgraph, scratch_pad):
     """
         This function ensures that always-optimized tasks have been optimized.
     """
     if task is None:
         return
     if task.task.get('workerType') == 'always-optimized':
         raise Exception('Could not optimize the task {!r}'.format(task.label))
--- a/taskcluster/taskgraph/util/workertypes.py
+++ b/taskcluster/taskgraph/util/workertypes.py
@@ -32,17 +32,16 @@ WORKER_TYPES = {
     'aws-provisioner-v1/gecko-t-linux-xlarge': ('docker-worker', 'linux'),
     'aws-provisioner-v1/gecko-t-win10-64': ('generic-worker', 'windows'),
     'aws-provisioner-v1/gecko-t-win10-64-gpu': ('generic-worker', 'windows'),
     'releng-hardware/gecko-t-win10-64-hw': ('generic-worker', 'windows'),
     'aws-provisioner-v1/gecko-t-win7-32': ('generic-worker', 'windows'),
     'aws-provisioner-v1/gecko-t-win7-32-gpu': ('generic-worker', 'windows'),
     'releng-hardware/gecko-t-win7-32-hw': ('generic-worker', 'windows'),
     'aws-provisioner-v1/taskcluster-generic': ('docker-worker', 'linux'),
-    'buildbot-bridge/buildbot-bridge': ('buildbot-bridge', None),
     'invalid/invalid': ('invalid', None),
     'invalid/always-optimized': ('always-optimized', None),
     'releng-hardware/gecko-t-linux-talos': ('native-engine', 'linux'),
     'scriptworker-prov-v1/balrog-dev': ('balrog', None),
     'scriptworker-prov-v1/balrogworker-v1': ('balrog', None),
     'scriptworker-prov-v1/beetmoverworker-v1': ('beetmover', None),
     'scriptworker-prov-v1/pushapk-v1': ('push-apk', None),
     "scriptworker-prov-v1/signing-linux-v1": ('scriptworker-signing', None),