Bug 1527138: Remove references to old taskcluster worker types r=tomprince
authorChris AtLee <catlee@mozilla.com>
Tue, 12 Feb 2019 21:12:27 +0000
changeset 459447 ac36d71cc3170523b35ba018b46e82e416579689
parent 459446 25e2d821be0762477db0e87b5bc601a0b6e10b61
child 459448 4c7b508eb10d6743cc53c0b798a62569aa492cf5
push id78276
push usercatlee@mozilla.com
push dateFri, 15 Feb 2019 14:25:37 +0000
treeherderautoland@ac36d71cc317 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerstomprince
bugs1527138
milestone67.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1527138: Remove references to old taskcluster worker types r=tomprince Differential Revision: https://phabricator.services.mozilla.com/D19445
taskcluster/taskgraph/transforms/job/common.py
taskcluster/taskgraph/transforms/job/mach.py
taskcluster/taskgraph/transforms/job/mozharness_test.py
taskcluster/taskgraph/transforms/job/run_task.py
taskcluster/taskgraph/transforms/task.py
taskcluster/taskgraph/transforms/tests.py
--- a/taskcluster/taskgraph/transforms/job/common.py
+++ b/taskcluster/taskgraph/transforms/job/common.py
@@ -151,17 +151,17 @@ def support_vcs_checkout(config, job, ta
     elif job['run'].get('comm-checkout', False):
         raise Exception("Can't checkout from comm-* repository if not given a repository.")
 
     # Give task access to hgfingerprint secret so it can pin the certificate
     # for hg.mozilla.org.
     taskdesc['scopes'].append('secrets:get:project/taskcluster/gecko/hgfingerprint')
 
     # only some worker platforms have taskcluster-proxy enabled
-    if job['worker']['implementation'] in ('docker-worker', 'docker-engine'):
+    if job['worker']['implementation'] in ('docker-worker',):
         taskdesc['worker']['taskcluster-proxy'] = True
 
 
 def generic_worker_hg_commands(base_repo, head_repo, head_rev, path):
     """Obtain commands needed to obtain a Mercurial checkout on generic-worker.
 
     Returns two command strings. One performs the checkout. Another logs.
     """
@@ -211,17 +211,17 @@ def docker_worker_add_tooltool(config, j
 
     By default, only public tooltool access will be granted. Access to internal
     tooltool can be enabled via ``internal=True``.
 
     This can only be used with ``run-task`` tasks, as the cache name is
     reserved for use with ``run-task``.
     """
 
-    assert job['worker']['implementation'] in ('docker-worker', 'docker-engine')
+    assert job['worker']['implementation'] in ('docker-worker',)
 
     level = config.params['level']
     add_cache(job, taskdesc, 'level-{}-tooltool-cache'.format(level),
               '{workdir}/tooltool-cache'.format(**job['run']))
 
     taskdesc['worker'].setdefault('env', {}).update({
         'TOOLTOOL_CACHE': '{workdir}/tooltool-cache'.format(**job['run']),
     })
--- a/taskcluster/taskgraph/transforms/job/mach.py
+++ b/taskcluster/taskgraph/transforms/job/mach.py
@@ -34,17 +34,16 @@ mach_schema = Schema({
 
 
 defaults = {
     'comm-checkout': False,
 }
 
 
 @run_job_using("docker-worker", "mach", schema=mach_schema, defaults=defaults)
-@run_job_using("native-engine", "mach", schema=mach_schema, defaults=defaults)
 @run_job_using("generic-worker", "mach", schema=mach_schema, defaults=defaults)
 def configure_mach(config, job, taskdesc):
     run = job['run']
 
     command_prefix = 'cd $GECKO_PATH && ./mach '
     mach = run['mach']
     if isinstance(mach, dict):
         ref, pattern = next(iter(mach.items()))
--- a/taskcluster/taskgraph/transforms/job/mozharness_test.py
+++ b/taskcluster/taskgraph/transforms/job/mozharness_test.py
@@ -54,17 +54,16 @@ def test_packages_url(taskdesc):
     # for android nightly we need to add 'en-US' to the artifact url
     test = taskdesc['run']['test']
     if get_variant(test['test-platform']) == "nightly" and 'android' in test['test-platform']:
         head, tail = os.path.split(artifact_url)
         artifact_url = os.path.join(head, 'en-US', tail)
     return artifact_url
 
 
-@run_job_using('docker-engine', 'mozharness-test', schema=mozharness_test_run_schema)
 @run_job_using('docker-worker', 'mozharness-test', schema=mozharness_test_run_schema)
 def mozharness_test_on_docker(config, job, taskdesc):
     run = job['run']
     test = taskdesc['run']['test']
     mozharness = test['mozharness']
     worker = taskdesc['worker']
 
     # apply some defaults
@@ -320,96 +319,16 @@ def mozharness_test_on_generic_worker(co
         mh_command_task_ref = []
         for token in mh_command:
             mh_command_task_ref.append({'task-reference': token})
         worker['command'] = [
             mh_command_task_ref
         ]
 
 
-@run_job_using('native-engine', 'mozharness-test', schema=mozharness_test_run_schema)
-def mozharness_test_on_native_engine(config, job, taskdesc):
-    test = taskdesc['run']['test']
-    mozharness = test['mozharness']
-    worker = taskdesc['worker']
-    is_talos = test['suite'] == 'talos' or test['suite'] == 'raptor'
-    is_macosx = worker['os'] == 'macosx'
-
-    installer_url = get_artifact_url('<build>', mozharness['build-artifact-name'])
-    mozharness_url = get_artifact_url('<build>',
-                                      get_artifact_path(taskdesc, 'mozharness.zip'))
-
-    worker['artifacts'] = [{
-        'name': prefix.rstrip('/'),
-        'path': path.rstrip('/'),
-        'type': 'directory',
-    } for (prefix, path) in [
-        # (artifact name prefix, in-image path relative to homedir)
-        ("public/logs/", "workspace/build/logs/"),
-        ("public/test", "artifacts/"),
-        ("public/test_info/", "workspace/build/blobber_upload_dir/"),
-    ]]
-
-    if test['reboot']:
-        worker['reboot'] = test['reboot']
-
-    if test['max-run-time']:
-        worker['max-run-time'] = test['max-run-time']
-
-    env = worker.setdefault('env', {})
-    env.update({
-        'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
-        'GECKO_HEAD_REV': config.params['head_rev'],
-        'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
-        'MOZHARNESS_SCRIPT': mozharness['script'],
-        'MOZHARNESS_URL': {'task-reference': mozharness_url},
-        'MOZILLA_BUILD_URL': {'task-reference': installer_url},
-        "MOZ_NO_REMOTE": '1',
-        "XPCOM_DEBUG_BREAK": 'warn',
-        "NO_FAIL_ON_TEST_ERRORS": '1',
-        "MOZ_HIDE_RESULTS_TABLE": '1',
-        "MOZ_NODE_PATH": "/usr/local/bin/node",
-        'MOZ_AUTOMATION': '1',
-    })
-    # talos tests don't need Xvfb
-    if is_talos:
-        env['NEED_XVFB'] = 'false'
-
-    script = 'test-macosx.sh' if is_macosx else 'test-linux.sh'
-    worker['context'] = '{}/raw-file/{}/taskcluster/scripts/tester/{}'.format(
-        config.params['head_repository'], config.params['head_rev'], script
-    )
-
-    command = worker['command'] = ["./{}".format(script)]
-    command.extend([
-        {"task-reference": "--installer-url=" + installer_url},
-        {"task-reference": "--test-packages-url=" + test_packages_url(taskdesc)},
-    ])
-    if mozharness.get('include-blob-upload-branch'):
-        command.append('--blob-upload-branch=' + config.params['project'])
-    command.extend(mozharness.get('extra-options', []))
-
-    # TODO: remove the need for run['chunked']
-    if mozharness.get('chunked') or test['chunks'] > 1:
-        # Implement mozharness['chunking-args'], modifying command in place
-        if mozharness['chunking-args'] == 'this-chunk':
-            command.append('--total-chunk={}'.format(test['chunks']))
-            command.append('--this-chunk={}'.format(test['this-chunk']))
-        elif mozharness['chunking-args'] == 'test-suite-suffix':
-            suffix = mozharness['chunk-suffix'].replace('<CHUNK>', str(test['this-chunk']))
-            for i, c in enumerate(command):
-                if isinstance(c, basestring) and c.startswith('--test-suite'):
-                    command[i] += suffix
-
-    if 'download-symbols' in mozharness:
-        download_symbols = mozharness['download-symbols']
-        download_symbols = {True: 'true', False: 'false'}.get(download_symbols, download_symbols)
-        command.append('--download-symbols=' + download_symbols)
-
-
 @run_job_using('script-engine-autophone', 'mozharness-test', schema=mozharness_test_run_schema)
 def mozharness_test_on_script_engine_autophone(config, job, taskdesc):
     test = taskdesc['run']['test']
     mozharness = test['mozharness']
     worker = taskdesc['worker']
     is_talos = test['suite'] == 'talos' or test['suite'] == 'raptor'
     if worker['os'] != 'linux':
         raise Exception('os: {} not supported on script-engine-autophone'.format(worker['os']))
--- a/taskcluster/taskgraph/transforms/job/run_task.py
+++ b/taskcluster/taskgraph/transforms/job/run_task.py
@@ -112,36 +112,16 @@ def docker_worker_run_task(config, job, 
     if run['comm-checkout']:
         command.append('--comm-checkout={workdir}/checkouts/gecko/comm'.format(**run))
     command.append('--fetch-hgfingerprint')
     command.append('--')
     command.extend(run_command)
     worker['command'] = command
 
 
-@run_job_using("native-engine", "run-task", schema=run_task_schema, defaults=worker_defaults)
-def native_engine_run_task(config, job, taskdesc):
-    run = job['run']
-    worker = taskdesc['worker'] = job['worker']
-    command = ['./run-task']
-    common_setup(config, job, taskdesc, command)
-
-    worker['context'] = run_task_url(config)
-
-    if run.get('cache-dotcache'):
-        raise Exception("No cache support on native-worker; can't use cache-dotcache")
-
-    run_command = run['command']
-    if isinstance(run_command, basestring):
-        run_command = ['bash', '-cx', run_command]
-    command.append('--')
-    command.extend(run_command)
-    worker['command'] = command
-
-
 @run_job_using("generic-worker", "run-task", schema=run_task_schema, defaults=worker_defaults)
 def generic_worker_run_task(config, job, taskdesc):
     run = job['run']
     worker = taskdesc['worker'] = job['worker']
     is_win = worker['os'] == 'windows'
     is_mac = worker['os'] == 'macosx'
 
     if is_win:
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -366,17 +366,17 @@ def verify_index(config, index):
     product = index['product']
     if product not in config.graph_config['index']['products']:
         raise Exception(UNSUPPORTED_INDEX_PRODUCT_ERROR.format(product=product))
 
 
 @payload_builder('docker-worker', schema={
     Required('os'): 'linux',
 
-    # For tasks that will run in docker-worker or docker-engine, this is the
+    # For tasks that will run in docker-worker, this is the
     # name of the docker image or in-tree docker image to run the task in.  If
     # in-tree, then a dependency will be created automatically.  This is
     # generally `desktop-test`, or an image that acts an awful lot like it.
     Required('docker-image'): Any(
         # a raw Docker image path (repo/image:tag)
         basestring,
         # an in-tree generated docker image (from `taskcluster/docker/<name>`)
         {'in-tree': basestring},
@@ -1284,79 +1284,16 @@ def build_invalid_payload(config, task, 
 
 @payload_builder('always-optimized', schema={
     Extra: object,
 })
 def build_always_optimized_payload(config, task, task_def):
     task_def['payload'] = {}
 
 
-@payload_builder('native-engine', schema={
-    Required('os'): Any('macosx', 'linux'),
-
-    # the maximum time to run, in seconds
-    Required('max-run-time'): int,
-
-    # A link for an executable to download
-    Optional('context'): basestring,
-
-    # Tells the worker whether machine should reboot
-    # after the task is finished.
-    Optional('reboot'):
-    Any('always', 'on-exception', 'on-failure'),
-
-    # the command to run
-    Optional('command'): [taskref_or_string],
-
-    # environment variables
-    Optional('env'): {basestring: taskref_or_string},
-
-    # artifacts to extract from the task image after completion
-    Optional('artifacts'): [{
-        # type of artifact -- simple file, or recursive directory
-        Required('type'): Any('file', 'directory'),
-
-        # task image path from which to read artifact
-        Required('path'): basestring,
-
-        # name of the produced artifact (root of the names for
-        # type=directory)
-        Required('name'): basestring,
-    }],
-    # Wether any artifacts are assigned to this worker
-    Optional('skip-artifacts'): bool,
-})
-def build_macosx_engine_payload(config, task, task_def):
-    worker = task['worker']
-
-    # propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon
-    # be provided directly by the worker, making this redundant
-    worker.setdefault('env', {})['TASKCLUSTER_ROOT_URL'] = get_root_url()
-
-    artifacts = map(lambda artifact: {
-        'name': artifact['name'],
-        'path': artifact['path'],
-        'type': artifact['type'],
-        'expires': task_def['expires'],
-    }, worker.get('artifacts', []))
-
-    task_def['payload'] = {
-        'context': worker['context'],
-        'command': worker['command'],
-        'env': worker['env'],
-        'artifacts': artifacts,
-        'maxRunTime': worker['max-run-time'],
-    }
-    if worker.get('reboot'):
-        task_def['payload'] = worker['reboot']
-
-    if task.get('needs-sccache'):
-        raise Exception('needs-sccache not supported in native-engine')
-
-
 @payload_builder('script-engine-autophone', schema={
     Required('os'): Any('macosx', 'linux'),
 
     # A link for an executable to download
     Optional('context'): basestring,
 
     # Tells the worker whether machine should reboot
     # after the task is finished.
@@ -1412,17 +1349,17 @@ def set_defaults(config, tasks):
     for task in tasks:
         task.setdefault('shipping-phase', None)
         task.setdefault('shipping-product', None)
         task.setdefault('always-target', False)
         task.setdefault('optimization', None)
         task.setdefault('needs-sccache', False)
 
         worker = task['worker']
-        if worker['implementation'] in ('docker-worker', 'docker-engine'):
+        if worker['implementation'] in ('docker-worker',):
             worker.setdefault('relengapi-proxy', False)
             worker.setdefault('chain-of-trust', False)
             worker.setdefault('taskcluster-proxy', False)
             worker.setdefault('allow-ptrace', False)
             worker.setdefault('loopback-video', False)
             worker.setdefault('loopback-audio', False)
             worker.setdefault('docker-in-docker', False)
             worker.setdefault('privileged', False)
@@ -1808,18 +1745,16 @@ def build_task(config, tasks):
                     task['label'], attributes['shipping_product'], task['shipping-product']
                 )
             )
         attributes.setdefault('shipping_product', task['shipping-product'])
 
         # Set MOZ_AUTOMATION on all jobs.
         if task['worker']['implementation'] in (
             'generic-worker',
-            'docker-engine',
-            'native-engine',
             'docker-worker',
         ):
             payload = task_def.get('payload')
             if payload:
                 env = payload.setdefault('env', {})
                 env['MOZ_AUTOMATION'] = '1'
 
         yield {
--- a/taskcluster/taskgraph/transforms/tests.py
+++ b/taskcluster/taskgraph/transforms/tests.py
@@ -237,17 +237,17 @@ test_description_schema = Schema({
     Required('loopback-video'): bool,
 
     # Whether the test can run using a software GL implementation on Linux
     # using the GL compositor. May not be used with "legacy" sized instances
     # due to poor LLVMPipe performance (bug 1296086).  Defaults to true for
     # unit tests on linux platforms and false otherwise
     Optional('allow-software-gl-layers'): bool,
 
-    # For tasks that will run in docker-worker or docker-engine, this is the
+    # For tasks that will run in docker-worker, this is the
     # name of the docker image or in-tree docker image to run the task in.  If
     # in-tree, then a dependency will be created automatically.  This is
     # generally `desktop-test`, or an image that acts an awful lot like it.
     Required('docker-image'): optionally_keyed_by(
         'test-platform',
         Any(
             # a raw Docker image path (repo/image:tag)
             basestring,