author | Peter Moore <pmoore@mozilla.com> |
Thu, 06 Apr 2017 18:50:41 +0200 | |
changeset 399851 | baade5ccb7c95fb0be9a06e1b8f2a82bf9d19a98 |
parent 399760 | 6aa495cd990886c310b01386d8b02a17d5603890 |
child 399852 | 4fba86dcc6ce53fcd049c649d9ba33c6a3ee76e6 |
push id | 7391 |
push user | mtabara@mozilla.com |
push date | Mon, 12 Jun 2017 13:08:53 +0000 |
treeherder | mozilla-beta@2191d7f87e2e [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | dustin |
bugs | 1349980 |
milestone | 55.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/AUTHORS +++ b/AUTHORS @@ -792,16 +792,17 @@ Paul Rouget <paul@mozilla.com> Paul Sandoz <paul.sandoz@sun.com> Pavel Cvrcek Pawel Chmielowski PenPal Pete Collins <petejc@collab.net> Peter Annema <disttsc@bart.nl> Peter Bajusz <hyp-x@inf.bme.hu> Peter Lubczynski <peterl@netscape.com> +Peter Moore <petemoore@gmx.net> Peter Naulls Peter Parente <parente@cs.unc.edu> Peter Seliger Peter Van der Beken <peter@propagandism.org> Peter van der Woude Peter Weilbacher <mozilla@weilbacher.org> Pete Zha <pete.zha@sun.com> Petr Kostka <petr.kostka@st.com>
--- a/taskcluster/taskgraph/transforms/job/mozharness.py +++ b/taskcluster/taskgraph/transforms/job/mozharness.py @@ -5,16 +5,18 @@ Support for running jobs via mozharness. Ideally, most stuff gets run this way, and certainly anything using mozharness should use this approach. """ from __future__ import absolute_import, print_function, unicode_literals +from textwrap import dedent + from taskgraph.util.schema import Schema from voluptuous import Required, Optional, Any from taskgraph.transforms.job import run_job_using from taskgraph.transforms.job.common import ( docker_worker_add_workspace_cache, docker_worker_add_gecko_vcs_env_vars, docker_worker_setup_secrets, @@ -163,17 +165,17 @@ def mozharness_on_docker_worker_setup(co "taskcluster/scripts/builder/build-linux.sh" ))) worker['command'] = command # We use the generic worker to run tasks on Windows @run_job_using("generic-worker", "mozharness", schema=mozharness_run_schema) -def mozharness_on_windows(config, job, taskdesc): +def mozharness_on_generic_worker(config, job, taskdesc): run = job['run'] # fail if invalid run options are included invalid = [] for prop in ['actions', 'custom-build-variant-cfg', 'tooltool-downloads', 'secrets', 'taskcluster-proxy', 'need-xvfb']: if prop in run and run[prop]: @@ -182,28 +184,33 @@ def mozharness_on_windows(config, job, t invalid.append('keep-artifacts') if invalid: raise Exception("Jobs run using mozharness on Windows do not support properties " + ', '.join(invalid)) worker = taskdesc['worker'] worker['artifacts'] = [{ - 'path': r'public\build', + 'path': r'public/build', 'type': 'directory', }] docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], }) + if not job['attributes']['build_platform'].startswith('win'): + raise Exception( + "Task generation for mozharness build jobs currently only supported on Windows" + ) + mh_command = [r'c:\mozilla-build\python\python.exe'] mh_command.append('\\'.join([r'.\build\src\testing', run['script'].replace('/', '\\')])) for cfg in run['config']: mh_command.append('--config ' + cfg.replace('/', '\\')) mh_command.append('--branch ' + config.params['project']) mh_command.append(r'--skip-buildbot-actions --work-dir %cd:Z:=z:%\build') for option in run.get('options', []): mh_command.append('--' + option) @@ -213,22 +220,25 @@ def mozharness_on_windows(config, job, t hg_command.extend(['--sharebase', 'y:\\hg-shared']) hg_command.append('--purge') hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified']) hg_command.extend(['--revision', env['GECKO_HEAD_REV']]) hg_command.append(env['GECKO_HEAD_REPOSITORY']) hg_command.append('.\\build\\src') worker['command'] = [] - # sccache currently uses the full compiler commandline as input to the - # cache hash key, so create a symlink to the task dir and build from - # the symlink dir to get consistent paths. if taskdesc.get('needs-sccache'): worker['command'].extend([ - r'if exist z:\build rmdir z:\build', + # Make the comment part of the first command, as it will help users to + # understand what is going on, and why these steps are implemented. + dedent('''\ + :: sccache currently uses the full compiler commandline as input to the + :: cache hash key, so create a symlink to the task dir and build from + :: the symlink dir to get consistent paths. + if exist z:\\build rmdir z:\\build'''), r'mklink /d z:\build %cd%', # Grant delete permission on the link to everyone. r'icacls z:\build /grant *S-1-1-0:D /L', r'cd /d z:\build', ]) worker['command'].extend([ ' '.join(hg_command),
--- a/taskcluster/taskgraph/transforms/job/mozharness_test.py +++ b/taskcluster/taskgraph/transforms/job/mozharness_test.py @@ -164,83 +164,124 @@ def mozharness_test_on_docker(config, jo download_symbols = mozharness['download-symbols'] download_symbols = {True: 'true', False: 'false'}.get(download_symbols, download_symbols) command.append('--download-symbols=' + download_symbols) worker['command'] = command @run_job_using('generic-worker', 'mozharness-test', schema=mozharness_test_run_schema) -def mozharness_test_on_windows(config, job, taskdesc): +def mozharness_test_on_generic_worker(config, job, taskdesc): test = taskdesc['run']['test'] mozharness = test['mozharness'] worker = taskdesc['worker'] artifacts = [ { - 'path': 'public\\logs\\localconfig.json', + 'name': 'public/logs/localconfig.json', + 'path': 'logs/localconfig.json', 'type': 'file' }, { - 'path': 'public\\logs\\log_critical.log', + 'name': 'public/logs/log_critical.log', + 'path': 'logs/log_critical.log', 'type': 'file' }, { - 'path': 'public\\logs\\log_error.log', + 'name': 'public/logs/log_error.log', + 'path': 'logs/log_error.log', 'type': 'file' }, { - 'path': 'public\\logs\\log_fatal.log', + 'name': 'public/logs/log_fatal.log', + 'path': 'logs/log_fatal.log', 'type': 'file' }, { - 'path': 'public\\logs\\log_info.log', + 'name': 'public/logs/log_info.log', + 'path': 'logs/log_info.log', 'type': 'file' }, { - 'path': 'public\\logs\\log_raw.log', + 'name': 'public/logs/log_raw.log', + 'path': 'logs/log_raw.log', 'type': 'file' }, { - 'path': 'public\\logs\\log_warning.log', + 'name': 'public/logs/log_warning.log', + 'path': 'logs/log_warning.log', 'type': 'file' }, { - 'path': 'public\\test_info', + 'name': 'public/test_info', + 'path': 'build/blobber_upload_dir', 'type': 'directory' } ] build_platform = taskdesc['attributes']['build_platform'] - target = 'firefox-{}.en-US.{}'.format(get_firefox_version(), build_platform) + target = 'firefox-{}.en-US.{}'.format(get_firefox_version(), build_platform) \ + if build_platform.startswith('win') else 'target' - installer_url = get_artifact_url( - '<build>', 'public/build/{}.zip'.format(target)) + installer_url = get_artifact_url('<build>', mozharness['build-artifact-name']) + test_packages_url = get_artifact_url( '<build>', 'public/build/{}.test_packages.json'.format(target)) - mozharness_url = get_artifact_url( - '<build>', 'public/build/mozharness.zip') taskdesc['scopes'].extend( ['generic-worker:os-group:{}'.format(group) for group in test['os-groups']]) worker['os-groups'] = test['os-groups'] worker['max-run-time'] = test['max-run-time'] worker['artifacts'] = artifacts - # assemble the command line - mh_command = [ - 'c:\\mozilla-build\\python\\python.exe', - '-u', - 'mozharness\\scripts\\' + normpath(mozharness['script']) - ] + # this list will get cleaned up / reduced / removed in bug 1354088 + if build_platform.startswith('macosx'): + worker['env'] = { + 'IDLEIZER_DISABLE_SHUTDOWN': 'true', + 'LANG': 'en_US.UTF-8', + 'LC_ALL': 'en_US.UTF-8', + 'MOZ_HIDE_RESULTS_TABLE': '1', + 'MOZ_NODE_PATH': '/usr/local/bin/node', + 'MOZ_NO_REMOTE': '1', + 'NO_EM_RESTART': '1', + 'NO_FAIL_ON_TEST_ERRORS': '1', + 'PATH': '/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin', + 'SHELL': '/bin/bash', + 'XPCOM_DEBUG_BREAK': 'warn', + 'XPC_FLAGS': '0x0', + 'XPC_SERVICE_NAME': '0' + } + + if build_platform.startswith('macosx'): + mh_command = [ + 'python2.7', + '-u', + 'mozharness/scripts/' + mozharness['script'] + ] + elif build_platform.startswith('win'): + mh_command = [ + 'c:\\mozilla-build\\python\\python.exe', + '-u', + 'mozharness\\scripts\\' + normpath(mozharness['script']) + ] + else: + mh_command = [ + 'python', + '-u', + 'mozharness/scripts/' + mozharness['script'] + ] + for mh_config in mozharness['config']: - mh_command.extend(['--cfg', 'mozharness\\configs\\' + normpath(mh_config)]) + cfg_path = 'mozharness/configs/' + mh_config + if build_platform.startswith('win'): + cfg_path = normpath(cfg_path) + mh_command.extend(['--cfg', cfg_path]) mh_command.extend(mozharness.get('extra-options', [])) if mozharness.get('no-read-buildbot-config'): mh_command.append('--no-read-buildbot-config') mh_command.extend(['--installer-url', installer_url]) mh_command.extend(['--test-packages-url', test_packages_url]) if mozharness.get('download-symbols'): if isinstance(mozharness['download-symbols'], basestring): mh_command.extend(['--download-symbols', mozharness['download-symbols']]) @@ -254,26 +295,38 @@ def mozharness_test_on_windows(config, j mh_command.append('--total-chunk={}'.format(test['chunks'])) mh_command.append('--this-chunk={}'.format(test['this-chunk'])) elif mozharness['chunking-args'] == 'test-suite-suffix': suffix = mozharness['chunk-suffix'].replace('<CHUNK>', str(test['this-chunk'])) for i, c in enumerate(mh_command): if isinstance(c, basestring) and c.startswith('--test-suite'): mh_command[i] += suffix - # bug 1311966 - symlink to artifacts until generic worker supports virtual artifact paths - artifact_link_commands = ['mklink /d %cd%\\public\\test_info %cd%\\build\\blobber_upload_dir'] - for link in [a['path'] for a in artifacts if a['path'].startswith('public\\logs\\')]: - artifact_link_commands.append('mklink %cd%\\{} %cd%\\{}'.format(link, link[7:])) + worker['mounts'] = [{ + 'directory': '.', + 'content': { + 'artifact': 'public/build/mozharness.zip', + 'task-id': { + 'task-reference': '<build>' + } + }, + 'format': 'zip' + }] - worker['command'] = artifact_link_commands + [ - {'task-reference': 'c:\\mozilla-build\\wget\\wget.exe {}'.format(mozharness_url)}, - 'c:\\mozilla-build\\info-zip\\unzip.exe mozharness.zip', - {'task-reference': ' '.join(mh_command)} - ] + if build_platform.startswith('win'): + worker['command'] = [ + {'task-reference': ' '.join(mh_command)} + ] + else: + mh_command_task_ref = [] + for token in mh_command: + mh_command_task_ref.append({'task-reference': token}) + worker['command'] = [ + mh_command_task_ref + ] @run_job_using('native-engine', 'mozharness-test', schema=mozharness_test_run_schema) def mozharness_test_on_native_engine(config, job, taskdesc): test = taskdesc['run']['test'] mozharness = test['mozharness'] worker = taskdesc['worker']
--- a/taskcluster/taskgraph/transforms/job/toolchain.py +++ b/taskcluster/taskgraph/transforms/job/toolchain.py @@ -140,17 +140,17 @@ def windows_toolchain(config, job, taskd }] docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) # We fetch LLVM SVN into this. svn_cache = 'level-{}-toolchain-clang-cl-build-svn'.format(config.params['level']) worker['mounts'] = [{ 'cache-name': svn_cache, - 'path': r'llvm-sources', + 'directory': r'llvm-sources', }] taskdesc['scopes'].extend([ 'generic-worker:cache:' + svn_cache, ]) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
--- a/taskcluster/taskgraph/transforms/task.py +++ b/taskcluster/taskgraph/transforms/task.py @@ -7,16 +7,17 @@ task definition (along with attributes, transformations is generic to any kind of task, but abstracts away some of the complexities of worker implementations, scopes, and treeherder annotations. """ from __future__ import absolute_import, print_function, unicode_literals import json import time +from copy import deepcopy from taskgraph.util.treeherder import split_symbol from taskgraph.transforms.base import TransformSequence from taskgraph.util.schema import validate_schema, Schema from taskgraph.util.scriptworker import get_release_config from voluptuous import Any, Required, Optional, Extra from .gecko_v2_whitelist import JOB_NAME_WHITELIST, JOB_NAME_WHITELIST_ERROR @@ -207,48 +208,91 @@ task_description_schema = Schema({ # the maximum time to run, in seconds Required('max-run-time'): int, # the exit status code that indicates the task should be retried Optional('retry-exit-status'): int, }, { + # see http://schemas.taskcluster.net/generic-worker/v1/payload.json + # and https://docs.taskcluster.net/reference/workers/generic-worker/payload Required('implementation'): 'generic-worker', # command is a list of commands to run, sequentially - Required('command'): [taskref_or_string], + # on Windows, each command is a string, on OS X and Linux, each command is + # a string array + Required('command'): Any( + [taskref_or_string], # Windows + [[taskref_or_string]] # Linux / OS X + ), # artifacts to extract from the task image after completion; note that artifacts # for the generic worker cannot have names Optional('artifacts'): [{ # type of artifact -- simple file, or recursive directory 'type': Any('file', 'directory'), - # task image path from which to read artifact + # filesystem path from which to read artifact 'path': basestring, + + # if not specified, path is used for artifact name + Optional('name'): basestring }], - # directories and/or files to be mounted + # Directories and/or files to be mounted. + # The actual allowed combinations are stricter than the model below, + # but this provides a simple starting point. + # See https://docs.taskcluster.net/reference/workers/generic-worker/payload Optional('mounts'): [{ - # a unique name for the cache volume - 'cache-name': basestring, + # A unique name for the cache volume, implies writable cache directory + # (otherwise mount is a read-only file or directory). + Optional('cache-name'): basestring, + # Optional content for pre-loading cache, or mandatory content for + # read-only file or directory. Pre-loaded content can come from either + # a task artifact or from a URL. + Optional('content'): { + + # *** Either (artifact and task-id) or url must be specified. *** - # task image path for the cache - 'path': basestring, + # Artifact name that contains the content. + Optional('artifact'): basestring, + # Task ID that has the artifact that contains the content. + Optional('task-id'): taskref_or_string, + # URL that supplies the content in response to an unauthenticated + # GET request. + Optional('url'): basestring + }, + + # *** Either file or directory must be specified. *** + + # If mounting a cache or read-only directory, the filesystem location of + # the directory should be specified as a relative path to the task + # directory here. + Optional('directory'): basestring, + # If mounting a file, specify the relative path within the task + # directory to mount the file (the file will be read only). + Optional('file'): basestring, + # Required if and only if `content` is specified and mounting a + # directory (not a file). This should be the archive format of the + # content (either pre-loaded cache or read-only directory). + Optional('format'): Any('rar', 'tar.bz2', 'tar.gz', 'zip') }], # environment variables Required('env', default={}): {basestring: taskref_or_string}, # the maximum time to run, in seconds Required('max-run-time'): int, # os user groups for test task workers Optional('os-groups', default=[]): [basestring], + + # optional features + Required('chain-of-trust', default=False): bool, }, { Required('implementation'): 'buildbot-bridge', # see # https://github.com/mozilla/buildbot-bridge/blob/master/bbb/schemas/payload.yml Required('buildername'): basestring, Required('sourcestamp'): { 'branch': basestring, @@ -552,44 +596,60 @@ def build_docker_worker_payload(config, @payload_builder('generic-worker') def build_generic_worker_payload(config, task, task_def): worker = task['worker'] artifacts = [] for artifact in worker['artifacts']: - artifacts.append({ + a = { 'path': artifact['path'], 'type': artifact['type'], 'expires': task_def['expires'], # always expire with the task - }) - - mounts = [] + } + if 'name' in artifact: + a['name'] = artifact['name'] + artifacts.append(a) - for mount in worker.get('mounts', []): - mounts.append({ - 'cacheName': mount['cache-name'], - 'directory': mount['path'] - }) + # Need to copy over mounts, but rename keys to respect naming convention + # * 'cache-name' -> 'cacheName' + # * 'task-id' -> 'taskId' + # All other key names are already suitable, and don't need renaming. + mounts = deepcopy(worker.get('mounts', [])) + for mount in mounts: + if 'cache-name' in mount: + mount['cacheName'] = mount.pop('cache-name') + if 'content' in mount: + if 'task-id' in mount['content']: + mount['content']['taskId'] = mount['content'].pop('task-id') task_def['payload'] = { 'command': worker['command'], 'artifacts': artifacts, 'env': worker.get('env', {}), 'mounts': mounts, 'maxRunTime': worker['max-run-time'], 'osGroups': worker.get('os-groups', []), } # needs-sccache is handled in mozharness_on_windows if 'retry-exit-status' in worker: raise Exception("retry-exit-status not supported in generic-worker") + # currently only support one feature (chain of trust) but this will likely grow + features = {} + + if worker.get('chain-of-trust'): + features['chainOfTrust'] = True + + if features: + task_def['payload']['features'] = features + @payload_builder('scriptworker-signing') def build_scriptworker_signing_payload(config, task, task_def): worker = task['worker'] task_def['payload'] = { 'maxRunTime': worker['max-run-time'], 'upstreamArtifacts': worker['upstream-artifacts']
--- a/taskcluster/taskgraph/transforms/tests.py +++ b/taskcluster/taskgraph/transforms/tests.py @@ -37,22 +37,23 @@ import copy import logging WORKER_TYPE = { # default worker types keyed by instance-size 'large': 'aws-provisioner-v1/gecko-t-linux-large', 'xlarge': 'aws-provisioner-v1/gecko-t-linux-xlarge', 'legacy': 'aws-provisioner-v1/gecko-t-linux-medium', 'default': 'aws-provisioner-v1/gecko-t-linux-large', - # windows worker types keyed by test-platform + # windows / os x worker types keyed by test-platform 'windows7-32-vm': 'aws-provisioner-v1/gecko-t-win7-32', 'windows7-32': 'aws-provisioner-v1/gecko-t-win7-32-gpu', 'windows10-64-vm': 'aws-provisioner-v1/gecko-t-win10-64', 'windows10-64': 'aws-provisioner-v1/gecko-t-win10-64-gpu', - 'windows10-64-asan': 'aws-provisioner-v1/gecko-t-win10-64-gpu' + 'windows10-64-asan': 'aws-provisioner-v1/gecko-t-win10-64-gpu', + 'macosx64': 'scl3-puppet/os-x-10-10-gw' } logger = logging.getLogger(__name__) transforms = TransformSequence() # Schema for a test description # @@ -336,16 +337,21 @@ def set_defaults(config, tests): @transforms.add def set_target(config, tests): for test in tests: build_platform = test['build-platform'] if build_platform.startswith('macosx'): target = 'target.dmg' elif build_platform.startswith('android'): target = 'target.apk' + elif build_platform.startswith('win'): + target = 'firefox-{}.en-US.{}.zip'.format( + get_firefox_version(), + build_platform.split('/')[0] + ) else: target = 'target.tar.bz2' test['mozharness']['build-artifact-name'] = 'public/build/' + target yield test @transforms.add def set_treeherder_machine_platform(config, tests): @@ -381,21 +387,26 @@ def set_asan_docker_image(config, tests) test['build-platform'] == 'linux64-asan/opt': test['docker-image'] = {"in-tree": "desktop-test"} yield test @transforms.add def set_worker_implementation(config, tests): """Set the worker implementation based on the test platform.""" - use_tc_worker = config.config['args'].taskcluster_worker for test in tests: if test['test-platform'].startswith('macosx'): - test['worker-implementation'] = \ - 'native-engine' if use_tc_worker else 'buildbot-bridge' + # see if '-g' appears in try syntax + if config.config['args'].generic_worker: + test['worker-implementation'] = 'generic-worker' + # see if '-w' appears in try syntax + elif config.config['args'].taskcluster_worker: + test['worker-implementation'] = 'native-engine' + else: + test['worker-implementation'] = 'buildbot-bridge' elif test.get('suite', '') == 'talos': test['worker-implementation'] = 'buildbot-bridge' elif test['test-platform'].startswith('win'): test['worker-implementation'] = 'generic-worker' else: test['worker-implementation'] = 'docker-worker' yield test
--- a/taskcluster/taskgraph/try_option_syntax.py +++ b/taskcluster/taskgraph/try_option_syntax.py @@ -237,16 +237,19 @@ def parse_message(message): parser.add_argument('--no-retry', dest='no_retry', action='store_true') parser.add_argument('--include-nightly', dest='include_nightly', action='store_true') # While we are transitioning from BB to TC, we want to push jobs to tc-worker # machines but not overload machines with every try push. Therefore, we add # this temporary option to be able to push jobs to tc-worker. parser.add_argument('-w', '--taskcluster-worker', dest='taskcluster_worker', action='store_true', default=False) + # Similarly, an extra flag for enabling os x jobs in generic-worker + parser.add_argument('-g', '--generic-worker', + dest='generic_worker', action='store_true', default=False) # In order to run test jobs multiple times parser.add_argument('--rebuild', dest='trigger_tests', type=int, default=1) parts = parts[try_idx:] if try_idx is not None else [] args, _ = parser.parse_known_args(parts[try_idx:]) return args