--- a/.taskcluster.yml +++ b/.taskcluster.yml @@ -141,19 +141,16 @@ tasks: $merge: - GECKO_BASE_REPOSITORY: 'https://hg.mozilla.org/mozilla-unified' GECKO_HEAD_REPOSITORY: '${repoUrl}' GECKO_HEAD_REF: '${push.revision}' GECKO_HEAD_REV: '${push.revision}' GECKO_COMMIT_MSG: {$if: 'tasks_for != "action"', then: '${push.comment}'} HG_STORE_PATH: /builds/worker/checkouts/hg-store TASKCLUSTER_CACHES: /builds/worker/checkouts - # someday, these will be provided by the worker - Bug 1492664 - TASKCLUSTER_ROOT_URL: https://taskcluster.net - TASKCLUSTER_PROXY_URL: http://taskcluster - $if: 'tasks_for == "action"' then: ACTION_TASK_GROUP_ID: '${action.taskGroupId}' # taskGroupId of the target task ACTION_TASK_ID: {$json: {$eval: 'taskId'}} # taskId of the target task (JSON-encoded) ACTION_INPUT: {$json: {$eval: 'input'}} ACTION_CALLBACK: '${action.cb_name}' ACTION_PARAMETERS: {$json: {$eval: 'parameters'}}
--- a/build/virtualenv_packages.txt +++ b/build/virtualenv_packages.txt @@ -40,17 +40,16 @@ mozilla.pth:third_party/python/scandir mozilla.pth:third_party/python/slugid mozilla.pth:third_party/python/taskcluster mozilla.pth:third_party/python/taskcluster-urls mozilla.pth:third_party/python/py mozilla.pth:third_party/python/pytest/src mozilla.pth:third_party/python/pytoml mozilla.pth:third_party/python/redo mozilla.pth:third_party/python/six -mozilla.pth:third_party/python/taskcluster-urls mozilla.pth:third_party/python/voluptuous mozilla.pth:third_party/python/json-e mozilla.pth:build objdir:build mozilla.pth:build/pymake mozilla.pth:config mozilla.pth:config/mozunit mozilla.pth:dom/bindings
--- a/mobile/android/mach_commands.py +++ b/mobile/android/mach_commands.py @@ -45,23 +45,19 @@ def REMOVED(cls): See https://developer.mozilla.org/en-US/docs/Simple_Firefox_for_Android_build#Developing_Firefox_for_Android_in_Android_Studio_or_IDEA_IntelliJ. # NOQA: E501 """ return False @CommandProvider class MachCommands(MachCommandBase): def _root_url(self, artifactdir=None, objdir=None): - """Generate a publicly-accessible URL for the tasks's artifacts, or an objdir path""" if 'TASK_ID' in os.environ and 'RUN_ID' in os.environ: - import taskcluster_urls - return taskcluster_urls.api( - os.environ['TASKCLUSTER_ROOT_URL'], - 'queue', 'v1', 'task/{}/runs/{}/artifacts/{}'.format( - os.environ['TASK_ID'], os.environ['RUN_ID'], artifactdir)) + return 'https://queue.taskcluster.net/v1/task/{}/runs/{}/artifacts/{}'.format( + os.environ['TASK_ID'], os.environ['RUN_ID'], artifactdir) else: return os.path.join(self.topobjdir, objdir) @Command('android', category='devenv', description='Run Android-specific commands.', conditions=[conditions.is_android]) def android(self): pass
--- a/python/mozrelease/mozrelease/buglist_creator.py +++ b/python/mozrelease/mozrelease/buglist_creator.py @@ -213,17 +213,16 @@ Task group: [{task_group_id}](https://to subject_prefix = "[mobile] " if product in {"firefox", "devedition"}: subject_prefix = "[desktop] " subject = '{} Build of {} {} build {}'.format(subject_prefix, product, version, build_number) notify_options = {} if 'TASKCLUSTER_PROXY_URL' in os.environ: - # Until bug 1460015 is finished, use the old baseUrl style of proxy URL base_url = os.environ['TASKCLUSTER_PROXY_URL'].rstrip('/') notify_options['baseUrl'] = '{}/notify/v1'.format(base_url) notify = Notify(notify_options) for address in addresses: notify.email({ 'address': address, 'subject': subject, 'content': content,
--- a/taskcluster/ci/upload-symbols/kind.yml +++ b/taskcluster/ci/upload-symbols/kind.yml @@ -19,21 +19,22 @@ not-for-build-platforms: job-template: description: Upload Symbols worker-type: aws-provisioner-v1/gecko-{level}-b-linux worker: docker-image: {in-tree: "lint"} max-run-time: 900 env: + ARTIFACT_TASKID: {"task-reference": "<build>"} # {level} gets replaced in the upload_symbols transform SYMBOL_SECRET: "project/releng/gecko/build/level-{level}/gecko-symbol-upload" run: using: mach - mach: {artifact-reference: "python toolkit/crashreporter/tools/upload_symbols.py <build/public/build/target.crashreporter-symbols-full.zip>"} + mach: python toolkit/crashreporter/tools/upload_symbols.py https://queue.taskcluster.net/v1/task/${ARTIFACT_TASKID}/artifacts/public/build/target.crashreporter-symbols-full.zip sparse-profile: upload-symbols optimization: only-if-dependencies-run: null scopes: - secrets:get:project/releng/gecko/build/level-{level}/gecko-symbol-upload run-on-projects: by-build-platform: .*devedition.*: ['mozilla-beta', 'maple']
--- a/taskcluster/docker/debian-base/Dockerfile +++ b/taskcluster/docker/debian-base/Dockerfile @@ -44,19 +44,18 @@ RUN for s in debian_$DIST debian_$DIST-u RUN apt-get update && \ apt-get install \ apt-transport-https \ ca-certificates COPY setup_packages.sh /usr/local/sbin/ COPY cloud-mirror-workaround.sh /usr/local/sbin/ -# %ARG TASKCLUSTER_ROOT_URL # %ARG DOCKER_IMAGE_PACKAGES -RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \ +RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES && \ echo 'dir::bin::methods::https "/usr/local/sbin/cloud-mirror-workaround.sh";' > /etc/apt/apt.conf.d/99cloud-mirror-workaround && \ apt-get update && \ apt-get install \ git \ less \ make \ mercurial \ patch \
--- a/taskcluster/docker/debian-base/setup_packages.sh +++ b/taskcluster/docker/debian-base/setup_packages.sh @@ -1,17 +1,5 @@ #!/bin/sh -TASKCLUSTER_ROOT_URL=$1 -shift - -# duplicate the functionality of taskcluster-lib-urls, but in bash.. -if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then - queue_base='https://queue.taskcluster.net/v1' -else - queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1" -fi - - for task in "$@"; do - echo "adding package source $queue_base/task/$task/artifacts/public/build/" - echo "deb [trusted=yes] $queue_base/task/$task/artifacts/public/build/ debian/" > "/etc/apt/sources.list.d/99$task.list" + echo "deb [trusted=yes] https://queue.taskcluster.net/v1/task/$task/artifacts/public/build/ debian/" > "/etc/apt/sources.list.d/99$task.list" done
--- a/taskcluster/docker/debian7-build/Dockerfile +++ b/taskcluster/docker/debian7-build/Dockerfile @@ -3,19 +3,18 @@ FROM $DOCKER_IMAGE_PARENT MAINTAINER Mike Hommey <mhommey@mozilla.com> VOLUME /builds/worker/checkouts VOLUME /builds/worker/workspace VOLUME /builds/worker/tooltool-cache ENV XZ_OPT=-T0 -# %ARG TASKCLUSTER_ROOT_URL # %ARG DOCKER_IMAGE_PACKAGES -RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES +RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES # %ARG ARCH RUN dpkg --add-architecture $ARCH # Ideally, we wouldn't need gcc-multilib and the extra linux-libc-dev, # but the latter is required to make the former installable, and the former # because of bug 1409276. # We exclude /usr/share/doc/*/changelog.Debian* files because they might differ
--- a/taskcluster/docker/debian7-mozjs-rust-build/Dockerfile +++ b/taskcluster/docker/debian7-mozjs-rust-build/Dockerfile @@ -1,13 +1,12 @@ # %ARG DOCKER_IMAGE_PARENT FROM $DOCKER_IMAGE_PARENT MAINTAINER Mike Hommey <mhommey@mozilla.com> VOLUME /builds/worker/checkouts VOLUME /builds/worker/workspace VOLUME /builds/worker/tooltool-cache -# %ARG TASKCLUSTER_ROOT_URL # %ARG DOCKER_IMAGE_PACKAGES -RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \ +RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES && \ apt-get update && \ apt-get install cmake
--- a/taskcluster/docker/diffoscope/get_and_diffoscope +++ b/taskcluster/docker/diffoscope/get_and_diffoscope @@ -6,39 +6,32 @@ set -x cd /builds/worker mkdir a b # Until https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=879010 is # implemented, it's better to first manually extract the data. # Plus dmg files are not supported yet. -# duplicate the functionality of taskcluster-lib-urls, but in bash.. -if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then - queue_base='https://queue.taskcluster.net/v1' -else - queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1" -fi - case "$ORIG_URL" in */target.zip|*/target.apk) curl -sL "$ORIG_URL" > a.zip curl -sL "$NEW_URL" > b.zip unzip -d a a.zip unzip -d b b.zip ;; */target.tar.bz2) curl -sL "$ORIG_URL" | tar -C a -jxf - curl -sL "$NEW_URL" | tar -C b -jxf - ;; */target.dmg) # We don't have mach available to call mach artifact toolchain. # This is the trivial equivalent for those toolchains we use here. for t in $MOZ_TOOLCHAINS; do - curl -sL $queue_base/task/${t#*@}/artifacts/${t%@*} | tar -Jxf - + curl -sL https://queue.taskcluster.net/v1/task/${t#*@}/artifacts/${t%@*} | tar -Jxf - done for tool in lipo otool; do ln -s /builds/worker/cctools/bin/x86_64-apple-darwin*-$tool bin/$tool done export PATH=$PATH:/builds/worker/bin curl -sL "$ORIG_URL" > a.dmg curl -sL "$NEW_URL" > b.dmg for i in a b; do
--- a/taskcluster/docker/funsize-update-generator/runme.sh +++ b/taskcluster/docker/funsize-update-generator/runme.sh @@ -4,42 +4,35 @@ set -xe test "$TASK_ID" test "$SHA1_SIGNING_CERT" test "$SHA384_SIGNING_CERT" ARTIFACTS_DIR="/home/worker/artifacts" mkdir -p "$ARTIFACTS_DIR" -# duplicate the functionality of taskcluster-lib-urls, but in bash.. -if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then - queue_base='https://queue.taskcluster.net/v1' -else - queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1" -fi - -curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json "$queue_base/task/$TASK_ID" +curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json \ + "https://queue.taskcluster.net/v1/task/$TASK_ID" # auth:aws-s3:read-write:tc-gp-private-1d-us-east-1/releng/mbsdiff-cache/ # -> bucket of tc-gp-private-1d-us-east-1, path of releng/mbsdiff-cache/ # Trailing slash is important, due to prefix permissions in S3. S3_BUCKET_AND_PATH=$(jq -r '.scopes[] | select(contains ("auth:aws-s3"))' /home/worker/task.json | awk -F: '{print $4}') # Will be empty if there's no scope for AWS S3. if [ -n "${S3_BUCKET_AND_PATH}" ] && getent hosts taskcluster then # Does this parse as we expect? S3_PATH=${S3_BUCKET_AND_PATH#*/} AWS_BUCKET_NAME=${S3_BUCKET_AND_PATH%/${S3_PATH}*} test "${S3_PATH}" test "${AWS_BUCKET_NAME}" set +x # Don't echo these. - # Until bug 1460015 is finished, use baseUrl-style proxy URLs - secret_url="${TASKCLUSTER_PROXY_URL}/auth/v1/aws/s3/read-write/${AWS_BUCKET_NAME}/${S3_PATH}" + secret_url="taskcluster/auth/v1/aws/s3/read-write/${AWS_BUCKET_NAME}/${S3_PATH}" AUTH=$(curl "${secret_url}") AWS_ACCESS_KEY_ID=$(echo "${AUTH}" | jq -r '.credentials.accessKeyId') AWS_SECRET_ACCESS_KEY=$(echo "${AUTH}" | jq -r '.credentials.secretAccessKey') AWS_SESSION_TOKEN=$(echo "${AUTH}" | jq -r '.credentials.sessionToken') export AWS_ACCESS_KEY_ID export AWS_SECRET_ACCESS_KEY export AWS_SESSION_TOKEN AUTH=
--- a/taskcluster/docker/funsize-update-generator/scripts/funsize.py +++ b/taskcluster/docker/funsize-update-generator/scripts/funsize.py @@ -30,29 +30,26 @@ from datadog import initialize, ThreadSt log = logging.getLogger(__name__) # Create this even when not sending metrics, so the context manager # statements work. ddstats = ThreadStats(namespace='releng.releases.partials') -ROOT_URL = os.environ['TASKCLUSTER_ROOT_URL'] -QUEUE_PREFIX = ("https://queue.taskcluster.net/" - if ROOT_URL == 'https://taskcluster.net' - else ROOT_URL + '/api/queue/') ALLOWED_URL_PREFIXES = ( "http://download.cdn.mozilla.net/pub/mozilla.org/firefox/nightly/", "http://download.cdn.mozilla.net/pub/firefox/nightly/", "https://mozilla-nightly-updates.s3.amazonaws.com", + "https://queue.taskcluster.net/", "http://ftp.mozilla.org/", "http://download.mozilla.org/", "https://archive.mozilla.org/", "http://archive.mozilla.org/", - QUEUE_PREFIX, + "https://queue.taskcluster.net/v1/task/", ) STAGING_URL_PREFIXES = ( "http://ftp.stage.mozaws.net/", ) DEFAULT_FILENAME_TEMPLATE = "{appName}-{branch}-{version}-{platform}-" \ "{locale}-{from_buildid}-{to_buildid}.partial.mar"
--- a/taskcluster/docker/periodic-updates/runme.sh +++ b/taskcluster/docker/periodic-updates/runme.sh @@ -46,35 +46,28 @@ if [ ! -z "${DO_SUFFIX_LIST}" ] then PARAMS="${PARAMS} --suffix-list" fi export ARTIFACTS_DIR="/home/worker/artifacts" mkdir -p "$ARTIFACTS_DIR" -# duplicate the functionality of taskcluster-lib-urls, but in bash.. -if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then - queue_base='https://queue.taskcluster.net/v1' -else - queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1" -fi - # Get Arcanist API token if [ -n "${TASK_ID}" ] then - curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json "$queue_base/task/$TASK_ID" + curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json \ + "https://queue.taskcluster.net/v1/task/$TASK_ID" ARC_SECRET=$(jq -r '.scopes[] | select(contains ("arc-phabricator-token"))' /home/worker/task.json | awk -F: '{print $3}') fi if [ -n "${ARC_SECRET}" ] && getent hosts taskcluster then set +x # Don't echo these - # Until bug 1460015 is finished, use baseUrl-style proxy URLs - secrets_url="${TASKCLUSTER_PROXY_URL}/secrets/v1/secret/${ARC_SECRET}" + secrets_url="http://taskcluster/secrets/v1/secret/${ARC_SECRET}" SECRET=$(curl "${secrets_url}") TOKEN=$(echo "${SECRET}" | jq -r '.secret.token') elif [ -n "${ARC_TOKEN}" ] # Allow for local testing. then TOKEN="${ARC_TOKEN}" fi if [ -n "${TOKEN}" ]
--- a/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh +++ b/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh @@ -90,25 +90,16 @@ SUFFIX_LIST_UPDATED=false ARTIFACTS_DIR="${ARTIFACTS_DIR:-.}" # Defaults HSTS_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${HSTS_DIFF_ARTIFACT:-"nsSTSPreloadList.diff"}" HPKP_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${HPKP_DIFF_ARTIFACT:-"StaticHPKPins.h.diff"}" BLOCKLIST_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${BLOCKLIST_DIFF_ARTIFACT:-"blocklist.diff"}" REMOTE_SETTINGS_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${REMOTE_SETTINGS_DIFF_ARTIFACT:-"remote-settings.diff"}" SUFFIX_LIST_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${SUFFIX_LIST_DIFF_ARTIFACT:-"effective_tld_names.diff"}" -# duplicate the functionality of taskcluster-lib-urls, but in bash.. -if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then - queue_base='https://queue.taskcluster.net/v1' - index_base='https://index.taskcluster.net/v1' -else - queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1" - index_base="$TASKCLUSTER_ROOT_URL/api/index/v1" -fi - # Get the current in-tree version for a code branch. function get_version { VERSION_REPO=$1 VERSION_FILE='version.txt' # TODO bypass temporary file cd "${BASEDIR}" @@ -150,40 +141,40 @@ function download_shared_artifacts_from_ } function download_shared_artifacts_from_tc { cd "${BASEDIR}" TASKID_FILE="taskId.json" # Download everything we need to run js with xpcshell echo "INFO: Downloading all the necessary pieces from the taskcluster index..." - TASKID_URL="$index_base/task/gecko.v2.${REPODIR}.latest.${PRODUCT}.linux64-opt" + TASKID_URL="https://index.taskcluster.net/v1/task/gecko.v2.${REPODIR}.latest.${PRODUCT}.linux64-opt" if [ "${USE_MC}" == "true" ]; then - TASKID_URL="$index_base/task/gecko.v2.mozilla-central.latest.${PRODUCT}.linux64-opt" + TASKID_URL="https://index.taskcluster.net/v1/task/gecko.v2.mozilla-central.latest.${PRODUCT}.linux64-opt" fi - ${WGET} -O ${TASKID_FILE} "${TASKID_URL}" + ${WGET} -O ${TASKID_FILE} ${TASKID_URL} INDEX_TASK_ID="$($JQ -r '.taskId' ${TASKID_FILE})" if [ -z "${INDEX_TASK_ID}" ]; then echo "Failed to look up taskId at ${TASKID_URL}" exit 22 else echo "INFO: Got taskId of $INDEX_TASK_ID" fi TASKSTATUS_FILE="taskstatus.json" - STATUS_URL="$queue_base/task/${INDEX_TASK_ID}/status" + STATUS_URL="https://queue.taskcluster.net/v1/task/${INDEX_TASK_ID}/status" ${WGET} -O "${TASKSTATUS_FILE}" "${STATUS_URL}" LAST_RUN_INDEX=$(($(jq '.status.runs | length' ${TASKSTATUS_FILE}) - 1)) echo "INFO: Examining run number ${LAST_RUN_INDEX}" - BROWSER_ARCHIVE_URL="$queue_base/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${BROWSER_ARCHIVE}" + BROWSER_ARCHIVE_URL="https://queue.taskcluster.net/v1/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${BROWSER_ARCHIVE}" echo "INFO: ${WGET} ${BROWSER_ARCHIVE_URL}" ${WGET} "${BROWSER_ARCHIVE_URL}" - TESTS_ARCHIVE_URL="$queue_base/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${TESTS_ARCHIVE}" + TESTS_ARCHIVE_URL="https://queue.taskcluster.net/v1/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${TESTS_ARCHIVE}" echo "INFO: ${WGET} ${TESTS_ARCHIVE_URL}" ${WGET} "${TESTS_ARCHIVE_URL}" } function unpack_artifacts { cd "${BASEDIR}" if [ ! -f "${BROWSER_ARCHIVE}" ]; then echo "Downloaded file '${BROWSER_ARCHIVE}' not found in directory '$(pwd)'." >&2
--- a/taskcluster/docker/pipfile-updates/runme.sh +++ b/taskcluster/docker/pipfile-updates/runme.sh @@ -14,35 +14,28 @@ test "${PIPFILE_DIRECTORY}" PIP_ARG="-2" if [ -n "${PYTHON3}" ]; then PIP_ARG="-3" fi export ARTIFACTS_DIR="/home/worker/artifacts" mkdir -p "$ARTIFACTS_DIR" -# duplicate the functionality of taskcluster-lib-urls, but in bash.. -if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then - queue_base='https://queue.taskcluster.net/v1' -else - queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1" -fi - # Get Arcanist API token if [ -n "${TASK_ID}" ] then - curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json "$queue_base/task/$TASK_ID" + curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json \ + "https://queue.taskcluster.net/v1/task/$TASK_ID" ARC_SECRET=$(jq -r '.scopes[] | select(contains ("arc-phabricator-token"))' /home/worker/task.json | awk -F: '{print $3}') fi if [ -n "${ARC_SECRET}" ] && getent hosts taskcluster then set +x # Don't echo these - # Until bug 1460015 is finished, use the old, baseUrl-style proxy URLs - secrets_url="${TASKCLUSTER_PROXY_URL}/secrets/v1/secret/${ARC_SECRET}" + secrets_url="http://taskcluster/secrets/v1/secret/${ARC_SECRET}" SECRET=$(curl "${secrets_url}") TOKEN=$(echo "${SECRET}" | jq -r '.secret.token') elif [ -n "${ARC_TOKEN}" ] # Allow for local testing. then TOKEN="${ARC_TOKEN}" fi if [ -n "${TOKEN}" ]
--- a/taskcluster/docker/toolchain-build/Dockerfile +++ b/taskcluster/docker/toolchain-build/Dockerfile @@ -4,18 +4,17 @@ MAINTAINER Mike Hommey <mhommey@mozilla. VOLUME /builds/worker/checkouts VOLUME /builds/worker/workspace VOLUME /builds/worker/tooltool-cache ENV XZ_OPT=-T0 # %ARG DOCKER_IMAGE_PACKAGES -# %ARG TASKCLUSTER_ROOT_URL -RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES +RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES RUN apt-get update && \ apt-get install \ autoconf \ automake \ bison \ build-essential \ curl \
--- a/taskcluster/docs/optimization-process.rst +++ b/taskcluster/docs/optimization-process.rst @@ -65,11 +65,11 @@ simultaneously rewrites all dependencies To do so, it assigns a taskId to each retained task and uses the replacement taskId for all replaced tasks. The result is an optimized taskgraph with tasks named by taskId instead of label. At this phase, the edges in the task graph diverge from the ``task.dependencies`` attributes, as the latter may contain dependencies outside of the taskgraph (for replacement tasks). -As a side-effect, this phase also expands all ``{"task-reference": ".."}`` and -``{"artifact-reference": ".."}`` objects within the task definitions. +As a side-effect, this phase also expands all ``{"task-reference": ".."}`` +objects within the task definitions.
--- a/taskcluster/docs/taskgraph.rst +++ b/taskcluster/docs/taskgraph.rst @@ -171,21 +171,16 @@ using simple parameterized values, as fo ``{"task-reference": "string containing <dep-name>"}`` The task definition may contain "task references" of this form. These will be replaced during the optimization step, with the appropriate taskId for the named dependency substituted for ``<dep-name>`` in the string. Multiple labels may be substituted in a single string, and ``<<>`` can be used to escape a literal ``<``. -``{"artifact-reference": "..<dep-name/artifact/name>.."}`` - Similar to a ``task-reference``, but this substitutes a URL to the queue's - ``getLatestArtifact`` API method (for which a GET will redirect to the - artifact itself). - .. _taskgraph-graph-config: Graph Configuration ------------------- There are several configuration settings that are pertain to the entire taskgraph. These are specified in :file:`config.yml` at the root of the taskgraph configuration (typically :file:`taskcluster/ci/`). The available
--- a/taskcluster/docs/transforms.rst +++ b/taskcluster/docs/transforms.rst @@ -197,19 +197,18 @@ Signing Descriptions Signing kinds are passed a single dependent job (from its kind dependency) to act on. The transforms in ``taskcluster/taskgraph/transforms/signing.py`` implement this common functionality. They expect a "signing description", and produce a task definition. The schema for a signing description is defined at the top of ``signing.py``, with copious comments. -In particular you define a set of upstream artifact urls (that point at the -dependent task) and can optionally provide a dependent name (defaults to build) -for use in ``task-reference``/``artifact-reference``. You also need to provide -the signing formats to use. +In particular you define a set of upstream artifact urls (that point at the dependent +task) and can optionally provide a dependent name (defaults to build) for use in +task-reference. You also need to provide the signing formats to use. More Detail ----------- The source files provide lots of additional detail, both in the code itself and in the comments and docstrings. For the next level of detail beyond this file, consult the transform source under ``taskcluster/taskgraph/transforms``.
--- a/taskcluster/mach_commands.py +++ b/taskcluster/mach_commands.py @@ -177,17 +177,17 @@ class MachCommands(MachCommandBase): """Run the decision task: generate a task graph and submit to TaskCluster. This is only meant to be called within decision tasks, and requires a great many arguments. Commands like `mach taskgraph optimized` are better suited to use on the command line, and can take the parameters file generated by a decision task. """ import taskgraph.decision try: - self.setup() + self.setup_logging() return taskgraph.decision.taskgraph_decision(options) except Exception: traceback.print_exc() sys.exit(1) @SubCommand('taskgraph', 'cron', description="Run the cron task") @CommandArgument('--base-repository', @@ -216,30 +216,30 @@ class MachCommands(MachCommandBase): @CommandArgument('--root', '-r', required=False, help="root of the repository to get cron task definitions from") def taskgraph_cron(self, **options): """Run the cron task; this task creates zero or more decision tasks. It is run from the hooks service on a regular basis.""" import taskgraph.cron try: - self.setup() + self.setup_logging() return taskgraph.cron.taskgraph_cron(options) except Exception: traceback.print_exc() sys.exit(1) @SubCommand('taskgraph', 'action-callback', description='Run action callback used by action tasks') @CommandArgument('--root', '-r', default='taskcluster/ci', help="root of the taskgraph definition relative to topsrcdir") def action_callback(self, **options): import taskgraph.actions try: - self.setup() + self.setup_logging() # the target task for this action (or null if it's a group action) task_id = json.loads(os.environ.get('ACTION_TASK_ID', 'null')) # the target task group for this action task_group_id = os.environ.get('ACTION_TASK_GROUP_ID', None) input = json.loads(os.environ.get('ACTION_INPUT', 'null')) callback = os.environ.get('ACTION_CALLBACK', None) parameters = json.loads(os.environ.get('ACTION_PARAMETERS', '{}')) @@ -282,18 +282,17 @@ class MachCommands(MachCommandBase): if filename.endswith('.yml'): return yaml.safe_load(f) elif filename.endswith('.json'): return json.load(f) else: raise Exception("unknown filename {}".format(filename)) try: - self.setup() - + self.setup_logging() task_id = options['task_id'] if options['input']: input = load_data(options['input']) else: input = None parameters = taskgraph.parameters.load_parameters_file(options['parameters']) @@ -308,17 +307,17 @@ class MachCommands(MachCommandBase): callback=options['callback'], parameters=parameters, root=root, test=True) except Exception: traceback.print_exc() sys.exit(1) - def setup(self, quiet=False, verbose=True): + def setup_logging(self, quiet=False, verbose=True): """ Set up Python logging for all loggers, sending results to stderr (so that command output can be redirected easily) and adding the typical mach timestamp. """ # remove the old terminal handler old = self.log_manager.replace_terminal_handler(None) @@ -328,30 +327,25 @@ class MachCommands(MachCommandBase): self.log_manager.add_terminal_logging( fh=sys.stderr, level=level, write_interval=old.formatter.write_interval, write_times=old.formatter.write_times) # all of the taskgraph logging is unstructured logging self.log_manager.enable_unstructured() - # Ensure that TASKCLUSTER_ROOT_URL is set - import taskgraph - taskgraph.set_root_url_env() - def show_taskgraph(self, graph_attr, options): import taskgraph.parameters import taskgraph.generator import taskgraph - if options['fast']: taskgraph.fast = True try: - self.setup(quiet=options['quiet'], verbose=options['verbose']) + self.setup_logging(quiet=options['quiet'], verbose=options['verbose']) parameters = taskgraph.parameters.load_parameters_file(options['parameters']) parameters.check() tgg = taskgraph.generator.TaskGraphGenerator( root_dir=options.get('root'), parameters=parameters, target_kind=options.get('target_kind'), ) @@ -366,17 +360,17 @@ class MachCommands(MachCommandBase): sys.exit(1) def show_taskgraph_labels(self, taskgraph): for index in taskgraph.graph.visit_postorder(): print(taskgraph.tasks[index].label) def show_taskgraph_json(self, taskgraph): print(json.dumps(taskgraph.to_json(), - sort_keys=True, indent=2, separators=(',', ': '))) + sort_keys=True, indent=2, separators=(',', ': '))) def get_filtered_taskgraph(self, taskgraph, tasksregex): from taskgraph.graph import Graph from taskgraph.taskgraph import TaskGraph """ This class method filters all the tasks on basis of a regular expression and returns a new TaskGraph object """ @@ -400,17 +394,17 @@ class MachCommands(MachCommandBase): def show_actions(self, options): import taskgraph.parameters import taskgraph.generator import taskgraph import taskgraph.actions try: - self.setup(quiet=options['quiet'], verbose=options['verbose']) + self.setup_logging(quiet=options['quiet'], verbose=options['verbose']) parameters = taskgraph.parameters.load_parameters_file(options['parameters']) parameters.check() tgg = taskgraph.generator.TaskGraphGenerator( root_dir=options.get('root'), parameters=parameters) actions = taskgraph.actions.render_actions_json(parameters, tgg.graph_config)
--- a/taskcluster/scripts/misc/build-libdmg-hfsplus.sh +++ b/taskcluster/scripts/misc/build-libdmg-hfsplus.sh @@ -26,21 +26,14 @@ git checkout $LIBDMG_REV git archive --prefix=libdmg-hfsplus/ ${LIBDMG_REV} | xz > $UPLOAD_DIR/libdmg-hfsplus.tar.xz cmake -DOPENSSL_USE_STATIC_LIBS=1 . make -j$(getconf _NPROCESSORS_ONLN) # We only need the dmg and hfsplus tools. strip dmg/dmg hfs/hfsplus cp dmg/dmg hfs/hfsplus $STAGE -# duplicate the functionality of taskcluster-lib-urls, but in bash.. -if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then - queue_base='https://queue.taskcluster.net/v1' -else - queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1" -fi - cat >$STAGE/README<<EOF Built from ${LIBDMG_REPOSITORY} rev `git rev-parse ${LIBDMG_REV}`. Source is available as a taskcluster artifact: -$queue_base/task/$TASK_ID/artifacts/public/libdmg-hfsplus.tar.xz +https://queue.taskcluster.net/v1/task/$TASK_ID/artifacts/public/libdmg-hfsplus.tar.xz EOF tar cf - -C $WORKSPACE `basename $STAGE` | xz > $UPLOAD_DIR/dmg.tar.xz
--- a/taskcluster/scripts/misc/fetch-content +++ b/taskcluster/scripts/misc/fetch-content @@ -21,16 +21,21 @@ import time import urllib.request try: import zstandard except ImportError: zstandard = None +PUBLIC_ARTIFACT_URL = ('https://queue.taskcluster.net/v1/task/{task}/artifacts/' + '{artifact}') +PRIVATE_ARTIFACT_URL = ('http://taskcluster/queue/v1/task/{task}/artifacts/' + '{artifact}') + CONCURRENCY = multiprocessing.cpu_count() def log(msg): print(msg, file=sys.stderr) sys.stderr.flush() @@ -367,45 +372,30 @@ def command_static_url(args): try: dest.unlink() except FileNotFoundError: pass raise -def api(root_url, service, version, path): - # taskcluster-lib-urls is not available when this script runs, so - # simulate its behavior: - if root_url == 'https://taskcluster.net': - return 'https://{service}.taskcluster.net/{version}/{path}'.format( - service=service, version=version, path=path) - return 'https://{root_url}/api/{service}/{version}/{path}'.format( - root_url=root_url, service=service, version=version, path=path) - - def command_task_artifacts(args): fetches = json.loads(os.environ['MOZ_FETCHES']) downloads = [] for fetch in fetches: extdir = pathlib.Path(args.dest) if 'dest' in fetch: extdir = extdir.joinpath(fetch['dest']) extdir.mkdir(parents=True, exist_ok=True) - root_url = os.environ['TASKCLUSTER_ROOT_URL'] if fetch['artifact'].startswith('public/'): - path = 'task/{task}/artifacts/{artifact}'.format( - task=fetch['task'], artifact=fetch['artifact']) - url = api(root_url, 'queue', 'v1', path) + url = PUBLIC_ARTIFACT_URL.format(task=fetch['task'], + artifact=fetch['artifact']) else: - # Until bug 1460015 is finished, use the old baseUrl style proxy URLs - url = ('{proxy_url}/queue/v1/task/{task}/artifacts/{artifact}').format( - proxy_url=os.environ['TASKCLUSTER_PROXY_URL'], - task=fetch['task'], - artifact=fetch['artifact']) + url = PRIVATE_ARTIFACT_URL.format(task=fetch['task'], + artifact=fetch['artifact']) downloads.append((url, extdir, fetch['extract'])) fetch_urls(downloads) def main(): parser = argparse.ArgumentParser() subparsers = parser.add_subparsers(title='sub commands')
--- a/taskcluster/taskgraph/__init__.py +++ b/taskcluster/taskgraph/__init__.py @@ -1,39 +1,20 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import absolute_import, print_function, unicode_literals import os -import logging GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..')) # Maximum number of dependencies a single task can have # https://docs.taskcluster.net/reference/platform/taskcluster-queue/references/api#createTask # specifies 100, but we also optionally add the decision task id as a dep in # taskgraph.create, so let's set this to 99. MAX_DEPENDENCIES = 99 # Enable fast task generation for local debugging # This is normally switched on via the --fast/-F flag to `mach taskgraph` # Currently this skips toolchain task optimizations and schema validation fast = False - -# Default rootUrl to use for command-line invocations -PRODUCTION_TASKCLUSTER_ROOT_URL = 'https://taskcluster.net' - - -def set_root_url_env(): - """Ensure that TASKCLUSTER_ROOT_URL is set, defaulting when run outside of a task.""" - logger = logging.getLogger('set_root_url_env') - - if 'TASKCLUSTER_ROOT_URL' not in os.environ: - if 'TASK_ID' in os.environ: - raise RuntimeError('TASKCLUSTER_ROOT_URL must be set when running in a task') - else: - logger.info('Setting TASKCLUSTER_ROOT_URL to default value (Firefox CI production)') - os.environ['TASKCLUSTER_ROOT_URL'] = PRODUCTION_TASKCLUSTER_ROOT_URL - logger.info('Running in Taskcluster instance {}{}'.format( - os.environ['TASKCLUSTER_ROOT_URL'], - ' with taskcluster-proxy' if 'TASKCLUSTER_PROXY_URL' in os.environ else ''))
--- a/taskcluster/taskgraph/actions/cancel_all.py +++ b/taskcluster/taskgraph/actions/cancel_all.py @@ -5,39 +5,58 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import absolute_import, print_function, unicode_literals import concurrent.futures as futures import logging import os -from taskgraph.util.taskcluster import list_task_group, cancel_task +from taskgraph.util.taskcluster import get_session, cancel_task from .registry import register_callback_action # the maximum number of parallel cancelTask calls to make CONCURRENCY = 50 +base_url = 'https://queue.taskcluster.net/v1/{}' + logger = logging.getLogger(__name__) +def list_group(task_group_id, session): + params = {} + while True: + url = base_url.format('task-group/{}/list'.format(task_group_id)) + response = session.get(url, stream=True, params=params) + response.raise_for_status() + response = response.json() + for task in [t['status'] for t in response['tasks']]: + if task['state'] in ['running', 'pending', 'unscheduled']: + yield task['taskId'] + if response.get('continuationToken'): + params = {'continuationToken': response.get('continuationToken')} + else: + break + + @register_callback_action( title='Cancel All', name='cancel-all', kind='hook', generic=True, symbol='cAll', description=( 'Cancel all running and pending tasks created by the decision task ' 'this action task is associated with.' ), order=400, context=[] ) def cancel_all_action(parameters, graph_config, input, task_group_id, task_id, task): + session = get_session() own_task_id = os.environ.get('TASK_ID', '') with futures.ThreadPoolExecutor(CONCURRENCY) as e: cancels_jobs = [ e.submit(cancel_task, t, use_proxy=True) - for t in list_task_group(task_group_id) if t != own_task_id + for t in list_group(task_group_id, session) if t != own_task_id ] for job in cancels_jobs: job.result()
--- a/taskcluster/taskgraph/actions/create_interactive.py +++ b/taskcluster/taskgraph/actions/create_interactive.py @@ -3,18 +3,16 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import absolute_import, print_function, unicode_literals import logging import re -import os -import taskcluster_urls from .util import ( create_tasks, fetch_graph_and_labels ) from taskgraph.util.taskcluster import send_email from .registry import register_callback_action @@ -150,19 +148,17 @@ def create_interactive_action(parameters if input and 'notify' in input: email = input['notify'] # no point sending to a noreply address! if email == 'noreply@noreply.mozilla.org': return info = { - 'url': taskcluster_urls.ui( - os.environ['TASKCLUSTER_ROOT_URL'], - 'tasks/{}/connect'.format(taskId)), + 'url': 'https://tools.taskcluster.net/tasks/{}/connect'.format(taskId), 'label': label, 'revision': parameters['head_rev'], 'repo': parameters['head_repository'], } send_email( email, subject=EMAIL_SUBJECT.format(**info), content=EMAIL_CONTENT.format(**info),
--- a/taskcluster/taskgraph/morph.py +++ b/taskcluster/taskgraph/morph.py @@ -6,21 +6,20 @@ Graph morphs are modifications to task-graphs that take place *after* the optimization phase. These graph morphs are largely invisible to developers running `./mach` locally, so they should be limited to changes that do not modify the meaning of the graph. """ -# Note that the translation of `{'task-reference': '..'}` and -# `artifact-reference` are handled in the optimization phase (since -# optimization involves dealing with taskIds directly). Similarly, -# `{'relative-datestamp': '..'}` is handled at the last possible moment during -# task creation. +# Note that the translation of `{'task-reference': '..'}` is handled in the +# optimization phase (since optimization involves dealing with taskIds +# directly). Similarly, `{'relative-datestamp': '..'}` is handled at the last +# possible moment during task creation. from __future__ import absolute_import, print_function, unicode_literals import logging import os import re import jsone
--- a/taskcluster/taskgraph/test/test_util_docker.py +++ b/taskcluster/taskgraph/test/test_util_docker.py @@ -5,27 +5,24 @@ from __future__ import absolute_import, print_function, unicode_literals import os import shutil import stat import tarfile import tempfile import unittest -import mock -import taskcluster_urls as liburls from taskgraph.util import docker from mozunit import main, MockedOpen MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH -@mock.patch.dict('os.environ', {'TASKCLUSTER_ROOT_URL': liburls.test_root_url()}) class TestDocker(unittest.TestCase): def test_generate_context_hash(self): tmpdir = tempfile.mkdtemp() old_GECKO = docker.GECKO docker.GECKO = tmpdir try: os.makedirs(os.path.join(tmpdir, 'docker', 'my-image'))
--- a/taskcluster/taskgraph/test/test_util_parameterization.py +++ b/taskcluster/taskgraph/test/test_util_parameterization.py @@ -1,18 +1,16 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import absolute_import, print_function, unicode_literals import unittest import datetime -import mock -import os from mozunit import main from taskgraph.util.parameterization import ( resolve_timestamps, resolve_task_references, ) @@ -85,51 +83,10 @@ class TestTaskRefs(unittest.TestCase): "resolve_task_references raises a KeyError on reference to an invalid task" self.assertRaisesRegexp( KeyError, "task 'subject' has no dependency named 'no-such'", lambda: resolve_task_references('subject', {'task-reference': '<no-such>'}, {}) ) -class TestArtifactRefs(unittest.TestCase): - - def do(self, input, output): - taskid_for_edge_name = {'edge%d' % n: 'tid%d' % n for n in range(1, 4)} - with mock.patch.dict(os.environ, {'TASKCLUSTER_ROOT_URL': 'https://tc-tests.localhost'}): - self.assertEqual(resolve_task_references('subject', input, taskid_for_edge_name), - output) - - def test_in_list(self): - "resolve_task_references resolves artifact references in a list" - self.do( - {'in-a-list': [ - 'stuff', {'artifact-reference': '<edge1/public/foo/bar>'}]}, - {'in-a-list': [ - 'stuff', 'https://tc-tests.localhost/api/queue/v1' - '/task/tid1/artifacts/public/foo/bar']}) - - def test_in_dict(self): - "resolve_task_references resolves artifact references in a dict" - self.do( - {'in-a-dict': - {'stuff': {'artifact-reference': '<edge2/public/bar/foo>'}}}, - {'in-a-dict': - {'stuff': 'https://tc-tests.localhost/api/queue/v1' - '/task/tid2/artifacts/public/bar/foo'}}) - - def test_in_string(self): - "resolve_task_references resolves artifact references embedded in a string" - self.do( - {'stuff': {'artifact-reference': '<edge1/public/filename> and <edge2/public/bar>'}}, - {'stuff': 'https://tc-tests.localhost/api/queue/v1' - '/task/tid1/artifacts/public/filename and ' - 'https://tc-tests.localhost/api/queue/v1/task/tid2/artifacts/public/bar'}) - - def test_invalid(self): - "resolve_task_references ignores badly-formatted artifact references" - for inv in ['<edge1>', 'edge1/foo>', '<edge1>/foo', '<edge1>foo']: - resolved = resolve_task_references('subject', {'artifact-reference': inv}, {}) - self.assertEqual(resolved, inv) - - if __name__ == '__main__': main()
--- a/taskcluster/taskgraph/transforms/balrog_submit.py +++ b/taskcluster/taskgraph/transforms/balrog_submit.py @@ -12,23 +12,28 @@ from taskgraph.transforms.base import Tr from taskgraph.util.attributes import copy_attributes_from_dependent_job from taskgraph.util.schema import ( optionally_keyed_by, resolve_keyed_by, ) from taskgraph.util.scriptworker import ( get_balrog_server_scope, get_worker_type_for_scope ) from taskgraph.transforms.task import task_description_schema -from voluptuous import Optional +from voluptuous import Any, Required, Optional # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} +# shortcut for a string where task references are allowed +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}) + balrog_description_schema = schema.extend({ # unique label to describe this balrog task, defaults to balrog-{dep.label} Optional('label'): basestring, Optional( 'update-no-wnp', description="Whether the parallel `-No-WNP` blob should be updated as well.",
--- a/taskcluster/taskgraph/transforms/beetmover.py +++ b/taskcluster/taskgraph/transforms/beetmover.py @@ -2,17 +2,17 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Transform the beetmover task into an actual task description. """ from __future__ import absolute_import, print_function, unicode_literals -from voluptuous import Optional, Required +from voluptuous import Any, Optional, Required from taskgraph.loader.single_dep import schema from taskgraph.transforms.base import TransformSequence from taskgraph.transforms.task import task_description_schema from taskgraph.util.attributes import copy_attributes_from_dependent_job from taskgraph.util.scriptworker import (generate_beetmover_artifact_map, generate_beetmover_upstream_artifacts, get_beetmover_bucket_scope, @@ -114,16 +114,21 @@ UPSTREAM_SOURCE_ARTIFACTS = [ ] # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} transforms = TransformSequence() +# shortcut for a string where task references are allowed +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}) + beetmover_description_schema = schema.extend({ # depname is used in taskref's to identify the taskID of the unsigned things Required('depname', default='build'): basestring, # unique label to describe this beetmover task, defaults to {dep.label}-beetmover Optional('label'): basestring, # treeherder is allowed here to override any defaults we use for beetmover. See
--- a/taskcluster/taskgraph/transforms/beetmover_checksums.py +++ b/taskcluster/taskgraph/transforms/beetmover_checksums.py @@ -13,23 +13,27 @@ from taskgraph.transforms.base import Tr from taskgraph.transforms.beetmover import craft_release_properties from taskgraph.util.attributes import copy_attributes_from_dependent_job from taskgraph.util.scriptworker import (generate_beetmover_artifact_map, generate_beetmover_upstream_artifacts, get_beetmover_action_scope, get_beetmover_bucket_scope, get_worker_type_for_scope, should_use_artifact_map) -from voluptuous import Optional, Required +from voluptuous import Any, Optional, Required from taskgraph.transforms.task import task_description_schema # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}) + beetmover_checksums_description_schema = schema.extend({ Required('depname', default='build'): basestring, Required('attributes'): {basestring: object}, Optional('label'): basestring, Optional('treeherder'): task_description_schema['treeherder'], Optional('locale'): basestring, Optional('shipping-phase'): task_description_schema['shipping-phase'], Optional('shipping-product'): task_description_schema['shipping-product'],
--- a/taskcluster/taskgraph/transforms/beetmover_emefree_checksums.py +++ b/taskcluster/taskgraph/transforms/beetmover_emefree_checksums.py @@ -7,22 +7,26 @@ Transform release-beetmover-source-check from __future__ import absolute_import, print_function, unicode_literals from taskgraph.loader.single_dep import schema from taskgraph.transforms.base import TransformSequence from taskgraph.transforms.beetmover import craft_release_properties from taskgraph.util.attributes import copy_attributes_from_dependent_job from taskgraph.transforms.task import task_description_schema -from voluptuous import Required, Optional +from voluptuous import Any, Required, Optional # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}) + beetmover_checksums_description_schema = schema.extend({ Required('depname', default='build'): basestring, Optional('label'): basestring, Optional('extra'): object, Optional('shipping-phase'): task_description_schema['shipping-phase'], Optional('shipping-product'): task_description_schema['shipping-product'], })
--- a/taskcluster/taskgraph/transforms/beetmover_langpack_checksums.py +++ b/taskcluster/taskgraph/transforms/beetmover_langpack_checksums.py @@ -10,22 +10,26 @@ from __future__ import absolute_import, from taskgraph.loader.single_dep import schema from taskgraph.transforms.base import TransformSequence from taskgraph.transforms.beetmover import craft_release_properties from taskgraph.util.attributes import copy_attributes_from_dependent_job from taskgraph.util.scriptworker import (get_beetmover_bucket_scope, get_beetmover_action_scope, get_worker_type_for_scope) from taskgraph.transforms.task import task_description_schema -from voluptuous import Required, Optional +from voluptuous import Any, Required, Optional # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}) + beetmover_checksums_description_schema = schema.extend({ Required('depname', default='build'): basestring, Optional('label'): basestring, Optional('treeherder'): task_description_schema['treeherder'], Optional('locale'): basestring, Optional('shipping-phase'): task_description_schema['shipping-phase'], Optional('shipping-product'): task_description_schema['shipping-product'], })
--- a/taskcluster/taskgraph/transforms/beetmover_push_to_release.py +++ b/taskcluster/taskgraph/transforms/beetmover_push_to_release.py @@ -3,34 +3,35 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Transform the beetmover-push-to-release task into a task description. """ from __future__ import absolute_import, print_function, unicode_literals from taskgraph.transforms.base import TransformSequence -from taskgraph.util.schema import ( - Schema, - taskref_or_string, -) +from taskgraph.util.schema import Schema from taskgraph.util.scriptworker import ( get_beetmover_bucket_scope, add_scope_prefix, get_worker_type_for_scope, ) from taskgraph.transforms.job import job_description_schema from taskgraph.transforms.task import task_description_schema -from voluptuous import Required, Optional +from voluptuous import Any, Required, Optional # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} job_description_schema = {str(k): v for k, v in job_description_schema.schema.iteritems()} +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}) + beetmover_push_to_release_description_schema = Schema({ Required('name'): basestring, Required('product'): basestring, Required('treeherder-platform'): basestring, Optional('attributes'): {basestring: object}, Optional('job-from'): task_description_schema['job-from'], Optional('run'): {basestring: object}, Optional('run-on-projects'): task_description_schema['run-on-projects'],
--- a/taskcluster/taskgraph/transforms/beetmover_repackage.py +++ b/taskcluster/taskgraph/transforms/beetmover_repackage.py @@ -14,17 +14,17 @@ from taskgraph.util.attributes import co from taskgraph.util.partials import (get_balrog_platform_name, get_partials_artifacts, get_partials_artifact_map) from taskgraph.util.scriptworker import (get_beetmover_bucket_scope, get_beetmover_action_scope, get_worker_type_for_scope) from taskgraph.util.taskcluster import get_artifact_prefix from taskgraph.transforms.task import task_description_schema -from voluptuous import Required, Optional +from voluptuous import Any, Required, Optional import logging import re logger = logging.getLogger(__name__) def _compile_regex_mapping(mapping): @@ -141,16 +141,21 @@ UPSTREAM_ARTIFACT_SIGNED_MAR_PATHS = [ 'target.complete.mar', 'target.bz2.complete.mar', ] # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} +# shortcut for a string where task references are allowed +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}) + beetmover_description_schema = schema.extend({ # depname is used in taskref's to identify the taskID of the unsigned things Required('depname', default='build'): basestring, # unique label to describe this beetmover task, defaults to {dep.label}-beetmover Required('label'): basestring, # treeherder is allowed here to override any defaults we use for beetmover. See
--- a/taskcluster/taskgraph/transforms/beetmover_repackage_partner.py +++ b/taskcluster/taskgraph/transforms/beetmover_repackage_partner.py @@ -34,16 +34,21 @@ import logging logger = logging.getLogger(__name__) # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} +# shortcut for a string where task references are allowed +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}) + beetmover_description_schema = schema.extend({ # depname is used in taskref's to identify the taskID of the unsigned things Required('depname', default='build'): basestring, # unique label to describe this beetmover task, defaults to {dep.label}-beetmover Optional('label'): basestring, Required('partner-bucket-scope'): optionally_keyed_by('release-level', basestring),
--- a/taskcluster/taskgraph/transforms/beetmover_source_checksums.py +++ b/taskcluster/taskgraph/transforms/beetmover_source_checksums.py @@ -13,22 +13,26 @@ from taskgraph.transforms.beetmover impo from taskgraph.util.attributes import copy_attributes_from_dependent_job from taskgraph.util.scriptworker import (generate_beetmover_artifact_map, generate_beetmover_upstream_artifacts, get_beetmover_bucket_scope, get_beetmover_action_scope, get_worker_type_for_scope, should_use_artifact_map) from taskgraph.transforms.task import task_description_schema -from voluptuous import Required, Optional +from voluptuous import Any, Required, Optional # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}) + beetmover_checksums_description_schema = schema.extend({ Required('depname', default='build'): basestring, Optional('label'): basestring, Optional('treeherder'): task_description_schema['treeherder'], Optional('locale'): basestring, Optional('shipping-phase'): task_description_schema['shipping-phase'], Optional('shipping-product'): task_description_schema['shipping-product'], Optional('attributes'): task_description_schema['attributes'],
--- a/taskcluster/taskgraph/transforms/checksums_signing.py +++ b/taskcluster/taskgraph/transforms/checksums_signing.py @@ -11,22 +11,26 @@ from taskgraph.loader.single_dep import from taskgraph.transforms.base import TransformSequence from taskgraph.util.attributes import copy_attributes_from_dependent_job from taskgraph.util.scriptworker import ( get_signing_cert_scope, get_worker_type_for_scope, add_scope_prefix, ) from taskgraph.transforms.task import task_description_schema -from voluptuous import Required, Optional +from voluptuous import Any, Required, Optional # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}) + checksums_signing_description_schema = schema.extend({ Required('depname', default='beetmover'): basestring, Optional('label'): basestring, Optional('treeherder'): task_description_schema['treeherder'], Optional('shipping-product'): task_description_schema['shipping-product'], Optional('shipping-phase'): task_description_schema['shipping-phase'], })
--- a/taskcluster/taskgraph/transforms/diffoscope.py +++ b/taskcluster/taskgraph/transforms/diffoscope.py @@ -7,17 +7,17 @@ defined in kind.yml """ from __future__ import absolute_import, print_function, unicode_literals from taskgraph.transforms.base import TransformSequence from taskgraph.util.schema import ( Schema, ) -from taskgraph.util.taskcluster import get_artifact_path +from taskgraph.util.taskcluster import get_artifact_path, get_artifact_url from voluptuous import ( Any, Optional, Required, ) index_or_string = Any( basestring, @@ -59,17 +59,17 @@ def fill_template(config, tasks): deps = {} urls = {} previous_artifact = None for k in ('original', 'new'): value = task[k] if isinstance(value, basestring): deps[k] = value - dep_name = k + task_id = '<{}>'.format(k) os_hint = value else: index = value['index-search'] if index not in dummy_tasks: dummy_tasks[index] = { 'label': 'index-search-' + index, 'description': index, 'worker-type': 'invalid/always-optimized', @@ -77,36 +77,34 @@ def fill_template(config, tasks): 'using': 'always-optimized', }, 'optimization': { 'index-search': [index], } } yield dummy_tasks[index] deps[index] = 'index-search-' + index - dep_name = index + task_id = '<{}>'.format(index) os_hint = index.split('.')[-1] if 'linux' in os_hint: artifact = 'target.tar.bz2' elif 'macosx' in os_hint: artifact = 'target.dmg' elif 'android' in os_hint: artifact = 'target.apk' elif 'win' in os_hint: artifact = 'target.zip' else: raise Exception( 'Cannot figure out the OS for {!r}'.format(value)) if previous_artifact is not None and previous_artifact != artifact: raise Exception( 'Cannot compare builds from different OSes') - urls[k] = { - 'artifact-reference': '<{}/{}>'.format( - dep_name, get_artifact_path(task, artifact)), - } + url = get_artifact_url(task_id, get_artifact_path(task, artifact)) + urls[k] = {'task-reference': url} previous_artifact = artifact taskdesc = { 'label': 'diff-' + name, 'description': name, 'treeherder': { 'symbol': task['symbol'], 'platform': 'diff/opt',
--- a/taskcluster/taskgraph/transforms/docker_image.py +++ b/taskcluster/taskgraph/transforms/docker_image.py @@ -106,18 +106,16 @@ def fill_template(config, tasks): # task-reference value, see further below). We add the package routes # containing a hash to get the overall docker image hash, so changes # to packages will be reflected in the docker image hash. args['DOCKER_IMAGE_PACKAGES'] = ' '.join('<{}>'.format(p) for p in packages) if parent: args['DOCKER_IMAGE_PARENT'] = '{}:{}'.format(parent, context_hashes[parent]) - args['TASKCLUSTER_ROOT_URL'] = os.environ['TASKCLUSTER_ROOT_URL'] - if not taskgraph.fast: context_path = os.path.join('taskcluster', 'docker', definition) context_hash = generate_context_hash( GECKO, context_path, image_name, args) else: context_hash = '0'*40 digest_data = [context_hash] context_hashes[image_name] = context_hash
--- a/taskcluster/taskgraph/transforms/final_verify.py +++ b/taskcluster/taskgraph/transforms/final_verify.py @@ -4,37 +4,40 @@ """ Transform the beetmover task into an actual task description. """ from __future__ import absolute_import, print_function, unicode_literals from taskgraph.transforms.base import TransformSequence from taskgraph.util.schema import resolve_keyed_by +from taskgraph.util.taskcluster import get_taskcluster_artifact_prefix transforms = TransformSequence() @transforms.add def add_command(config, tasks): for task in tasks: if not task["worker"].get("env"): task["worker"]["env"] = {} final_verify_configs = [] for upstream in task.get("dependencies", {}).keys(): if 'update-verify-config' in upstream: final_verify_configs.append( - "<{}/public/build/update-verify.cfg>".format(upstream), + "{}update-verify.cfg".format( + get_taskcluster_artifact_prefix(task, "<{}>".format(upstream)) + ) ) task['run'] = { 'using': 'run-task', 'command': { - 'artifact-reference': 'cd /builds/worker/checkouts/gecko && ' - 'tools/update-verify/release/final-verification.sh ' - + ' '.join(final_verify_configs), + 'task-reference': 'cd /builds/worker/checkouts/gecko && ' + 'tools/update-verify/release/final-verification.sh ' + + ' '.join(final_verify_configs), }, 'sparse-profile': 'update-verify', } for thing in ("BUILD_TOOLS_REPO",): thing = "worker.env.{}".format(thing) resolve_keyed_by(task, thing, thing, **config.params) yield task
--- a/taskcluster/taskgraph/transforms/job/debian_package.py +++ b/taskcluster/taskgraph/transforms/job/debian_package.py @@ -4,17 +4,16 @@ """ Support for running spidermonkey jobs via dedicated scripts """ from __future__ import absolute_import, print_function, unicode_literals import os import re -import taskcluster_urls from taskgraph.util.schema import Schema from voluptuous import Any, Optional, Required from taskgraph.transforms.job import run_job_using from taskgraph.transforms.job.common import add_artifacts from taskgraph.util.hash import hash_path @@ -148,18 +147,16 @@ def docker_worker_debian_package(config, ) if 'patch' not in run and 'pre-build-command' not in run: adjust += ('debchange -l ".{prefix}moz" --distribution "{dist}"' ' "Mozilla backport for {dist}." < /dev/null && ').format( prefix=name.split('-', 1)[0], dist=run['dist'], ) - queue_url = taskcluster_urls.api(os.environ['TASKCLUSTER_ROOT_URL'], 'queue', 'v1', '') - # We can't depend on docker images (since docker images depend on packages), # so we inline the whole script here. worker['command'] = [ 'sh', '-x', '-c', # Fill /etc/apt/sources.list with the relevant snapshot repository. 'echo "deb http://snapshot.debian.org/archive/debian' @@ -169,17 +166,18 @@ def docker_worker_debian_package(config, 'echo "deb http://snapshot.debian.org/archive/debian' '/{snapshot}/ {dist}-backports main" >> /etc/apt/sources.list && ' 'echo "deb http://snapshot.debian.org/archive/debian-security' '/{snapshot}/ {dist}/updates main" >> /etc/apt/sources.list && ' 'apt-get update -o Acquire::Check-Valid-Until=false -q && ' # Add sources for packages coming from other package tasks. 'apt-get install -yyq apt-transport-https ca-certificates && ' 'for task in $PACKAGES; do ' - ' echo "deb [trusted=yes] {queue_url}task/$task/artifacts/public/build/ debian/" ' + ' echo "deb [trusted=yes] https://queue.taskcluster.net/v1/task' + '/$task/artifacts/public/build/ debian/" ' '>> /etc/apt/sources.list; ' 'done && ' # Install the base utilities required to build debian packages. 'apt-get update -o Acquire::Check-Valid-Until=false -q && ' 'apt-get install -yyq {base_deps} && ' 'cd /tmp && ' # Get, validate and extract the package source. 'dget -d -u {src_url} && ' @@ -195,17 +193,16 @@ def docker_worker_debian_package(config, # Copy the artifacts 'mkdir -p {artifacts}/debian && ' 'dcmd cp ../{package}_*.changes {artifacts}/debian/ && ' 'cd {artifacts} && ' # Make the artifacts directory usable as an APT repository. 'apt-ftparchive sources debian | gzip -c9 > debian/Sources.gz && ' 'apt-ftparchive packages debian | gzip -c9 > debian/Packages.gz' .format( - queue_url=queue_url, package=package, snapshot=run['snapshot'], dist=run['dist'], src_url=src_url, src_file=src_file, src_sha256=src_sha256, unpack=unpack, adjust=adjust,
--- a/taskcluster/taskgraph/transforms/job/mach.py +++ b/taskcluster/taskgraph/transforms/job/mach.py @@ -3,27 +3,24 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Support for running mach tasks (via run-task) """ from __future__ import absolute_import, print_function, unicode_literals from taskgraph.transforms.job import run_job_using, configure_taskdesc_for_run -from taskgraph.util.schema import ( - Schema, - taskref_or_string, -) +from taskgraph.util.schema import Schema from voluptuous import Required, Optional, Any mach_schema = Schema({ Required('using'): 'mach', # The mach command (omitting `./mach`) to run - Required('mach'): taskref_or_string, + Required('mach'): basestring, # The sparse checkout profile to use. Value is the filename relative to the # directory where sparse profiles are defined (build/sparse-profiles/). Optional('sparse-profile'): Any(basestring, None), # if true, perform a checkout of a comm-central based branch inside the # gecko checkout Required('comm-checkout'): bool, @@ -39,21 +36,13 @@ defaults = { @run_job_using("docker-worker", "mach", schema=mach_schema, defaults=defaults) @run_job_using("native-engine", "mach", schema=mach_schema, defaults=defaults) @run_job_using("generic-worker", "mach", schema=mach_schema, defaults=defaults) def configure_mach(config, job, taskdesc): run = job['run'] - command_prefix = 'cd $GECKO_PATH && ./mach ' - mach = run['mach'] - if isinstance(mach, dict): - ref, pattern = next(iter(mach.items())) - command = {ref: command_prefix + pattern} - else: - command = command_prefix + mach - # defer to the run_task implementation - run['command'] = command + run['command'] = 'cd $GECKO_PATH && ./mach {mach}'.format(**run) run['using'] = 'run-task' del run['mach'] configure_taskdesc_for_run(config, job, taskdesc, job['worker']['implementation'])
--- a/taskcluster/taskgraph/transforms/l10n.py +++ b/taskcluster/taskgraph/transforms/l10n.py @@ -13,17 +13,16 @@ import json from mozbuild.chunkify import chunkify from taskgraph.loader.multi_dep import schema from taskgraph.transforms.base import ( TransformSequence, ) from taskgraph.util.schema import ( optionally_keyed_by, resolve_keyed_by, - taskref_or_string, ) from taskgraph.util.attributes import copy_attributes_from_dependent_job from taskgraph.util.taskcluster import get_artifact_prefix from taskgraph.util.treeherder import add_suffix from taskgraph.transforms.job import job_description_schema from taskgraph.transforms.task import task_description_schema from voluptuous import ( Any, @@ -31,16 +30,21 @@ from voluptuous import ( Required, ) def _by_platform(arg): return optionally_keyed_by('build-platform', arg) +# shortcut for a string where task references are allowed +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}) + # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings job_description_schema = {str(k): v for k, v in job_description_schema.schema.iteritems()} task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} l10n_description_schema = schema.extend({ # Name for this job, inferred from the dependent job before validation Required('name'): basestring,
--- a/taskcluster/taskgraph/transforms/partials.py +++ b/taskcluster/taskgraph/transforms/partials.py @@ -4,17 +4,17 @@ """ Transform the partials task into an actual task description. """ from __future__ import absolute_import, print_function, unicode_literals from taskgraph.transforms.base import TransformSequence from taskgraph.util.attributes import copy_attributes_from_dependent_job from taskgraph.util.partials import get_balrog_platform_name, get_builds -from taskgraph.util.taskcluster import get_artifact_prefix +from taskgraph.util.taskcluster import get_taskcluster_artifact_prefix, get_artifact_prefix import logging logger = logging.getLogger(__name__) transforms = TransformSequence() def _generate_task_output_files(job, filenames, locale=None): @@ -71,30 +71,29 @@ def make_task_description(config, jobs): builds = get_builds(config.params['release_history'], dep_th_platform, build_locale) # If the list is empty there's no available history for this platform # and locale combination, so we can't build any partials. if not builds: continue + dep_task_ref = '<{}>'.format(dependent_kind) + extra = {'funsize': {'partials': list()}} update_number = 1 - - locale_suffix = '' - if locale: - locale_suffix = '{}/'.format(locale) - artifact_path = "<{}/{}/{}target.complete.mar>".format( - dependent_kind, get_artifact_prefix(dep_job), locale_suffix, + artifact_path = "{}{}".format( + get_taskcluster_artifact_prefix(dep_job, dep_task_ref, locale=locale), + 'target.complete.mar' ) for build in sorted(builds): partial_info = { 'locale': build_locale, 'from_mar': builds[build]['mar_url'], - 'to_mar': {'artifact-reference': artifact_path}, + 'to_mar': {'task-reference': artifact_path}, 'platform': get_balrog_platform_name(dep_th_platform), 'branch': config.params['project'], 'update_number': update_number, 'dest_mar': build, } if 'product' in builds[build]: partial_info['product'] = builds[build]['product'] if 'previousVersion' in builds[build]:
--- a/taskcluster/taskgraph/transforms/repackage.py +++ b/taskcluster/taskgraph/transforms/repackage.py @@ -15,23 +15,28 @@ from taskgraph.util.attributes import co from taskgraph.util.schema import ( optionally_keyed_by, resolve_keyed_by, ) from taskgraph.util.taskcluster import get_artifact_prefix from taskgraph.util.platforms import archive_format, executable_extension from taskgraph.util.workertypes import worker_type_implementation from taskgraph.transforms.job import job_description_schema -from voluptuous import Required, Optional +from voluptuous import Any, Required, Optional # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings job_description_schema = {str(k): v for k, v in job_description_schema.schema.iteritems()} +# shortcut for a string where task references are allowed +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}) + packaging_description_schema = schema.extend({ # depname is used in taskref's to identify the taskID of the signed things Required('depname', default='build'): basestring, # unique label to describe this repackaging task Optional('label'): basestring, # treeherder is allowed here to override any defaults we use for repackaging. See
--- a/taskcluster/taskgraph/transforms/repackage_partner.py +++ b/taskcluster/taskgraph/transforms/repackage_partner.py @@ -17,27 +17,32 @@ from taskgraph.util.schema import ( resolve_keyed_by, ) from taskgraph.util.taskcluster import get_artifact_prefix from taskgraph.util.partners import check_if_partners_enabled from taskgraph.util.platforms import archive_format, executable_extension from taskgraph.util.workertypes import worker_type_implementation from taskgraph.transforms.task import task_description_schema from taskgraph.transforms.repackage import PACKAGE_FORMATS -from voluptuous import Required, Optional +from voluptuous import Any, Required, Optional # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} def _by_platform(arg): return optionally_keyed_by('build-platform', arg) +# shortcut for a string where task references are allowed +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}) + packaging_description_schema = schema.extend({ # depname is used in taskref's to identify the taskID of the signed things Required('depname', default='build'): basestring, # unique label to describe this repackaging task Optional('label'): basestring, # Routes specific to this task, if defined
--- a/taskcluster/taskgraph/transforms/signing.py +++ b/taskcluster/taskgraph/transforms/signing.py @@ -5,32 +5,36 @@ Transform the signing task into an actual task description. """ from __future__ import absolute_import, print_function, unicode_literals from taskgraph.loader.single_dep import schema from taskgraph.transforms.base import TransformSequence from taskgraph.util.attributes import copy_attributes_from_dependent_job -from taskgraph.util.schema import taskref_or_string from taskgraph.util.scriptworker import ( add_scope_prefix, get_signing_cert_scope_per_platform, get_worker_type_for_scope, ) from taskgraph.transforms.task import task_description_schema -from voluptuous import Required, Optional +from voluptuous import Any, Required, Optional # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} transforms = TransformSequence() +# shortcut for a string where task references are allowed +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}) + signing_description_schema = schema.extend({ # Artifacts from dep task to sign - Sync with taskgraph/transforms/task.py # because this is passed directly into the signingscript worker Required('upstream-artifacts'): [{ # taskId of the task with the artifact Required('taskId'): taskref_or_string, # type of signing task (for CoT)
--- a/taskcluster/taskgraph/transforms/source_checksums_signing.py +++ b/taskcluster/taskgraph/transforms/source_checksums_signing.py @@ -11,22 +11,26 @@ from taskgraph.loader.single_dep import from taskgraph.transforms.base import TransformSequence from taskgraph.util.attributes import copy_attributes_from_dependent_job from taskgraph.util.scriptworker import ( get_signing_cert_scope, get_worker_type_for_scope, add_scope_prefix, ) from taskgraph.transforms.task import task_description_schema -from voluptuous import Required, Optional +from voluptuous import Any, Required, Optional # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}) + checksums_signing_description_schema = schema.extend({ Required('depname', default='beetmover'): basestring, Optional('label'): basestring, Optional('treeherder'): task_description_schema['treeherder'], Optional('shipping-product'): task_description_schema['shipping-product'], Optional('shipping-phase'): task_description_schema['shipping-phase'], })
--- a/taskcluster/taskgraph/transforms/source_test.py +++ b/taskcluster/taskgraph/transforms/source_test.py @@ -22,16 +22,18 @@ from taskgraph.util.treeherder import jo from voluptuous import ( Any, Extra, Optional, Required, Schema, ) +ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}' + job_description_schema = {str(k): v for k, v in job_description_schema.schema.iteritems()} source_test_description_schema = Schema({ # most fields are passed directly through as job fields, and are not # repeated here Extra: object, # The platform on which this task runs. This will be used to set up attributes
--- a/taskcluster/taskgraph/transforms/task.py +++ b/taskcluster/taskgraph/transforms/task.py @@ -22,17 +22,16 @@ from taskgraph.util.hash import hash_pat from taskgraph.util.treeherder import split_symbol from taskgraph.transforms.base import TransformSequence from taskgraph.util.schema import ( validate_schema, Schema, optionally_keyed_by, resolve_keyed_by, OptimizationSchema, - taskref_or_string, ) from taskgraph.util.scriptworker import ( BALROG_ACTIONS, get_release_config, add_scope_prefix, ) from taskgraph.util.signed_artifacts import get_signed_artifacts from voluptuous import Any, Required, Optional, Extra @@ -43,16 +42,22 @@ RUN_TASK = os.path.join(GECKO, 'taskclus @memoize def _run_task_suffix(): """String to append to cache names under control of run-task.""" return hash_path(RUN_TASK)[0:20] +# shortcut for a string where task references are allowed +taskref_or_string = Any( + basestring, + {Required('task-reference'): basestring}, +) + # A task description is a general description of a TaskCluster task task_description_schema = Schema({ # the label for this task Required('label'): basestring, # description of the task (for metadata) Required('description'): basestring, @@ -489,21 +494,16 @@ def build_docker_worker_payload(config, image = { "path": "public/image.tar.zst", "namespace": image['indexed'], "type": "indexed-image", } else: raise Exception("unknown docker image type") - # propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon - # be provided directly by the worker, making this redundant: - # https://bugzilla.mozilla.org/show_bug.cgi?id=1460015 - worker['env']['TASKCLUSTER_ROOT_URL'] = os.environ['TASKCLUSTER_ROOT_URL'] - features = {} if worker.get('relengapi-proxy'): features['relengAPIProxy'] = True if worker.get('taskcluster-proxy'): features['taskclusterProxy'] = True worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster/' @@ -526,21 +526,16 @@ def build_docker_worker_payload(config, level=config.params['level']) ) worker['env']['USE_SCCACHE'] = '1' # Disable sccache idle shutdown. worker['env']['SCCACHE_IDLE_TIMEOUT'] = '0' else: worker['env']['SCCACHE_DISABLE'] = '1' - # this will soon be provided directly by the worker: - # https://bugzilla.mozilla.org/show_bug.cgi?id=1460015 - if features.get('taskclusterProxy'): - worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster' - capabilities = {} for lo in 'audio', 'video': if worker.get('loopback-' + lo): capitalized = 'loopback' + lo.capitalize() devices = capabilities.setdefault('devices', {}) devices[capitalized] = True task_def['scopes'].append('docker-worker:capability:device:' + capitalized) @@ -763,21 +758,16 @@ def build_generic_worker_payload(config, task_def['payload'] = { 'command': worker['command'], 'maxRunTime': worker['max-run-time'], } env = worker.get('env', {}) - # propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon - # be provided directly by the worker, making this redundant: - # https://bugzilla.mozilla.org/show_bug.cgi?id=1460015 - env['TASKCLUSTER_ROOT_URL'] = os.environ['TASKCLUSTER_ROOT_URL'] - if task.get('needs-sccache'): env['USE_SCCACHE'] = '1' # Disable sccache idle shutdown. env['SCCACHE_IDLE_TIMEOUT'] = '0' else: env['SCCACHE_DISABLE'] = '1' if env: @@ -822,19 +812,17 @@ def build_generic_worker_payload(config, features = {} if worker.get('chain-of-trust'): features['chainOfTrust'] = True if worker.get('taskcluster-proxy'): features['taskclusterProxy'] = True - # this will soon be provided directly by the worker: - # https://bugzilla.mozilla.org/show_bug.cgi?id=1460015 - worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster' + worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster/' if worker.get('run-as-administrator', False): features['runAsAdministrator'] = True if features: task_def['payload']['features'] = features # coalesce / superseding @@ -1322,21 +1310,16 @@ def build_always_optimized_payload(confi # type=directory) Required('name'): basestring, }], # Wether any artifacts are assigned to this worker Optional('skip-artifacts'): bool, }) def build_macosx_engine_payload(config, task, task_def): worker = task['worker'] - - # propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon - # be provided directly by the worker, making this redundant - worker.setdefault('env', {})['TASKCLUSTER_ROOT_URL'] = os.environ['TASKCLUSTER_ROOT_URL'] - artifacts = map(lambda artifact: { 'name': artifact['name'], 'path': artifact['path'], 'type': artifact['type'], 'expires': task_def['expires'], }, worker.get('artifacts', [])) task_def['payload'] = {
--- a/taskcluster/taskgraph/util/docker.py +++ b/taskcluster/taskgraph/util/docker.py @@ -203,28 +203,28 @@ def create_context_tar(topsrcdir, contex return stream_context_tar(topsrcdir, context_dir, fh, prefix, args) def stream_context_tar(topsrcdir, context_dir, out_file, prefix, args=None): """Like create_context_tar, but streams the tar file to the `out_file` file object.""" archive_files = {} replace = [] - content = [] context_dir = os.path.join(topsrcdir, context_dir) for root, dirs, files in os.walk(context_dir): for f in files: source_path = os.path.join(root, f) rel = source_path[len(context_dir) + 1:] archive_path = os.path.join(prefix, rel) archive_files[archive_path] = source_path # Parse Dockerfile for special syntax of extra files to include. + content = [] with open(os.path.join(context_dir, 'Dockerfile'), 'rb') as fh: for line in fh: if line.startswith('# %ARG'): p = line[len('# %ARG '):].strip() if not args or p not in args: raise Exception('missing argument: {}'.format(p)) replace.append((re.compile(r'\${}\b'.format(p)), args[p].encode('ascii')))
--- a/taskcluster/taskgraph/util/parameterization.py +++ b/taskcluster/taskgraph/util/parameterization.py @@ -2,75 +2,47 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import absolute_import, print_function, unicode_literals import re from taskgraph.util.time import json_time_from_now -from taskgraph.util.taskcluster import get_artifact_url TASK_REFERENCE_PATTERN = re.compile('<([^>]+)>') -ARTIFACT_REFERENCE_PATTERN = re.compile('<([^/]+)/([^>]+)>') -def _recurse(val, param_fns): +def _recurse(val, param_name, param_fn): + param_keys = [param_name] + def recurse(val): if isinstance(val, list): return [recurse(v) for v in val] elif isinstance(val, dict): - if len(val) == 1: - for param_key, param_fn in param_fns.items(): - if val.keys() == [param_key]: - return param_fn(val[param_key]) - return {k: recurse(v) for k, v in val.iteritems()} + if val.keys() == param_keys: + return param_fn(val[param_name]) + else: + return {k: recurse(v) for k, v in val.iteritems()} else: return val return recurse(val) def resolve_timestamps(now, task_def): """Resolve all instances of `{'relative-datestamp': '..'}` in the given task definition""" - return _recurse(task_def, { - 'relative-datestamp': lambda v: json_time_from_now(v, now), - }) + return _recurse(task_def, 'relative-datestamp', lambda v: json_time_from_now(v, now)) def resolve_task_references(label, task_def, dependencies): - """Resolve all instances of - {'task-reference': '..<..>..'} - and - {'artifact-reference`: '..<dependency/artifact/path>..'} - in the given task definition, using the given dependencies""" - - def task_reference(val): - def repl(match): - key = match.group(1) - try: - return dependencies[key] - except KeyError: - # handle escaping '<' - if key == '<': - return key - raise KeyError("task '{}' has no dependency named '{}'".format(label, key)) - - return TASK_REFERENCE_PATTERN.sub(repl, val) + """Resolve all instances of `{'task-reference': '..<..>..'}` in the given task + definition, using the given dependencies""" + def repl(match): + key = match.group(1) + try: + return dependencies[key] + except KeyError: + # handle escaping '<' + if key == '<': + return key + raise KeyError("task '{}' has no dependency named '{}'".format(label, key)) - def artifact_reference(val): - def repl(match): - dependency, artifact_name = match.group(1, 2) - - try: - task_id = dependencies[dependency] - except KeyError: - raise KeyError("task '{}' has no dependency named '{}'".format(label, dependency)) - - assert artifact_name.startswith('public/'), \ - "artifact-reference only supports public artifacts, not `{}`".format(artifact_name) - return get_artifact_url(task_id, artifact_name) - - return ARTIFACT_REFERENCE_PATTERN.sub(repl, val) - - return _recurse(task_def, { - 'task-reference': task_reference, - 'artifact-reference': artifact_reference, - }) + return _recurse(task_def, 'task-reference', lambda v: TASK_REFERENCE_PATTERN.sub(repl, v))
--- a/taskcluster/taskgraph/util/schema.py +++ b/taskcluster/taskgraph/util/schema.py @@ -229,15 +229,8 @@ OptimizationSchema = voluptuous.Any( # skip this task if unless the change files' SCHEDULES contains any of these components {'skip-unless-schedules': list(schedules.ALL_COMPONENTS)}, # skip if SETA or skip-unless-schedules says to {'skip-unless-schedules-or-seta': list(schedules.ALL_COMPONENTS)}, # only run this task if its dependencies will run (useful for follow-on tasks that # are unnecessary if the parent tasks are not run) {'only-if-dependencies-run': None} ) - -# shortcut for a string where task references are allowed -taskref_or_string = voluptuous.Any( - basestring, - {voluptuous.Required('task-reference'): basestring}, - {voluptuous.Required('artifact-reference'): basestring}, -)
--- a/taskcluster/taskgraph/util/taskcluster.py +++ b/taskcluster/taskgraph/util/taskcluster.py @@ -1,28 +1,32 @@ # -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import absolute_import, print_function, unicode_literals -import os import datetime import functools import yaml import requests import logging -import taskcluster_urls as liburls from mozbuild.util import memoize from requests.packages.urllib3.util.retry import Retry from requests.adapters import HTTPAdapter from taskgraph.task import Task +_PUBLIC_TC_ARTIFACT_LOCATION = \ + 'https://queue.taskcluster.net/v1/task/{task_id}/artifacts/{artifact_prefix}/{postfix}' + +_PRIVATE_TC_ARTIFACT_LOCATION = \ + 'http://taskcluster/queue/v1/task/{task_id}/artifacts/{artifact_prefix}/{postfix}' + logger = logging.getLogger(__name__) # this is set to true for `mach taskgraph action-callback --test` testing = False @memoize def get_session(): @@ -54,32 +58,27 @@ def _handle_artifact(path, response): if path.endswith('.yml'): return yaml.safe_load(response.text) response.raw.read = functools.partial(response.raw.read, decode_content=True) return response.raw def get_artifact_url(task_id, path, use_proxy=False): - artifact_tmpl = liburls.api( - os.environ['TASKCLUSTER_ROOT_URL'], 'queue', 'v1', - 'task/{}/artifacts/{}') - data = artifact_tmpl.format(task_id, path) + ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}' if use_proxy: # Until Bug 1405889 is deployed, we can't download directly # from the taskcluster-proxy. Work around by using the /bewit # endpoint instead. + data = ARTIFACT_URL.format(task_id, path) # The bewit URL is the body of a 303 redirect, which we don't # want to follow (which fetches a potentially large resource). - response = _do_request( - os.environ['TASKCLUSTER_PROXY_URL'] + '/bewit', - data=data, - allow_redirects=False) + response = _do_request('http://taskcluster/bewit', data=data, allow_redirects=False) return response.text - return data + return ARTIFACT_URL.format(task_id, path) def get_artifact(task_id, path, use_proxy=False): """ Returns the artifact with the given path for the given task id. If the path ends with ".json" or ".yml", the content is deserialized as, respectively, json or yaml, and the corresponding python data (usually @@ -105,22 +104,20 @@ def get_artifact_prefix(task): def get_artifact_path(task, path): return "{}/{}".format(get_artifact_prefix(task), path) def get_index_url(index_path, use_proxy=False, multiple=False): if use_proxy: - # Until bug 1460015 is finished, use the old baseUrl style of proxy URL - index_tmpl = os.environ['TASKCLUSTER_PROXY_URL'] + '/index/v1/task{}/{}' + INDEX_URL = 'http://taskcluster/index/v1/task{}/{}' else: - index_tmpl = liburls.api( - os.environ['TASKCLUSTER_ROOT_URL'], 'index', 'v1', 'task{}/{}') - return index_tmpl.format('s' if multiple else '', index_path) + INDEX_URL = 'https://index.taskcluster.net/v1/task{}/{}' + return INDEX_URL.format('s' if multiple else '', index_path) def find_task_id(index_path, use_proxy=False): try: response = _do_request(get_index_url(index_path, use_proxy)) except requests.exceptions.HTTPError as e: if e.response.status_code == 404: raise KeyError("index path {} not found".format(index_path)) @@ -160,22 +157,20 @@ def list_tasks(index_path, use_proxy=Fal def parse_time(timestamp): """Turn a "JSON timestamp" as used in TC APIs into a datetime""" return datetime.datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ') def get_task_url(task_id, use_proxy=False): if use_proxy: - # Until bug 1460015 is finished, use the old baseUrl style of proxy URL - task_tmpl = os.environ['TASKCLUSTER_PROXY_URL'] + '/queue/v1/task/{}' + TASK_URL = 'http://taskcluster/queue/v1/task/{}' else: - task_tmpl = liburls.api( - os.environ['TASKCLUSTER_ROOT_URL'], 'queue', 'v1', 'task/{}') - return task_tmpl.format(task_id) + TASK_URL = 'https://queue.taskcluster.net/v1/task/{}' + return TASK_URL.format(task_id) def get_task_definition(task_id, use_proxy=False): response = _do_request(get_task_url(task_id, use_proxy)) return response.json() def cancel_task(task_id, use_proxy=False): @@ -205,63 +200,58 @@ def rerun_task(task_id): logger.info('Would have rerun {}.'.format(task_id)) else: _do_request(get_task_url(task_id, use_proxy=True) + '/rerun', json={}) def get_current_scopes(): """Get the current scopes. This only makes sense in a task with the Taskcluster proxy enabled, where it returns the actual scopes accorded to the task.""" - # Until bug 1460015 is finished, use the old baseUrl style of proxy URL - resp = _do_request(os.environ['TASKCLUSTER_PROXY_URL'] + '/auth/v1/scopes/current') + resp = _do_request('http://taskcluster/auth/v1/scopes/current') return resp.json().get("scopes", []) def get_purge_cache_url(provisioner_id, worker_type, use_proxy=False): if use_proxy: - # Until bug 1460015 is finished, use the old baseUrl style of proxy URL - url_tmpl = os.environ['TASKCLUSTER_PROXY_URL'] + '/purge-cache/v1/purge-cache/{}/{}' + TASK_URL = 'http://taskcluster/purge-cache/v1/purge-cache/{}/{}' else: - url_tmpl = liburls.api( - os.environ['TASKCLUSTER_ROOT_URL'], 'purge-cache', 'v1', 'purge-cache/{}/{}') - return url_tmpl.format(provisioner_id, worker_type) + TASK_URL = 'https://purge-cache.taskcluster.net/v1/purge-cache/{}/{}' + return TASK_URL.format(provisioner_id, worker_type) def purge_cache(provisioner_id, worker_type, cache_name, use_proxy=False): """Requests a cache purge from the purge-caches service.""" if testing: logger.info('Would have purged {}/{}/{}.'.format(provisioner_id, worker_type, cache_name)) else: logger.info('Purging {}/{}/{}.'.format(provisioner_id, worker_type, cache_name)) purge_cache_url = get_purge_cache_url(provisioner_id, worker_type, use_proxy) _do_request(purge_cache_url, json={'cacheName': cache_name}) +def get_taskcluster_artifact_prefix(task, task_id, postfix='', locale=None, force_private=False): + if locale: + postfix = '{}/{}'.format(locale, postfix) + + artifact_prefix = get_artifact_prefix(task) + if artifact_prefix == 'public/build' and not force_private: + tmpl = _PUBLIC_TC_ARTIFACT_LOCATION + else: + tmpl = _PRIVATE_TC_ARTIFACT_LOCATION + + return tmpl.format( + task_id=task_id, postfix=postfix, artifact_prefix=artifact_prefix + ) + + def send_email(address, subject, content, link, use_proxy=False): """Sends an email using the notify service""" logger.info('Sending email to {}.'.format(address)) if use_proxy: - # Until bug 1460015 is finished, use the old baseUrl style of proxy URL - url = os.environ['TASKCLUSTER_PROXY_URL'] + '/notify/v1/email' + url = 'http://taskcluster/notify/v1/email' else: - url = liburls.api(os.environ['TASKCLUSTER_ROOT_URL'], 'notify', 'v1', 'email') + url = 'https://notify.taskcluster.net/v1/email' _do_request(url, json={ 'address': address, 'subject': subject, 'content': content, 'link': link, }) - - -def list_task_group(task_group_id): - """Generate the tasks in a task group""" - params = {} - while True: - url = liburls.api(os.environ['TASKCLUSTER_ROOT_URL'], 'queue', 'v1', - 'task-group/{}/list'.format(task_group_id)) - resp = _do_request(url, params=params).json() - for task in [t['status'] for t in resp['tasks']]: - if task['state'] in ['running', 'pending', 'unscheduled']: - yield task['taskId'] - if resp.get('continuationToken'): - params = {'continuationToken': resp.get('continuationToken')} - else: - break
--- a/third_party/python/requirements.in +++ b/third_party/python/requirements.in @@ -8,11 +8,10 @@ pip-tools==3.0.0 pipenv==2018.5.18 psutil==5.4.3 pytest==3.6.2 python-hglib==2.4 redo==2.0.2 requests==2.9.1 six==1.10.0 taskcluster==4.0.1 -taskcluster-urls==11.0.0 virtualenv==15.2.0 voluptuous==0.11.5
--- a/third_party/python/requirements.txt +++ b/third_party/python/requirements.txt @@ -95,20 +95,16 @@ scandir==1.9.0 \ --hash=sha256:f5c71e29b4e2af7ccdc03a020c626ede51da471173b4a6ad1e904f2b2e04b4bd \ # via pathlib2 six==1.10.0 \ --hash=sha256:0ff78c403d9bccf5a425a6d31a12aa6b47f1c21ca4dc2573a7e2f32a97335eb1 \ --hash=sha256:105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a slugid==1.0.7 \ --hash=sha256:6dab3c7eef0bb423fb54cb7752e0f466ddd0ee495b78b763be60e8a27f69e779 \ # via taskcluster -taskcluster-urls==11.0.0 \ - --hash=sha256:18dcaa9c2412d34ff6c78faca33f0dd8f2384e3f00a98d5832c62d6d664741f0 \ - --hash=sha256:2aceab7cf5b1948bc197f2e5e50c371aa48181ccd490b8bada00f1e3baf0c5cc \ - --hash=sha256:74bd2110b5daaebcec5e1d287bf137b61cb8cf6b2d8f5f2b74183e32bc4e7c87 taskcluster==4.0.1 \ --hash=sha256:27256511044346ac71a495d3c636f2add95c102b9b09f90d6fb1ea3e9949d311 \ --hash=sha256:99dd90bc1c566968868c8b07ede32f8e031cbccd52c7195a61e802679d461447 \ --hash=sha256:d0360063c1a3fcaaa514bb31c03954ba573d2b671df40a2ecfdfd9339cc8e93e virtualenv-clone==0.3.0 \ --hash=sha256:4507071d81013fd03ea9930ec26bc8648b997927a11fa80e8ee81198b57e0ac7 \ --hash=sha256:b5cfe535d14dc68dfc1d1bb4ac1209ea28235b91156e2bba8e250d291c3fb4f8 \ # via pipenv
deleted file mode 100644 --- a/third_party/python/taskcluster-urls/LICENSE +++ /dev/null @@ -1,373 +0,0 @@ -Mozilla Public License Version 2.0 -================================== - -1. Definitions --------------- - -1.1. "Contributor" - means each individual or legal entity that creates, contributes to - the creation of, or owns Covered Software. - -1.2. "Contributor Version" - means the combination of the Contributions of others (if any) used - by a Contributor and that particular Contributor's Contribution. - -1.3. "Contribution" - means Covered Software of a particular Contributor. - -1.4. "Covered Software" - means Source Code Form to which the initial Contributor has attached - the notice in Exhibit A, the Executable Form of such Source Code - Form, and Modifications of such Source Code Form, in each case - including portions thereof. - -1.5. "Incompatible With Secondary Licenses" - means - - (a) that the initial Contributor has attached the notice described - in Exhibit B to the Covered Software; or - - (b) that the Covered Software was made available under the terms of - version 1.1 or earlier of the License, but not also under the - terms of a Secondary License. - -1.6. "Executable Form" - means any form of the work other than Source Code Form. - -1.7. "Larger Work" - means a work that combines Covered Software with other material, in - a separate file or files, that is not Covered Software. - -1.8. "License" - means this document. - -1.9. "Licensable" - means having the right to grant, to the maximum extent possible, - whether at the time of the initial grant or subsequently, any and - all of the rights conveyed by this License. - -1.10. "Modifications" - means any of the following: - - (a) any file in Source Code Form that results from an addition to, - deletion from, or modification of the contents of Covered - Software; or - - (b) any new file in Source Code Form that contains any Covered - Software. - -1.11. "Patent Claims" of a Contributor - means any patent claim(s), including without limitation, method, - process, and apparatus claims, in any patent Licensable by such - Contributor that would be infringed, but for the grant of the - License, by the making, using, selling, offering for sale, having - made, import, or transfer of either its Contributions or its - Contributor Version. - -1.12. "Secondary License" - means either the GNU General Public License, Version 2.0, the GNU - Lesser General Public License, Version 2.1, the GNU Affero General - Public License, Version 3.0, or any later versions of those - licenses. - -1.13. "Source Code Form" - means the form of the work preferred for making modifications. - -1.14. "You" (or "Your") - means an individual or a legal entity exercising rights under this - License. For legal entities, "You" includes any entity that - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants and Conditions --------------------------------- - -2.1. Grants - -Each Contributor hereby grants You a world-wide, royalty-free, -non-exclusive license: - -(a) under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or - as part of a Larger Work; and - -(b) under Patent Claims of such Contributor to make, use, sell, offer - for sale, have made, import, and otherwise transfer either its - Contributions or its Contributor Version. - -2.2. Effective Date - -The licenses granted in Section 2.1 with respect to any Contribution -become effective for each Contribution on the date the Contributor first -distributes such Contribution. - -2.3. Limitations on Grant Scope - -The licenses granted in this Section 2 are the only rights granted under -this License. No additional rights or licenses will be implied from the -distribution or licensing of Covered Software under this License. -Notwithstanding Section 2.1(b) above, no patent license is granted by a -Contributor: - -(a) for any code that a Contributor has removed from Covered Software; - or - -(b) for infringements caused by: (i) Your and any other third party's - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - -(c) under Patent Claims infringed by Covered Software in the absence of - its Contributions. - -This License does not grant any rights in the trademarks, service marks, -or logos of any Contributor (except as may be necessary to comply with -the notice requirements in Section 3.4). - -2.4. Subsequent Licenses - -No Contributor makes additional grants as a result of Your choice to -distribute the Covered Software under a subsequent version of this -License (see Section 10.2) or under the terms of a Secondary License (if -permitted under the terms of Section 3.3). - -2.5. Representation - -Each Contributor represents that the Contributor believes its -Contributions are its original creation(s) or it has sufficient rights -to grant the rights to its Contributions conveyed by this License. - -2.6. Fair Use - -This License is not intended to limit any rights You have under -applicable copyright doctrines of fair use, fair dealing, or other -equivalents. - -2.7. Conditions - -Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted -in Section 2.1. - -3. Responsibilities -------------------- - -3.1. Distribution of Source Form - -All distribution of Covered Software in Source Code Form, including any -Modifications that You create or to which You contribute, must be under -the terms of this License. You must inform recipients that the Source -Code Form of the Covered Software is governed by the terms of this -License, and how they can obtain a copy of this License. You may not -attempt to alter or restrict the recipients' rights in the Source Code -Form. - -3.2. Distribution of Executable Form - -If You distribute Covered Software in Executable Form then: - -(a) such Covered Software must also be made available in Source Code - Form, as described in Section 3.1, and You must inform recipients of - the Executable Form how they can obtain a copy of such Source Code - Form by reasonable means in a timely manner, at a charge no more - than the cost of distribution to the recipient; and - -(b) You may distribute such Executable Form under the terms of this - License, or sublicense it under different terms, provided that the - license for the Executable Form does not attempt to limit or alter - the recipients' rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - -You may create and distribute a Larger Work under terms of Your choice, -provided that You also comply with the requirements of this License for -the Covered Software. If the Larger Work is a combination of Covered -Software with a work governed by one or more Secondary Licenses, and the -Covered Software is not Incompatible With Secondary Licenses, this -License permits You to additionally distribute such Covered Software -under the terms of such Secondary License(s), so that the recipient of -the Larger Work may, at their option, further distribute the Covered -Software under the terms of either this License or such Secondary -License(s). - -3.4. Notices - -You may not remove or alter the substance of any license notices -(including copyright notices, patent notices, disclaimers of warranty, -or limitations of liability) contained within the Source Code Form of -the Covered Software, except that You may alter any license notices to -the extent required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - -You may choose to offer, and to charge a fee for, warranty, support, -indemnity or liability obligations to one or more recipients of Covered -Software. However, You may do so only on Your own behalf, and not on -behalf of any Contributor. You must make it absolutely clear that any -such warranty, support, indemnity, or liability obligation is offered by -You alone, and You hereby agree to indemnify every Contributor for any -liability incurred by such Contributor as a result of warranty, support, -indemnity or liability terms You offer. You may include additional -disclaimers of warranty and limitations of liability specific to any -jurisdiction. - -4. Inability to Comply Due to Statute or Regulation ---------------------------------------------------- - -If it is impossible for You to comply with any of the terms of this -License with respect to some or all of the Covered Software due to -statute, judicial order, or regulation then You must: (a) comply with -the terms of this License to the maximum extent possible; and (b) -describe the limitations and the code they affect. Such description must -be placed in a text file included with all distributions of the Covered -Software under this License. Except to the extent prohibited by statute -or regulation, such description must be sufficiently detailed for a -recipient of ordinary skill to be able to understand it. - -5. Termination --------------- - -5.1. The rights granted under this License will terminate automatically -if You fail to comply with any of its terms. However, if You become -compliant, then the rights granted under this License from a particular -Contributor are reinstated (a) provisionally, unless and until such -Contributor explicitly and finally terminates Your grants, and (b) on an -ongoing basis, if such Contributor fails to notify You of the -non-compliance by some reasonable means prior to 60 days after You have -come back into compliance. Moreover, Your grants from a particular -Contributor are reinstated on an ongoing basis if such Contributor -notifies You of the non-compliance by some reasonable means, this is the -first time You have received notice of non-compliance with this License -from such Contributor, and You become compliant prior to 30 days after -Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent -infringement claim (excluding declaratory judgment actions, -counter-claims, and cross-claims) alleging that a Contributor Version -directly or indirectly infringes any patent, then the rights granted to -You by any and all Contributors for the Covered Software under Section -2.1 of this License shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all -end user license agreements (excluding distributors and resellers) which -have been validly granted by You or Your distributors under this License -prior to termination shall survive termination. - -************************************************************************ -* * -* 6. Disclaimer of Warranty * -* ------------------------- * -* * -* Covered Software is provided under this License on an "as is" * -* basis, without warranty of any kind, either expressed, implied, or * -* statutory, including, without limitation, warranties that the * -* Covered Software is free of defects, merchantable, fit for a * -* particular purpose or non-infringing. The entire risk as to the * -* quality and performance of the Covered Software is with You. * -* Should any Covered Software prove defective in any respect, You * -* (not any Contributor) assume the cost of any necessary servicing, * -* repair, or correction. This disclaimer of warranty constitutes an * -* essential part of this License. No use of any Covered Software is * -* authorized under this License except under this disclaimer. * -* * -************************************************************************ - -************************************************************************ -* * -* 7. Limitation of Liability * -* -------------------------- * -* * -* Under no circumstances and under no legal theory, whether tort * -* (including negligence), contract, or otherwise, shall any * -* Contributor, or anyone who distributes Covered Software as * -* permitted above, be liable to You for any direct, indirect, * -* special, incidental, or consequential damages of any character * -* including, without limitation, damages for lost profits, loss of * -* goodwill, work stoppage, computer failure or malfunction, or any * -* and all other commercial damages or losses, even if such party * -* shall have been informed of the possibility of such damages. This * -* limitation of liability shall not apply to liability for death or * -* personal injury resulting from such party's negligence to the * -* extent applicable law prohibits such limitation. Some * -* jurisdictions do not allow the exclusion or limitation of * -* incidental or consequential damages, so this exclusion and * -* limitation may not apply to You. * -* * -************************************************************************ - -8. Litigation -------------- - -Any litigation relating to this License may be brought only in the -courts of a jurisdiction where the defendant maintains its principal -place of business and such litigation shall be governed by laws of that -jurisdiction, without reference to its conflict-of-law provisions. -Nothing in this Section shall prevent a party's ability to bring -cross-claims or counter-claims. - -9. Miscellaneous ----------------- - -This License represents the complete agreement concerning the subject -matter hereof. If any provision of this License is held to be -unenforceable, such provision shall be reformed only to the extent -necessary to make it enforceable. Any law or regulation which provides -that the language of a contract shall be construed against the drafter -shall not be used to construe this License against a Contributor. - -10. Versions of the License ---------------------------- - -10.1. New Versions - -Mozilla Foundation is the license steward. Except as provided in Section -10.3, no one other than the license steward has the right to modify or -publish new versions of this License. Each version will be given a -distinguishing version number. - -10.2. Effect of New Versions - -You may distribute the Covered Software under the terms of the version -of the License under which You originally received the Covered Software, -or under the terms of any subsequent version published by the license -steward. - -10.3. Modified Versions - -If you create software not governed by this License, and you want to -create a new license for such software, you may create and use a -modified version of this License if you rename the license and remove -any references to the name of the license steward (except to note that -such modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary -Licenses - -If You choose to distribute Source Code Form that is Incompatible With -Secondary Licenses under the terms of this version of the License, the -notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice -------------------------------------------- - - This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular -file, then You may include the notice in a location (such as a LICENSE -file in a relevant directory) where a recipient would be likely to look -for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - "Incompatible With Secondary Licenses" Notice ---------------------------------------------------------- - - This Source Code Form is "Incompatible With Secondary Licenses", as - defined by the Mozilla Public License, v. 2.0.
deleted file mode 100644 --- a/third_party/python/taskcluster-urls/MANIFEST.in +++ /dev/null @@ -1,4 +0,0 @@ -include LICENSE -global-exclude *.py[co] -include specification.yml -include package.json
deleted file mode 100644 --- a/third_party/python/taskcluster-urls/PKG-INFO +++ /dev/null @@ -1,253 +0,0 @@ -Metadata-Version: 2.1 -Name: taskcluster-urls -Version: 11.0.0 -Summary: Standardized url generator for taskcluster resources. -Home-page: https://github.com/taskcluster/taskcluster-lib-urls -Author: Brian Stack -Author-email: bstack@mozilla.com -License: MPL2 -Description: # Taskcluster URL Building Library - - [](http://mozilla.org/MPL/2.0) - - A simple library to generate URLs for various Taskcluster resources across our various deployment methods. - - This serves as both a simple shim for projects that use JavaScript but also is the reference implementation for - how we define these paths. - - URLs are defined in the 'Taskcluster URL Format' document. - - Changelog - --------- - View the changelog on the [releases page](https://github.com/taskcluster/taskcluster-lib-urls/releases). - - Requirements - ------------ - - This is tested on and should run on any of Node.js `{8, 10}`. - - JS Usage - -------- - [](https://travis-ci.org/taskcluster/taskcluster-lib-urls) - [](https://www.npmjs.com/package/taskcluster-lib-urls) - - This package exports several methods for generating URLs conditionally based on - a root URL, as well as a few helper classes for generating URLs for a pre-determined - root URL: - - * `api(rootUrl, service, version, path)` -> `String` - * `apiReference(rootUrl, service, version)` -> `String` - * `docs(rootUrl, path)` -> `String` - * `exchangeReference(rootUrl, service, version)` -> `String` - * `schema(rootUrl, service, schema)` -> `String` - * `ui(rootUrl, path)` -> `String` - * `servicesManifest(rootUrl)` -> `String` - * `testRootUrl()` -> `String` - * `withRootUrl(rootUrl)` -> `Class` instance for above methods - - When the `rootUrl` is `https://taskcluster.net`, the generated URLs will be to the Heroku cluster. Otherwise they will follow the - [spec defined in this project](https://github.com/taskcluster/taskcluster-lib-urls/tree/master/docs/urls-spec.md). - - `testRootUrl()` is used to share a common fake `rootUrl` between various Taskcluster mocks in testing. - The URL does not resolve. - - ```js - // Specifying root URL every time: - const libUrls = require('taskcluster-lib-urls'); - - libUrls.api(rootUrl, 'auth', 'v1', 'foo/bar'); - libUrls.schema(rootUrl, 'auth', 'v1/foo.yml'); // Note that schema names have versions in them - libUrls.apiReference(rootUrl, 'auth', 'v1'); - libUrls.exchangeReference(rootUrl, 'auth', 'v1'); - libUrls.ui(rootUrl, 'foo/bar'); - libUrls.servicesManifest(rootUrl); - libUrls.docs(rootUrl, 'foo/bar'); - ``` - - ```js - // Specifying root URL in advance: - const libUrls = require('taskcluster-lib-urls'); - - const urls = libUrls.withRoot(rootUrl); - - urls.api('auth', 'v1', 'foo/bar'); - urls.schema('auth', 'v1/foo.yml'); - urls.apiReference('auth', 'v1'); - urls.exchangeReference('auth', 'v1'); - urls.ui('foo/bar'); - urls.servicesManifest(); - urls.docs('foo/bar'); - ``` - - If you would like, you can set this up via [taskcluster-lib-loader](https://github.com/taskcluster/taskcluster-lib-loader) as follows: - - ```js - { - libUrlss: { - require: ['cfg'], - setup: ({cfg}) => withRootUrl(cfg.rootURl), - }, - } - ``` - - Test with: - - ``` - yarn install - yarn test - ``` - - - Go Usage - -------- - - [](https://godoc.org/github.com/taskcluster/taskcluster-lib-urls) - - The go package exports the following functions: - - ```go - func API(rootURL string, service string, version string, path string) string - func APIReference(rootURL string, service string, version string) string - func Docs(rootURL string, path string) string - func ExchangeReference(rootURL string, service string, version string) string - func Schema(rootURL string, service string, name string) string - func UI(rootURL string, path string) string - func ServicesManifest(rootURL string) string - ``` - - Install with: - - ``` - go install ./.. - ``` - - Test with: - - ``` - go test -v ./... - ``` - - Python Usage - ------------ - - You can install the python client with `pip install taskcluster-urls`; - - ```python - import taskcluster_urls - - taskcluster_urls.api(root_url, 'auth', 'v1', 'foo/bar') - taskcluster_urls.schema(root_url, 'auth', 'v1/foo.yml') # Note that schema names have versions in them - taskcluster_urls.api_reference(root_url, 'auth', 'v1') - taskcluster_urls.exchange_reference(root_url, 'auth', 'v1') - taskcluster_urls.ui(root_url, 'foo/bar') - taskcluster_urls.servicesManifest(root_url) - taskcluster_urls.docs(root_url, 'foo/bar') - - And for testing, - ```python - taskcluster_urls.test_root_url() - ``` - - Test with: - - ``` - tox - ``` - - Java Usage - ---------- - - [](http://taskcluster.github.io/taskcluster-lib-urls/apidocs) - - In order to use this library from your maven project, simply include it as a project dependency: - - ``` - <project> - ... - <dependencies> - ... - <dependency> - <groupId>org.mozilla.taskcluster</groupId> - <artifactId>taskcluster-lib-urls</artifactId> - <version>1.0.0</version> - </dependency> - </dependencies> - </project> - ``` - - The taskcluster-lib-urls artifacts are now available from the [maven central repository](http://central.sonatype.org/): - - * [Search Results](http://search.maven.org/#search|gav|1|g%3A%22org.mozilla.taskcluster%22%20AND%20a%3A%22taskcluster-lib-urls%22) - * [Directory Listing](https://repo1.maven.org/maven2/org/mozilla/taskcluster/taskcluster-lib-urls/) - - To use the library, do as follows: - - ```java - import org.mozilla.taskcluster.urls.*; - - ... - - URLProvider urlProvider = URLs.provider("https://mytaskcluster.acme.org"); - - String fooBarAPI = urlProvider.api("auth", "v1", "foo/bar"); - String fooSchema = urlProvider.schema("auth", "v1/foo.yml"); // Note that schema names have versions in them - String authAPIRef = urlProvider.apiReference("auth", "v1"); - String authExchangesRef = urlProvider.exchangeReference("auth", "v1"); - String uiFooBar = urlProvider.ui("foo/bar"); - String servicesManifest = urlProvider.servicesManifest(); - String docsFooBar = urlProvider.docs("foo/bar"); - - ... - ``` - - Install with: - - ``` - mvn install - ``` - - Test with: - - ``` - mvn test - ``` - - - Releasing - --------- - - New releases should be tested on Travis and Taskcluster to allow for all supported versions of various languages to be tested. Once satisfied that it works, new versions should be created with - `npm version` rather than by manually editing `package.json` and tags should be pushed to Github. - - Make the Node release first, as Python's version depends on its `package.json`. This follows the typical tag-and-push-to-publish approach: - - ```sh - $ npm version minor # or patch, or major - $ git push upstream - ``` - - Once that's done, build the Python sdists (only possible by the [maintainers on pypi](https://pypi.org/project/taskcluster-urls/#files)): - - ```sh - rm -rf dist/* - python setup.py sdist bdist_wheel - python3 setup.py bdist_wheel - pip install twine - twine upload dist/* - ``` - - Make sure to update [the changelog](https://github.com/taskcluster/taskcluster-lib-urls/releases)! - - License - ------- - - [Mozilla Public License Version 2.0](https://github.com/taskcluster/taskcluster-lib-urls/blob/master/LICENSE) - -Platform: UNKNOWN -Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Description-Content-Type: text/markdown
deleted file mode 100644 --- a/third_party/python/taskcluster-urls/README.md +++ /dev/null @@ -1,236 +0,0 @@ -# Taskcluster URL Building Library - -[](http://mozilla.org/MPL/2.0) - -A simple library to generate URLs for various Taskcluster resources across our various deployment methods. - -This serves as both a simple shim for projects that use JavaScript but also is the reference implementation for -how we define these paths. - -URLs are defined in the 'Taskcluster URL Format' document. - -Changelog ---------- -View the changelog on the [releases page](https://github.com/taskcluster/taskcluster-lib-urls/releases). - -Requirements ------------- - -This is tested on and should run on any of Node.js `{8, 10}`. - -JS Usage --------- -[](https://travis-ci.org/taskcluster/taskcluster-lib-urls) -[](https://www.npmjs.com/package/taskcluster-lib-urls) - -This package exports several methods for generating URLs conditionally based on -a root URL, as well as a few helper classes for generating URLs for a pre-determined -root URL: - -* `api(rootUrl, service, version, path)` -> `String` -* `apiReference(rootUrl, service, version)` -> `String` -* `docs(rootUrl, path)` -> `String` -* `exchangeReference(rootUrl, service, version)` -> `String` -* `schema(rootUrl, service, schema)` -> `String` -* `ui(rootUrl, path)` -> `String` -* `servicesManifest(rootUrl)` -> `String` -* `testRootUrl()` -> `String` -* `withRootUrl(rootUrl)` -> `Class` instance for above methods - -When the `rootUrl` is `https://taskcluster.net`, the generated URLs will be to the Heroku cluster. Otherwise they will follow the -[spec defined in this project](https://github.com/taskcluster/taskcluster-lib-urls/tree/master/docs/urls-spec.md). - -`testRootUrl()` is used to share a common fake `rootUrl` between various Taskcluster mocks in testing. -The URL does not resolve. - -```js -// Specifying root URL every time: -const libUrls = require('taskcluster-lib-urls'); - -libUrls.api(rootUrl, 'auth', 'v1', 'foo/bar'); -libUrls.schema(rootUrl, 'auth', 'v1/foo.yml'); // Note that schema names have versions in them -libUrls.apiReference(rootUrl, 'auth', 'v1'); -libUrls.exchangeReference(rootUrl, 'auth', 'v1'); -libUrls.ui(rootUrl, 'foo/bar'); -libUrls.servicesManifest(rootUrl); -libUrls.docs(rootUrl, 'foo/bar'); -``` - -```js -// Specifying root URL in advance: -const libUrls = require('taskcluster-lib-urls'); - -const urls = libUrls.withRoot(rootUrl); - -urls.api('auth', 'v1', 'foo/bar'); -urls.schema('auth', 'v1/foo.yml'); -urls.apiReference('auth', 'v1'); -urls.exchangeReference('auth', 'v1'); -urls.ui('foo/bar'); -urls.servicesManifest(); -urls.docs('foo/bar'); -``` - -If you would like, you can set this up via [taskcluster-lib-loader](https://github.com/taskcluster/taskcluster-lib-loader) as follows: - -```js -{ - libUrlss: { - require: ['cfg'], - setup: ({cfg}) => withRootUrl(cfg.rootURl), - }, -} -``` - -Test with: - -``` -yarn install -yarn test -``` - - -Go Usage --------- - -[](https://godoc.org/github.com/taskcluster/taskcluster-lib-urls) - -The go package exports the following functions: - -```go -func API(rootURL string, service string, version string, path string) string -func APIReference(rootURL string, service string, version string) string -func Docs(rootURL string, path string) string -func ExchangeReference(rootURL string, service string, version string) string -func Schema(rootURL string, service string, name string) string -func UI(rootURL string, path string) string -func ServicesManifest(rootURL string) string -``` - -Install with: - -``` -go install ./.. -``` - -Test with: - -``` -go test -v ./... -``` - -Python Usage ------------- - -You can install the python client with `pip install taskcluster-urls`; - -```python -import taskcluster_urls - -taskcluster_urls.api(root_url, 'auth', 'v1', 'foo/bar') -taskcluster_urls.schema(root_url, 'auth', 'v1/foo.yml') # Note that schema names have versions in them -taskcluster_urls.api_reference(root_url, 'auth', 'v1') -taskcluster_urls.exchange_reference(root_url, 'auth', 'v1') -taskcluster_urls.ui(root_url, 'foo/bar') -taskcluster_urls.servicesManifest(root_url) -taskcluster_urls.docs(root_url, 'foo/bar') - -And for testing, -```python -taskcluster_urls.test_root_url() -``` - -Test with: - -``` -tox -``` - -Java Usage ----------- - -[](http://taskcluster.github.io/taskcluster-lib-urls/apidocs) - -In order to use this library from your maven project, simply include it as a project dependency: - -``` -<project> - ... - <dependencies> - ... - <dependency> - <groupId>org.mozilla.taskcluster</groupId> - <artifactId>taskcluster-lib-urls</artifactId> - <version>1.0.0</version> - </dependency> - </dependencies> -</project> -``` - -The taskcluster-lib-urls artifacts are now available from the [maven central repository](http://central.sonatype.org/): - -* [Search Results](http://search.maven.org/#search|gav|1|g%3A%22org.mozilla.taskcluster%22%20AND%20a%3A%22taskcluster-lib-urls%22) -* [Directory Listing](https://repo1.maven.org/maven2/org/mozilla/taskcluster/taskcluster-lib-urls/) - -To use the library, do as follows: - -```java -import org.mozilla.taskcluster.urls.*; - -... - - URLProvider urlProvider = URLs.provider("https://mytaskcluster.acme.org"); - - String fooBarAPI = urlProvider.api("auth", "v1", "foo/bar"); - String fooSchema = urlProvider.schema("auth", "v1/foo.yml"); // Note that schema names have versions in them - String authAPIRef = urlProvider.apiReference("auth", "v1"); - String authExchangesRef = urlProvider.exchangeReference("auth", "v1"); - String uiFooBar = urlProvider.ui("foo/bar"); - String servicesManifest = urlProvider.servicesManifest(); - String docsFooBar = urlProvider.docs("foo/bar"); - -... -``` - -Install with: - -``` -mvn install -``` - -Test with: - -``` -mvn test -``` - - -Releasing ---------- - -New releases should be tested on Travis and Taskcluster to allow for all supported versions of various languages to be tested. Once satisfied that it works, new versions should be created with -`npm version` rather than by manually editing `package.json` and tags should be pushed to Github. - -Make the Node release first, as Python's version depends on its `package.json`. This follows the typical tag-and-push-to-publish approach: - -```sh -$ npm version minor # or patch, or major -$ git push upstream -``` - -Once that's done, build the Python sdists (only possible by the [maintainers on pypi](https://pypi.org/project/taskcluster-urls/#files)): - -```sh -rm -rf dist/* -python setup.py sdist bdist_wheel -python3 setup.py bdist_wheel -pip install twine -twine upload dist/* -``` - -Make sure to update [the changelog](https://github.com/taskcluster/taskcluster-lib-urls/releases)! - -License -------- - -[Mozilla Public License Version 2.0](https://github.com/taskcluster/taskcluster-lib-urls/blob/master/LICENSE)
deleted file mode 100644 --- a/third_party/python/taskcluster-urls/package.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "name": "taskcluster-lib-urls", - "version": "11.0.0", - "author": "Brian Stack <bstack@mozilla.com>", - "description": "Build urls for taskcluster resources.", - "license": "MPL-2.0", - "scripts": { - "lint": "eslint src/*.js test/*.js", - "pretest": "yarn lint", - "test": "mocha test/*_test.js" - }, - "files": [ - "src" - ], - "repository": { - "type": "git", - "url": "https://github.com/taskcluster/taskcluster-lib-urls.git" - }, - "main": "./src/index.js", - "devDependencies": { - "eslint-config-taskcluster": "^3.1.0", - "js-yaml": "^3.11.0", - "mocha": "^5.1.1" - } -}
deleted file mode 100644 --- a/third_party/python/taskcluster-urls/setup.cfg +++ /dev/null @@ -1,7 +0,0 @@ -[tools:pytest] -flake8-max-line-length = 120 - -[egg_info] -tag_build = -tag_date = 0 -
deleted file mode 100644 --- a/third_party/python/taskcluster-urls/setup.py +++ /dev/null @@ -1,28 +0,0 @@ -import json -import os -from setuptools import setup - -package_json = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'package.json') -with open(package_json) as f: - version = json.load(f)['version'] - -setup( - name='taskcluster-urls', - description='Standardized url generator for taskcluster resources.', - long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(), - long_description_content_type='text/markdown', - url='https://github.com/taskcluster/taskcluster-lib-urls', - version=version, - packages=['taskcluster_urls'], - author='Brian Stack', - author_email='bstack@mozilla.com', - license='MPL2', - classifiers=[ - 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - ], -)
deleted file mode 100644 --- a/third_party/python/taskcluster-urls/taskcluster_urls/__init__.py +++ /dev/null @@ -1,66 +0,0 @@ -OLD_ROOT_URL = 'https://taskcluster.net' - -def api(root_url, service, version, path): - """Generate URL for path in a Taskcluster service.""" - root_url = root_url.rstrip('/') - path = path.lstrip('/') - if root_url == OLD_ROOT_URL: - return 'https://{}.taskcluster.net/{}/{}'.format(service, version, path) - else: - return '{}/api/{}/{}/{}'.format(root_url, service, version, path) - -def api_reference(root_url, service, version): - """Generate URL for a Taskcluster api reference.""" - root_url = root_url.rstrip('/') - if root_url == OLD_ROOT_URL: - return 'https://references.taskcluster.net/{}/{}/api.json'.format(service, version) - else: - return '{}/references/{}/{}/api.json'.format(root_url, service, version) - -def docs(root_url, path): - """Generate URL for path in the Taskcluster docs.""" - root_url = root_url.rstrip('/') - path = path.lstrip('/') - if root_url == OLD_ROOT_URL: - return 'https://docs.taskcluster.net/{}'.format(path) - else: - return '{}/docs/{}'.format(root_url, path) - -def exchange_reference(root_url, service, version): - """Generate URL for a Taskcluster exchange reference.""" - root_url = root_url.rstrip('/') - if root_url == OLD_ROOT_URL: - return 'https://references.taskcluster.net/{}/{}/exchanges.json'.format(service, version) - else: - return '{}/references/{}/{}/exchanges.json'.format(root_url, service, version) - -def schema(root_url, service, name): - """Generate URL for a schema in a Taskcluster service.""" - root_url = root_url.rstrip('/') - name = name.lstrip('/') - if root_url == OLD_ROOT_URL: - return 'https://schemas.taskcluster.net/{}/{}'.format(service, name) - else: - return '{}/schemas/{}/{}'.format(root_url, service, name) - -def ui(root_url, path): - """Generate URL for a path in the Taskcluster ui.""" - root_url = root_url.rstrip('/') - path = path.lstrip('/') - if root_url == OLD_ROOT_URL: - return 'https://tools.taskcluster.net/{}'.format(path) - else: - return '{}/{}'.format(root_url, path) - -def services_manifest(root_url): - """Returns a URL for the service manifest of a taskcluster deployment.""" - root_url = root_url.rstrip('/') - if root_url == OLD_ROOT_URL: - return 'https://references.taskcluster.net/manifest.json' - else: - return '{}/references/manifest.json'.format(root_url) - -def test_root_url(): - """Returns a standardized "testing" rootUrl that does not resolve but - is easily recognizable in test failures.""" - return 'https://tc-tests.example.com'
--- a/tools/tryselect/tasks.py +++ b/tools/tryselect/tasks.py @@ -50,19 +50,16 @@ def invalidate(cache, root): tmod = max(os.path.getmtime(os.path.join(tc_dir, p)) for p, _ in FileFinder(tc_dir)) cmod = os.path.getmtime(cache) if tmod > cmod: os.remove(cache) def generate_tasks(params, full, root): - # Ensure that TASKCLUSTER_ROOT_URL is set - taskgraph.set_root_url_env() - params = params or "project=mozilla-central" # Try to delete the old taskgraph cache directory. old_cache_dir = os.path.join(get_state_dir()[0], 'cache', 'taskgraph') if os.path.isdir(old_cache_dir): shutil.rmtree(old_cache_dir) root_hash = hashlib.sha256(os.path.abspath(root)).hexdigest()