Bug 1748926 - [taskgraph] Move Task.release_artifacts to an attribute, r=aki
authorAndrew Halberstadt <ahal@mozilla.com>
Thu, 03 Mar 2022 16:51:19 +0000
changeset 609515 28ac458827ee72c2d212cb8c0796ac907f4268b7
parent 609514 50f221b3b6144844fb7ba4592ecec7f454ba90ba
child 609516 b1d3464943fa4a4675e3dac8634890d46a44abd0
push id158855
push userahalberstadt@mozilla.com
push dateThu, 03 Mar 2022 16:53:42 +0000
treeherderautoland@28ac458827ee [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersaki
bugs1748926
milestone99.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1748926 - [taskgraph] Move Task.release_artifacts to an attribute, r=aki The 'release_artifacts' attribute of the Task object doesn't exist in upstream taskgraph. This attribute is only needed by certain kinds and likely doesn't belong on the general purpose Task container. Move it to an attribute instead. This was tested via `taskgraph --diff` on all release tasks. While there is a diff in that 'release_artifacts' moves from the top-level to under an attribute, there is no change to the payload of any tasks. So this shouldn't affect any release graphs. Depends on D140081 Differential Revision: https://phabricator.services.mozilla.com/D140082
taskcluster/docs/attributes.rst
taskcluster/gecko_taskgraph/generator.py
taskcluster/gecko_taskgraph/task.py
taskcluster/gecko_taskgraph/test/test_optimize_strategies.py
taskcluster/gecko_taskgraph/transforms/job/__init__.py
taskcluster/gecko_taskgraph/transforms/mar_signing.py
taskcluster/gecko_taskgraph/transforms/partner_attribution.py
taskcluster/gecko_taskgraph/transforms/partner_attribution_beetmover.py
taskcluster/gecko_taskgraph/transforms/repackage.py
taskcluster/gecko_taskgraph/transforms/repackage_signing.py
taskcluster/gecko_taskgraph/transforms/signing.py
taskcluster/gecko_taskgraph/transforms/task.py
taskcluster/gecko_taskgraph/util/scriptworker.py
--- a/taskcluster/docs/attributes.rst
+++ b/taskcluster/docs/attributes.rst
@@ -290,16 +290,21 @@ lot of places. To support private artifa
 ``artifact_prefix`` attribute. It will default to ``public/build`` but will be
 overridable per-task.
 
 artifact_map
 ===============
 For beetmover jobs, this indicates which yaml file should be used to
 generate the upstream artifacts and payload instructions to the task.
 
+release_artifacts
+=================
+A set of artifacts that are candidates for downstream release tasks to operate
+on.
+
 batch
 =====
 Used by `perftest` to indicates that a task can be run as a batch.
 
 
 enable-full-crashsymbols
 ========================
 In automation, full crashsymbol package generation is normally disabled.  For
--- a/taskcluster/gecko_taskgraph/generator.py
+++ b/taskcluster/gecko_taskgraph/generator.py
@@ -81,17 +81,16 @@ class Kind:
                 label=task_dict["label"],
                 description=task_dict["description"],
                 attributes=task_dict["attributes"],
                 task=task_dict["task"],
                 optimization=task_dict.get("optimization"),
                 dependencies=task_dict.get("dependencies"),
                 soft_dependencies=task_dict.get("soft-dependencies"),
                 if_dependencies=task_dict.get("if-dependencies"),
-                release_artifacts=task_dict.get("release-artifacts"),
             )
             for task_dict in transforms(trans_config, inputs)
         ]
         return tasks
 
     @classmethod
     def load(cls, root_dir, graph_config, kind_name):
         path = os.path.join(root_dir, kind_name)
--- a/taskcluster/gecko_taskgraph/task.py
+++ b/taskcluster/gecko_taskgraph/task.py
@@ -36,20 +36,16 @@ class Task:
     attributes = attr.ib()
     task = attr.ib()
     description = attr.ib(default="")
     task_id = attr.ib(default=None, init=False)
     optimization = attr.ib(default=None)
     dependencies = attr.ib(factory=dict)
     soft_dependencies = attr.ib(factory=list)
     if_dependencies = attr.ib(factory=list)
-    release_artifacts = attr.ib(
-        converter=attr.converters.optional(frozenset),
-        default=None,
-    )
 
     def __attrs_post_init__(self):
         self.attributes["kind"] = self.kind
 
     def to_json(self):
         rv = {
             "kind": self.kind,
             "label": self.label,
@@ -58,18 +54,16 @@ class Task:
             "dependencies": self.dependencies,
             "soft_dependencies": sorted(self.soft_dependencies),
             "if_dependencies": self.if_dependencies,
             "optimization": self.optimization,
             "task": self.task,
         }
         if self.task_id:
             rv["task_id"] = self.task_id
-        if self.release_artifacts:
-            rv["release_artifacts"] = sorted(self.release_artifacts)
         return rv
 
     @classmethod
     def from_json(cls, task_dict):
         """
         Given a data structure as produced by taskgraph.to_json, re-construct
         the original Task object.  This is used to "resume" the task-graph
         generation process, for example in Action tasks.
@@ -79,13 +73,12 @@ class Task:
             label=task_dict["label"],
             description=task_dict.get("description", ""),
             attributes=task_dict["attributes"],
             task=task_dict["task"],
             optimization=task_dict["optimization"],
             dependencies=task_dict.get("dependencies"),
             soft_dependencies=task_dict.get("soft_dependencies"),
             if_dependencies=task_dict.get("if_dependencies"),
-            release_artifacts=task_dict.get("release-artifacts"),
         )
         if "task_id" in task_dict:
             rv.task_id = task_dict["task_id"]
         return rv
--- a/taskcluster/gecko_taskgraph/test/test_optimize_strategies.py
+++ b/taskcluster/gecko_taskgraph/test/test_optimize_strategies.py
@@ -51,17 +51,16 @@ def generate_tasks(*tasks):
         task.setdefault("task", {})
         task.setdefault("attributes", {})
         task["attributes"].setdefault("e10s", True)
 
         for attr in (
             "optimization",
             "dependencies",
             "soft_dependencies",
-            "release_artifacts",
         ):
             task.setdefault(attr, None)
 
         task["task"].setdefault("label", task["label"])
         yield Task.from_json(task)
 
 
 # task sets
--- a/taskcluster/gecko_taskgraph/transforms/job/__init__.py
+++ b/taskcluster/gecko_taskgraph/transforms/job/__init__.py
@@ -64,17 +64,16 @@ job_description_schema = Schema(
         Optional("run-on-projects"): task_description_schema["run-on-projects"],
         Optional("shipping-phase"): task_description_schema["shipping-phase"],
         Optional("shipping-product"): task_description_schema["shipping-product"],
         Optional("always-target"): task_description_schema["always-target"],
         Exclusive("optimization", "optimization"): task_description_schema[
             "optimization"
         ],
         Optional("use-sccache"): task_description_schema["use-sccache"],
-        Optional("release-artifacts"): task_description_schema["release-artifacts"],
         Optional("priority"): task_description_schema["priority"],
         # The "when" section contains descriptions of the circumstances under which
         # this task should be included in the task graph.  This will be converted
         # into an optimization, so it cannot be specified in a job description that
         # also gives 'optimization'.
         Exclusive("when", "optimization"): {
             # This task only needs to be run if a file matching one of the given
             # patterns has changed in the push.  The patterns use the mozpack
--- a/taskcluster/gecko_taskgraph/transforms/mar_signing.py
+++ b/taskcluster/gecko_taskgraph/transforms/mar_signing.py
@@ -77,17 +77,17 @@ def generate_partials_artifacts(job, rel
 
     return upstream_artifacts
 
 
 def generate_complete_artifacts(job, kind):
     upstream_artifacts = []
     if kind not in SIGNING_FORMATS:
         kind = "default"
-    for artifact in job.release_artifacts:
+    for artifact in job.attributes["release_artifacts"]:
         basename = os.path.basename(artifact)
         if basename in SIGNING_FORMATS[kind]:
             upstream_artifacts.append(
                 {
                     "taskId": {"task-reference": f"<{job.kind}>"},
                     "taskType": "build",
                     "paths": [artifact],
                     "formats": SIGNING_FORMATS[kind][basename],
--- a/taskcluster/gecko_taskgraph/transforms/partner_attribution.py
+++ b/taskcluster/gecko_taskgraph/transforms/partner_attribution.py
@@ -117,12 +117,12 @@ def add_command_arguments(config, tasks)
         )
         worker["artifacts"] = [
             {
                 "name": "releng/partner",
                 "path": "/builds/worker/artifacts/releng/partner",
                 "type": "directory",
             }
         ]
-        task["release-artifacts"] = release_artifacts
+        task.setdefault("attributes", {})["release_artifacts"] = release_artifacts
         task["label"] = config.kind
 
         yield task
--- a/taskcluster/gecko_taskgraph/transforms/partner_attribution_beetmover.py
+++ b/taskcluster/gecko_taskgraph/transforms/partner_attribution_beetmover.py
@@ -67,17 +67,19 @@ def resolve_keys(config, jobs):
 
 
 @transforms.add
 def split_public_and_private(config, jobs):
     # we need to separate private vs public destinations because beetmover supports one
     # in a single task. Only use a single task for each type though.
     partner_config = get_partner_config_by_kind(config, config.kind)
     for job in jobs:
-        upstream_artifacts = job["primary-dependency"].release_artifacts
+        upstream_artifacts = job["primary-dependency"].attributes.get(
+            "release_artifacts"
+        )
         attribution_task_ref = "<{}>".format(job["primary-dependency"].label)
         prefix = get_artifact_prefix(job["primary-dependency"])
         artifacts = defaultdict(list)
         for artifact in upstream_artifacts:
             partner, sub_partner, platform, locale, _ = artifact.replace(
                 prefix + "/", ""
             ).split("/", 4)
             destination = "private"
--- a/taskcluster/gecko_taskgraph/transforms/repackage.py
+++ b/taskcluster/gecko_taskgraph/transforms/repackage.py
@@ -476,16 +476,19 @@ def make_job_description(config, jobs):
             worker.setdefault("env", {})["LOCALE"] = locale
 
         worker["artifacts"] = _generate_task_output_files(
             dep_job,
             worker_type_implementation(config.graph_config, worker_type),
             repackage_config=repackage_config,
             locale=locale,
         )
+        attributes["release_artifacts"] = [
+            artifact["name"] for artifact in worker["artifacts"]
+        ]
 
         task = {
             "label": job["label"],
             "description": description,
             "worker-type": worker_type,
             "dependencies": dependencies,
             "if-dependencies": [dep_job.kind],
             "attributes": attributes,
@@ -502,17 +505,16 @@ def make_job_description(config, jobs):
                 dep_job,
                 build_platform,
                 signing_task,
                 repackage_signing_task,
                 locale=locale,
                 project=config.params["project"],
                 existing_fetch=job.get("fetches"),
             ),
-            "release-artifacts": [artifact["name"] for artifact in worker["artifacts"]],
         }
 
         if build_platform.startswith("macosx"):
             task.setdefault("fetches", {}).setdefault("toolchain", []).extend(
                 [
                     "linux64-libdmg",
                     "linux64-hfsplus",
                     "linux64-node",
--- a/taskcluster/gecko_taskgraph/transforms/repackage_signing.py
+++ b/taskcluster/gecko_taskgraph/transforms/repackage_signing.py
@@ -100,17 +100,17 @@ def make_repackage_signing_description(c
         build_platform = dep_job.attributes.get("build_platform")
         is_shippable = dep_job.attributes.get("shippable")
         signing_cert_scope = get_signing_cert_scope_per_platform(
             build_platform, is_shippable, config
         )
         scopes = [signing_cert_scope]
 
         upstream_artifacts = []
-        for artifact in sorted(dep_job.release_artifacts):
+        for artifact in sorted(dep_job.attributes.get("release_artifacts")):
             basename = os.path.basename(artifact)
             if basename in SIGNING_FORMATS:
                 upstream_artifacts.append(
                     {
                         "taskId": {"task-reference": f"<{dep_kind}>"},
                         "taskType": "repackage",
                         "paths": [artifact],
                         "formats": SIGNING_FORMATS[os.path.basename(artifact)],
--- a/taskcluster/gecko_taskgraph/transforms/signing.py
+++ b/taskcluster/gecko_taskgraph/transforms/signing.py
@@ -168,16 +168,19 @@ def make_task_description(config, jobs):
 
         attributes = (
             job["attributes"]
             if job.get("attributes")
             else copy_attributes_from_dependent_job(dep_job)
         )
         attributes["signed"] = True
 
+        if "linux" in build_platform:
+            attributes["release_artifacts"] = ["public/build/KEY"]
+
         if dep_job.attributes.get("chunk_locales"):
             # Used for l10n attribute passthrough
             attributes["chunk_locales"] = dep_job.attributes.get("chunk_locales")
 
         signing_cert_scope = get_signing_cert_scope_per_platform(
             build_platform, is_shippable, config
         )
         worker_type_alias = "linux-signing" if is_shippable else "linux-depsigning"
@@ -197,19 +200,16 @@ def make_task_description(config, jobs):
             "optimization": dep_job.optimization,
             "routes": job.get("routes", []),
             "shipping-product": job.get("shipping-product"),
             "shipping-phase": job.get("shipping-phase"),
         }
         if dep_job.kind in task["dependencies"]:
             task["if-dependencies"] = [dep_job.kind]
 
-        if "linux" in build_platform:
-            task["release-artifacts"] = ["public/build/KEY"]
-
         if "macosx" in build_platform:
             shippable = "false"
             if "shippable" in attributes and attributes["shippable"]:
                 shippable = "true"
             mac_behavior = evaluate_keyed_by(
                 config.graph_config["mac-notarization"]["mac-behavior"],
                 "mac behavior",
                 {
--- a/taskcluster/gecko_taskgraph/transforms/task.py
+++ b/taskcluster/gecko_taskgraph/transforms/task.py
@@ -187,18 +187,16 @@ task_description_schema = Schema(
         # Optimizations are defined in taskcluster/gecko_taskgraph/optimize.py.
         Required("optimization"): OptimizationSchema,
         # the provisioner-id/worker-type for the task.  The following parameters will
         # be substituted in this string:
         #  {level} -- the scm level of this push
         "worker-type": str,
         # Whether the job should use sccache compiler caching.
         Required("use-sccache"): bool,
-        # Set of artifacts relevant to release tasks
-        Optional("release-artifacts"): [str],
         # information specific to the worker implementation that will run this task
         Optional("worker"): {
             Required("implementation"): str,
             Extra: object,
         },
         # Override the default priority for the project
         Optional("priority"): str,
     }
@@ -846,27 +844,28 @@ def build_scriptworker_signing_payload(c
         "maxRunTime": worker["max-run-time"],
         "upstreamArtifacts": worker["upstream-artifacts"],
     }
     if worker.get("mac-behavior"):
         task_def["payload"]["behavior"] = worker["mac-behavior"]
         for attribute in ("entitlements-url", "requirements-plist-url"):
             if worker.get(attribute):
                 task_def["payload"][attribute] = worker[attribute]
-    artifacts = set(task.get("release-artifacts", []))
+
+    artifacts = set(task.setdefault("attributes", {}).get("release_artifacts", []))
     for upstream_artifact in worker["upstream-artifacts"]:
         for path in upstream_artifact["paths"]:
             artifacts.update(
                 get_signed_artifacts(
                     input=path,
                     formats=upstream_artifact["formats"],
                     behavior=worker.get("mac-behavior"),
                 )
             )
-    task["release-artifacts"] = list(artifacts)
+    task["attributes"]["release_artifacts"] = list(artifacts)
 
 
 @payload_builder(
     "notarization-poller",
     schema={
         Required("uuid-manifest"): taskref_or_string,
         # the maximum time to run, in seconds
         Optional("max-run-time"): int,
@@ -2003,17 +2002,16 @@ def build_task(config, tasks):
             "label": task["label"],
             "description": task["description"],
             "task": task_def,
             "dependencies": dependencies,
             "if-dependencies": if_dependencies,
             "soft-dependencies": task.get("soft-dependencies", []),
             "attributes": attributes,
             "optimization": task.get("optimization", None),
-            "release-artifacts": task.get("release-artifacts", []),
         }
 
 
 @transforms.add
 def chain_of_trust(config, tasks):
     for task in tasks:
         if task["task"].get("payload", {}).get("features", {}).get("chainOfTrust"):
             image = task.get("dependencies", {}).get("docker-image")
--- a/taskcluster/gecko_taskgraph/util/scriptworker.py
+++ b/taskcluster/gecko_taskgraph/util/scriptworker.py
@@ -452,23 +452,25 @@ def generate_beetmover_upstream_artifact
             paths.append(
                 os.path.join(
                     base_artifact_prefix,
                     jsone.render(file_config["source_path_modifier"], kwargs),
                     jsone.render(filename, kwargs),
                 )
             )
 
-        if job.get("dependencies") and getattr(
-            job["dependencies"][dep], "release_artifacts", None
+        if (
+            job.get("dependencies")
+            and getattr(job["dependencies"][dep], "attributes", None)
+            and job["dependencies"][dep].attributes.get("release_artifacts")
         ):
             paths = [
                 path
                 for path in paths
-                if path in job["dependencies"][dep].release_artifacts
+                if path in job["dependencies"][dep].attributes["release_artifacts"]
             ]
 
         if not paths:
             continue
 
         upstream_artifacts.append(
             {
                 "taskId": {"task-reference": f"<{dep}>"},