Bug 1481121: [release] Add support for tasks declaring the release-relevant artifacts, for downstream tasks to inspect; r=Callek
authorTom Prince <mozilla@hocat.ca>
Mon, 20 Aug 2018 12:29:15 -0600
changeset 433945 a702b39447f197ed7ec5d89416c6642505a5411d
parent 433944 d70b429fda86b2dd606454043bdd42fe7f7d4407
child 433946 d08523e5c65744629acae475dd3ec2734a52c2d9
push id34526
push useraiakab@mozilla.com
push dateWed, 29 Aug 2018 21:56:30 +0000
treeherdermozilla-central@2b50a2ad969a [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersCallek
bugs1481121
milestone63.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1481121: [release] Add support for tasks declaring the release-relevant artifacts, for downstream tasks to inspect; r=Callek Differential Revision: https://phabricator.services.mozilla.com/D3814
taskcluster/taskgraph/generator.py
taskcluster/taskgraph/task.py
taskcluster/taskgraph/transforms/job/__init__.py
taskcluster/taskgraph/transforms/task.py
--- a/taskcluster/taskgraph/generator.py
+++ b/taskcluster/taskgraph/generator.py
@@ -64,17 +64,19 @@ class Kind(object):
         # perform the transformations on the loaded inputs
         trans_config = TransformConfig(self.name, self.path, config, parameters,
                                        kind_dependencies_tasks, self.graph_config)
         tasks = [Task(self.name,
                       label=task_dict['label'],
                       attributes=task_dict['attributes'],
                       task=task_dict['task'],
                       optimization=task_dict.get('optimization'),
-                      dependencies=task_dict.get('dependencies'))
+                      dependencies=task_dict.get('dependencies'),
+                      release_artifacts=task_dict.get('release-artifacts'),
+                      )
                  for task_dict in transforms(trans_config, inputs)]
         return tasks
 
     @classmethod
     def load(cls, root_dir, graph_config, kind_name):
         path = os.path.join(root_dir, kind_name)
         kind_yml = os.path.join(path, 'kind.yml')
         if not os.path.exists(kind_yml):
--- a/taskcluster/taskgraph/task.py
+++ b/taskcluster/taskgraph/task.py
@@ -20,65 +20,76 @@ class Task(object):
     And later, as the task-graph processing proceeds:
 
     - task_id -- TaskCluster taskId under which this task will be created
 
     This class is just a convenience wrapper for the data type and managing
     display, comparison, serialization, etc. It has no functionality of its own.
     """
     def __init__(self, kind, label, attributes, task,
-                 optimization=None, dependencies=None):
+                 optimization=None, dependencies=None,
+                 release_artifacts=None):
         self.kind = kind
         self.label = label
         self.attributes = attributes
         self.task = task
 
         self.task_id = None
 
         self.attributes['kind'] = kind
 
         self.optimization = optimization
         self.dependencies = dependencies or {}
+        if release_artifacts:
+            self.release_artifacts = frozenset(release_artifacts)
+        else:
+            self.release_artifacts = None
 
     def __eq__(self, other):
         return self.kind == other.kind and \
             self.label == other.label and \
             self.attributes == other.attributes and \
             self.task == other.task and \
             self.task_id == other.task_id and \
             self.optimization == other.optimization and \
-            self.dependencies == other.dependencies
+            self.dependencies == other.dependencies and \
+            self.release_artifacts == other.release_artifacts
 
     def __repr__(self):
         return ('Task({kind!r}, {label!r}, {attributes!r}, {task!r}, '
                 'optimization={optimization!r}, '
-                'dependencies={dependencies!r})'.format(**self.__dict__))
+                'dependencies={dependencies!r}, '
+                'release_artifacts={release_artifacts!r})'.format(**self.__dict__))
 
     def to_json(self):
         rv = {
             'kind': self.kind,
             'label': self.label,
             'attributes': self.attributes,
             'dependencies': self.dependencies,
             'optimization': self.optimization,
             'task': self.task,
         }
         if self.task_id:
             rv['task_id'] = self.task_id
+        if self.release_artifacts:
+            rv['release_artifacts'] = sorted(self.release_artifacts),
         return rv
 
     @classmethod
     def from_json(cls, task_dict):
         """
         Given a data structure as produced by taskgraph.to_json, re-construct
         the original Task object.  This is used to "resume" the task-graph
         generation process, for example in Action tasks.
         """
         rv = cls(
             kind=task_dict['kind'],
             label=task_dict['label'],
             attributes=task_dict['attributes'],
             task=task_dict['task'],
             optimization=task_dict['optimization'],
-            dependencies=task_dict.get('dependencies'))
+            dependencies=task_dict.get('dependencies'),
+            release_artifacts=task_dict.get('release-artifacts')
+        )
         if 'task_id' in task_dict:
             rv.task_id = task_dict['task_id']
         return rv
--- a/taskcluster/taskgraph/transforms/job/__init__.py
+++ b/taskcluster/taskgraph/transforms/job/__init__.py
@@ -62,16 +62,17 @@ job_description_schema = Schema({
     Optional('index'): task_description_schema['index'],
     Optional('run-on-projects'): task_description_schema['run-on-projects'],
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
     Optional('coalesce'): task_description_schema['coalesce'],
     Optional('always-target'): task_description_schema['always-target'],
     Exclusive('optimization', 'optimization'): task_description_schema['optimization'],
     Optional('needs-sccache'): task_description_schema['needs-sccache'],
+    Optional('release-artifacts'): task_description_schema['release-artifacts'],
 
     # The "when" section contains descriptions of the circumstances under which
     # this task should be included in the task graph.  This will be converted
     # into an optimization, so it cannot be specified in a job description that
     # also gives 'optimization'.
     Exclusive('when', 'optimization'): Any({
         # This task only needs to be run if a file matching one of the given
         # patterns has changed in the push.  The patterns use the mozpack
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -208,16 +208,19 @@ task_description_schema = Schema({
     # the provisioner-id/worker-type for the task.  The following parameters will
     # be substituted in this string:
     #  {level} -- the scm level of this push
     'worker-type': basestring,
 
     # Whether the job should use sccache compiler caching.
     Required('needs-sccache'): bool,
 
+    # Set of artifacts relevant to release tasks
+    Optional('release-artifacts'): [basestring],
+
     # information specific to the worker implementation that will run this task
     'worker': Any({
         Required('implementation'): Any('docker-worker', 'docker-engine'),
         Required('os'): 'linux',
 
         # For tasks that will run in docker-worker or docker-engine, this is the
         # name of the docker image or in-tree docker image to run the task in.  If
         # in-tree, then a dependency will be created automatically.  This is
@@ -1731,16 +1734,17 @@ def build_task(config, tasks):
                 env['MOZ_AUTOMATION'] = '1'
 
         yield {
             'label': task['label'],
             'task': task_def,
             'dependencies': task.get('dependencies', {}),
             'attributes': attributes,
             'optimization': task.get('optimization', None),
+            'release-artifacts': task.get('release-artifacts', []),
         }
 
 
 @transforms.add
 def chain_of_trust(config, tasks):
     for task in tasks:
         if task['task'].get('payload', {}).get('features', {}).get('chainOfTrust'):
             image = task.get('dependencies', {}).get('docker-image')