Bug 1133074 - Use routes.json for mozharness TC uploads; r=jlund
authorMike Shal <mshal@mozilla.com>
Mon, 27 Jul 2015 11:03:54 -0400
changeset 288490 8f81ad1f9fdca61bf0e1bd58e4c0ab570de6c39d
parent 288489 ac350571e7b8b0d331a81d13f3ca44a8e2b8208a
child 288491 8134a7435cffaa99f62b5e0ad33308cf353a048f
push id5067
push userraliiev@mozilla.com
push dateMon, 21 Sep 2015 14:04:52 +0000
treeherdermozilla-beta@14221ffe5b2f [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjlund
bugs1133074
milestone42.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1133074 - Use routes.json for mozharness TC uploads; r=jlund
testing/mozharness/mozharness/mozilla/buildbot.py
testing/mozharness/mozharness/mozilla/building/buildbase.py
testing/mozharness/scripts/desktop_l10n.py
--- a/testing/mozharness/mozharness/mozilla/buildbot.py
+++ b/testing/mozharness/mozharness/mozilla/buildbot.py
@@ -176,8 +176,42 @@ class BuildbotMixin(object):
             ]
 
         for d in downloadables:
             sendchange += [d]
 
         retcode = self.run_command(buildbot + sendchange)
         if retcode != 0:
             self.info("The sendchange failed but we don't want to turn the build orange: %s" % retcode)
+
+    def query_build_name(self):
+        build_name = self.config.get('platform')
+        if not build_name:
+            self.fatal('Must specify "platform" in the mozharness config for indexing')
+
+        return build_name
+
+    def query_build_type(self):
+        if self.config.get('build_type'):
+            build_type = self.config['build_type']
+        elif self.config.get('pgo_build'):
+            build_type = 'pgo'
+        elif self.config.get('debug_build', False):
+            build_type = 'debug'
+        else:
+            build_type = 'opt'
+        return build_type
+
+    def buildid_to_dict(self, buildid):
+        """Returns an dict with the year, month, day, hour, minute, and second
+           as keys, as parsed from the buildid"""
+        buildidDict = {}
+        try:
+            # strptime is no good here because it strips leading zeros
+            buildidDict['year'] = buildid[0:4]
+            buildidDict['month'] = buildid[4:6]
+            buildidDict['day'] = buildid[6:8]
+            buildidDict['hour'] = buildid[8:10]
+            buildidDict['minute'] = buildid[10:12]
+            buildidDict['second'] = buildid[12:14]
+        except:
+            self.fatal('Could not parse buildid into YYYYMMDDHHMMSS: %s' % buildid)
+        return buildidDict
--- a/testing/mozharness/mozharness/mozilla/building/buildbase.py
+++ b/testing/mozharness/mozharness/mozilla/building/buildbase.py
@@ -1365,16 +1365,17 @@ or run without that action (ie: --no-{ac
                 self.return_code = self.worst_level(
                     EXIT_STATUS_DICT[TBPL_WARNING], self.return_code,
                     AUTOMATION_EXIT_CODES[::-1]
                 )
 
         self.generated_build_props = True
 
     def upload_files(self):
+        dirs = self.query_abs_dirs()
         auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
         credentials = {}
         execfile(auth, credentials)
         client_id = credentials.get('taskcluster_clientId')
         access_token = credentials.get('taskcluster_accessToken')
         if not client_id or not access_token:
             self.warning('Skipping S3 file upload: No taskcluster credentials.')
             return
@@ -1386,29 +1387,57 @@ or run without that action (ie: --no-{ac
         # which is necessary before the virtualenv can be created.
         self.create_virtualenv()
         self.activate_virtualenv()
 
         # Enable Taskcluster debug logging, so at least we get some debug
         # messages while we are testing uploads.
         logging.getLogger('taskcluster').setLevel(logging.DEBUG)
 
+        routes_json = os.path.join(dirs['abs_src_dir'],
+                                   'testing/taskcluster/routes.json')
+        with open(routes_json) as f:
+            contents = json.load(f)
+            if self.query_is_nightly():
+                templates = contents['nightly']
+
+                # Nightly builds with l10n counterparts also publish to the
+                # 'en-US' locale.
+                if self.config.get('publish_nightly_en_US_routes'):
+                    templates.extend(contents['l10n'])
+            else:
+                templates = contents['routes']
+        routes = []
+        for template in templates:
+            fmt = {
+                'index': 'index.garbage.staging.mshal-testing', # TODO
+                'project': self.buildbot_config['properties']['branch'],
+                'head_rev': self.query_revision(),
+                'build_product': self.config['stage_product'],
+                'build_name': self.query_build_name(),
+                'build_type': self.query_build_type(),
+                'locale': 'en-US',
+            }
+            fmt.update(self.buildid_to_dict(self.query_buildid()))
+            routes.append(template.format(**fmt))
+        self.info("Using routes: %s" % routes)
+
         tc = Taskcluster(self.branch,
                          self.query_pushdate(), # Use pushdate as the rank
                          client_id,
                          access_token,
                          self.log_obj,
                          )
 
         index = self.config.get('taskcluster_index', 'index.garbage.staging')
         # TODO: Bug 1165980 - these should be in tree
-        routes = [
+        routes.extend([
             "%s.buildbot.branches.%s.%s" % (index, self.branch, self.stage_platform),
             "%s.buildbot.revisions.%s.%s.%s" % (index, self.query_revision(), self.branch, self.stage_platform),
-        ]
+        ])
         task = tc.create_task(routes)
         tc.claim_task(task)
 
         # Some trees may not be setting uploadFiles, so default to []. Normally
         # we'd only expect to get here if the build completes successfully,
         # which means we should have uploadFiles.
         files = self.query_buildbot_property('uploadFiles') or []
         if not files:
@@ -1468,17 +1497,16 @@ or run without that action (ie: --no-{ac
             '.zip',
             '.json',
         )
 
         # Also upload our mozharness log files
         files.extend([os.path.join(self.log_obj.abs_log_dir, x) for x in self.log_obj.log_files.values()])
 
         # Also upload our buildprops.json file.
-        dirs = self.query_abs_dirs()
         files.extend([os.path.join(dirs['base_work_dir'], 'buildprops.json')])
 
         for upload_file in files:
             # Create an S3 artifact for each file that gets uploaded. We also
             # check the uploaded file against the property conditions so that we
             # can set the buildbot config with the correct URLs for package
             # locations.
             tc.create_artifact(task, upload_file)
--- a/testing/mozharness/scripts/desktop_l10n.py
+++ b/testing/mozharness/scripts/desktop_l10n.py
@@ -201,20 +201,18 @@ class DesktopSingleLocale(LocalesMixin, 
         self.upload_env = None
         self.revision = None
         self.version = None
         self.upload_urls = {}
         self.locales_property = {}
         self.l10n_dir = None
         self.package_urls = {}
         self.pushdate = None
-        # Each locale adds its list of files to upload_files - some will be
-        # duplicates (like the mar binaries), so we use a set to prune those
-        # when uploading to taskcluster.
-        self.upload_files = set()
+        # upload_files is a dictionary of files to upload, keyed by locale.
+        self.upload_files = {}
 
         if 'mock_target' in self.config:
             self.enable_mock()
 
     def _pre_config_lock(self, rw_config):
         """replaces 'configuration_tokens' with their values, before the
            configuration gets locked. If some of the configuration_tokens
            are not present, stops the execution of the script"""
@@ -744,19 +742,19 @@ class DesktopSingleLocale(LocalesMixin, 
         cwd = dirs['abs_locales_dir']
         output = self._get_output_from_make(target=target, cwd=cwd, env=env)
         self.info('UPLOAD_FILES is "%s"' % (output))
         files = shlex.split(output)
         if not files:
             self.error('failed to get upload file list for locale %s' % (locale))
             return FAILURE
 
-        for f in files:
-            abs_file = os.path.abspath(os.path.join(cwd, f))
-            self.upload_files.update([abs_file])
+        self.upload_files[locale] = [
+            os.path.abspath(os.path.join(cwd, f)) for f in files
+        ]
         return SUCCESS
 
     def make_installers(self, locale):
         """wrapper for make installers-(locale)"""
         env = self.query_l10n_env()
         self._copy_mozconfig()
         env['L10NBASEDIR'] = self.l10n_dir
         dirs = self.query_abs_dirs()
@@ -1013,41 +1011,58 @@ class DesktopSingleLocale(LocalesMixin, 
 
         # Enable Taskcluster debug logging, so at least we get some debug
         # messages while we are testing uploads.
         logging.getLogger('taskcluster').setLevel(logging.DEBUG)
 
         branch = self.config['branch']
         platform = self.config['platform']
         revision = self._query_revision()
-        tc = Taskcluster(self.config['branch'],
-                         self.query_pushdate(),
-                         client_id,
-                         access_token,
-                         self.log_obj,
-                         )
+
+        routes_json = os.path.join(self.query_abs_dirs()['abs_mozilla_dir'],
+                                   'testing/taskcluster/routes.json')
+        with open(routes_json) as f:
+            contents = json.load(f)
+            templates = contents['l10n']
 
-        index = self.config.get('taskcluster_index', 'index.garbage.staging')
-        # TODO: Bug 1165980 - these should be in tree. Note the '.l10n' suffix.
-        routes = [
-            "%s.buildbot.branches.%s.%s.l10n" % (index, branch, platform),
-            "%s.buildbot.revisions.%s.%s.%s.l10n" % (index, revision, branch, platform),
-        ]
+        for locale, files in self.upload_files.iteritems():
+            self.info("Uploading files to S3 for locale '%s': %s" % (locale, files))
+            routes = []
+            for template in templates:
+                fmt = {
+                    # TODO: Bug 1133074
+                    #index = self.config.get('taskcluster_index', 'index.garbage.staging')
+                    'index': 'index.garbage.staging.mshal-testing',
+                    'project': branch,
+                    'head_rev': revision,
+                    'build_product': self.config['stage_product'],
+                    'build_name': self.query_build_name(),
+                    'build_type': self.query_build_type(),
+                    'locale': locale,
+                }
+                fmt.update(self.buildid_to_dict(self._query_buildid()))
+                routes.append(template.format(**fmt))
+                self.info('Using routes: %s' % routes)
 
-        task = tc.create_task(routes)
-        tc.claim_task(task)
+            tc = Taskcluster(branch,
+                             self.query_pushdate(),
+                             client_id,
+                             access_token,
+                             self.log_obj,
+                             )
+            task = tc.create_task(routes)
+            tc.claim_task(task)
 
-        self.info("Uploading files to S3: %s" % self.upload_files)
-        for upload_file in self.upload_files:
-            # Create an S3 artifact for each file that gets uploaded. We also
-            # check the uploaded file against the property conditions so that we
-            # can set the buildbot config with the correct URLs for package
-            # locations.
-            tc.create_artifact(task, upload_file)
-        tc.report_completed(task)
+            for upload_file in files:
+                # Create an S3 artifact for each file that gets uploaded. We also
+                # check the uploaded file against the property conditions so that we
+                # can set the buildbot config with the correct URLs for package
+                # locations.
+                tc.create_artifact(task, upload_file)
+            tc.report_completed(task)
 
     def query_pushdate(self):
         if self.pushdate:
             return self.pushdate
 
         mozilla_dir = self.config['mozilla_dir']
         repo = None
         for repository in self.config['repos']: