Bug 1597616 - make mozharness::mozilla::bouncer and building python3 compatible r=aki
authorEdwin Takahashi <egao@mozilla.com>
Mon, 25 Nov 2019 17:15:18 +0000
changeset 503708 ce0afeefd88f95b58f723faf23b9dd06f806c2f5
parent 503707 8a4fb397fa5cb696042eb34c75ca7cc25d675990
child 503709 229153aa0f4ea30657732e187b4868eba8a7ac66
push id36844
push usershindli@mozilla.com
push dateTue, 26 Nov 2019 05:19:55 +0000
treeherdermozilla-central@e29eecc5147c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersaki
bugs1597616
milestone72.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1597616 - make mozharness::mozilla::bouncer and building python3 compatible r=aki Changes: Lots of code formatting changes from automated tools. Python2/3 compatible imports. Removed usage of `execfile` and replaced with `exec(compile(open()))` equivalent. Import `six` and change `unicode` comparison to `six.string_types`. Change `iterxxx` to appropriate python2/3 compatible methods. Adapt exception logging code to call appropriate `message` member. Differential Revision: https://phabricator.services.mozilla.com/D53854
testing/mozharness/mozharness/mozilla/bouncer/submitter.py
testing/mozharness/mozharness/mozilla/building/buildbase.py
--- a/testing/mozharness/mozharness/mozilla/bouncer/submitter.py
+++ b/testing/mozharness/mozharness/mozilla/bouncer/submitter.py
@@ -1,35 +1,48 @@
 import base64
-import httplib
 import socket
 import sys
 import traceback
-import urllib
-import urllib2
 from xml.dom.minidom import parseString
 
 from mozharness.base.log import FATAL
 
+try:
+    import httplib
+except ImportError:
+    import http.client as httplib
+try:
+    from urllib import urlencode, quote
+except ImportError:
+    from urllib.parse import urlencode, quote
+try:
+    from urllib2 import HTTPError, URLError, Request, urlopen
+except ImportError:
+    from urllib.request import HTTPError, URLError, Request, urlopen
+
 
 class BouncerSubmitterMixin(object):
     def query_credentials(self):
         if self.credentials:
             return self.credentials
         global_dict = {}
         local_dict = {}
-        execfile(self.config["credentials_file"], global_dict, local_dict)
+        exec(compile(
+            open(self.config["credentials_file"], "rb").read(),
+            self.config["credentials_file"],
+            'exec'), global_dict, local_dict)
         self.credentials = (local_dict["tuxedoUsername"],
                             local_dict["tuxedoPassword"])
         return self.credentials
 
     def api_call(self, route, data, error_level=FATAL, retry_config=None):
         retry_args = dict(
             failure_status=None,
-            retry_exceptions=(urllib2.HTTPError, urllib2.URLError,
+            retry_exceptions=(HTTPError, URLError,
                               httplib.BadStatusLine,
                               socket.timeout, socket.error),
             error_message="call to %s failed" % (route),
             error_level=error_level,
         )
 
         if retry_config:
             retry_args.update(retry_config)
@@ -38,55 +51,55 @@ class BouncerSubmitterMixin(object):
             self._api_call,
             args=(route, data),
             **retry_args
         )
 
     def _api_call(self, route, data):
         api_prefix = self.config["bouncer-api-prefix"]
         api_url = "%s/%s" % (api_prefix, route)
-        request = urllib2.Request(api_url)
+        request = Request(api_url)
         if data:
-            post_data = urllib.urlencode(data, doseq=True)
+            post_data = urlencode(data, doseq=True)
             request.add_data(post_data)
             self.info("POST data: %s" % post_data)
         credentials = self.query_credentials()
         if credentials:
             auth = base64.encodestring('%s:%s' % credentials)
             request.add_header("Authorization", "Basic %s" % auth.strip())
         try:
             self.info("Submitting to %s" % api_url)
-            res = urllib2.urlopen(request, timeout=60).read()
+            res = urlopen(request, timeout=60).read()
             self.info("Server response")
             self.info(res)
             return res
-        except urllib2.HTTPError as e:
+        except HTTPError as e:
             self.warning("Cannot access %s" % api_url)
             traceback.print_exc(file=sys.stdout)
             self.warning("Returned page source:")
             self.warning(e.read())
             raise
-        except urllib2.URLError:
+        except URLError:
             traceback.print_exc(file=sys.stdout)
             self.warning("Cannot access %s" % api_url)
             raise
         except socket.timeout as e:
             self.warning("Timed out accessing %s: %s" % (api_url, e))
             raise
         except socket.error as e:
             self.warning("Socket error when accessing %s: %s" % (api_url, e))
             raise
         except httplib.BadStatusLine as e:
             self.warning('BadStatusLine accessing %s: %s' % (api_url, e))
             raise
 
     def product_exists(self, product_name):
         self.info("Checking if %s already exists" % product_name)
         res = self.api_call("product_show?product=%s" %
-                            urllib.quote(product_name), data=None)
+                            quote(product_name), data=None)
         try:
             xml = parseString(res)
             # API returns <products/> if the product doesn't exist
             products_found = len(xml.getElementsByTagName("product"))
             self.info("Products found: %s" % products_found)
             return bool(products_found)
         except Exception as e:
             self.warning("Error parsing XML: %s" % e)
--- a/testing/mozharness/mozharness/mozilla/building/buildbase.py
+++ b/testing/mozharness/mozharness/mozilla/building/buildbase.py
@@ -5,53 +5,44 @@
 # You can obtain one at http://mozilla.org/MPL/2.0/.
 # ***** END LICENSE BLOCK *****
 """ buildbase.py.
 
 provides a base class for fx desktop builds
 author: Jordan Lund
 
 """
-import json
-
-import os
-import time
-import uuid
 import copy
 import glob
-
-# import the power of mozharness ;)
-import sys
-from datetime import datetime
+import json
+import os
 import re
-from mozharness.base.config import (
-    BaseConfig, parse_config_file, DEFAULT_CONFIG_PATH,
-)
+import sys
+import time
+import uuid
+from datetime import datetime
+
+import six
+
+from mozharness.base.config import (DEFAULT_CONFIG_PATH, BaseConfig,
+                                    parse_config_file)
 from mozharness.base.errors import MakefileErrorList
-from mozharness.base.log import ERROR, OutputParser, FATAL
+from mozharness.base.log import ERROR, FATAL, OutputParser
+from mozharness.base.python import (PerfherderResourceOptionsMixin,
+                                    VirtualenvMixin)
 from mozharness.base.script import PostScriptRun
 from mozharness.base.vcs.vcsbase import MercurialScript
-from mozharness.mozilla.automation import (
-    AutomationMixin,
-    EXIT_STATUS_DICT,
-    TBPL_STATUS_DICT,
-    TBPL_FAILURE,
-    TBPL_RETRY,
-    TBPL_WARNING,
-    TBPL_SUCCESS,
-    TBPL_WORST_LEVEL_TUPLE,
-)
+from mozharness.mozilla.automation import (EXIT_STATUS_DICT, TBPL_FAILURE,
+                                           TBPL_RETRY, TBPL_STATUS_DICT,
+                                           TBPL_SUCCESS, TBPL_WARNING,
+                                           TBPL_WORST_LEVEL_TUPLE,
+                                           AutomationMixin)
 from mozharness.mozilla.secrets import SecretsMixin
-from mozharness.base.python import (
-    PerfherderResourceOptionsMixin,
-    VirtualenvMixin,
-)
 
-AUTOMATION_EXIT_CODES = EXIT_STATUS_DICT.values()
-AUTOMATION_EXIT_CODES.sort()
+AUTOMATION_EXIT_CODES = sorted(EXIT_STATUS_DICT.values())
 
 MISSING_CFG_KEY_MSG = "The key '%s' could not be determined \
 Please add this to your config."
 
 ERROR_MSGS = {
     'comments_undetermined': '"comments" could not be determined. This may be \
 because it was a forced build.',
     'tooltool_manifest_undetermined': '"tooltool_manifest_src" not set, \
@@ -116,17 +107,18 @@ def get_mozconfig_path(script, config, d
     :param config: The mozharness config to inspect.
     :type config: dict
 
     :param dirs: The directories specified for this build.
     :type dirs: dict
     """
     COMPOSITE_KEYS = {'mozconfig_variant', 'app_name', 'mozconfig_platform'}
     have_composite_mozconfig = COMPOSITE_KEYS <= set(config.keys())
-    have_partial_composite_mozconfig = len(COMPOSITE_KEYS & set(config.keys())) > 0
+    have_partial_composite_mozconfig = len(
+        COMPOSITE_KEYS & set(config.keys())) > 0
     have_src_mozconfig = 'src_mozconfig' in config
     have_src_mozconfig_manifest = 'src_mozconfig_manifest' in config
 
     # first determine the mozconfig path
     if have_partial_composite_mozconfig and not have_composite_mozconfig:
         raise MozconfigPathError(
             "All or none of 'app_name', 'mozconfig_platform' and `mozconfig_variant' must be "
             "in the config in order to determine the mozconfig.")
@@ -145,27 +137,34 @@ def get_mozconfig_path(script, config, d
     elif have_composite_mozconfig:
         src_mozconfig = '%(app_name)s/config/mozconfigs/%(platform)s/%(variant)s' % {
             'app_name': config['app_name'],
             'platform': config['mozconfig_platform'],
             'variant': config['mozconfig_variant'],
         }
         abs_mozconfig_path = os.path.join(dirs['abs_src_dir'], src_mozconfig)
     elif have_src_mozconfig:
-        abs_mozconfig_path = os.path.join(dirs['abs_src_dir'], config.get('src_mozconfig'))
+        abs_mozconfig_path = os.path.join(
+            dirs['abs_src_dir'], config.get('src_mozconfig'))
     elif have_src_mozconfig_manifest:
-        manifest = os.path.join(dirs['abs_work_dir'], config['src_mozconfig_manifest'])
+        manifest = os.path.join(
+            dirs['abs_work_dir'],
+            config['src_mozconfig_manifest'])
         if not os.path.exists(manifest):
             raise MozconfigPathError(
-                'src_mozconfig_manifest: "%s" not found. Does it exist?' % (manifest,))
+                'src_mozconfig_manifest: "%s" not found. Does it exist?' %
+                (manifest,))
         else:
             with script.opened(manifest, error_level=ERROR) as (fh, err):
                 if err:
-                    raise MozconfigPathError("%s exists but coud not read properties" % manifest)
-                abs_mozconfig_path = os.path.join(dirs['abs_src_dir'], json.load(fh)['gecko_path'])
+                    raise MozconfigPathError(
+                        "%s exists but coud not read properties" %
+                        manifest)
+                abs_mozconfig_path = os.path.join(
+                    dirs['abs_src_dir'], json.load(fh)['gecko_path'])
     else:
         raise MozconfigPathError(
             "Must provide 'app_name', 'mozconfig_platform' and 'mozconfig_variant'; "
             "or one of 'src_mozconfig' or 'src_mozconfig_manifest' in the config "
             "in order to determine the mozconfig.")
 
     return abs_mozconfig_path
 
@@ -361,18 +360,22 @@ class BuildOptionParser(object):
                     break
                 if 'linux' in cfg_file_name:
                     cls.platform = 'linux'
                     break
                 if 'android' in cfg_file_name:
                     cls.platform = 'android'
                     break
             else:
-                sys.exit(error_msg % (target_option, 'platform', '--platform',
-                                      '"linux", "windows", "mac", or "android"'))
+                sys.exit(
+                    error_msg %
+                    (target_option,
+                     'platform',
+                     '--platform',
+                     '"linux", "windows", "mac", or "android"'))
         return cls.bits, cls.platform
 
     @classmethod
     def find_variant_cfg_path(cls, opt, value, parser):
         valid_variant_cfg_path = None
         # first let's see if we were given a valid short-name
         if cls.build_variants.get(value):
             bits, pltfrm = cls._query_pltfrm_and_bits(opt, parser.values)
@@ -410,23 +413,23 @@ class BuildOptionParser(object):
         shortname coupled with known platform/bits.
         """
         valid_variant_cfg_path, prospective_cfg_path = cls.find_variant_cfg_path(
             '--custom-build-variant-cfg', value, parser)
 
         if not valid_variant_cfg_path:
             # either the value was an indeterminable path or an invalid short
             # name
-            sys.exit("Whoops!\n'--custom-build-variant' was passed but an "
-                     "appropriate config file could not be determined. Tried "
-                     "using: '%s' but it was not:"
-                     "\n\t-- a valid shortname: %s "
-                     "\n\t-- a valid variant for the given platform and bits." % (
-                         prospective_cfg_path,
-                         str(cls.build_variants.keys())))
+            sys.exit(
+                "Whoops!\n'--custom-build-variant' was passed but an "
+                "appropriate config file could not be determined. Tried "
+                "using: '%s' but it was not:"
+                "\n\t-- a valid shortname: %s "
+                "\n\t-- a valid variant for the given platform and bits." %
+                (prospective_cfg_path, str(list(cls.build_variants.keys()))))
         parser.values.config_files.append(valid_variant_cfg_path)
         setattr(parser.values, option.dest, value)  # the pool
 
     @classmethod
     def set_build_pool(cls, option, opt, value, parser):
         # first let's add the build pool file where there may be pool
         # specific keys/values. Then let's store the pool name
         parser.values.config_files.append(cls.build_pool_cfg_file)
@@ -475,17 +478,17 @@ BUILD_BASE_CONFIG_OPTIONS = [
     [['--custom-build-variant-cfg'], {
         "action": "callback",
         "callback": BuildOptionParser.set_build_variant,
         "type": "string",
         "dest": "build_variant",
         "help": "Sets the build type and will determine appropriate"
                 " additional config to use. Either pass a config path"
                 " or use a valid shortname from: "
-                "%s" % (BuildOptionParser.build_variants.keys(),)}],
+                "%s" % (list(BuildOptionParser.build_variants.keys()),)}],
     [['--build-pool'], {
         "action": "callback",
         "callback": BuildOptionParser.set_build_pool,
         "type": "string",
         "dest": "build_pool",
         "help": "This will update the config with specific pool"
                 " environment keys/values. The dicts for this are"
                 " in %s\nValid values: staging or"
@@ -603,20 +606,25 @@ items from that key's value."
                                                'config',
                                                'printconfigsetting.py')
         if not app_ini_path:
             # set the default
             app_ini_path = dirs['abs_app_ini_path']
         if (os.path.exists(print_conf_setting_path) and
                 os.path.exists(app_ini_path)):
             cmd = [
-                sys.executable, os.path.join(dirs['abs_src_dir'], 'mach'), 'python',
-                print_conf_setting_path, app_ini_path,
-                'App', prop
-            ]
+                sys.executable,
+                os.path.join(
+                    dirs['abs_src_dir'],
+                    'mach'),
+                'python',
+                print_conf_setting_path,
+                app_ini_path,
+                'App',
+                prop]
             env = self.query_build_env()
             # dirs['abs_obj_dir'] can be different from env['MOZ_OBJDIR'] on
             # mac, and that confuses mach.
             del env['MOZ_OBJDIR']
             return self.get_output_from_command(
                 cmd, cwd=dirs['abs_obj_dir'], env=env)
         else:
             return None
@@ -643,17 +651,18 @@ items from that key's value."
 
         if not self.config.get('objdir'):
             return self.fatal(MISSING_CFG_KEY_MSG % ('objdir',))
         self.objdir = self.config['objdir']
         return self.objdir
 
     def query_is_nightly_promotion(self):
         platform_enabled = self.config.get('enable_nightly_promotion')
-        branch_enabled = self.branch in self.config.get('nightly_promotion_branches')
+        branch_enabled = self.branch in self.config.get(
+            'nightly_promotion_branches')
         return platform_enabled and branch_enabled
 
     def query_build_env(self, **kwargs):
         c = self.config
 
         # let's evoke the base query_env and make a copy of it
         # as we don't always want every key below added to the same dict
         env = copy.deepcopy(
@@ -663,22 +672,24 @@ items from that key's value."
         # first grab the buildid
         env['MOZ_BUILD_DATE'] = self.query_buildid()
 
         if self.query_is_nightly() or self.query_is_nightly_promotion():
             # taskcluster sets the update channel for shipping builds
             # explicitly
             if c.get('update_channel'):
                 update_channel = c['update_channel']
-                if isinstance(update_channel, unicode):
+                if isinstance(update_channel, six.text_type):
                     update_channel = update_channel.encode("utf-8")
                 env["MOZ_UPDATE_CHANNEL"] = update_channel
             else:  # let's just give the generic channel based on branch
                 env["MOZ_UPDATE_CHANNEL"] = "nightly-%s" % (self.branch,)
-            self.info("Update channel set to: {}".format(env["MOZ_UPDATE_CHANNEL"]))
+            self.info(
+                "Update channel set to: {}".format(
+                    env["MOZ_UPDATE_CHANNEL"]))
 
         return env
 
     def query_mach_build_env(self, multiLocale=None):
         c = self.config
         if multiLocale is None and self.query_is_nightly():
             multiLocale = c.get('multi_locale', False)
         mach_env = {}
@@ -716,27 +727,35 @@ items from that key's value."
     def _get_mozconfig(self):
         """assign mozconfig."""
         dirs = self.query_abs_dirs()
 
         try:
             abs_mozconfig_path = get_mozconfig_path(
                 script=self, config=self.config, dirs=dirs)
         except MozconfigPathError as e:
-            self.fatal(e.message)
+            if six.PY2:
+                self.fatal(e.message)
+            else:
+                self.fatal(e.msg)
 
         self.info("Use mozconfig: {}".format(abs_mozconfig_path))
 
         # print its contents
         content = self.read_from_file(abs_mozconfig_path, error_level=FATAL)
         self.info("mozconfig content:")
         self.info(content)
 
-        # finally, copy the mozconfig to a path that 'mach build' expects it to be
-        self.copyfile(abs_mozconfig_path, os.path.join(dirs['abs_src_dir'], '.mozconfig'))
+        # finally, copy the mozconfig to a path that 'mach build' expects it to
+        # be
+        self.copyfile(
+            abs_mozconfig_path,
+            os.path.join(
+                dirs['abs_src_dir'],
+                '.mozconfig'))
 
     # TODO: replace with ToolToolMixin
     def _get_tooltool_auth_file(self):
         # set the default authentication file based on platform; this
         # corresponds to where puppet puts the token
         if 'tooltool_authentication_file' in self.config:
             fn = self.config['tooltool_authentication_file']
         elif self._is_windows():
@@ -862,18 +881,19 @@ items from that key's value."
             # this for nighties since we clobber the whole work_dir in
             # clobber()
             self._rm_old_package()
         self._get_mozconfig()
         self._run_tooltool()
         self._create_mozbuild_dir()
         self._ensure_upload_path()
         mach_props = os.path.join(
-            self.query_abs_dirs()['abs_obj_dir'], 'dist', 'mach_build_properties.json'
-        )
+            self.query_abs_dirs()['abs_obj_dir'],
+            'dist',
+            'mach_build_properties.json')
         if os.path.exists(mach_props):
             self.info("Removing previous mach property file: %s" % mach_props)
             self.rmtree(mach_props)
 
     def build(self):
         """builds application."""
 
         args = ['build', '-v']
@@ -881,17 +901,18 @@ items from that key's value."
         custom_build_targets = self.config.get('build_targets')
         if custom_build_targets:
             args += custom_build_targets
 
         # This will error on non-0 exit code.
         self._run_mach_command_in_build_env(args)
 
         if not custom_build_targets:
-            self.generate_build_props(console_output=True, halt_on_failure=True)
+            self.generate_build_props(
+                console_output=True, halt_on_failure=True)
 
         self._generate_build_stats()
 
     def static_analysis_autotest(self):
         """Run mach static-analysis autotest, in order to make sure we dont regress"""
         self.preflight_build()
         self._run_mach_command_in_build_env(['configure'])
         self._run_mach_command_in_build_env(['static-analysis', 'autotest',
@@ -901,19 +922,24 @@ items from that key's value."
     def _query_mach(self):
         dirs = self.query_abs_dirs()
 
         if 'MOZILLABUILD' in os.environ:
             # We found many issues with intermittent build failures when not
             # invoking mach via bash.
             # See bug 1364651 before considering changing.
             mach = [
-                os.path.join(os.environ['MOZILLABUILD'], 'msys', 'bin', 'bash.exe'),
-                os.path.join(dirs['abs_src_dir'], 'mach')
-            ]
+                os.path.join(
+                    os.environ['MOZILLABUILD'],
+                    'msys',
+                    'bin',
+                    'bash.exe'),
+                os.path.join(
+                    dirs['abs_src_dir'],
+                    'mach')]
         else:
             mach = [sys.executable, 'mach']
         return mach
 
     def _run_mach_command_in_build_env(self, args, use_subprocess=False):
         """Run a mach command in a build context."""
         env = self.query_build_env()
         env.update(self.query_mach_build_env())
@@ -991,17 +1017,18 @@ items from that key's value."
         ]
         package_filename = self.get_output_from_command(
             package_cmd,
             cwd=objdir,
         )
         if not package_filename:
             self.fatal(
                 "Unable to determine the package filename for the multi-l10n build. "
-                "Was trying to run: %s" % package_cmd)
+                "Was trying to run: %s" %
+                package_cmd)
 
         self.info('Multi-l10n package filename is: %s' % package_filename)
 
         parser = MakeUploadOutputParser(config=self.config,
                                         log_obj=self.log_obj,
                                         )
         upload_cmd = ['make', 'upload', 'AB_CD=multi']
         self.run_command(upload_cmd,
@@ -1030,40 +1057,43 @@ items from that key's value."
         env.update(self.query_mach_build_env())
 
         command = [sys.executable, 'mach', '--log-no-times']
         command.extend(mach_command_args)
 
         self.run_command(
             command=command,
             cwd=self.query_abs_dirs()['abs_src_dir'],
-            env=env, output_timeout=self.config.get('max_build_output_timeout', 60 * 20),
+            env=env,
+            output_timeout=self.config.get(
+                'max_build_output_timeout',
+                60 * 20),
             halt_on_failure=True,
         )
 
     def preflight_package_source(self):
         self._get_mozconfig()
 
     def package_source(self):
         """generates source archives and uploads them"""
         env = self.query_build_env()
         env.update(self.query_mach_build_env())
         dirs = self.query_abs_dirs()
 
         self.run_command(
             command=[sys.executable, 'mach', '--log-no-times', 'configure'],
             cwd=dirs['abs_src_dir'],
-            env=env, output_timeout=60*3, halt_on_failure=True,
+            env=env, output_timeout=60 * 3, halt_on_failure=True,
         )
         self.run_command(
             command=[
                 'make', 'source-package', 'source-upload',
             ],
             cwd=dirs['abs_obj_dir'],
-            env=env, output_timeout=60*45, halt_on_failure=True,
+            env=env, output_timeout=60 * 45, halt_on_failure=True,
         )
 
     def _is_configuration_shipped(self):
         """Determine if the current build configuration is shipped to users.
 
         This is used to drive alerting so we don't see alerts for build
         configurations we care less about.
         """
@@ -1119,17 +1149,19 @@ items from that key's value."
 
         return data
 
     def _load_sccache_stats(self):
         stats_file = os.path.join(
             self.query_abs_dirs()['abs_obj_dir'], 'sccache-stats.json'
         )
         if not os.path.exists(stats_file):
-            self.info('%s does not exist; not loading sccache stats' % stats_file)
+            self.info(
+                '%s does not exist; not loading sccache stats' %
+                stats_file)
             return
 
         with open(stats_file, 'rb') as fh:
             stats = json.load(fh)
 
         def get_stat(key):
             val = stats['stats'][key]
             # Future versions of sccache will distinguish stats by language
@@ -1201,30 +1233,33 @@ items from that key's value."
                 subtests = {}
                 for path, size in paths_with_sizes(installer):
                     name = os.path.basename(path)
                     if name in interests:
                         # We have to be careful here: desktop Firefox installers
                         # contain two omni.ja files: one for the general runtime,
                         # and one for the browser proper.
                         if name == 'omni.ja':
-                            containing_dir = os.path.basename(os.path.dirname(path))
+                            containing_dir = os.path.basename(
+                                os.path.dirname(path))
                             if containing_dir == 'browser':
                                 name = 'browser-omni.ja'
                         if name in subtests:
                             self.fatal('should not see %s (%s) multiple times!'
                                        % (name, path))
                         subtests[name] = size
                 for name in subtests:
                     self.info('Size of %s: %s bytes' % (name,
                                                         subtests[name]))
                     size_measurements.append(
                         {'name': name, 'value': subtests[name]})
             except Exception:
-                self.info('Unable to search %s for component sizes.' % installer)
+                self.info(
+                    'Unable to search %s for component sizes.' %
+                    installer)
                 size_measurements = []
 
         if not installer_size and not size_measurements:
             return
 
         # We want to always collect metrics. But alerts for installer size are
         # only use for builds with ship. So nix the alerts for builds we don't
         # ship.
@@ -1280,18 +1315,18 @@ items from that key's value."
         # }
         try:
             parsed = json.loads(output)
         except ValueError:
             self.info("`rust-size` failed: %s" % output)
             return {}
 
         sections = {}
-        for sec_type in parsed.itervalues():
-            for name, size in sec_type.iteritems():
+        for sec_type in list(parsed.values()):
+            for name, size in list(sec_type.items()):
                 if not filter or name in filter:
                     sections[name] = size
 
         return sections
 
     def _get_binary_metrics(self):
         """
         Provides metrics on interesting compenents of the built binaries.
@@ -1307,37 +1342,38 @@ items from that key's value."
         section_interests = ('.text', '.data', '.rodata', '.rdata',
                              '.cstring', '.data.rel.ro', '.bss')
         lib_details = []
 
         dirs = self.query_abs_dirs()
         dist_dir = os.path.join(dirs['abs_obj_dir'], 'dist')
         bin_dir = os.path.join(dist_dir, 'bin')
 
-        for lib_type, lib_names in lib_interests.iteritems():
+        for lib_type, lib_names in list(lib_interests.items()):
             for lib_name in lib_names:
                 lib = os.path.join(bin_dir, lib_name)
                 if os.path.exists(lib):
                     lib_size = 0
-                    section_details = self._get_sections(lib, section_interests)
+                    section_details = self._get_sections(
+                        lib, section_interests)
                     section_measurements = []
                     # Build up the subtests
 
                     # Lump rodata sections together
                     # - Mach-O separates out read-only string data as .cstring
                     # - PE really uses .rdata, but XUL at least has a .rodata as well
                     for ro_alias in ('.cstring', '.rdata'):
                         if ro_alias in section_details:
                             if '.rodata' in section_details:
                                 section_details['.rodata'] += section_details[ro_alias]
                             else:
                                 section_details['.rodata'] = section_details[ro_alias]
                             del section_details[ro_alias]
 
-                    for k, v in section_details.iteritems():
+                    for k, v in list(section_details.items()):
                         section_measurements.append({'name': k, 'value': v})
                         lib_size += v
                     lib_details.append({
                         'name': lib_type,
                         'size': lib_size,
                         'sections': section_measurements
                     })
 
@@ -1414,27 +1450,33 @@ items from that key's value."
             self.info('PERFHERDER_DATA: %s' % json.dumps(perfherder_data))
 
     def valgrind_test(self):
         '''Execute mach's valgrind-test for memory leaks'''
         env = self.query_build_env()
         env.update(self.query_mach_build_env())
 
         return_code = self.run_command(
-            command=[sys.executable, 'mach', 'valgrind-test'],
+            command=[
+                sys.executable,
+                'mach',
+                'valgrind-test'],
             cwd=self.query_abs_dirs()['abs_src_dir'],
-            env=env, output_timeout=self.config.get('max_build_output_timeout', 60 * 40)
-        )
+            env=env,
+            output_timeout=self.config.get(
+                'max_build_output_timeout',
+                60 * 40))
         if return_code:
             self.return_code = self.worst_level(
-                EXIT_STATUS_DICT[TBPL_FAILURE],  self.return_code,
+                EXIT_STATUS_DICT[TBPL_FAILURE], self.return_code,
                 AUTOMATION_EXIT_CODES[::-1]
             )
-            self.fatal("'mach valgrind-test' did not run successfully. Please check "
-                       "log for errors.")
+            self.fatal(
+                "'mach valgrind-test' did not run successfully. Please check "
+                "log for errors.")
 
     def _ensure_upload_path(self):
         env = self.query_mach_build_env()
 
         # Some Taskcluster workers don't like it if an artifacts directory
         # is defined but no artifacts are uploaded. Guard against this by always
         # ensuring the artifacts directory exists.
         if 'UPLOAD_PATH' in env and not os.path.exists(env['UPLOAD_PATH']):
@@ -1466,17 +1508,17 @@ items from that key's value."
             # let's ignore all mention of tbpl status until this
             # point so it will be easier to manage
             if self.return_code not in AUTOMATION_EXIT_CODES:
                 self.error("Return code is set to: %s and is outside of "
                            "automation's known values. Setting to 2(failure). "
                            "Valid return codes %s" % (self.return_code,
                                                       AUTOMATION_EXIT_CODES))
                 self.return_code = 2
-            for status, return_code in EXIT_STATUS_DICT.iteritems():
+            for status, return_code in list(EXIT_STATUS_DICT.items()):
                 if return_code == self.return_code:
                     self.record_status(status, TBPL_STATUS_DICT[status])
         self.summary()
 
     @PostScriptRun
     def _parse_build_tests_ccov(self):
         if 'MOZ_FETCHES_DIR' not in os.environ:
             return
@@ -1493,9 +1535,13 @@ items from that key's value."
         if self._is_windows():
             grcov_path += '.exe'
         env['GRCOV_PATH'] = grcov_path
 
         cmd = self._query_mach() + [
             'python',
             os.path.join('testing', 'parse_build_tests_ccov.py'),
         ]
-        self.run_command(command=cmd, cwd=topsrcdir, env=env, halt_on_failure=True)
+        self.run_command(
+            command=cmd,
+            cwd=topsrcdir,
+            env=env,
+            halt_on_failure=True)