Bug 1406650 - Make build/*.py and a few other files flake8 compatible and add them to the list of files to check r=chmanchester
authorSylvestre Ledru <sledru@mozilla.com>
Sat, 07 Oct 2017 16:45:22 +0200
changeset 436572 6074db12d685655fe5692d59471b3c32cc967dc9
parent 436571 d14dd0e5c41a54d4c5a82d7c82245c03854e5b5e
child 436573 e3018533b2fb703bc0f1da89f067253ab1176044
push id8114
push userjlorenzo@mozilla.com
push dateThu, 02 Nov 2017 16:33:21 +0000
treeherdermozilla-beta@73e0d89a540f [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerschmanchester
bugs1406650
milestone58.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1406650 - Make build/*.py and a few other files flake8 compatible and add them to the list of files to check r=chmanchester MozReview-Commit-ID: icmFJtbWdN
build/appini_header.py
build/build-clang/build-clang.py
build/buildconfig.py
build/checksums.py
build/gen_test_packages_manifest.py
build/mach_bootstrap.py
build/subconfigure.py
build/submit_telemetry_data.py
build/upload.py
build/upload_generated_sources.py
build/variables.py
build/windows_toolchain.py
configure.py
tools/lint/flake8.yml
--- a/build/appini_header.py
+++ b/build/appini_header.py
@@ -3,40 +3,45 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 '''Parses a given application.ini file and outputs the corresponding
    StaticXREAppData structure as a C++ header file'''
 
 import ConfigParser
 import sys
 
+
 def main(output, file):
     config = ConfigParser.RawConfigParser()
     config.read(file)
     flags = set()
     try:
         if config.getint('XRE', 'EnableProfileMigrator') == 1:
             flags.add('NS_XRE_ENABLE_PROFILE_MIGRATOR')
-    except: pass
+    except:
+        pass
     try:
         if config.getint('Crash Reporter', 'Enabled') == 1:
             flags.add('NS_XRE_ENABLE_CRASH_REPORTER')
-    except: pass
-    appdata = dict(("%s:%s" % (s, o), config.get(s, o)) for s in config.sections() for o in config.options(s))
+    except:
+        pass
+    appdata = dict(("%s:%s" % (s, o), config.get(s, o))
+                   for s in config.sections() for o in config.options(s))
     appdata['flags'] = ' | '.join(flags) if flags else '0'
-    appdata['App:profile'] = '"%s"' % appdata['App:profile'] if 'App:profile' in appdata else 'NULL'
+    appdata['App:profile'] = ('"%s"' % appdata['App:profile']
+                              if 'App:profile' in appdata else 'NULL')
     expected = ('App:vendor', 'App:name', 'App:remotingname', 'App:version', 'App:buildid',
                 'App:id', 'Gecko:minversion', 'Gecko:maxversion')
     missing = [var for var in expected if var not in appdata]
     if missing:
         print >>sys.stderr, \
             "Missing values in %s: %s" % (file, ', '.join(missing))
         sys.exit(1)
 
-    if not 'Crash Reporter:serverurl' in appdata:
+    if 'Crash Reporter:serverurl' not in appdata:
         appdata['Crash Reporter:serverurl'] = ''
 
     output.write('''#include "mozilla/XREAppData.h"
              static const mozilla::StaticXREAppData sAppData = {
                  "%(App:vendor)s",
                  "%(App:name)s",
                  "%(App:remotingname)s",
                  "%(App:version)s",
@@ -45,13 +50,14 @@ def main(output, file):
                  NULL, // copyright
                  %(flags)s,
                  "%(Gecko:minversion)s",
                  "%(Gecko:maxversion)s",
                  "%(Crash Reporter:serverurl)s",
                  %(App:profile)s
              };''' % appdata)
 
+
 if __name__ == '__main__':
     if len(sys.argv) != 1:
         main(sys.stdout, sys.argv[1])
     else:
         print >>sys.stderr, "Usage: %s /path/to/application.ini" % sys.argv[0]
--- a/build/build-clang/build-clang.py
+++ b/build/build-clang/build-clang.py
@@ -204,17 +204,17 @@ def build_one_stage(cc, cxx, asm, ld, ar
     if libtool is not None:
         cmake_args += ["-DCMAKE_LIBTOOL=%s" % slashify_path(libtool)]
     if osx_cross_compile:
         cmake_args += ["-DCMAKE_SYSTEM_NAME=Darwin",
                        "-DCMAKE_SYSTEM_VERSION=10.10",
                        "-DLLVM_ENABLE_THREADS=OFF",
                        "-DLIBCXXABI_LIBCXX_INCLUDES=%s" % libcxx_include_dir,
                        "-DCMAKE_OSX_SYSROOT=%s" % slashify_path(os.getenv("CROSS_SYSROOT")),
-                       "-DCMAKE_FIND_ROOT_PATH=%s" % slashify_path(os.getenv("CROSS_CCTOOLS_PATH")),
+                       "-DCMAKE_FIND_ROOT_PATH=%s" % slashify_path(os.getenv("CROSS_CCTOOLS_PATH")), # noqa
                        "-DCMAKE_FIND_ROOT_PATH_MODE_PROGRAM=NEVER",
                        "-DCMAKE_FIND_ROOT_PATH_MODE_LIBRARY=ONLY",
                        "-DCMAKE_FIND_ROOT_PATH_MODE_INCLUDE=ONLY",
                        "-DCMAKE_MACOSX_RPATH=@executable_path",
                        "-DCMAKE_OSX_ARCHITECTURES=x86_64",
                        "-DDARWIN_osx_ARCHS=x86_64",
                        "-DLLVM_DEFAULT_TARGET_TRIPLE=x86_64-apple-darwin11"]
     build_package(build_dir, cmake_args)
@@ -386,17 +386,18 @@ if __name__ == "__main__":
     if "stages" in config:
         stages = int(config["stages"])
         if stages not in (1, 2, 3):
             raise ValueError("We only know how to build 1, 2, or 3 stages")
     build_type = "Release"
     if "build_type" in config:
         build_type = config["build_type"]
         if build_type not in ("Release", "Debug", "RelWithDebInfo", "MinSizeRel"):
-            raise ValueError("We only know how to do Release, Debug, RelWithDebInfo or MinSizeRel builds")
+            raise ValueError("We only know how to do Release, Debug, RelWithDebInfo or "
+                             "MinSizeRel builds")
     build_libcxx = False
     if "build_libcxx" in config:
         build_libcxx = config["build_libcxx"]
         if build_libcxx not in (True, False):
             raise ValueError("Only boolean values are accepted for build_libcxx.")
     build_clang_tidy = False
     if "build_clang_tidy" in config:
         build_clang_tidy = config["build_clang_tidy"]
@@ -498,17 +499,18 @@ if __name__ == "__main__":
         extra_cflags = ["-static-libgcc"]
         extra_cxxflags = ["-static-libgcc", "-static-libstdc++"]
         extra_cflags2 = ["-fPIC"]
         extra_cxxflags2 = ["-fPIC", "-static-libstdc++"]
         extra_asmflags = []
         extra_ldflags = []
 
         if 'LD_LIBRARY_PATH' in os.environ:
-            os.environ['LD_LIBRARY_PATH'] = '%s/lib64/:%s' % (gcc_dir, os.environ['LD_LIBRARY_PATH'])
+            os.environ['LD_LIBRARY_PATH'] = ('%s/lib64/:%s' %
+                                             (gcc_dir, os.environ['LD_LIBRARY_PATH']))
         else:
             os.environ['LD_LIBRARY_PATH'] = '%s/lib64/' % gcc_dir
     elif is_windows():
         extra_cflags = []
         extra_cxxflags = []
         # clang-cl would like to figure out what it's supposed to be emulating
         # by looking at an MSVC install, but we don't really have that here.
         # Force things on.
--- a/build/buildconfig.py
+++ b/build/buildconfig.py
@@ -1,13 +1,12 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-import os
 import sys
 from mozbuild.base import MozbuildObject
 from mozbuild.backend.configenvironment import PartialConfigEnvironment
 
 config = MozbuildObject.from_environment()
 partial_config = PartialConfigEnvironment(config.topobjdir)
 
 for var in ('topsrcdir', 'topobjdir'):
--- a/build/checksums.py
+++ b/build/checksums.py
@@ -8,16 +8,17 @@ from __future__ import with_statement
 from optparse import OptionParser
 import logging
 import os
 try:
     import hashlib
 except:
     hashlib = None
 
+
 def digest_file(filename, digest, chunk_size=1024):
     '''Produce a checksum for the file specified by 'filename'.  'filename'
     is a string path to a file that is opened and read in this function.  The
     checksum algorithm is specified by 'digest' and is a valid OpenSSL
     algorithm.  If the digest used is not valid or Python's hashlib doesn't
     work, the None object will be returned instead.  The size of blocks
     that this function will read from the file object it opens based on
     'filename' can be specified by 'chunk_size', which defaults to 1K'''
@@ -45,17 +46,17 @@ def digest_file(filename, digest, chunk_
 
 def process_files(files, output_filename, digests, strip):
     '''This function takes a list of file names, 'files'.  It will then
     compute the checksum for each of the files by opening the files.
     Once each file is read and its checksum is computed, this function
     will write the information to the file specified by 'output_filename'.
     The path written in the output file will have anything specified by 'strip'
     removed from the path.  The output file is closed before returning nothing
-    The algorithm to compute checksums with can be specified by 'digests' 
+    The algorithm to compute checksums with can be specified by 'digests'
     and needs to be a list of valid OpenSSL algorithms.
 
     The output file is written in the format:
         <hash> <algorithm> <filesize> <filepath>
     Example:
         d1fa09a<snip>e4220 sha1 14250744 firefox-4.0b6pre.en-US.mac64.dmg
     '''
 
@@ -80,16 +81,17 @@ def process_files(files, output_filename
                         short_file = file[len(strip):]
                         short_file = short_file.lstrip('/')
                     else:
                         short_file = file
                     print >>output, '%s %s %s %s' % (hash, digest,
                                                      os.path.getsize(file),
                                                      short_file)
 
+
 def setup_logging(level=logging.DEBUG):
     '''This function sets up the logging module using a speficiable logging
     module logging level.  The default log level is DEBUG.
 
     The output is in the format:
         <level> - <message>
     Example:
         DEBUG - Finished reading in file
@@ -98,16 +100,17 @@ def setup_logging(level=logging.DEBUG):
     logger = logging.getLogger('checksums.py')
     logger.setLevel(logging.DEBUG)
     handler = logging.StreamHandler()
     handler.setLevel(level)
     formatter = logging.Formatter("%(levelname)s - %(message)s")
     handler.setFormatter(formatter)
     logger.addHandler(handler)
 
+
 def main():
     '''This is a main function that parses arguments, sets up logging
     and generates a checksum file'''
     # Parse command line arguments
     parser = OptionParser()
     parser.add_option('-d', '--digest', help='checksum algorithm to use',
                       action='append', dest='digests')
     parser.add_option('-o', '--output', help='output file to use',
@@ -117,42 +120,43 @@ def main():
                       action='store_true', dest='verbose', default=False)
     parser.add_option('-q', '--quiet', help='Be quiet', action='store_true',
                       dest='quiet', default=False)
     parser.add_option('-s', '--strip',
                       help='strip this path from the filenames',
                       dest='strip', default=os.getcwd())
     options, args = parser.parse_args()
 
-    #Figure out which logging level to use
+    # Figure out which logging level to use
     if options.verbose:
         loglevel = logging.DEBUG
     elif options.quiet:
         loglevel = logging.ERROR
     else:
         loglevel = logging.INFO
 
-    #Set up logging
+    # Set up logging
     setup_logging(loglevel)
     logger = logging.getLogger('checksums.py')
 
     # Validate the digest type to use
     if not options.digests:
         options.digests = ['sha1']
     try:
         for digest in options.digests:
             hashlib.new(digest)
-    except ValueError, ve:
+    except ValueError as ve:
         logger.error('Could not create a "%s" hash object (%s)' %
                      (digest, ve.args[0]))
         exit(1)
 
     # Validate the files to checksum
     files = []
     for i in args:
         if os.path.exists(i):
             files.append(i)
         else:
             logger.info('File "%s" was not found on the filesystem' % i)
     process_files(files, options.outfile, options.digests, options.strip)
 
+
 if __name__ == '__main__':
     main()
--- a/build/gen_test_packages_manifest.py
+++ b/build/gen_test_packages_manifest.py
@@ -4,17 +4,17 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import json
 
 from argparse import ArgumentParser
 
 ALL_HARNESSES = [
-    'common', # Harnesses without a specific package will look here.
+    'common',  # Harnesses without a specific package will look here.
     'mochitest',
     'reftest',
     'xpcshell',
     'cppunittest',
     'jittest',
     'mozbase',
     'web-platform',
     'talos',
@@ -34,20 +34,23 @@ PACKAGE_SPECIFIED_HARNESSES = [
 
 # These packages are not present for every build configuration.
 OPTIONAL_PACKAGES = [
     'gtest',
 ]
 
 
 def parse_args():
-    parser = ArgumentParser(description='Generate a test_packages.json file to tell automation which harnesses require which test packages.')
+    parser = ArgumentParser(
+        description="Generate a test_packages.json file to tell automation which harnesses "
+        "require which test packages.")
     parser.add_argument("--common", required=True,
                         action="store", dest="tests_common",
-                        help="Name of the \"common\" archive, a package to be used by all harnesses.")
+                        help="Name of the \"common\" archive, a package to be used by all "
+                        "harnesses.")
     parser.add_argument("--jsshell", required=True,
                         action="store", dest="jsshell",
                         help="Name of the jsshell zip.")
     for harness in PACKAGE_SPECIFIED_HARNESSES:
         parser.add_argument("--%s" % harness, required=True,
                             action="store", dest=harness,
                             help="Name of the %s zip." % harness)
     for harness in OPTIONAL_PACKAGES:
@@ -74,13 +77,14 @@ def generate_package_data(args):
     harness_requirements['jittest'].append(jsshell)
     for harness in PACKAGE_SPECIFIED_HARNESSES + OPTIONAL_PACKAGES:
         pkg_name = getattr(args, harness, None)
         if pkg_name is None:
             continue
         harness_requirements[harness].append(pkg_name)
     return harness_requirements
 
+
 if __name__ == '__main__':
     args = parse_args()
     packages_data = generate_package_data(args)
     with open(args.destfile, 'w') as of:
         json.dump(packages_data, of, indent=4)
--- a/build/mach_bootstrap.py
+++ b/build/mach_bootstrap.py
@@ -101,17 +101,19 @@ CATEGORIES = {
     },
     'misc': {
         'short': 'Potpourri',
         'long': 'Potent potables and assorted snacks.',
         'priority': 10,
     },
     'disabled': {
         'short': 'Disabled',
-        'long': 'The disabled commands are hidden by default. Use -v to display them. These commands are unavailable for your current context, run "mach <command>" to see why.',
+        'long': 'The disabled commands are hidden by default. Use -v to display them. '
+        'These commands are unavailable for your current context, '
+        'run "mach <command>" to see why.',
         'priority': 0,
     }
 }
 
 
 # We submit data to telemetry approximately every this many mach invocations
 TELEMETRY_SUBMISSION_FREQUENCY = 10
 
@@ -201,17 +203,17 @@ def bootstrap(topsrcdir, mozilla_dir=Non
             system=platform.system(),
             version=platform.version(),
         ))
 
         if platform.system() == 'Linux':
             dist = list(platform.linux_distribution())
             data['system']['linux_distribution'] = dist
         elif platform.system() == 'Windows':
-            win32_ver=list((platform.win32_ver())),
+            win32_ver = list((platform.win32_ver())),
             data['system']['win32_ver'] = win32_ver
         elif platform.system() == 'Darwin':
             # mac version is a special Cupertino snowflake
             r, v, m = platform.mac_ver()
             data['system']['mac_ver'] = [r, list(v), m]
 
         with open(os.path.join(outgoing_dir, str(uuid.uuid4()) + '.json'),
                   'w') as f:
@@ -255,17 +257,17 @@ def bootstrap(topsrcdir, mozilla_dir=Non
         if random.randint(1, TELEMETRY_SUBMISSION_FREQUENCY) != 1:
             return
 
         with open(os.devnull, 'wb') as devnull:
             subprocess.Popen([sys.executable,
                               os.path.join(topsrcdir, 'build',
                                            'submit_telemetry_data.py'),
                               get_state_dir()[0]],
-                              stdout=devnull, stderr=devnull)
+                             stdout=devnull, stderr=devnull)
 
     def populate_context(context, key=None):
         if key is None:
             return
         if key == 'state_dir':
             state_dir, is_environ = get_state_dir()
             if is_environ:
                 if not os.path.exists(state_dir):
@@ -306,17 +308,17 @@ def bootstrap(topsrcdir, mozilla_dir=Non
     if not driver.settings_paths:
         # default global machrc location
         driver.settings_paths.append(get_state_dir()[0])
     # always load local repository configuration
     driver.settings_paths.append(mozilla_dir)
 
     for category, meta in CATEGORIES.items():
         driver.define_category(category, meta['short'], meta['long'],
-            meta['priority'])
+                               meta['priority'])
 
     repo = resolve_repository()
 
     for path in MACH_MODULES:
         # Sparse checkouts may not have all mach_commands.py files. Ignore
         # errors from missing files.
         try:
             driver.load_commands_from_file(os.path.join(mozilla_dir, path))
--- a/build/subconfigure.py
+++ b/build/subconfigure.py
@@ -2,17 +2,16 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 # This script is used to capture the content of config.status-generated
 # files and subsequently restore their timestamp if they haven't changed.
 
 import argparse
 import errno
-import itertools
 import os
 import re
 import subprocess
 import sys
 import pickle
 
 import mozpack.path as mozpath
 
@@ -107,21 +106,20 @@ def maybe_clear_cache(data):
     for precious in PRECIOUS_VARS:
         # If there is no entry at all for that precious variable, then
         # its value is not precious for that particular configure.
         if 'ac_cv_env_%s_set' % precious not in cache:
             continue
         is_set = cache.get('ac_cv_env_%s_set' % precious) == 'set'
         value = cache.get('ac_cv_env_%s_value' % precious) if is_set else None
         if value != env.get(precious):
-            print 'Removing %s because of %s value change from:' \
-                % (data['cache-file'], precious)
-            print '  %s' % (value if value is not None else 'undefined')
-            print 'to:'
-            print '  %s' % env.get(precious, 'undefined')
+            print('Removing %s because of %s value change from:' % (data['cache-file'], precious))
+            print('  %s' % (value if value is not None else 'undefined'))
+            print('to:')
+            print('  %s' % env.get(precious, 'undefined'))
             os.remove(data['cache-file'])
             return True
     return False
 
 
 def split_template(s):
     """Given a "file:template" string, returns "file", "template". If the string
     is of the form "file" (without a template), returns "file", "file.in"."""
@@ -134,17 +132,16 @@ def get_config_files(data):
     # config.status in js/src never contains the output we try to scan here.
     if data['relobjdir'] == 'js/src':
         return [], []
 
     config_status = mozpath.join(data['objdir'], 'config.status')
     if not os.path.exists(config_status):
         return [], []
 
-    configure = mozpath.join(data['srcdir'], 'configure')
     config_files = []
     command_files = []
 
     # Scan the config.status output for information about configuration files
     # it generates.
     config_status_output = subprocess.check_output(
         [data['shell'], '-c', '%s --help' % config_status],
         stderr=subprocess.STDOUT).splitlines()
@@ -217,17 +214,17 @@ def prepare(srcdir, objdir, shell, args)
         'args': others,
         'shell': shell,
         'srcdir': srcdir,
         'env': environ,
     }
 
     if args.cache_file:
         data['cache-file'] = mozpath.normpath(mozpath.join(os.getcwd(),
-            args.cache_file))
+                                                           args.cache_file))
     else:
         data['cache-file'] = mozpath.join(objdir, 'config.cache')
 
     if previous_args is not None:
         data['previous-args'] = previous_args
 
     try:
         os.makedirs(objdir)
@@ -247,17 +244,17 @@ def execute_and_prefix(*args, **kwargs):
     prefix = kwargs['prefix']
     del kwargs['prefix']
     proc = subprocess.Popen(*args, stdout=subprocess.PIPE,
                             stderr=subprocess.STDOUT, **kwargs)
     while True:
         line = proc.stdout.readline()
         if not line:
             break
-        print prefix_lines(line.rstrip(), prefix)
+        print(prefix_lines(line.rstrip(), prefix))
         sys.stdout.flush()
     return proc.wait()
 
 
 def run(objdir):
     ret = 0
 
     with open(os.path.join(objdir, CONFIGURE_DATA), 'rb') as f:
@@ -325,18 +322,18 @@ def run(objdir):
                 command += ['--%s=%s' % (kind, data[kind])]
         command += data['args']
         command += ['--cache-file=%s' % cache_file]
 
         # Pass --no-create to configure so that it doesn't run config.status.
         # We're going to run it ourselves.
         command += ['--no-create']
 
-        print prefix_lines('configuring', relobjdir)
-        print prefix_lines('running %s' % ' '.join(command[:-1]), relobjdir)
+        print(prefix_lines('configuring', relobjdir))
+        print(prefix_lines('running %s' % ' '.join(command[:-1]), relobjdir))
         sys.stdout.flush()
         returncode = execute_and_prefix(command, cwd=objdir, env=data['env'],
                                         prefix=relobjdir)
         if returncode:
             return returncode
 
         # Leave config.status with a new timestamp if configure is newer than
         # its original mtime.
@@ -363,35 +360,35 @@ def run(objdir):
         config_files, command_files = get_config_files(data)
         for f, t in config_files:
             if not os.path.exists(t) or \
                     os.path.getmtime(f) < os.path.getmtime(t):
                 skip_config_status = False
 
     if not skip_config_status:
         if skip_configure:
-            print prefix_lines('running config.status', relobjdir)
+            print(prefix_lines('running config.status', relobjdir))
             sys.stdout.flush()
         ret = execute_and_prefix([data['shell'], '-c', './config.status'],
                                  cwd=objdir, env=data['env'], prefix=relobjdir)
 
         for f in contents:
             f.update_time()
 
     return ret
 
 
 def subconfigure(args):
     parser = argparse.ArgumentParser()
     parser.add_argument('--list', type=str,
-        help='File containing a list of subconfigures to run')
+                        help='File containing a list of subconfigures to run')
     parser.add_argument('--skip', type=str,
-        help='File containing a list of Subconfigures to skip')
+                        help='File containing a list of Subconfigures to skip')
     parser.add_argument('subconfigures', type=str, nargs='*',
-        help='Subconfigures to run if no list file is given')
+                        help='Subconfigures to run if no list file is given')
     args, others = parser.parse_known_args(args)
     subconfigures = args.subconfigures
     if args.list:
         subconfigures.extend(open(args.list, 'rb').read().splitlines())
     if args.skip:
         skips = set(open(args.skip, 'rb').read().splitlines())
         subconfigures = [s for s in subconfigures if s not in skips]
 
--- a/build/submit_telemetry_data.py
+++ b/build/submit_telemetry_data.py
@@ -56,17 +56,17 @@ def submit_telemetry_data(statedir):
                   os.path.join(submitted, filename))
 
     session.close()
 
     # Discard submitted data that is >= 30 days old
     now = time.time()
     for filename in os.listdir(submitted):
         ctime = os.stat(os.path.join(submitted, filename)).st_ctime
-        if now - ctime >= 60*60*24*30:
+        if now - ctime >= 60 * 60 * 24 * 30:
             os.remove(os.path.join(submitted, filename))
 
     return 0
 
 
 if __name__ == '__main__':
     if len(sys.argv) != 2:
         print('usage: python submit_telemetry_data.py <statedir>')
--- a/build/upload.py
+++ b/build/upload.py
@@ -26,130 +26,139 @@
 #                  UPLOAD_PATH and the full paths of all files uploaded will
 #                  be appended to the commandline.
 #
 # All files to be uploaded should be passed as commandline arguments to this
 # script. The script takes one other parameter, --base-path, which you can use
 # to indicate that files should be uploaded including their paths relative
 # to the base path.
 
-import sys, os
+import sys
+import os
 import re
 import json
 import errno
 import hashlib
 import shutil
 from optparse import OptionParser
 from subprocess import (
     check_call,
     check_output,
     STDOUT,
     CalledProcessError,
 )
 import concurrent.futures as futures
 import redo
 
+
 def OptionalEnvironmentVariable(v):
     """Return the value of the environment variable named v, or None
     if it's unset (or empty)."""
     if v in os.environ and os.environ[v] != "":
         return os.environ[v]
     return None
 
+
 def FixupMsysPath(path):
     """MSYS helpfully translates absolute pathnames in environment variables
     and commandline arguments into Windows native paths. This sucks if you're
     trying to pass an absolute path on a remote server. This function attempts
     to un-mangle such paths."""
     if 'OSTYPE' in os.environ and os.environ['OSTYPE'] == 'msys':
         # sort of awful, find out where our shell is (should be in msys/bin)
         # and strip the first part of that path out of the other path
         if 'SHELL' in os.environ:
             sh = os.environ['SHELL']
             msys = sh[:sh.find('/bin')]
             if path.startswith(msys):
                 path = path[len(msys):]
     return path
 
+
 def WindowsPathToMsysPath(path):
     """Translate a Windows pathname to an MSYS pathname.
     Necessary because we call out to ssh/scp, which are MSYS binaries
     and expect MSYS paths."""
     # If we're not on Windows, or if we already have an MSYS path (starting
     # with '/' instead of 'c:' or something), then just return.
     if sys.platform != 'win32' or path.startswith('/'):
         return path
     (drive, path) = os.path.splitdrive(os.path.abspath(path))
-    return "/" + drive[0] + path.replace('\\','/')
+    return "/" + drive[0] + path.replace('\\', '/')
+
 
 def AppendOptionalArgsToSSHCommandline(cmdline, port, ssh_key):
     """Given optional port and ssh key values, append valid OpenSSH
     commandline arguments to the list cmdline if the values are not None."""
     if port is not None:
         cmdline.append("-P%d" % port)
     if ssh_key is not None:
         # Don't interpret ~ paths - ssh can handle that on its own
         if not ssh_key.startswith('~'):
             ssh_key = WindowsPathToMsysPath(ssh_key)
         cmdline.extend(["-o", "IdentityFile=%s" % ssh_key])
     # In case of an issue here we don't want to hang on a password prompt.
     cmdline.extend(["-o", "BatchMode=yes"])
 
+
 def DoSSHCommand(command, user, host, port=None, ssh_key=None):
     """Execute command on user@host using ssh. Optionally use
     port and ssh_key, if provided."""
     cmdline = ["ssh"]
     AppendOptionalArgsToSSHCommandline(cmdline, port, ssh_key)
     cmdline.extend(["%s@%s" % (user, host), command])
 
     with redo.retrying(check_output, sleeptime=10) as f:
         try:
             output = f(cmdline, stderr=STDOUT).strip()
         except CalledProcessError as e:
-            print "failed ssh command output:"
-            print '=' * 20
-            print e.output
-            print '=' * 20
+            print("failed ssh command output:")
+            print('=' * 20)
+            print(e.output)
+            print('=' * 20)
             raise
         return output
 
     raise Exception("Command %s returned non-zero exit code" % cmdline)
 
+
 def DoSCPFile(file, remote_path, user, host, port=None, ssh_key=None,
               log=False):
     """Upload file to user@host:remote_path using scp. Optionally use
     port and ssh_key, if provided."""
     if log:
-        print 'Uploading %s' % file
+        print('Uploading %s' % file)
     cmdline = ["scp"]
     AppendOptionalArgsToSSHCommandline(cmdline, port, ssh_key)
     cmdline.extend([WindowsPathToMsysPath(file),
                     "%s@%s:%s" % (user, host, remote_path)])
     with redo.retrying(check_call, sleeptime=10) as f:
         f(cmdline)
         return
 
     raise Exception("Command %s returned non-zero exit code" % cmdline)
 
+
 def GetBaseRelativePath(path, local_file, base_path):
     """Given a remote path to upload to, a full path to a local file, and an
     optional full path that is a base path of the local file, construct the
     full remote path to place the file in. If base_path is not None, include
     the relative path from base_path to file."""
     if base_path is None or not local_file.startswith(base_path):
         # Hack to work around OSX uploading the i386 SDK from i386/dist. Both
         # the i386 SDK and x86-64 SDK end up in the same directory this way.
         if base_path.endswith('/x86_64/dist'):
             return GetBaseRelativePath(path, local_file, base_path.replace('/x86_64/', '/i386/'))
         return path
     dir = os.path.dirname(local_file)
     # strip base_path + extra slash and make it unixy
-    dir = dir[len(base_path)+1:].replace('\\','/')
+    dir = dir[len(base_path) + 1:].replace('\\', '/')
     return path + dir
 
+
 def GetFileHashAndSize(filename):
     sha512Hash = 'UNKNOWN'
     size = 'UNKNOWN'
 
     try:
         # open in binary mode to make sure we get consistent results
         # across all platforms
         with open(filename, "rb") as f:
@@ -157,81 +166,87 @@ def GetFileHashAndSize(filename):
             sha512Hash = shaObj.hexdigest()
 
         size = os.path.getsize(filename)
     except:
         raise Exception("Unable to get filesize/hash from file: %s" % filename)
 
     return (sha512Hash, size)
 
+
 def GetMarProperties(filename):
     if not os.path.exists(filename):
         return {}
     (mar_hash, mar_size) = GetFileHashAndSize(filename)
     return {
         'completeMarFilename': os.path.basename(filename),
         'completeMarSize': mar_size,
         'completeMarHash': mar_hash,
     }
 
+
 def GetUrlProperties(output, package):
     # let's create a switch case using name-spaces/dict
     # rather than a long if/else with duplicate code
     property_conditions = [
         # key: property name, value: condition
         ('symbolsUrl', lambda m: m.endswith('crashreporter-symbols.zip') or
-                       m.endswith('crashreporter-symbols-full.zip')),
+         m.endswith('crashreporter-symbols-full.zip')),
         ('testsUrl', lambda m: m.endswith(('tests.tar.bz2', 'tests.zip'))),
         ('robocopApkUrl', lambda m: m.endswith('apk') and 'robocop' in m),
         ('jsshellUrl', lambda m: 'jsshell-' in m and m.endswith('.zip')),
         ('completeMarUrl', lambda m: m.endswith('.complete.mar')),
         ('partialMarUrl', lambda m: m.endswith('.mar') and '.partial.' in m),
         ('codeCoverageURL', lambda m: m.endswith('code-coverage-gcno.zip')),
         ('sdkUrl', lambda m: m.endswith(('sdk.tar.bz2', 'sdk.zip'))),
         ('testPackagesUrl', lambda m: m.endswith('test_packages.json')),
         ('packageUrl', lambda m: m.endswith(package)),
     ]
-    url_re = re.compile(r'''^(https?://.*?\.(?:tar\.bz2|dmg|zip|apk|rpm|mar|tar\.gz|json))$''')
+    url_re = re.compile(
+        r'''^(https?://.*?\.(?:tar\.bz2|dmg|zip|apk|rpm|mar|tar\.gz|json))$''')
     properties = {}
 
     try:
         for line in output.splitlines():
             m = url_re.match(line.strip())
             if m:
                 m = m.group(1)
                 for prop, condition in property_conditions:
                     if condition(m):
                         properties.update({prop: m})
                         break
     except IOError as e:
         if e.errno != errno.ENOENT:
             raise
-        properties = {prop: 'UNKNOWN' for prop, condition in property_conditions}
+        properties = {prop: 'UNKNOWN' for prop, condition
+                      in property_conditions}
     return properties
 
-def UploadFiles(user, host, path, files, verbose=False, port=None, ssh_key=None, base_path=None, upload_to_temp_dir=False, post_upload_command=None, package=None):
+
+def UploadFiles(user, host, path, files, verbose=False, port=None, ssh_key=None, base_path=None,
+                upload_to_temp_dir=False, post_upload_command=None, package=None):
     """Upload each file in the list files to user@host:path. Optionally pass
     port and ssh_key to the ssh commands. If base_path is not None, upload
     files including their path relative to base_path. If upload_to_temp_dir is
     True files will be uploaded to a temporary directory on the remote server.
     Generally, you should have a post upload command specified in these cases
     that can move them around to their correct location(s).
     If post_upload_command is not None, execute that command on the remote host
     after uploading all files, passing it the upload path, and the full paths to
     all files uploaded.
     If verbose is True, print status updates while working."""
     if not host or not user:
         return {}
     if (not path and not upload_to_temp_dir) or (path and upload_to_temp_dir):
-        print "One (and only one of UPLOAD_PATH or UPLOAD_TO_TEMP must be " + \
-                "defined."
+        print("One (and only one of UPLOAD_PATH or UPLOAD_TO_TEMP must be defined.")
         sys.exit(1)
 
     if upload_to_temp_dir:
-        path = DoSSHCommand("mktemp -d", user, host, port=port, ssh_key=ssh_key)
+        path = DoSSHCommand("mktemp -d", user, host,
+                            port=port, ssh_key=ssh_key)
     if not path.endswith("/"):
         path += "/"
     if base_path is not None:
         base_path = os.path.abspath(base_path)
     remote_files = []
     properties = {}
 
     def get_remote_path(p):
@@ -245,17 +260,18 @@ def UploadFiles(user, host, path, files,
             if not os.path.isfile(file):
                 raise IOError("File not found: %s" % file)
 
             remote_paths.add(get_remote_path(file))
 
         # If we wanted to, we could reduce the remote paths if they are a parent
         # of any entry.
         for p in sorted(remote_paths):
-            DoSSHCommand("mkdir -p " + p, user, host, port=port, ssh_key=ssh_key)
+            DoSSHCommand("mkdir -p " + p, user, host,
+                         port=port, ssh_key=ssh_key)
 
         with futures.ThreadPoolExecutor(4) as e:
             fs = []
             # Since we're uploading in parallel, the largest file should take
             # the longest to upload. So start it first.
             for file in sorted(files, key=os.path.getsize, reverse=True):
                 remote_path = get_remote_path(file)
                 fs.append(e.submit(DoSCPFile, file, remote_path, user, host,
@@ -264,59 +280,63 @@ def UploadFiles(user, host, path, files,
 
             # We need to call result() on the future otherwise exceptions could
             # get swallowed.
             for f in futures.as_completed(fs):
                 f.result()
 
         if post_upload_command is not None:
             if verbose:
-                print "Running post-upload command: " + post_upload_command
+                print("Running post-upload command: " + post_upload_command)
             file_list = '"' + '" "'.join(remote_files) + '"'
-            output = DoSSHCommand('%s "%s" %s' % (post_upload_command, path, file_list), user, host, port=port, ssh_key=ssh_key)
+            output = DoSSHCommand('%s "%s" %s' % (
+                post_upload_command, path, file_list), user, host, port=port, ssh_key=ssh_key)
             # We print since mozharness may parse URLs from the output stream.
-            print output
+            print(output)
             properties = GetUrlProperties(output, package)
     finally:
         if upload_to_temp_dir:
             DoSSHCommand("rm -rf %s" % path, user, host, port=port,
                          ssh_key=ssh_key)
     if verbose:
-        print "Upload complete"
+        print("Upload complete")
     return properties
 
+
 def CopyFilesLocally(path, files, verbose=False, base_path=None, package=None):
     """Copy each file in the list of files to `path`.  The `base_path` argument is treated
     as it is by UploadFiles."""
     if not path.endswith("/"):
         path += "/"
     if base_path is not None:
         base_path = os.path.abspath(base_path)
     for file in files:
         file = os.path.abspath(file)
         if not os.path.isfile(file):
             raise IOError("File not found: %s" % file)
         # first ensure that path exists remotely
         target_path = GetBaseRelativePath(path, file, base_path)
         if not os.path.exists(target_path):
             os.makedirs(target_path)
         if verbose:
-            print "Copying " + file + " to " + target_path
+            print("Copying " + file + " to " + target_path)
         shutil.copy(file, target_path)
 
+
 def WriteProperties(files, properties_file, url_properties, package):
     properties = url_properties
     for file in files:
         if file.endswith('.complete.mar'):
             properties.update(GetMarProperties(file))
     with open(properties_file, 'w') as outfile:
         properties['packageFilename'] = package
         properties['uploadFiles'] = [os.path.abspath(f) for f in files]
         json.dump(properties, outfile, indent=4)
 
+
 if __name__ == '__main__':
     host = OptionalEnvironmentVariable('UPLOAD_HOST')
     user = OptionalEnvironmentVariable('UPLOAD_USER')
     path = OptionalEnvironmentVariable('UPLOAD_PATH')
     upload_to_temp_dir = OptionalEnvironmentVariable('UPLOAD_TO_TEMP')
     port = OptionalEnvironmentVariable('UPLOAD_PORT')
     if port is not None:
         port = int(port)
@@ -327,49 +347,51 @@ if __name__ == '__main__':
         if path is not None:
             path = FixupMsysPath(path)
         if post_upload_command is not None:
             post_upload_command = FixupMsysPath(post_upload_command)
 
     parser = OptionParser(usage="usage: %prog [options] <files>")
     parser.add_option("-b", "--base-path",
                       action="store",
-                      help="Preserve file paths relative to this path when uploading. If unset, all files will be uploaded directly to UPLOAD_PATH.")
+                      help="Preserve file paths relative to this path when uploading. "
+                      "If unset, all files will be uploaded directly to UPLOAD_PATH.")
     parser.add_option("--properties-file",
                       action="store",
                       help="Path to the properties file to store the upload properties.")
     parser.add_option("--package",
                       action="store",
                       help="Name of the main package.")
     (options, args) = parser.parse_args()
     if len(args) < 1:
-        print "You must specify at least one file to upload"
+        print("You must specify at least one file to upload")
         sys.exit(1)
     if not options.properties_file:
-        print "You must specify a --properties-file"
+        print("You must specify a --properties-file")
         sys.exit(1)
 
     if host == "localhost":
         if upload_to_temp_dir:
-            print "Cannot use UPLOAD_TO_TEMP with UPLOAD_HOST=localhost"
+            print("Cannot use UPLOAD_TO_TEMP with UPLOAD_HOST=localhost")
             sys.exit(1)
         if post_upload_command:
             # POST_UPLOAD_COMMAND is difficult to extract from the mozharness
             # scripts, so just ignore it until it's no longer used anywhere
-            print "Ignoring POST_UPLOAD_COMMAND with UPLOAD_HOST=localhost"
+            print("Ignoring POST_UPLOAD_COMMAND with UPLOAD_HOST=localhost")
 
     try:
         if host == "localhost":
             CopyFilesLocally(path, args, base_path=options.base_path,
                              package=options.package,
                              verbose=True)
         else:
 
             url_properties = UploadFiles(user, host, path, args,
                                          base_path=options.base_path, port=port, ssh_key=key,
                                          upload_to_temp_dir=upload_to_temp_dir,
                                          post_upload_command=post_upload_command,
                                          package=options.package, verbose=True)
 
-            WriteProperties(args, options.properties_file, url_properties, options.package)
-    except IOError, (strerror):
-        print strerror
+            WriteProperties(args, options.properties_file,
+                            url_properties, options.package)
+    except IOError as strerror:
+        print(strerror)
         sys.exit(1)
--- a/build/upload_generated_sources.py
+++ b/build/upload_generated_sources.py
@@ -1,17 +1,16 @@
 #!/usr/bin/env/python
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import argparse
-from concurrent.futures import ThreadPoolExecutor
 from contextlib import contextmanager
 import gzip
 import io
 import logging
 from mozbuild.base import MozbuildObject
 from mozbuild.generated_sources import (
     get_filename_with_digest,
     get_s3_region_and_bucket,
@@ -32,16 +31,17 @@ log.setLevel(logging.INFO)
 
 @contextmanager
 def timed():
     '''
     Yield a function that provides the elapsed time in seconds since this
     function was called.
     '''
     start = time.time()
+
     def elapsed():
         return time.time() - start
     yield elapsed
 
 
 def gzip_compress(data):
     '''
     Apply gzip compression to `data` and return the result as a `BytesIO`.
@@ -71,49 +71,54 @@ def upload_worker(queue, event, bucket, 
                 return
             (name, contents) = queue.get()
             pathname = get_filename_with_digest(name, contents)
             compressed = gzip_compress(contents)
             extra_args = {
                 'ContentEncoding': 'gzip',
                 'ContentType': 'text/plain',
             }
-            log.info('Uploading "{}" ({} bytes)'.format(pathname, len(compressed.getvalue())))
+            log.info('Uploading "{}" ({} bytes)'.format(
+                pathname, len(compressed.getvalue())))
             with timed() as elapsed:
-                s3.upload_fileobj(compressed, bucket, pathname, ExtraArgs=extra_args)
-                log.info('Finished uploading "{}" in {:0.3f}s'.format(pathname, elapsed()))
+                s3.upload_fileobj(compressed, bucket,
+                                  pathname, ExtraArgs=extra_args)
+                log.info('Finished uploading "{}" in {:0.3f}s'.format(
+                    pathname, elapsed()))
             queue.task_done()
     except Exception:
         log.exception('Thread encountered exception:')
         event.set()
 
 
 def do_work(artifact, region, bucket):
     session_args = {'region_name': region}
     session = requests.Session()
     if 'TASK_ID' in os.environ:
         level = os.environ.get('MOZ_SCM_LEVEL', '1')
-        secrets_url = 'http://taskcluster/secrets/v1/secret/project/releng/gecko/build/level-{}/gecko-generated-sources-upload'.format(level)
-        log.info('Using AWS credentials from the secrets service: "{}"'.format(secrets_url))
+        secrets_url = 'http://taskcluster/secrets/v1/secret/project/releng/gecko/build/level-{}/gecko-generated-sources-upload'.format( # noqa
+            level)
+        log.info(
+            'Using AWS credentials from the secrets service: "{}"'.format(secrets_url))
         res = session.get(secrets_url)
         res.raise_for_status()
         secret = res.json()
         session_args.update(
             aws_access_key_id=secret['secret']['AWS_ACCESS_KEY_ID'],
             aws_secret_access_key=secret['secret']['AWS_SECRET_ACCESS_KEY'],
         )
     else:
         log.info('Trying to use your AWS credentials..')
 
-
     # First, fetch the artifact containing the sources.
     log.info('Fetching generated sources artifact: "{}"'.format(artifact))
     with timed() as elapsed:
         res = session.get(artifact)
-        log.info('Fetch HTTP status: {}, {} bytes downloaded in {:0.3f}s'.format(res.status_code, len(res.content), elapsed()))
+        log.info('Fetch HTTP status: {}, {} bytes downloaded in {:0.3f}s'.format(
+            res.status_code, len(res.content), elapsed()))
     res.raise_for_status()
     # Create a queue and worker threads for uploading.
     q = Queue()
     event = Event()
     log.info('Creating {} worker threads'.format(NUM_WORKER_THREADS))
     for i in range(NUM_WORKER_THREADS):
         t = Thread(target=upload_worker, args=(q, event, bucket, session_args))
         t.daemon = True
@@ -131,17 +136,17 @@ def do_work(artifact, region, bucket):
         if event.wait(0.1):
             log.error('Worker thread encountered exception, exiting...')
             break
 
 
 def main(argv):
     logging.basicConfig(format='%(levelname)s - %(threadName)s - %(message)s')
     parser = argparse.ArgumentParser(
-    description='Upload generated source files in ARTIFACT to BUCKET in S3.')
+        description='Upload generated source files in ARTIFACT to BUCKET in S3.')
     parser.add_argument('artifact',
                         help='generated-sources artifact from build task')
     args = parser.parse_args(argv)
     region, bucket = get_s3_region_and_bucket()
 
     config = MozbuildObject.from_environment()
     config._activate_virtualenv()
     config.virtualenv_manager.install_pip_package('boto3==1.4.4')
--- a/build/variables.py
+++ b/build/variables.py
@@ -6,16 +6,17 @@ from __future__ import print_function, u
 
 import os
 import subprocess
 import sys
 from datetime import datetime
 
 SOURCESTAMP_FILENAME = 'sourcestamp.txt'
 
+
 def buildid_header(output):
     buildid = os.environ.get('MOZ_BUILD_DATE')
     if buildid and len(buildid) != 14:
         print('Ignoring invalid MOZ_BUILD_DATE: %s' % buildid, file=sys.stderr)
         buildid = None
     if not buildid:
         buildid = datetime.now().strftime('%Y%m%d%H%M%S')
     output.write("#define MOZ_BUILDID %s\n" % buildid)
@@ -40,16 +41,17 @@ def get_hg_info(workdir):
     changeset = get_hg_changeset(workdir)
 
     return repo, changeset
 
 
 def get_hg_changeset(path):
     return get_program_output('hg', '-R', path, 'parent', '--template={node}')
 
+
 def get_info_from_sourcestamp(sourcestamp_path):
     """Read the repository and changelog information from the sourcestamp
     file. This assumes that the file exists and returns the results as a list
     (either strings or None in case of error).
     """
 
     # Load the content of the file.
     lines = None
@@ -61,26 +63,28 @@ def get_info_from_sourcestamp(sourcestam
     # URL.
     if len(lines) != 2 or not lines[1].startswith('http'):
         # Just return if the file doesn't contain what we expect.
         return None, None
 
     # Return the repo and the changeset.
     return lines[1].split('/rev/')
 
+
 def source_repo_header(output):
     # We allow the source repo and changeset to be specified via the
     # environment (see configure)
     import buildconfig
     repo = buildconfig.substs.get('MOZ_SOURCE_REPO')
     changeset = buildconfig.substs.get('MOZ_SOURCE_CHANGESET')
     source = ''
 
     if not repo:
-        sourcestamp_path = os.path.join(buildconfig.topsrcdir, SOURCESTAMP_FILENAME)
+        sourcestamp_path = os.path.join(
+            buildconfig.topsrcdir, SOURCESTAMP_FILENAME)
         if os.path.exists(os.path.join(buildconfig.topsrcdir, '.hg')):
             repo, changeset = get_hg_info(buildconfig.topsrcdir)
         elif os.path.exists(sourcestamp_path):
             repo, changeset = get_info_from_sourcestamp(sourcestamp_path)
     elif not changeset:
         changeset = get_hg_changeset(buildconfig.topsrcdir)
         if not changeset:
             raise Exception('could not resolve changeset; '
--- a/build/windows_toolchain.py
+++ b/build/windows_toolchain.py
@@ -193,17 +193,17 @@ def format_manifest(manifest):
     # Trailing newline.
     sha256_lines.append(b'')
 
     return b'\n'.join(sha256_lines)
 
 
 def write_zip(zip_path, prefix=None):
     """Write toolchain data to a zip file."""
-    if isinstance(prefix, unicode):
+    if isinstance(prefix, unicode): # noqa Special case for Python 2
         prefix = prefix.encode('utf-8')
 
     with JarWriter(file=zip_path, optimize=False, compress=5) as zip:
         manifest = {}
         for p, data, mode in resolve_files_and_hash(manifest):
             print(p)
             if prefix:
                 p = mozpath.join(prefix, p)
--- a/configure.py
+++ b/configure.py
@@ -2,17 +2,16 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import print_function, unicode_literals
 
 import codecs
 import itertools
 import os
-import subprocess
 import sys
 import textwrap
 
 
 base_dir = os.path.abspath(os.path.dirname(__file__))
 sys.path.insert(0, os.path.join(base_dir, 'python', 'mozbuild'))
 from mozbuild.configure import ConfigureSandbox
 from mozbuild.makeutil import Makefile
--- a/tools/lint/flake8.yml
+++ b/tools/lint/flake8.yml
@@ -1,13 +1,15 @@
 ---
 flake8:
     description: Python linter
     include:
         - build/moz.configure/*.configure
+        - build/*.py
+        - configure.py
         - config/check_macroassembler_style.py
         - config/mozunit.py
         - layout/tools/reftest
         - python/mach
         - python/mach_commands.py
         - python/mozlint
         - python/mozversioncontrol
         - security/manager