Bug 1542963 - run './mach lint ... --fix' on mozbuild/mozbuild, undoes some black changes. r=#build
authorJustin Wood <Callek@gmail.com>
Wed, 24 Apr 2019 22:12:09 -0400
changeset 532992 2715bac40d2f2e90a60cb86a28308bfeb4fc34de
parent 532991 8f69c7eeb6fd147b0066756389b95a734be8ae86
child 532993 11a714f491d56eb86b131505b12b9db240249179
push id11276
push userrgurzau@mozilla.com
push dateMon, 20 May 2019 13:11:24 +0000
treeherdermozilla-beta@847755a7c325 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs1542963
milestone68.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1542963 - run './mach lint ... --fix' on mozbuild/mozbuild, undoes some black changes. r=#build Lint python/mozbuild/{mozbuild,mozpack}. ran './mach lint -l py2 -l flake8 -l shellcheck -l codespell -l yaml python/mozbuild/{mozbuild,mozpack}/ --fix' in order to undo some black changes and get closer to making this folder able to be validated on every lint run Differential Revision: https://phabricator.services.mozilla.com/D26640
python/mozbuild/mozbuild/action/buildlist.py
python/mozbuild/mozbuild/action/check_binary.py
python/mozbuild/mozbuild/action/dumpsymbols.py
python/mozbuild/mozbuild/action/exe_7z_archive.py
python/mozbuild/mozbuild/action/exe_7z_extract.py
python/mozbuild/mozbuild/action/file_generate.py
python/mozbuild/mozbuild/action/generate_searchjson.py
python/mozbuild/mozbuild/action/generate_suggestedsites.py
python/mozbuild/mozbuild/action/make_unzip.py
python/mozbuild/mozbuild/action/make_zip.py
python/mozbuild/mozbuild/action/output_searchplugins_list.py
python/mozbuild/mozbuild/action/package_fennec_apk.py
python/mozbuild/mozbuild/action/package_generated_sources.py
python/mozbuild/mozbuild/action/preprocessor.py
python/mozbuild/mozbuild/action/process_define_files.py
python/mozbuild/mozbuild/action/process_install_manifest.py
python/mozbuild/mozbuild/action/symbols_archive.py
python/mozbuild/mozbuild/action/test_archive.py
python/mozbuild/mozbuild/action/tooltool.py
python/mozbuild/mozbuild/action/wrap_rustc.py
python/mozbuild/mozbuild/action/xpccheck.py
python/mozbuild/mozbuild/action/xpidl-process.py
python/mozbuild/mozbuild/analyze/graph.py
python/mozbuild/mozbuild/analyze/hg.py
python/mozbuild/mozbuild/android_version_code.py
python/mozbuild/mozbuild/artifacts.py
python/mozbuild/mozbuild/backend/common.py
python/mozbuild/mozbuild/backend/configenvironment.py
python/mozbuild/mozbuild/backend/cpp_eclipse.py
python/mozbuild/mozbuild/backend/fastermake.py
python/mozbuild/mozbuild/backend/mach_commands.py
python/mozbuild/mozbuild/backend/recursivemake.py
python/mozbuild/mozbuild/backend/tup.py
python/mozbuild/mozbuild/backend/visualstudio.py
python/mozbuild/mozbuild/base.py
python/mozbuild/mozbuild/chunkify.py
python/mozbuild/mozbuild/codecoverage/chrome_map.py
python/mozbuild/mozbuild/codecoverage/lcov_rewriter.py
python/mozbuild/mozbuild/codecoverage/packager.py
python/mozbuild/mozbuild/compilation/codecomplete.py
python/mozbuild/mozbuild/compilation/database.py
python/mozbuild/mozbuild/compilation/util.py
python/mozbuild/mozbuild/compilation/warnings.py
python/mozbuild/mozbuild/config_status.py
python/mozbuild/mozbuild/configure/__init__.py
python/mozbuild/mozbuild/configure/check_debug_ranges.py
python/mozbuild/mozbuild/configure/libstdcxx.py
python/mozbuild/mozbuild/configure/lint.py
python/mozbuild/mozbuild/configure/lint_util.py
python/mozbuild/mozbuild/configure/options.py
python/mozbuild/mozbuild/configure/util.py
python/mozbuild/mozbuild/controller/building.py
python/mozbuild/mozbuild/controller/clobber.py
python/mozbuild/mozbuild/doctor.py
python/mozbuild/mozbuild/dotproperties.py
python/mozbuild/mozbuild/faster_daemon.py
python/mozbuild/mozbuild/frontend/context.py
python/mozbuild/mozbuild/frontend/data.py
python/mozbuild/mozbuild/frontend/emitter.py
python/mozbuild/mozbuild/frontend/gyp_reader.py
python/mozbuild/mozbuild/frontend/mach_commands.py
python/mozbuild/mozbuild/frontend/reader.py
python/mozbuild/mozbuild/frontend/sandbox.py
python/mozbuild/mozbuild/gn_processor.py
python/mozbuild/mozbuild/jar.py
python/mozbuild/mozbuild/mach_commands.py
python/mozbuild/mozbuild/makeutil.py
python/mozbuild/mozbuild/moz_yaml.py
python/mozbuild/mozbuild/mozconfig.py
python/mozbuild/mozbuild/mozinfo.py
python/mozbuild/mozbuild/preprocessor.py
python/mozbuild/mozbuild/repackaging/dmg.py
python/mozbuild/mozbuild/repackaging/msi.py
python/mozbuild/mozbuild/shellutil.py
python/mozbuild/mozbuild/sphinx.py
python/mozbuild/mozbuild/telemetry.py
python/mozbuild/mozbuild/test/action/test_buildlist.py
python/mozbuild/mozbuild/test/action/test_node.py
python/mozbuild/mozbuild/test/action/test_process_install_manifest.py
python/mozbuild/mozbuild/test/analyze/test_graph.py
python/mozbuild/mozbuild/test/backend/test_build.py
python/mozbuild/mozbuild/test/backend/test_configenvironment.py
python/mozbuild/mozbuild/test/backend/test_fastermake.py
python/mozbuild/mozbuild/test/backend/test_gn_processor.py
python/mozbuild/mozbuild/test/backend/test_partialconfigenvironment.py
python/mozbuild/mozbuild/test/backend/test_recursivemake.py
python/mozbuild/mozbuild/test/backend/test_visualstudio.py
python/mozbuild/mozbuild/test/codecoverage/test_lcov_rewrite.py
python/mozbuild/mozbuild/test/common.py
python/mozbuild/mozbuild/test/compilation/test_warnings.py
python/mozbuild/mozbuild/test/configure/common.py
python/mozbuild/mozbuild/test/configure/test_checks_configure.py
python/mozbuild/mozbuild/test/configure/test_compile_checks.py
python/mozbuild/mozbuild/test/configure/test_configure.py
python/mozbuild/mozbuild/test/configure/test_options.py
python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py
python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py
python/mozbuild/mozbuild/test/configure/test_util.py
python/mozbuild/mozbuild/test/controller/test_ccachestats.py
python/mozbuild/mozbuild/test/controller/test_clobber.py
python/mozbuild/mozbuild/test/frontend/test_context.py
python/mozbuild/mozbuild/test/frontend/test_emitter.py
python/mozbuild/mozbuild/test/frontend/test_namespaces.py
python/mozbuild/mozbuild/test/frontend/test_reader.py
python/mozbuild/mozbuild/test/frontend/test_sandbox.py
python/mozbuild/mozbuild/test/test_android_version_code.py
python/mozbuild/mozbuild/test/test_artifact_cache.py
python/mozbuild/mozbuild/test/test_base.py
python/mozbuild/mozbuild/test/test_containers.py
python/mozbuild/mozbuild/test/test_dotproperties.py
python/mozbuild/mozbuild/test/test_expression.py
python/mozbuild/mozbuild/test/test_jarmaker.py
python/mozbuild/mozbuild/test/test_licenses.py
python/mozbuild/mozbuild/test/test_line_endings.py
python/mozbuild/mozbuild/test/test_makeutil.py
python/mozbuild/mozbuild/test/test_mozconfig.py
python/mozbuild/mozbuild/test/test_mozinfo.py
python/mozbuild/mozbuild/test/test_util.py
python/mozbuild/mozbuild/testing.py
python/mozbuild/mozbuild/util.py
python/mozbuild/mozbuild/vendor_aom.py
python/mozbuild/mozbuild/vendor_dav1d.py
python/mozbuild/mozbuild/vendor_python.py
python/mozbuild/mozbuild/vendor_rust.py
python/mozbuild/mozbuild/virtualenv.py
--- a/python/mozbuild/mozbuild/action/buildlist.py
+++ b/python/mozbuild/mozbuild/action/buildlist.py
@@ -12,41 +12,42 @@ from __future__ import absolute_import, 
 import sys
 import os
 
 from mozbuild.util import (
     ensureParentDir,
     lock_file,
 )
 
+
 def addEntriesToListFile(listFile, entries):
-  """Given a file |listFile| containing one entry per line,
-  add each entry in |entries| to the file, unless it is already
-  present."""
-  ensureParentDir(listFile)
-  lock = lock_file(listFile + ".lck")
-  try:
-    if os.path.exists(listFile):
-      f = open(listFile)
-      existing = set(x.strip() for x in f.readlines())
-      f.close()
-    else:
-      existing = set()
-    for e in entries:
-      if e not in existing:
-        existing.add(e)
-    with open(listFile, 'wb') as f:
-      f.write("\n".join(sorted(existing))+"\n")
-  finally:
-    lock = None
+    """Given a file |listFile| containing one entry per line,
+    add each entry in |entries| to the file, unless it is already
+    present."""
+    ensureParentDir(listFile)
+    lock = lock_file(listFile + ".lck")
+    try:
+        if os.path.exists(listFile):
+            f = open(listFile)
+            existing = set(x.strip() for x in f.readlines())
+            f.close()
+        else:
+            existing = set()
+        for e in entries:
+            if e not in existing:
+                existing.add(e)
+        with open(listFile, 'wb') as f:
+            f.write("\n".join(sorted(existing))+"\n")
+    finally:
+        lock = None
 
 
 def main(args):
     if len(args) < 2:
         print("Usage: buildlist.py <list file> <entry> [<entry> ...]",
-            file=sys.stderr)
+              file=sys.stderr)
         return 1
 
     return addEntriesToListFile(args[0], args[1:])
 
 
 if __name__ == '__main__':
     sys.exit(main(sys.argv[1:]))
--- a/python/mozbuild/mozbuild/action/check_binary.py
+++ b/python/mozbuild/mozbuild/action/check_binary.py
@@ -275,23 +275,24 @@ def check_networking(binary):
     except Empty:
         raise RuntimeError('Could not parse llvm-objdump output?')
 
     basename = os.path.basename(binary)
     if bad_occurences_names:
         s = 'TEST-UNEXPECTED-FAIL | check_networking | {} | Identified {} ' + \
             'networking function(s) being imported in the rust static library ({})'
         print(s.format(basename, len(bad_occurences_names),
-            ",".join(sorted(bad_occurences_names))),
-            file=sys.stderr)
+                       ",".join(sorted(bad_occurences_names))),
+              file=sys.stderr)
         retcode = 1
     elif buildconfig.substs.get('MOZ_AUTOMATION'):
         print('TEST-PASS | check_networking | {}'.format(basename))
     return retcode
 
+
 def checks(target, binary):
     # The clang-plugin is built as target but is really a host binary.
     # Cheat and pretend we were passed the right argument.
     if 'clang-plugin' in binary:
         target = HOST
     checks = []
     if target['MOZ_LIBSTDCXX_VERSION']:
         checks.append(check_stdcxx)
@@ -340,17 +341,17 @@ def main(args):
 
     if options.host == options.target:
         print('Exactly one of --host or --target must be given',
               file=sys.stderr)
         return 1
 
     if options.networking and options.host:
         print('--networking is only valid with --target',
-               file=sys.stderr)
+              file=sys.stderr)
         return 1
 
     if options.networking:
         return check_networking(options.binary)
     elif options.host:
         return checks(HOST, options.binary)
     elif options.target:
         return checks(TARGET, options.binary)
--- a/python/mozbuild/mozbuild/action/dumpsymbols.py
+++ b/python/mozbuild/mozbuild/action/dumpsymbols.py
@@ -6,16 +6,17 @@ from __future__ import absolute_import, 
 
 import argparse
 import buildconfig
 import subprocess
 import shutil
 import sys
 import os
 
+
 def dump_symbols(target, tracking_file, count_ctors=False):
     # Our tracking file, if present, will contain path(s) to the previously generated
     # symbols. Remove them in this case so we don't simply accumulate old symbols
     # during incremental builds.
     if os.path.isfile(os.path.normpath(tracking_file)):
         with open(tracking_file, 'r') as fh:
             files = fh.read().splitlines()
         dirs = set(os.path.dirname(f) for f in files)
@@ -54,30 +55,31 @@ def dump_symbols(target, tracking_file, 
                                                         os.path.join(buildconfig.topobjdir,
                                                                      'dist',
                                                                      'include')))
     objcopy = buildconfig.substs.get('OBJCOPY')
     if objcopy:
         os.environ['OBJCOPY'] = objcopy
 
     args = ([buildconfig.substs['PYTHON'], os.path.join(buildconfig.topsrcdir, 'toolkit',
-                                                       'crashreporter', 'tools', 'symbolstore.py')] +
+                                                        'crashreporter', 'tools', 'symbolstore.py')] +
             sym_store_args +
             ['-s', buildconfig.topsrcdir, dump_syms_bin, os.path.join(buildconfig.topobjdir,
                                                                       'dist',
                                                                       'crashreporter-symbols'),
              os.path.abspath(target)])
     if count_ctors:
         args.append('--count-ctors')
     print('Running: %s' % ' '.join(args))
     out_files = subprocess.check_output(args)
     with open(tracking_file, 'w') as fh:
         fh.write(out_files)
         fh.flush()
 
+
 def main(argv):
     parser = argparse.ArgumentParser(
         usage="Usage: dumpsymbols.py <library or program> <tracking file>")
     parser.add_argument("--count-ctors",
                         action="store_true", default=False,
                         help="Count static initializers")
     parser.add_argument("library_or_program",
                         help="Path to library or program")
--- a/python/mozbuild/mozbuild/action/exe_7z_archive.py
+++ b/python/mozbuild/mozbuild/action/exe_7z_archive.py
@@ -8,16 +8,17 @@ import os
 import shutil
 import sys
 import subprocess
 import tempfile
 import mozpack.path as mozpath
 import buildconfig
 from mozbuild.base import BuildEnvironmentNotFoundException
 
+
 def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
     tmpdir = tempfile.mkdtemp(prefix='tmp')
     try:
         if pkg_dir:
             shutil.move(pkg_dir, 'core')
 
         if use_upx:
             final_sfx = mozpath.join(tmpdir, '7zSD.sfx')
@@ -25,30 +26,33 @@ def archive_exe(pkg_dir, tagfile, sfx_pa
         else:
             final_sfx = sfx_package
 
         try:
             sevenz = buildconfig.config.substs['7Z']
         except BuildEnvironmentNotFoundException:
             # configure hasn't been run, just use the default
             sevenz = '7z'
-        subprocess.check_call([sevenz, 'a', '-r', '-t7z', mozpath.join(tmpdir, 'app.7z'), '-mx', '-m0=BCJ2', '-m1=LZMA:d25', '-m2=LZMA:d19', '-m3=LZMA:d19', '-mb0:1', '-mb0s1:2', '-mb0s2:3'])
+        subprocess.check_call([sevenz, 'a', '-r', '-t7z', mozpath.join(tmpdir, 'app.7z'), '-mx',
+                               '-m0=BCJ2', '-m1=LZMA:d25', '-m2=LZMA:d19', '-m3=LZMA:d19', '-mb0:1', '-mb0s1:2', '-mb0s2:3'])
 
         with open(package, 'wb') as o:
             for i in [final_sfx, tagfile, mozpath.join(tmpdir, 'app.7z')]:
                 shutil.copyfileobj(open(i, 'rb'), o)
         os.chmod(package, 0o0755)
     finally:
         if pkg_dir:
             shutil.move('core', pkg_dir)
         shutil.rmtree(tmpdir)
 
+
 def main(args):
     if len(args) != 4:
         print('Usage: exe_7z_archive.py <pkg_dir> <tagfile> <sfx_package> <package> <use_upx>',
               file=sys.stderr)
         return 1
     else:
         archive_exe(args[0], args[1], args[2], args[3], args[4])
         return 0
 
+
 if __name__ == '__main__':
     sys.exit(main(sys.argv[1:]))
--- a/python/mozbuild/mozbuild/action/exe_7z_extract.py
+++ b/python/mozbuild/mozbuild/action/exe_7z_extract.py
@@ -3,23 +3,26 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import print_function
 
 import shutil
 import sys
 import subprocess
 
+
 def extract_exe(package, target):
     subprocess.check_call(['7z', 'x', package, 'core'])
     shutil.move('core', target)
 
+
 def main(args):
     if len(args) != 2:
         print('Usage: exe_7z_extract.py <package> <target>',
               file=sys.stderr)
         return 1
     else:
         extract_exe(args[0], args[1])
         return 0
 
+
 if __name__ == '__main__':
     sys.exit(main(sys.argv[1:]))
--- a/python/mozbuild/mozbuild/action/file_generate.py
+++ b/python/mozbuild/mozbuild/action/file_generate.py
@@ -111,10 +111,11 @@ def main(argv):
                 output.avoid_writing_to_file()
 
     except IOError as e:
         print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
         traceback.print_exc()
         return 1
     return ret
 
+
 if __name__ == '__main__':
     sys.exit(main(sys.argv[1:]))
--- a/python/mozbuild/mozbuild/action/generate_searchjson.py
+++ b/python/mozbuild/mozbuild/action/generate_searchjson.py
@@ -9,59 +9,62 @@ import copy
 engines = []
 
 locale = sys.argv[2]
 output_file = sys.argv[3]
 
 output = open(output_file, 'w')
 
 with open(sys.argv[1]) as f:
-  searchinfo = json.load(f)
+    searchinfo = json.load(f)
 
 # If we have a locale, use it, otherwise use the default
 if locale in searchinfo["locales"]:
-  localeSearchInfo = searchinfo["locales"][locale]
+    localeSearchInfo = searchinfo["locales"][locale]
 else:
-  localeSearchInfo = {}
-  localeSearchInfo["default"] = searchinfo["default"]
+    localeSearchInfo = {}
+    localeSearchInfo["default"] = searchinfo["default"]
+
 
 def validateDefault(key):
-  if (not key in searchinfo["default"]):
-    print >>sys.stderr, "Error: Missing default %s in list.json" % (key)
-    sys.exit(1)
+    if (not key in searchinfo["default"]):
+        print >>sys.stderr, "Error: Missing default %s in list.json" % (key)
+        sys.exit(1)
 
-validateDefault("searchDefault");
-validateDefault("visibleDefaultEngines");
+
+validateDefault("searchDefault")
+validateDefault("visibleDefaultEngines")
 
 # If the selected locale doesn't have a searchDefault,
 # use the global one.
 if not "searchDefault" in localeSearchInfo["default"]:
-  localeSearchInfo["default"]["searchDefault"] = searchinfo["default"]["searchDefault"]
+    localeSearchInfo["default"]["searchDefault"] = searchinfo["default"]["searchDefault"]
 
 # If the selected locale doesn't have a searchOrder,
 # use the global one if present.
 # searchOrder is NOT required.
 if not "searchOrder" in localeSearchInfo["default"] and "searchOrder" in searchinfo["default"]:
     localeSearchInfo["default"]["searchOrder"] = searchinfo["default"]["searchOrder"]
 
 # If we have region overrides, enumerate through them
 # and add the additional regions to the locale information.
 if "regionOverrides" in searchinfo:
-  regionOverrides = searchinfo["regionOverrides"]
+    regionOverrides = searchinfo["regionOverrides"]
 
-  for region in regionOverrides:
-    # Only add a new engine list if there is an engine that is overridden
-    enginesToOverride = set(regionOverrides[region].keys())
-    if region in localeSearchInfo and "visibleDefaultEngines" in localeSearchInfo[region]:
-       visibleDefaultEngines = localeSearchInfo[region]["visibleDefaultEngines"]
-    else:
-       visibleDefaultEngines = localeSearchInfo["default"]["visibleDefaultEngines"]
-    if set(visibleDefaultEngines) & enginesToOverride:
-      if region not in localeSearchInfo:
-        localeSearchInfo[region] = {}
-      localeSearchInfo[region]["visibleDefaultEngines"] = copy.deepcopy(visibleDefaultEngines)
-      for i, engine in enumerate(localeSearchInfo[region]["visibleDefaultEngines"]):
-        if engine in regionOverrides[region]:
-          localeSearchInfo[region]["visibleDefaultEngines"][i] = regionOverrides[region][engine]
+    for region in regionOverrides:
+        # Only add a new engine list if there is an engine that is overridden
+        enginesToOverride = set(regionOverrides[region].keys())
+        if region in localeSearchInfo and "visibleDefaultEngines" in localeSearchInfo[region]:
+            visibleDefaultEngines = localeSearchInfo[region]["visibleDefaultEngines"]
+        else:
+            visibleDefaultEngines = localeSearchInfo["default"]["visibleDefaultEngines"]
+        if set(visibleDefaultEngines) & enginesToOverride:
+            if region not in localeSearchInfo:
+                localeSearchInfo[region] = {}
+            localeSearchInfo[region]["visibleDefaultEngines"] = copy.deepcopy(
+                visibleDefaultEngines)
+            for i, engine in enumerate(localeSearchInfo[region]["visibleDefaultEngines"]):
+                if engine in regionOverrides[region]:
+                    localeSearchInfo[region]["visibleDefaultEngines"][i] = regionOverrides[region][engine]
 
 output.write(json.dumps(localeSearchInfo, ensure_ascii=False).encode('utf8'))
 
-output.close();
+output.close()
--- a/python/mozbuild/mozbuild/action/generate_suggestedsites.py
+++ b/python/mozbuild/mozbuild/action/generate_suggestedsites.py
@@ -97,49 +97,52 @@ def main(output, *args, **kwargs):
 
     # Load properties corresponding to each site name and define their
     # respective image URL.
     sites = []
 
     def add_names(names, defaults={}):
         for name in names:
             site = copy.deepcopy(defaults)
-            site.update(properties.get_dict('browser.suggestedsites.{name}'.format(name=name), required_keys=('title', 'url', 'bgcolor')))
+            site.update(properties.get_dict('browser.suggestedsites.{name}'.format(
+                name=name), required_keys=('title', 'url', 'bgcolor')))
             site['imageurl'] = image_url_template.format(name=name)
             sites.append(site)
 
             # Now check for existence of an appropriately named drawable.  If none
             # exists, throw.  This stops a locale discovering, at runtime, that the
             # corresponding drawable was not added to en-US.
             if not opts.resources:
                 continue
             resources = os.path.abspath(opts.resources)
             finder = FileFinder(resources)
             matches = [p for p, _ in finder.find(drawables_template.format(name=name))]
             if not matches:
                 raise Exception("Could not find drawable in '{resources}' for '{name}'"
-                    .format(resources=resources, name=name))
+                                .format(resources=resources, name=name))
             else:
                 if opts.verbose:
                     print("Found {len} drawables in '{resources}' for '{name}': {matches}"
                           .format(len=len(matches), resources=resources, name=name, matches=matches))
 
     # We want the lists to be ordered for reproducibility.  Each list has a
     # "default" JSON list item which will be extended by the properties read.
     lists = [
         ('browser.suggestedsites.list', {}),
         ('browser.suggestedsites.restricted.list', {'restricted': True}),
     ]
     if opts.verbose:
-        print('Reading {len} suggested site lists: {lists}'.format(len=len(lists), lists=[list_name for list_name, _ in lists]))
+        print('Reading {len} suggested site lists: {lists}'.format(
+            len=len(lists), lists=[list_name for list_name, _ in lists]))
 
     for (list_name, list_item_defaults) in lists:
         names = properties.get_list(list_name)
         if opts.verbose:
-            print('Reading {len} suggested sites from {list}: {names}'.format(len=len(names), list=list_name, names=names))
+            print('Reading {len} suggested sites from {list}: {names}'.format(
+                len=len(names), list=list_name, names=names))
         add_names(names, list_item_defaults)
 
     # We must define at least one site -- that's what the fallback is for.
     if not sites:
         print('No sites defined: searched in {}!'.format(sources))
         return 1
 
     json.dump(sites, output)
--- a/python/mozbuild/mozbuild/action/make_unzip.py
+++ b/python/mozbuild/mozbuild/action/make_unzip.py
@@ -2,22 +2,25 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import print_function
 
 import sys
 import subprocess
 
+
 def make_unzip(package):
     subprocess.check_call(['unzip', package])
 
+
 def main(args):
     if len(args) != 1:
         print('Usage: make_unzip.py <package>',
               file=sys.stderr)
         return 1
     else:
         make_unzip(args[0])
         return 0
 
+
 if __name__ == '__main__':
     sys.exit(main(sys.argv[1:]))
--- a/python/mozbuild/mozbuild/action/make_zip.py
+++ b/python/mozbuild/mozbuild/action/make_zip.py
@@ -2,22 +2,25 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import print_function
 
 import sys
 import subprocess
 
+
 def make_zip(source, package):
     subprocess.check_call(['zip', '-r9D', package, source, '-x', '\*/.mkdir.done'])
 
+
 def main(args):
     if len(args) != 2:
         print('Usage: make_zip.py <source> <package>',
               file=sys.stderr)
         return 1
     else:
         make_zip(args[0], args[1])
         return 0
 
+
 if __name__ == '__main__':
     sys.exit(main(sys.argv[1:]))
--- a/python/mozbuild/mozbuild/action/output_searchplugins_list.py
+++ b/python/mozbuild/mozbuild/action/output_searchplugins_list.py
@@ -5,29 +5,29 @@
 import sys
 import json
 
 engines = []
 
 locale = sys.argv[2]
 
 with open(sys.argv[1]) as f:
-  searchinfo = json.load(f)
+    searchinfo = json.load(f)
 
 # Get a list of the engines from the locale or the default
 engines = set()
 if locale in searchinfo["locales"]:
-  for region, table in searchinfo["locales"][locale].iteritems():
-    if "visibleDefaultEngines" in table:
-      engines.update(table["visibleDefaultEngines"])
+    for region, table in searchinfo["locales"][locale].iteritems():
+        if "visibleDefaultEngines" in table:
+            engines.update(table["visibleDefaultEngines"])
 
 if not engines:
-  engines.update(searchinfo["default"]["visibleDefaultEngines"])
+    engines.update(searchinfo["default"]["visibleDefaultEngines"])
 
 # Get additional engines from regionOverrides
 for region, overrides in searchinfo["regionOverrides"].iteritems():
-  for originalengine, replacement in overrides.iteritems():
-    if originalengine in engines:
-      # We add the engine because we still need the original
-      engines.add(replacement)
+    for originalengine, replacement in overrides.iteritems():
+        if originalengine in engines:
+            # We add the engine because we still need the original
+            engines.add(replacement)
 
 # join() will take an iterable, not just a list.
 print('\n'.join(engines))
--- a/python/mozbuild/mozbuild/action/package_fennec_apk.py
+++ b/python/mozbuild/mozbuild/action/package_fennec_apk.py
@@ -65,17 +65,17 @@ def package_fennec_apk(inputs=[], omni_j
                 jarrer.remove(path)
             jarrer.add(path, DeflatedFile(file), compress=file.compressed)
 
     def add(path, file, compress=None):
         abspath = os.path.abspath(file.path)
         if verbose:
             print('Packaging %s from %s' % (path, file.path))
         if not os.path.exists(abspath):
-            raise ValueError('File %s not found (looked for %s)' % \
+            raise ValueError('File %s not found (looked for %s)' %
                              (file.path, abspath))
         if jarrer.contains(path):
             jarrer.remove(path)
         jarrer.add(path, file, compress=compress)
 
     for features_dir in features_dirs:
         finder = FileFinder(features_dir)
         for p, f in finder.find('**'):
--- a/python/mozbuild/mozbuild/action/package_generated_sources.py
+++ b/python/mozbuild/mozbuild/action/package_generated_sources.py
@@ -18,16 +18,17 @@ from mozbuild.generated_sources import g
 
 def main(argv):
     parser = argparse.ArgumentParser(
         description='Produce archive of generated sources')
     parser.add_argument('outputfile', help='File to write output to')
     args = parser.parse_args(argv)
 
     objdir_abspath = mozpath.abspath(buildconfig.topobjdir)
+
     def is_valid_entry(entry):
         if isinstance(entry[1], BaseFile):
             entry_abspath = mozpath.abspath(entry[1].path)
         else:
             entry_abspath = mozpath.abspath(entry[1])
         if not entry_abspath.startswith(objdir_abspath):
             print("Warning: omitting generated source [%s] from archive" % entry_abspath, file=sys.stderr)
             return False
--- a/python/mozbuild/mozbuild/action/preprocessor.py
+++ b/python/mozbuild/mozbuild/action/preprocessor.py
@@ -10,15 +10,16 @@ from mozbuild.preprocessor import Prepro
 
 
 def generate(output, *args):
     pp = Preprocessor()
     pp.out = output
     pp.handleCommandLine(list(args), True)
     return set(pp.includes)
 
+
 def main(args):
     pp = Preprocessor()
     pp.handleCommandLine(args, True)
 
 
 if __name__ == "__main__":
-  main(sys.argv[1:])
+    main(sys.argv[1:])
--- a/python/mozbuild/mozbuild/action/process_define_files.py
+++ b/python/mozbuild/mozbuild/action/process_define_files.py
@@ -50,16 +50,17 @@ def process_define_file(output, input):
                         if cmd == 'define':
                             raise Exception(
                                 '`#define ALLDEFINES` is not allowed in a '
                                 'CONFIGURE_DEFINE_FILE')
                         # WebRTC files like to define WINVER and _WIN32_WINNT
                         # via the command line, which raises a mass of macro
                         # redefinition warnings.  Just handle those macros
                         # specially here.
+
                         def define_for_name(name, val):
                             define = "#define {name} {val}".format(name=name, val=val)
                             if name in ('WINVER', '_WIN32_WINNT'):
                                 return '#if !defined({name})\n{define}\n#endif' \
                                     .format(name=name, define=define)
                             return define
                         defines = '\n'.join(sorted(
                             define_for_name(name, val)
--- a/python/mozbuild/mozbuild/action/process_install_manifest.py
+++ b/python/mozbuild/mozbuild/action/process_install_manifest.py
@@ -24,90 +24,91 @@ from mozbuild.util import DefinesAction
 
 
 COMPLETE = 'Elapsed: {elapsed:.2f}s; From {dest}: Kept {existing} existing; ' \
     'Added/updated {updated}; ' \
     'Removed {rm_files} files and {rm_dirs} directories.'
 
 
 def process_manifest(destdir, paths, track,
-        no_symlinks=False,
-        defines={}):
+                     no_symlinks=False,
+                     defines={}):
 
     if os.path.exists(track):
         # We use the same format as install manifests for the tracking
         # data.
         manifest = InstallManifest(path=track)
         remove_unaccounted = FileRegistry()
         dummy_file = BaseFile()
 
         finder = FileFinder(destdir, find_dotfiles=True)
         for dest in manifest._dests:
             for p, f in finder.find(dest):
                 remove_unaccounted.add(p, dummy_file)
 
-        remove_empty_directories=True
-        remove_all_directory_symlinks=True
+        remove_empty_directories = True
+        remove_all_directory_symlinks = True
 
     else:
         # If tracking is enabled and there is no file, we don't want to
         # be removing anything.
         remove_unaccounted = False
-        remove_empty_directories=False
-        remove_all_directory_symlinks=False
+        remove_empty_directories = False
+        remove_all_directory_symlinks = False
 
     manifest = InstallManifest()
     for path in paths:
         manifest |= InstallManifest(path=path)
 
     copier = FileCopier()
     link_policy = "copy" if no_symlinks else "symlink"
     manifest.populate_registry(
         copier, defines_override=defines, link_policy=link_policy
     )
     result = copier.copy(destdir,
-        remove_unaccounted=remove_unaccounted,
-        remove_all_directory_symlinks=remove_all_directory_symlinks,
-        remove_empty_directories=remove_empty_directories)
+                         remove_unaccounted=remove_unaccounted,
+                         remove_all_directory_symlinks=remove_all_directory_symlinks,
+                         remove_empty_directories=remove_empty_directories)
 
     if track:
         # We should record files that we actually copied.
         # It is too late to expand wildcards when the track file is read.
         manifest.write(path=track, expand_pattern=True)
 
     return result
 
 
 def main(argv):
     parser = argparse.ArgumentParser(
         description='Process install manifest files.')
 
     parser.add_argument('destdir', help='Destination directory.')
     parser.add_argument('manifests', nargs='+', help='Path to manifest file(s).')
     parser.add_argument('--no-symlinks', action='store_true',
-        help='Do not install symbolic links. Always copy files')
+                        help='Do not install symbolic links. Always copy files')
     parser.add_argument('--track', metavar="PATH", required=True,
-        help='Use installed files tracking information from the given path.')
+                        help='Use installed files tracking information from the given path.')
     parser.add_argument('-D', action=DefinesAction,
-        dest='defines', metavar="VAR[=VAL]",
-        help='Define a variable to override what is specified in the manifest')
+                        dest='defines', metavar="VAR[=VAL]",
+                        help='Define a variable to override what is specified in the manifest')
 
     args = parser.parse_args(argv)
 
     start = time.time()
 
     result = process_manifest(args.destdir, args.manifests,
-        track=args.track,
-        no_symlinks=args.no_symlinks,
-        defines=args.defines)
+                              track=args.track,
+                              no_symlinks=args.no_symlinks,
+                              defines=args.defines)
 
     elapsed = time.time() - start
 
     print(COMPLETE.format(
         elapsed=elapsed,
         dest=args.destdir,
         existing=result.existing_files_count,
         updated=result.updated_files_count,
         rm_files=result.removed_files_count,
         rm_dirs=result.removed_directories_count))
 
+
 if __name__ == '__main__':
     main(sys.argv[1:])
--- a/python/mozbuild/mozbuild/action/symbols_archive.py
+++ b/python/mozbuild/mozbuild/action/symbols_archive.py
@@ -7,45 +7,49 @@ from __future__ import absolute_import, 
 import argparse
 import sys
 import os
 
 from mozpack.files import FileFinder
 from mozpack.mozjar import JarWriter
 import mozpack.path as mozpath
 
+
 def make_archive(archive_name, base, exclude, include):
     compress = ['**/*.sym']
     finder = FileFinder(base, ignore=exclude)
     if not include:
         include = ['*']
     archive_basename = os.path.basename(archive_name)
     with open(archive_name, 'wb') as fh:
         with JarWriter(fileobj=fh, compress_level=5) as writer:
             for pat in include:
                 for p, f in finder.find(pat):
                     print('  Adding to "%s":\n\t"%s"' % (archive_basename, p))
                     should_compress = any(mozpath.match(p, pat) for pat in compress)
                     writer.add(p.encode('utf-8'), f, mode=f.mode,
                                compress=should_compress, skip_duplicates=True)
 
+
 def main(argv):
     parser = argparse.ArgumentParser(description='Produce a symbols archive')
     parser.add_argument('archive', help='Which archive to generate')
     parser.add_argument('base', help='Base directory to package')
-    parser.add_argument('--full-archive', action='store_true', help='Generate a full symbol archive')
+    parser.add_argument('--full-archive', action='store_true',
+                        help='Generate a full symbol archive')
 
     args = parser.parse_args(argv)
 
     excludes = []
     includes = []
 
     if args.full_archive:
         # We allow symbols for tests to be included when building on try
         if os.environ.get('MH_BRANCH', 'unknown') != 'try':
             excludes = ['*test*', '*Test*']
     else:
         includes = ['**/*.sym']
 
     make_archive(args.archive, args.base, excludes, includes)
 
+
 if __name__ == '__main__':
     main(sys.argv[1:])
--- a/python/mozbuild/mozbuild/action/test_archive.py
+++ b/python/mozbuild/mozbuild/action/test_archive.py
@@ -637,17 +637,17 @@ if buildconfig.substs.get('commtopsrcdir
 # Verify nothing sneaks into ARCHIVE_FILES without a corresponding exclusion
 # rule in the "common" archive.
 for k, v in ARCHIVE_FILES.items():
     # Skip mozharness because it isn't staged.
     if k in ('common', 'mozharness'):
         continue
 
     ignores = set(itertools.chain(*(e.get('ignore', [])
-                                  for e in ARCHIVE_FILES['common'])))
+                                    for e in ARCHIVE_FILES['common'])))
 
     if not any(p.startswith('%s/' % k) for p in ignores):
         raise Exception('"common" ignore list probably should contain %s' % k)
 
 
 def find_generated_harness_files():
     # TEST_HARNESS_FILES end up in an install manifest at
     # $topsrcdir/_build_manifests/install/_tests.
--- a/python/mozbuild/mozbuild/action/tooltool.py
+++ b/python/mozbuild/mozbuild/action/tooltool.py
@@ -1295,10 +1295,11 @@ def main(argv, _skip_logging=False):
     if options['algorithm'] != 'sha512':
         parser.error('only --algorithm sha512 is supported')
 
     if len(args) < 1:
         parser.error('You must specify a command')
 
     return 0 if process_command(options, args) else 1
 
+
 if __name__ == "__main__":  # pragma: no cover
     sys.exit(main(sys.argv))
--- a/python/mozbuild/mozbuild/action/wrap_rustc.py
+++ b/python/mozbuild/mozbuild/action/wrap_rustc.py
@@ -4,16 +4,17 @@
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import argparse
 import subprocess
 import sys
 import os
 
+
 def parse_outputs(crate_output, dep_outputs, pass_l_flag):
     env = {}
     args = []
 
     def parse_line(line):
         if line.startswith('cargo:'):
             return line[len('cargo:'):].split('=', 1)
 
@@ -54,24 +55,26 @@ def parse_outputs(crate_output, dep_outp
             elif key:
                 # Todo: Distinguish between direct and transitive
                 # dependencies so we can pass metadata environment
                 # variables correctly.
                 pass
 
     return env, args
 
+
 def wrap_rustc(args):
     parser = argparse.ArgumentParser()
     parser.add_argument('--crate-out', nargs='?')
     parser.add_argument('--deps-out', nargs='*')
     parser.add_argument('--cwd')
     parser.add_argument('--pass-l-flag', action='store_true')
     parser.add_argument('--cmd', nargs=argparse.REMAINDER)
     args = parser.parse_args(args)
 
     new_env, new_args = parse_outputs(args.crate_out, args.deps_out,
                                       args.pass_l_flag)
     os.environ.update(new_env)
     return subprocess.Popen(args.cmd + new_args, cwd=args.cwd).wait()
 
+
 if __name__ == '__main__':
     sys.exit(wrap_rustc(sys.argv[1:]))
--- a/python/mozbuild/mozbuild/action/xpccheck.py
+++ b/python/mozbuild/mozbuild/action/xpccheck.py
@@ -10,74 +10,81 @@ Usage: xpccheck.py <directory> [<directo
 
 from __future__ import absolute_import
 
 import sys
 import os
 from glob import glob
 import manifestparser
 
+
 def getIniTests(testdir):
-  mp = manifestparser.ManifestParser(strict=False)
-  mp.read(os.path.join(testdir, 'xpcshell.ini'))
-  return mp.tests
+    mp = manifestparser.ManifestParser(strict=False)
+    mp.read(os.path.join(testdir, 'xpcshell.ini'))
+    return mp.tests
+
 
 def verifyDirectory(initests, directory):
-  files = glob(os.path.join(os.path.abspath(directory), "test_*"))
-  for f in files:
-    if (not os.path.isfile(f)):
-      continue
+    files = glob(os.path.join(os.path.abspath(directory), "test_*"))
+    for f in files:
+        if (not os.path.isfile(f)):
+            continue
 
-    name = os.path.basename(f)
-    if name.endswith('.in'):
-      name = name[:-3]
+        name = os.path.basename(f)
+        if name.endswith('.in'):
+            name = name[:-3]
+
+        if not name.endswith('.js'):
+            continue
 
-    if not name.endswith('.js'):
-      continue
+        found = False
+        for test in initests:
+            if os.path.join(os.path.abspath(directory), name) == test['path']:
+                found = True
+                break
 
-    found = False
-    for test in initests:
-      if os.path.join(os.path.abspath(directory), name) == test['path']:
-        found = True
-        break
-   
-    if not found:
-      print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | test %s is missing from test manifest %s!" % (name, os.path.join(directory, 'xpcshell.ini'))
-      sys.exit(1)
+        if not found:
+            print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | test %s is missing from test manifest %s!" % (
+                name, os.path.join(directory, 'xpcshell.ini'))
+            sys.exit(1)
+
 
 def verifyIniFile(initests, directory):
-  files = glob(os.path.join(os.path.abspath(directory), "test_*"))
-  for test in initests:
-    name = test['path'].split('/')[-1]
+    files = glob(os.path.join(os.path.abspath(directory), "test_*"))
+    for test in initests:
+        name = test['path'].split('/')[-1]
 
-    found = False
-    for f in files:
+        found = False
+        for f in files:
 
-      fname = f.split('/')[-1]
-      if fname.endswith('.in'):
-        fname = '.in'.join(fname.split('.in')[:-1])
+            fname = f.split('/')[-1]
+            if fname.endswith('.in'):
+                fname = '.in'.join(fname.split('.in')[:-1])
 
-      if os.path.join(os.path.abspath(directory), fname) == test['path']:
-        found = True
-        break
+            if os.path.join(os.path.abspath(directory), fname) == test['path']:
+                found = True
+                break
 
-    if not found:
-      print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | found %s in xpcshell.ini and not in directory '%s'" % (name, directory)
-      sys.exit(1)
+        if not found:
+            print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | found %s in xpcshell.ini and not in directory '%s'" % (
+                name, directory)
+            sys.exit(1)
+
 
 def main(argv):
-  if len(argv) < 2:
-    print >>sys.stderr, "Usage: xpccheck.py <topsrcdir> <directory> [<directory> ...]"
-    sys.exit(1)
+    if len(argv) < 2:
+        print >>sys.stderr, "Usage: xpccheck.py <topsrcdir> <directory> [<directory> ...]"
+        sys.exit(1)
 
-  topsrcdir = argv[0]
-  for d in argv[1:]:
-    # xpcshell-unpack is a copy of xpcshell sibling directory and in the Makefile
-    # we copy all files (including xpcshell.ini from the sibling directory.
-    if d.endswith('toolkit/mozapps/extensions/test/xpcshell-unpack'):
-      continue
+    topsrcdir = argv[0]
+    for d in argv[1:]:
+        # xpcshell-unpack is a copy of xpcshell sibling directory and in the Makefile
+        # we copy all files (including xpcshell.ini from the sibling directory.
+        if d.endswith('toolkit/mozapps/extensions/test/xpcshell-unpack'):
+            continue
 
-    initests = getIniTests(d)
-    verifyDirectory(initests, d)
-    verifyIniFile(initests, d)
+        initests = getIniTests(d)
+        verifyDirectory(initests, d)
+        verifyIniFile(initests, d)
+
 
 if __name__ == '__main__':
     main(sys.argv[1:])
--- a/python/mozbuild/mozbuild/action/xpidl-process.py
+++ b/python/mozbuild/mozbuild/action/xpidl-process.py
@@ -82,37 +82,38 @@ def process(input_dirs, inc_paths, bindi
         deps_path = os.path.join(deps_dir, '%s.pp' % module)
         with FileAvoidWrite(deps_path) as fh:
             mk.dump(fh)
 
 
 def main(argv):
     parser = argparse.ArgumentParser()
     parser.add_argument('--cache-dir',
-        help='Directory in which to find or write cached lexer data.')
+                        help='Directory in which to find or write cached lexer data.')
     parser.add_argument('--depsdir',
-        help='Directory in which to write dependency files.')
+                        help='Directory in which to write dependency files.')
     parser.add_argument('--bindings-conf',
-        help='Path to the WebIDL binding configuration file.')
+                        help='Path to the WebIDL binding configuration file.')
     parser.add_argument('--input-dir', dest='input_dirs',
                         action='append', default=[],
                         help='Directory(ies) in which to find source .idl files.')
     parser.add_argument('headerdir',
-        help='Directory in which to write header files.')
+                        help='Directory in which to write header files.')
     parser.add_argument('xpcrsdir',
-        help='Directory in which to write rust xpcom binding files.')
+                        help='Directory in which to write rust xpcom binding files.')
     parser.add_argument('xptdir',
-        help='Directory in which to write xpt file.')
+                        help='Directory in which to write xpt file.')
     parser.add_argument('module',
-        help='Final module name to use for linked output xpt file.')
+                        help='Final module name to use for linked output xpt file.')
     parser.add_argument('idls', nargs='+',
-        help='Source .idl file(s).')
+                        help='Source .idl file(s).')
     parser.add_argument('-I', dest='incpath', action='append', default=[],
-        help='Extra directories where to look for included .idl files.')
+                        help='Extra directories where to look for included .idl files.')
 
     args = parser.parse_args(argv)
     incpath = [os.path.join(topsrcdir, p) for p in args.incpath]
     process(args.input_dirs, incpath, args.bindings_conf, args.cache_dir,
-        args.headerdir, args.xpcrsdir, args.xptdir, args.depsdir, args.module,
-        args.idls)
+            args.headerdir, args.xpcrsdir, args.xptdir, args.depsdir, args.module,
+            args.idls)
+
 
 if __name__ == '__main__':
     main(sys.argv[1:])
--- a/python/mozbuild/mozbuild/analyze/graph.py
+++ b/python/mozbuild/mozbuild/analyze/graph.py
@@ -1,15 +1,16 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import os
 import sqlite3 as lite
 
+
 class Node(object):
 
     def __init__(self, graph, node_id):
         sql_result = graph.query_arg('SELECT id, dir, type, mtime, name \
             FROM node WHERE id=?', (node_id,)).fetchone()
         self.id, self.dir, self.type, self.mtime, self.name = sql_result
         children = graph.query_arg('SELECT to_id FROM \
             normal_link WHERE from_id=?', (self.id,)).fetchall()
@@ -34,21 +35,22 @@ class Node(object):
 
     def get_path(self, graph):
         if self.dir == 1:
             return self.name
         parent = graph.get_node(self.dir)
         return os.path.join(parent.get_path(graph), self.name)
 
     def calculate_mtime(self):
-        if self.type == 0: # only files have meaningful costs
+        if self.type == 0:  # only files have meaningful costs
             return sum(x.mtime for x in self.cmds)
         else:
             return None
 
+
 class Graph(object):
 
     def __init__(self, path=None, connect=None):
         self.connect = connect
         if path is not None:
             self.connect = lite.connect(path)
         elif self.connect is None:
             raise Exception
@@ -62,17 +64,17 @@ class Graph(object):
         tables = [x[0] for x in self.query_arg('SELECT name \
             FROM sqlite_master WHERE type=?', ('table',)).fetchall()]
         return ('node' in tables and 'normal_link' in tables)
 
     def close(self):
         self.connect.close()
 
     def query_arg(self, q, arg):
-        assert isinstance(arg, tuple) #execute() requires tuple argument
+        assert isinstance(arg, tuple)  # execute() requires tuple argument
         cursor = self.connect.cursor()
         cursor.execute(q, arg)
         return cursor
 
     def query(self, q):
         cursor = self.connect.cursor()
         cursor.execute(q)
         return cursor
@@ -107,25 +109,24 @@ class Graph(object):
     def file_summaries(self, files):
         for f in files:
             node = self.get_node(self.get_id(f))
             if node is not None:
                 sec = node.cost / 1000.0
                 m, s = sec / 60, sec % 60
                 print ("\n------ Summary for %s ------\
                     \nTotal Build Time (mm:ss) = %d:%d\nNum Downstream Commands = %d"
-                    % (f, m, s, node.num_cmds))
+                       % (f, m, s, node.num_cmds))
 
     def populate(self):
         # make nodes for files with downstream commands
         files = self.query('SELECT id FROM node WHERE type=0 AND id in \
             (SELECT DISTINCT from_id FROM normal_link)').fetchall()
         res = []
         for (i,) in files:
             node = self.get_node(i)
             res.append((node.path, node.cost))
         self.results = res
 
     def get_cost_dict(self):
         if self.results is None:
             self.populate()
-        return {k:v for k,v in self.results if v > 0}
-
+        return {k: v for k, v in self.results if v > 0}
--- a/python/mozbuild/mozbuild/analyze/hg.py
+++ b/python/mozbuild/mozbuild/analyze/hg.py
@@ -12,55 +12,62 @@ from datetime import datetime, timedelta
 from collections import Counter
 
 import mozpack.path as mozpath
 
 PUSHLOG_CHUNK_SIZE = 500
 
 URL = 'https://hg.mozilla.org/mozilla-central/json-pushes?'
 
+
 def unix_epoch(date):
-    return (date - datetime(1970,1,1)).total_seconds()
+    return (date - datetime(1970, 1, 1)).total_seconds()
+
 
 def unix_from_date(n, today):
     return unix_epoch(today - timedelta(days=n))
 
+
 def get_lastpid(session):
     return session.get(URL+'&version=2').json()['lastpushid']
 
+
 def get_pushlog_chunk(session, start, end):
     # returns pushes sorted by date
     res = session.get(URL+'version=1&startID={0}&\
         endID={1}&full=1'.format(start, end)).json()
-    return sorted(res.items(), key = lambda x: x[1]['date'])
+    return sorted(res.items(), key=lambda x: x[1]['date'])
+
 
 def collect_data(session, date):
-    if date < 1206031764: #first push
-        raise Exception ("No pushes exist before March 20, 2008.")
+    if date < 1206031764:  # first push
+        raise Exception("No pushes exist before March 20, 2008.")
     lastpushid = get_lastpid(session)
     data = []
     start_id = lastpushid - PUSHLOG_CHUNK_SIZE
     end_id = lastpushid + 1
     while True:
         res = get_pushlog_chunk(session, start_id, end_id)
-        starting_date = res[0][1]['date'] # date of oldest push in chunk
+        starting_date = res[0][1]['date']  # date of oldest push in chunk
         dates = [x[1]['date'] for x in res]
         if starting_date < date:
             i = bisect.bisect_left(dates, date)
             data.append(res[i:])
             return data
         else:
             data.append(res)
             end_id = start_id + 1
             start_id = start_id - PUSHLOG_CHUNK_SIZE
 
+
 def get_data(epoch):
     session = requests.Session()
     data = collect_data(session, epoch)
-    return {k:v for sublist in data for (k,v) in sublist}
+    return {k: v for sublist in data for (k, v) in sublist}
+
 
 class Pushlog(object):
 
     def __init__(self, days):
         info = get_data(unix_from_date(days, datetime.today()))
         self.pushlog = info
         self.pids = self.get_pids()
         self.pushes = self.make_pushes()
@@ -73,23 +80,25 @@ class Pushlog(object):
         all_pushes = self.pushlog
         return [Push(pid, all_pushes[str(pid)]) for pid in pids]
 
     def get_pids(self):
         keys = self.pushlog.keys()
         keys.sort()
         return keys
 
+
 class Push(object):
 
     def __init__(self, pid, p_dict):
         self.id = pid
         self.date = p_dict['date']
         self.files = [f for x in p_dict['changesets'] for f in x['files']]
 
+
 class Report(object):
 
     def __init__(self, days, path=None, cost_dict=None):
         obj = Pushlog(days)
         self.file_set = obj.file_set
         self.file_count = obj.file_count
         self.name = str(days) + 'day_report'
         self.cost_dict = self.get_cost_dict(path, cost_dict)
@@ -107,17 +116,17 @@ class Report(object):
     def organize_data(self):
         costs = self.cost_dict
         counts = self.file_count
         res = []
         for f in self.file_set:
             cost = costs.get(f)
             count = counts.get(f)
             if cost is not None:
-                res.append((f, cost, count, round(cost*count,3)))
+                res.append((f, cost, count, round(cost*count, 3)))
         return res
 
     def get_sorted_report(self, format):
         res = self.organize_data()
         res.sort(key=(lambda x: x[3]), reverse=True)
 
         def ms_to_mins_secs(ms):
             secs = ms / 1000.0
@@ -138,26 +147,26 @@ class Report(object):
             return lst[:size]
 
     def generate_output(self, format, limit, dst):
         import tablib
         data = tablib.Dataset(headers=['FILE', 'TIME', 'CHANGES', 'TOTAL'])
         res = self.get_sorted_report(format)
         if limit is not None:
             res = self.cut(limit, res)
-        for x in res: data.append(x)
+        for x in res:
+            data.append(x)
         if format == 'pretty':
             print (data)
         else:
             file_name = self.name + '.' + format
             content = None
             data.export(format)
             if format == 'csv':
                 content = data.csv
             elif format == 'json':
                 content = data.json
             else:
                 content = data.html
             file_path = mozpath.join(dst, file_name)
             with open(file_path, 'wb') as f:
                 f.write(content)
             print ("Created report: %s" % file_path)
-
--- a/python/mozbuild/mozbuild/android_version_code.py
+++ b/python/mozbuild/mozbuild/android_version_code.py
@@ -6,17 +6,18 @@ from __future__ import absolute_import, 
 
 import argparse
 import math
 import sys
 import time
 
 # Builds before this build ID use the v0 version scheme.  Builds after this
 # build ID use the v1 version scheme.
-V1_CUTOFF = 20150801000000 # YYYYmmddHHMMSS
+V1_CUTOFF = 20150801000000  # YYYYmmddHHMMSS
+
 
 def android_version_code_v0(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
     base = int(str(buildid)[:10])
     # None is interpreted as arm.
     if not cpu_arch or cpu_arch == 'armeabi-v7a':
         # Increment by MIN_SDK_VERSION -- this adds 9 to every build ID as a
         # minimum.  Our split APK starts at 15.
         return base + min_sdk + 0
@@ -25,16 +26,17 @@ def android_version_code_v0(buildid, cpu
         # x86 phones that have ARM emulators, beating the 2-point advantage that
         # the v15+ ARMv7 APK has.  If we change our splits in the future, we'll
         # need to do this further still.
         return base + min_sdk + 3
     else:
         raise ValueError("Don't know how to compute android:versionCode "
                          "for CPU arch %s" % cpu_arch)
 
+
 def android_version_code_v1(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
     '''Generate a v1 android:versionCode.
 
     The important consideration is that version codes be monotonically
     increasing (per Android package name) for all published builds.  The input
     build IDs are based on timestamps and hence are always monotonically
     increasing.
 
@@ -129,16 +131,17 @@ def android_version_code_v1(buildid, cpu
         raise ValueError("Don't know how to compute android:versionCode "
                          "for CPU arch %s" % cpu_arch)
 
     # 'g' bit is currently always 1, but may depend on `min_sdk` in the future.
     version |= 1 << 0
 
     return version
 
+
 def android_version_code(buildid, *args, **kwargs):
     base = int(str(buildid))
     if base < V1_CUTOFF:
         return android_version_code_v0(buildid, *args, **kwargs)
     else:
         return android_version_code_v1(buildid, *args, **kwargs)
 
 
@@ -161,17 +164,17 @@ def main(argv):
     parser.add_argument('--with-android-max-sdk-version', dest='max_sdk',
                         type=int, default=0,
                         help='The maximum target SDK')
     parser.add_argument('buildid', type=int,
                         help='The input build ID')
 
     args = parser.parse_args(argv)
     code = android_version_code(args.buildid,
-        cpu_arch=args.cpu_arch,
-        min_sdk=args.min_sdk,
-        max_sdk=args.max_sdk)
+                                cpu_arch=args.cpu_arch,
+                                min_sdk=args.min_sdk,
+                                max_sdk=args.max_sdk)
     print(code)
     return 0
 
 
 if __name__ == '__main__':
     sys.exit(main(sys.argv[1:]))
--- a/python/mozbuild/mozbuild/artifacts.py
+++ b/python/mozbuild/mozbuild/artifacts.py
@@ -72,17 +72,18 @@ from mozpack.files import (
 )
 from mozpack.mozjar import (
     JarReader,
     JarWriter,
 )
 from mozpack.packager.unpack import UnpackFinder
 import mozpack.path as mozpath
 
-NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50  # Number of candidate pushheads to cache per parent changeset.
+# Number of candidate pushheads to cache per parent changeset.
+NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50
 
 # Number of parent changesets to consider as possible pushheads.
 # There isn't really such a thing as a reasonable default here, because we don't
 # know how many pushheads we'll need to look at to find a build with our artifacts,
 # and we don't know how many changesets will be in each push. For now we assume
 # we'll find a build in the last 50 pushes, assuming each push contains 10 changesets.
 NUM_REVISIONS_TO_QUERY = 500
 
@@ -308,27 +309,28 @@ class AndroidArtifactJob(ArtifactJob):
         # Extract all .so files into the root, which will get copied into dist/bin.
         with JarWriter(file=processed_filename, compress_level=5) as writer:
             for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
                 if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
                     continue
 
                 dirname, basename = os.path.split(p)
                 self.log(logging.INFO, 'artifact',
-                    {'basename': basename},
-                   'Adding {basename} to processed archive')
+                         {'basename': basename},
+                         'Adding {basename} to processed archive')
 
                 basedir = 'bin'
                 if not basename.endswith('.so'):
                     basedir = mozpath.join('bin', dirname.lstrip('assets/'))
                 basename = mozpath.join(basedir, basename)
                 writer.add(basename.encode('utf-8'), f.open())
 
     def process_symbols_archive(self, filename, processed_filename):
-        ArtifactJob.process_symbols_archive(self, filename, processed_filename, skip_compressed=True)
+        ArtifactJob.process_symbols_archive(
+            self, filename, processed_filename, skip_compressed=True)
 
         if self._symbols_archive_suffix != 'crashreporter-symbols-full.zip':
             return
 
         import gzip
 
         with JarWriter(file=processed_filename, compress_level=5) as writer:
             reader = JarReader(filename)
@@ -435,18 +437,18 @@ class MacArtifactJob(ArtifactJob):
         root, paths = self._paths_no_keep_path
         return (root, [p.format(product=self.product) for p in paths])
 
     def process_package_artifact(self, filename, processed_filename):
         tempdir = tempfile.mkdtemp()
         oldcwd = os.getcwd()
         try:
             self.log(logging.INFO, 'artifact',
-                {'tempdir': tempdir},
-                'Unpacking DMG into {tempdir}')
+                     {'tempdir': tempdir},
+                     'Unpacking DMG into {tempdir}')
             if self._substs['HOST_OS_ARCH'] == 'Linux':
                 # This is a cross build, use hfsplus and dmg tools to extract the dmg.
                 os.chdir(tempdir)
                 with open(os.devnull, 'wb') as devnull:
                     subprocess.check_call([
                         self._substs['DMG_TOOL'],
                         'extract',
                         filename,
@@ -481,18 +483,18 @@ class MacArtifactJob(ArtifactJob):
             ]
 
             with JarWriter(file=processed_filename, compress_level=5) as writer:
                 root, paths = self.paths_no_keep_path
                 finder = UnpackFinder(mozpath.join(source, root))
                 for path in paths:
                     for p, f in finder.find(path):
                         self.log(logging.INFO, 'artifact',
-                            {'path': p},
-                            'Adding {path} to processed archive')
+                                 {'path': p},
+                                 'Adding {path} to processed archive')
                         destpath = mozpath.join('bin', os.path.basename(p))
                         writer.add(destpath.encode('utf-8'), f, mode=f.mode)
 
                 for root, paths in paths_keep_path:
                     finder = UnpackFinder(mozpath.join(source, root))
                     for path in paths:
                         for p, f in finder.find(path):
                             self.log(logging.INFO, 'artifact',
@@ -502,18 +504,18 @@ class MacArtifactJob(ArtifactJob):
                             writer.add(destpath.encode('utf-8'), f.open(), mode=f.mode)
 
         finally:
             os.chdir(oldcwd)
             try:
                 shutil.rmtree(tempdir)
             except (OSError, IOError):
                 self.log(logging.WARN, 'artifact',
-                    {'tempdir': tempdir},
-                    'Unable to delete {tempdir}')
+                         {'tempdir': tempdir},
+                         'Unable to delete {tempdir}')
                 pass
 
 
 class WinArtifactJob(ArtifactJob):
     package_re = r'public/build/target\.(zip|tar\.gz)'
     product = 'firefox'
 
     _package_artifact_patterns = {
@@ -556,18 +558,18 @@ class WinArtifactJob(ArtifactJob):
             for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
                 if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
                     continue
 
                 # strip off the relative "firefox/" bit from the path:
                 basename = mozpath.relpath(p, self.product)
                 basename = mozpath.join('bin', basename)
                 self.log(logging.INFO, 'artifact',
-                    {'basename': basename},
-                    'Adding {basename} to processed archive')
+                         {'basename': basename},
+                         'Adding {basename} to processed archive')
                 writer.add(basename.encode('utf-8'), f.open(), mode=f.mode)
                 added_entry = True
 
         if not added_entry:
             raise ValueError('Archive format changed! No pattern from "{patterns}"'
                              'matched an archive path.'.format(
                                  patterns=self.artifact_patterns))
 
@@ -663,65 +665,68 @@ class CacheManager(object):
 
     def log(self, *args, **kwargs):
         if self._log:
             self._log(*args, **kwargs)
 
     def load_cache(self):
         if self._skip_cache:
             self.log(logging.INFO, 'artifact',
-                {},
-                'Skipping cache: ignoring load_cache!')
+                     {},
+                     'Skipping cache: ignoring load_cache!')
             return
 
         try:
             items = pickle.load(open(self._cache_filename, 'rb'))
             for key, value in items:
                 self._cache[key] = value
         except Exception as e:
             # Corrupt cache, perhaps?  Sadly, pickle raises many different
             # exceptions, so it's not worth trying to be fine grained here.
             # We ignore any exception, so the cache is effectively dropped.
             self.log(logging.INFO, 'artifact',
-                {'filename': self._cache_filename, 'exception': repr(e)},
-                'Ignoring exception unpickling cache file {filename}: {exception}')
+                     {'filename': self._cache_filename, 'exception': repr(e)},
+                     'Ignoring exception unpickling cache file {filename}: {exception}')
             pass
 
     def dump_cache(self):
         if self._skip_cache:
             self.log(logging.INFO, 'artifact',
-                {},
-                'Skipping cache: ignoring dump_cache!')
+                     {},
+                     'Skipping cache: ignoring dump_cache!')
             return
 
         ensureParentDir(self._cache_filename)
-        pickle.dump(list(reversed(list(self._cache.items()))), open(self._cache_filename, 'wb'), -1)
+        pickle.dump(list(reversed(list(self._cache.items()))),
+                    open(self._cache_filename, 'wb'), -1)
 
     def clear_cache(self):
         if self._skip_cache:
             self.log(logging.INFO, 'artifact',
-                {},
-                'Skipping cache: ignoring clear_cache!')
+                     {},
+                     'Skipping cache: ignoring clear_cache!')
             return
 
         with self:
             self._cache.clear()
 
     def __enter__(self):
         self.load_cache()
         return self
 
     def __exit__(self, type, value, traceback):
         self.dump_cache()
 
+
 class PushheadCache(CacheManager):
     '''Helps map tree/revision pairs to parent pushheads according to the pushlog.'''
 
     def __init__(self, cache_dir, log=None, skip_cache=False):
-        CacheManager.__init__(self, cache_dir, 'pushhead_cache', MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
+        CacheManager.__init__(self, cache_dir, 'pushhead_cache',
+                              MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
 
     @cachedmethod(operator.attrgetter('_cache'))
     def parent_pushhead_id(self, tree, revision):
         cset_url_tmpl = ('https://hg.mozilla.org/{tree}/json-pushes?'
                          'changeset={changeset}&version=2&tipsonly=1')
         req = requests.get(cset_url_tmpl.format(tree=tree, changeset=revision),
                            headers={'Accept': 'application/json'})
         if req.status_code not in range(200, 300):
@@ -738,21 +743,23 @@ class PushheadCache(CacheManager):
         req = requests.get(pushid_url_tmpl.format(tree=tree, start=start,
                                                   end=end),
                            headers={'Accept': 'application/json'})
         result = req.json()
         return [
             p['changesets'][-1] for p in result['pushes'].values()
         ]
 
+
 class TaskCache(CacheManager):
     '''Map candidate pushheads to Task Cluster task IDs and artifact URLs.'''
 
     def __init__(self, cache_dir, log=None, skip_cache=False):
-        CacheManager.__init__(self, cache_dir, 'artifact_url', MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
+        CacheManager.__init__(self, cache_dir, 'artifact_url',
+                              MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
 
     @cachedmethod(operator.attrgetter('_cache'))
     def artifacts(self, tree, job, artifact_job_class, rev):
         # Grab the second part of the repo name, which is generally how things
         # are indexed. Eg: 'integration/mozilla-inbound' is indexed as
         # 'mozilla-inbound'
         tree = tree.split('/')[1] if '/' in tree else tree
 
@@ -777,17 +784,18 @@ class TaskCache(CacheManager):
         self.log(logging.INFO, 'artifact',
                  {'namespace': namespace},
                  'Searching Taskcluster index with namespace: {namespace}')
         try:
             taskId = find_task_id(namespace)
         except KeyError:
             # Not all revisions correspond to pushes that produce the job we
             # care about; and even those that do may not have completed yet.
-            raise ValueError('Task for {namespace} does not exist (yet)!'.format(namespace=namespace))
+            raise ValueError(
+                'Task for {namespace} does not exist (yet)!'.format(namespace=namespace))
 
         return taskId, list_artifacts(taskId)
 
 
 class Artifacts(object):
     '''Maintain state to efficiently fetch build artifacts from a Firefox tree.'''
 
     def __init__(self, tree, substs, defines, job=None, log=None,
@@ -815,23 +823,25 @@ class Artifacts(object):
             cls = job_details[self._job]
             self._artifact_job = cls(log=self._log,
                                      download_tests=download_tests,
                                      download_symbols=download_symbols,
                                      download_host_bins=download_host_bins,
                                      substs=self._substs)
         except KeyError:
             self.log(logging.INFO, 'artifact',
-                {'job': self._job},
-                'Unknown job {job}')
+                     {'job': self._job},
+                     'Unknown job {job}')
             raise KeyError("Unknown job")
 
         self._task_cache = TaskCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
-        self._artifact_cache = ArtifactCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
-        self._pushhead_cache = PushheadCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
+        self._artifact_cache = ArtifactCache(
+            self._cache_dir, log=self._log, skip_cache=self._skip_cache)
+        self._pushhead_cache = PushheadCache(
+            self._cache_dir, log=self._log, skip_cache=self._skip_cache)
 
     def log(self, *args, **kwargs):
         if self._log:
             self._log(*args, **kwargs)
 
     def _guess_artifact_job(self):
         # Add the "-debug" suffix to the guessed artifact job name
         # if MOZ_DEBUG is enabled.
@@ -948,18 +958,18 @@ class Artifacts(object):
         if len(last_revs) == 0:
             raise Exception("""\
 There are no public revisions.
 This can happen if the repository is created from bundle file and never pulled
 from remote.  Please run `hg pull` and build again.
 see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code/Mercurial/Bundles""")
 
         self.log(logging.INFO, 'artifact',
-            {'len': len(last_revs)},
-            'hg suggested {len} candidate revisions')
+                 {'len': len(last_revs)},
+                 'hg suggested {len} candidate revisions')
 
         def to_pair(line):
             rev, node = line.split(':', 1)
             return (int(rev), node)
 
         pairs = map(to_pair, last_revs)
 
         # Python's tuple sort orders by first component: here, the (local)
@@ -997,17 +1007,18 @@ see https://developer.mozilla.org/en-US/
         if not count:
             raise Exception('Could not find any candidate pushheads in the last {num} revisions.\n'
                             'Search started with {rev}, which must be known to Mozilla automation.\n\n'
                             'see https://developer.mozilla.org/en-US/docs/Artifact_builds'.format(
                                 rev=last_revs[0], num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT))
 
     def find_pushhead_artifacts(self, task_cache, job, tree, pushhead):
         try:
-            taskId, artifacts = task_cache.artifacts(tree, job, self._artifact_job.__class__, pushhead)
+            taskId, artifacts = task_cache.artifacts(
+                tree, job, self._artifact_job.__class__, pushhead)
         except ValueError:
             return None
 
         urls = []
         for artifact_name in self._artifact_job.find_candidate_artifacts(artifacts):
             # We can easily extract the task ID from the URL.  We can't easily
             # extract the build ID; we use the .ini files embedded in the
             # downloaded artifact for this.
@@ -1018,69 +1029,69 @@ see https://developer.mozilla.org/en-US/
                      {'pushhead': pushhead,
                       'tree': tree},
                      'Installing from remote pushhead {pushhead} on {tree}')
             return urls
         return None
 
     def install_from_file(self, filename, distdir):
         self.log(logging.INFO, 'artifact',
-            {'filename': filename},
-            'Installing from {filename}')
+                 {'filename': filename},
+                 'Installing from {filename}')
 
         # Do we need to post-process?
         processed_filename = filename + PROCESSED_SUFFIX
 
         if self._skip_cache and os.path.exists(processed_filename):
             self.log(logging.INFO, 'artifact',
-                {'path': processed_filename},
-                'Skipping cache: removing cached processed artifact {path}')
+                     {'path': processed_filename},
+                     'Skipping cache: removing cached processed artifact {path}')
             os.remove(processed_filename)
 
         if not os.path.exists(processed_filename):
             self.log(logging.INFO, 'artifact',
-                {'filename': filename},
-                'Processing contents of {filename}')
+                     {'filename': filename},
+                     'Processing contents of {filename}')
             self.log(logging.INFO, 'artifact',
-                {'processed_filename': processed_filename},
-                'Writing processed {processed_filename}')
+                     {'processed_filename': processed_filename},
+                     'Writing processed {processed_filename}')
             self._artifact_job.process_artifact(filename, processed_filename)
 
         self._artifact_cache._persist_limit.register_file(processed_filename)
 
         self.log(logging.INFO, 'artifact',
-            {'processed_filename': processed_filename},
-            'Installing from processed {processed_filename}')
+                 {'processed_filename': processed_filename},
+                 'Installing from processed {processed_filename}')
 
         # Copy all .so files, avoiding modification where possible.
         ensureParentDir(mozpath.join(distdir, '.dummy'))
 
         with zipfile.ZipFile(processed_filename) as zf:
             for info in zf.infolist():
                 if info.filename.endswith('.ini'):
                     continue
                 n = mozpath.join(distdir, info.filename)
                 fh = FileAvoidWrite(n, mode='rb')
                 shutil.copyfileobj(zf.open(info), fh)
                 file_existed, file_updated = fh.close()
                 self.log(logging.INFO, 'artifact',
-                    {'updating': 'Updating' if file_updated else 'Not updating', 'filename': n},
-                    '{updating} {filename}')
+                         {'updating': 'Updating' if file_updated else 'Not updating', 'filename': n},
+                         '{updating} {filename}')
                 if not file_existed or file_updated:
                     # Libraries and binaries may need to be marked executable,
                     # depending on platform.
-                    perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689.
-                    perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r.
+                    perms = info.external_attr >> 16  # See http://stackoverflow.com/a/434689.
+                    perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH  # u+w, a+r.
                     os.chmod(n, perms)
         return 0
 
     def install_from_url(self, url, distdir):
         self.log(logging.INFO, 'artifact',
-            {'url': url},
-            'Installing from {url}')
+                 {'url': url},
+                 'Installing from {url}')
         filename = self._artifact_cache.fetch(url)
         return self.install_from_file(filename, distdir)
 
     def _install_from_hg_pushheads(self, hg_pushheads, distdir):
         """Iterate pairs (hg_hash, {tree-set}) associating hg revision hashes
         and tree-sets they are known to be in, trying to download and
         install from each.
         """
@@ -1112,17 +1123,17 @@ see https://developer.mozilla.org/en-US/
         hg_pushheads = self._find_pushheads()
         return self._install_from_hg_pushheads(hg_pushheads, distdir)
 
     def install_from_revset(self, revset, distdir):
         revision = None
         try:
             if self._hg:
                 revision = subprocess.check_output([self._hg, 'log', '--template', '{node}\n',
-                                                  '-r', revset], cwd=self._topsrcdir).strip()
+                                                    '-r', revset], cwd=self._topsrcdir).strip()
             elif self._git:
                 revset = subprocess.check_output([
                     self._git, 'rev-parse', '%s^{commit}' % revset],
                     stderr=open(os.devnull, 'w'), cwd=self._topsrcdir).strip()
             else:
                 # Fallback to the exception handling case from both hg and git
                 raise subprocess.CalledProcessError()
         except subprocess.CalledProcessError:
@@ -1160,17 +1171,18 @@ see https://developer.mozilla.org/en-US/
         urls = []
         for artifact_name in self._artifact_job.find_candidate_artifacts(artifacts):
             # We can easily extract the task ID from the URL.  We can't easily
             # extract the build ID; we use the .ini files embedded in the
             # downloaded artifact for this.
             url = get_artifact_url(taskId, artifact_name)
             urls.append(url)
         if not urls:
-            raise ValueError('Task {taskId} existed, but no artifacts found!'.format(taskId=taskId))
+            raise ValueError(
+                'Task {taskId} existed, but no artifacts found!'.format(taskId=taskId))
         for url in urls:
             if self.install_from_url(url, distdir):
                 return 1
         return 0
 
     def install_from(self, source, distdir):
         """Install artifacts from a ``source`` into the given ``distdir``.
         """
@@ -1189,16 +1201,15 @@ see https://developer.mozilla.org/en-US/
                 'MOZ_ARTIFACT_TASK_%s' % self._job.upper().replace('-', '_'),
                 'MOZ_ARTIFACT_TASK',
             ):
                 if var in os.environ:
                     return self.install_from_task(os.environ[var], distdir)
 
             return self.install_from_recent(distdir)
 
-
     def clear_cache(self):
         self.log(logging.INFO, 'artifact',
-            {},
-            'Deleting cached artifacts and caches.')
+                 {},
+                 'Deleting cached artifacts and caches.')
         self._task_cache.clear_cache()
         self._artifact_cache.clear_cache()
         self._pushhead_cache.clear_cache()
--- a/python/mozbuild/mozbuild/backend/common.py
+++ b/python/mozbuild/mozbuild/backend/common.py
@@ -52,16 +52,17 @@ from mozbuild.jar import (
 from mozbuild.preprocessor import Preprocessor
 from mozpack.chrome.manifest import parse_manifest_line
 
 from mozbuild.util import (
     group_unified_files,
     mkdir,
 )
 
+
 class XPIDLManager(object):
     """Helps manage XPCOM IDLs in the context of the build system."""
 
     class Module(object):
         def __init__(self):
             self.idl_files = set()
             self.directories = set()
             self._stems = set()
@@ -97,23 +98,25 @@ class XPIDLManager(object):
 
     def idl_stems(self):
         """Return an iterator of stems of the managed IDL files.
 
         The stem of an IDL file is the basename of the file with no .idl extension.
         """
         return itertools.chain(*[m.stems() for m in self.modules.itervalues()])
 
+
 class BinariesCollection(object):
     """Tracks state of binaries produced by the build."""
 
     def __init__(self):
         self.shared_libraries = []
         self.programs = []
 
+
 class CommonBackend(BuildBackend):
     """Holds logic common to all build backends."""
 
     def _init(self):
         self._idl_manager = XPIDLManager(self.environment)
         self._binaries = BinariesCollection()
         self._configs = set()
         self._generated_sources = set()
@@ -177,17 +180,18 @@ class CommonBackend(BuildBackend):
         elif isinstance(obj, GeneratedFile):
             if obj.required_for_compile:
                 for f in obj.required_for_compile:
                     fullpath = ObjDirPath(obj._context, '!' + f).full_path
                     self._handle_generated_sources([fullpath])
             return False
 
         elif isinstance(obj, Exports):
-            objdir_files = [f.full_path for path, files in obj.files.walk() for f in files if isinstance(f, ObjDirPath)]
+            objdir_files = [f.full_path for path, files in obj.files.walk()
+                            for f in files if isinstance(f, ObjDirPath)]
             if objdir_files:
                 self._handle_generated_sources(objdir_files)
             return False
 
         elif isinstance(obj, GnProjectData):
             # These are only handled by special purpose build backends,
             # ignore them here.
             return True
@@ -199,17 +203,16 @@ class CommonBackend(BuildBackend):
 
     def consume_finished(self):
         if len(self._idl_manager.modules):
             self._write_rust_xpidl_summary(self._idl_manager)
             self._handle_idl_manager(self._idl_manager)
             self._handle_generated_sources(mozpath.join(self.environment.topobjdir, 'dist/include/%s.h' % stem)
                                            for stem in self._idl_manager.idl_stems())
 
-
         for config in self._configs:
             self.backend_input_files.add(config.source)
 
         # Write out a machine-readable file describing binaries.
         topobjdir = self.environment.topobjdir
         with self._write_file(mozpath.join(topobjdir, 'binaries.json')) as fh:
             d = {
                 'shared_libraries': [s.to_dict() for s in self._binaries.shared_libraries],
@@ -289,17 +292,17 @@ class CommonBackend(BuildBackend):
                     seen_libs.add(lib)
                     shared_libs.append(lib)
 
         for lib in input_bin.linked_system_libs:
             if lib not in seen_libs:
                 seen_libs.add(lib)
                 os_libs.append(lib)
 
-        return (objs, sorted(seen_pgo_gen_only_objs), no_pgo_objs, \
+        return (objs, sorted(seen_pgo_gen_only_objs), no_pgo_objs,
                 shared_libs, os_libs, static_libs)
 
     def _make_list_file(self, kind, objdir, objs, name):
         if not objs:
             return None
         if kind == 'target':
             list_style = self.environment.substs.get('EXPAND_LIBS_LIST_STYLE')
         else:
@@ -327,17 +330,18 @@ class CommonBackend(BuildBackend):
 
         mkdir(objdir)
         with self._write_file(list_file_path) as fh:
             fh.write(content)
 
         return ref
 
     def _handle_generated_sources(self, files):
-        self._generated_sources.update(mozpath.relpath(f, self.environment.topobjdir) for f in files)
+        self._generated_sources.update(mozpath.relpath(
+            f, self.environment.topobjdir) for f in files)
 
     def _handle_webidl_collection(self, webidls):
 
         bindings_dir = mozpath.join(self.environment.topobjdir, 'dom', 'bindings')
 
         all_inputs = set(webidls.all_static_sources())
         for s in webidls.all_non_static_basenames():
             all_inputs.add(mozpath.join(bindings_dir, s))
@@ -406,17 +410,17 @@ class CommonBackend(BuildBackend):
                 'so it cannot be built in unified mode."\n'
                 '#undef PL_ARENA_CONST_ALIGN_MASK\n'
                 '#endif\n'
                 '#ifdef INITGUID\n'
                 '#error "%(cppfile)s defines INITGUID, '
                 'so it cannot be built in unified mode."\n'
                 '#undef INITGUID\n'
                 '#endif')
-            f.write('\n'.join(includeTemplate % { "cppfile": s } for
+            f.write('\n'.join(includeTemplate % {"cppfile": s} for
                               s in source_filenames))
 
     def _write_unified_files(self, unified_source_mapping, output_directory,
                              poison_windows_h=False):
         for unified_file, source_filenames in unified_source_mapping:
             self._write_unified_file(unified_file, source_filenames,
                                      output_directory, poison_windows_h)
 
--- a/python/mozbuild/mozbuild/backend/configenvironment.py
+++ b/python/mozbuild/mozbuild/backend/configenvironment.py
@@ -113,17 +113,17 @@ class ConfigEnvironment(object):
 
     ConfigEnvironment expects a "top_srcdir" subst to be set with the top
     source directory, in msys format on windows. It is used to derive a
     "srcdir" subst when treating config files. It can either be an absolute
     path or a path relative to the topobjdir.
     """
 
     def __init__(self, topsrcdir, topobjdir, defines=None,
-        non_global_defines=None, substs=None, source=None, mozconfig=None):
+                 non_global_defines=None, substs=None, source=None, mozconfig=None):
 
         if not source:
             source = mozpath.join(topobjdir, 'config.status')
         self.source = source
         self.defines = ReadOnlyDict(defines or {})
         self.non_global_defines = non_global_defines or []
         self.substs = dict(substs or {})
         self.topsrcdir = mozpath.abspath(topsrcdir)
@@ -143,30 +143,31 @@ class ConfigEnvironment(object):
             self.import_prefix = self.lib_prefix
             self.import_suffix = '.%s' % self.substs['IMPORT_LIB_SUFFIX']
         else:
             self.import_prefix = self.dll_prefix
             self.import_suffix = self.dll_suffix
         self.bin_suffix = self.substs.get('BIN_SUFFIX', '')
 
         global_defines = [name for name in self.defines
-            if not name in self.non_global_defines]
+                          if not name in self.non_global_defines]
         self.substs['ACDEFINES'] = ' '.join(['-D%s=%s' % (name,
-            shell_quote(self.defines[name]).replace('$', '$$'))
-            for name in sorted(global_defines)])
+                                                          shell_quote(self.defines[name]).replace('$', '$$'))
+                                             for name in sorted(global_defines)])
+
         def serialize(name, obj):
             if isinstance(obj, StringTypes):
                 return obj
             if isinstance(obj, Iterable):
                 return ' '.join(obj)
             raise Exception('Unhandled type %s for %s', type(obj), str(name))
         self.substs['ALLSUBSTS'] = '\n'.join(sorted(['%s = %s' % (name,
-            serialize(name, self.substs[name])) for name in self.substs if self.substs[name]]))
+                                                                  serialize(name, self.substs[name])) for name in self.substs if self.substs[name]]))
         self.substs['ALLEMPTYSUBSTS'] = '\n'.join(sorted(['%s =' % name
-            for name in self.substs if not self.substs[name]]))
+                                                          for name in self.substs if not self.substs[name]]))
 
         self.substs = ReadOnlyDict(self.substs)
 
         self.external_source_dir = None
         external = self.substs.get('EXTERNAL_SOURCE_DIR', '')
         if external:
             external = mozpath.normpath(external)
             if not os.path.isabs(external):
@@ -209,26 +210,27 @@ class ConfigEnvironment(object):
                          if name not in self.non_global_defines)
         return ReadOnlyDict(acdefines)
 
     @staticmethod
     def from_config_status(path):
         config = BuildConfig.from_config_status(path)
 
         return ConfigEnvironment(config.topsrcdir, config.topobjdir,
-            config.defines, config.non_global_defines, config.substs, path)
+                                 config.defines, config.non_global_defines, config.substs, path)
 
 
 class PartialConfigDict(object):
     """Facilitates mapping the config.statusd defines & substs with dict-like access.
 
     This allows a buildconfig client to use buildconfig.defines['FOO'] (and
     similar for substs), where the value of FOO is delay-loaded until it is
     needed.
     """
+
     def __init__(self, config_statusd, typ, environ_override=False):
         self._dict = {}
         self._datadir = mozpath.join(config_statusd, typ)
         self._config_track = mozpath.join(self._datadir, 'config.track')
         self._files = set()
         self._environ_override = environ_override
 
     def _load_config_track(self):
@@ -333,33 +335,34 @@ class PartialConfigEnvironment(object):
         preprocessor command lines. The order in which defines were given
         when creating the ConfigEnvironment is preserved.
 
     and one additional define from all the defines as a dictionary:
       - ALLDEFINES contains all of the global defines as a dictionary. This is
       intended to be used instead of the defines structure from config.status so
       that scripts can depend directly on its value.
     """
+
     def __init__(self, topobjdir):
         config_statusd = mozpath.join(topobjdir, 'config.statusd')
         self.substs = PartialConfigDict(config_statusd, 'substs', environ_override=True)
         self.defines = PartialConfigDict(config_statusd, 'defines')
         self.topobjdir = topobjdir
 
     def write_vars(self, config):
         substs = config['substs'].copy()
         defines = config['defines'].copy()
 
         global_defines = [
             name for name in config['defines']
             if name not in config['non_global_defines']
         ]
         acdefines = ' '.join(['-D%s=%s' % (name,
-            shell_quote(config['defines'][name]).replace('$', '$$'))
-            for name in sorted(global_defines)])
+                                           shell_quote(config['defines'][name]).replace('$', '$$'))
+                              for name in sorted(global_defines)])
         substs['ACDEFINES'] = acdefines
 
         all_defines = OrderedDict()
         for k in global_defines:
             all_defines[k] = config['defines'][k]
         defines['ALLDEFINES'] = all_defines
 
         self.substs._fill_group(substs)
--- a/python/mozbuild/mozbuild/backend/cpp_eclipse.py
+++ b/python/mozbuild/mozbuild/backend/cpp_eclipse.py
@@ -21,16 +21,17 @@ from ..frontend.data import (
 )
 from mozbuild.base import ExecutionSummary
 
 # TODO Have ./mach eclipse generate the workspace and index it:
 # /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -application org.eclipse.cdt.managedbuilder.core.headlessbuild -data $PWD/workspace -importAll $PWD/eclipse
 # Open eclipse:
 # /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -data $PWD/workspace
 
+
 class CppEclipseBackend(CommonBackend):
     """Backend that generates Cpp Eclipse project files.
     """
 
     def __init__(self, environment):
         if os.name == 'nt':
             raise Exception('Eclipse is not supported on Windows. '
                             'Consider using Visual Studio instead.')
@@ -76,17 +77,18 @@ class CppEclipseBackend(CommonBackend):
         return os.path.join(srcdir_parent, workspace_dirname)
 
     def consume_object(self, obj):
         reldir = getattr(obj, 'relsrcdir', None)
 
         # Note that unlike VS, Eclipse' indexer seem to crawl the headers and
         # isn't picky about the local includes.
         if isinstance(obj, ComputedFlags):
-            args = self._args_for_dirs.setdefault('tree/' + reldir, {'includes': [], 'defines': []})
+            args = self._args_for_dirs.setdefault(
+                'tree/' + reldir, {'includes': [], 'defines': []})
             # use the same args for any objdirs we include:
             if reldir == 'dom/bindings':
                 self._args_for_dirs.setdefault('generated-webidl', args)
             if reldir == 'ipc/ipdl':
                 self._args_for_dirs.setdefault('generated-ipdl', args)
 
             includes = args["includes"]
             if "BASE_INCLUDES" in obj.flags and obj.flags["BASE_INCLUDES"]:
@@ -100,17 +102,18 @@ class CppEclipseBackend(CommonBackend):
             if "LIBRARY_DEFINES" in obj.flags and obj.flags["LIBRARY_DEFINES"]:
                 defs += obj.flags["LIBRARY_DEFINES"]
 
         return True
 
     def consume_finished(self):
         settings_dir = os.path.join(self._project_dir, '.settings')
         launch_dir = os.path.join(self._project_dir, 'RunConfigurations')
-        workspace_settings_dir = os.path.join(self._workspace_dir, '.metadata/.plugins/org.eclipse.core.runtime/.settings')
+        workspace_settings_dir = os.path.join(
+            self._workspace_dir, '.metadata/.plugins/org.eclipse.core.runtime/.settings')
 
         for dir_name in [self._project_dir, settings_dir, launch_dir, workspace_settings_dir, self._workspace_lang_dir]:
             try:
                 os.makedirs(dir_name)
             except OSError as e:
                 if e.errno != errno.EEXIST:
                     raise
 
@@ -124,60 +127,64 @@ class CppEclipseBackend(CommonBackend):
 
         language_path = os.path.join(settings_dir, 'language.settings.xml')
         with open(language_path, 'wb') as fh:
             self._write_language_settings(fh)
 
         workspace_language_path = os.path.join(self._workspace_lang_dir, 'language.settings.xml')
         with open(workspace_language_path, 'wb') as fh:
             workspace_lang_settings = WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE
-            workspace_lang_settings = workspace_lang_settings.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags);
+            workspace_lang_settings = workspace_lang_settings.replace(
+                "@COMPILER_FLAGS@", self._cxx + " " + self._cppflags)
             fh.write(workspace_lang_settings)
 
         self._write_launch_files(launch_dir)
 
-        core_resources_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.core.resources.prefs')
+        core_resources_prefs_path = os.path.join(
+            workspace_settings_dir, 'org.eclipse.core.resources.prefs')
         with open(core_resources_prefs_path, 'wb') as fh:
-            fh.write(STATIC_CORE_RESOURCES_PREFS);
+            fh.write(STATIC_CORE_RESOURCES_PREFS)
 
-        core_runtime_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.core.runtime.prefs')
+        core_runtime_prefs_path = os.path.join(
+            workspace_settings_dir, 'org.eclipse.core.runtime.prefs')
         with open(core_runtime_prefs_path, 'wb') as fh:
-            fh.write(STATIC_CORE_RUNTIME_PREFS);
+            fh.write(STATIC_CORE_RUNTIME_PREFS)
 
         ui_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.ui.prefs')
         with open(ui_prefs_path, 'wb') as fh:
-            fh.write(STATIC_UI_PREFS);
+            fh.write(STATIC_UI_PREFS)
 
         cdt_ui_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.cdt.ui.prefs')
         cdt_ui_prefs = STATIC_CDT_UI_PREFS
         # Here we generate the code formatter that will show up in the UI with
         # the name "Mozilla".  The formatter is stored as a single line of XML
         # in the org.eclipse.cdt.ui.formatterprofiles pref.
         cdt_ui_prefs += """org.eclipse.cdt.ui.formatterprofiles=<?xml version\="1.0" encoding\="UTF-8" standalone\="no"?>\\n<profiles version\="1">\\n<profile kind\="CodeFormatterProfile" name\="Mozilla" version\="1">\\n"""
         XML_PREF_TEMPLATE = """<setting id\="@PREF_NAME@" value\="@PREF_VAL@"/>\\n"""
         for line in FORMATTER_SETTINGS.splitlines():
             [pref, val] = line.split("=")
-            cdt_ui_prefs += XML_PREF_TEMPLATE.replace("@PREF_NAME@", pref).replace("@PREF_VAL@", val)
+            cdt_ui_prefs += XML_PREF_TEMPLATE.replace("@PREF_NAME@",
+                                                      pref).replace("@PREF_VAL@", val)
         cdt_ui_prefs += "</profile>\\n</profiles>\\n"
         with open(cdt_ui_prefs_path, 'wb') as fh:
-            fh.write(cdt_ui_prefs);
+            fh.write(cdt_ui_prefs)
 
         cdt_core_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.cdt.core.prefs')
         with open(cdt_core_prefs_path, 'wb') as fh:
             cdt_core_prefs = STATIC_CDT_CORE_PREFS
             # When we generated the code formatter called "Mozilla" above, we
             # also set it to be the active formatter.  When a formatter is set
             # as the active formatter all its prefs are set in this prefs file,
             # so we need add those now:
             cdt_core_prefs += FORMATTER_SETTINGS
-            fh.write(cdt_core_prefs);
+            fh.write(cdt_core_prefs)
 
-        editor_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.editors.prefs");
+        editor_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.editors.prefs")
         with open(editor_prefs_path, 'wb') as fh:
-            fh.write(EDITOR_SETTINGS);
+            fh.write(EDITOR_SETTINGS)
 
         # Now import the project into the workspace
         self._import_project()
 
     def _import_project(self):
         # If the workspace already exists then don't import the project again because
         # eclipse doesn't handle this properly
         if self._overwriting_workspace:
@@ -203,17 +210,17 @@ class CppEclipseBackend(CommonBackend):
             else:
                 raise
         finally:
             self._remove_noindex()
 
     def _write_noindex(self):
         noindex_path = os.path.join(self._project_dir, '.settings/org.eclipse.cdt.core.prefs')
         with open(noindex_path, 'wb') as fh:
-            fh.write(NOINDEX_TEMPLATE);
+            fh.write(NOINDEX_TEMPLATE)
 
     def _remove_noindex(self):
         # Below we remove the config file that temporarily disabled the indexer
         # while we were importing the project. Unfortunately, CDT doesn't
         # notice indexer settings changes in config files when it restarts. To
         # work around that we remove the index database here to force it to:
         for f in glob.glob(os.path.join(self._workspace_lang_dir, "Gecko.*.pdom")):
             os.remove(f)
@@ -252,17 +259,18 @@ class CppEclipseBackend(CommonBackend):
         # we add settings for.  (Fortunately that doesn't appear to have a
         # noticeable impact on the time it takes to open the generated Eclipse
         # project.)  We do that by generating a template here that we can then
         # use for each individual directory in the loop below.
         #
         dirsettings_template = LANGUAGE_SETTINGS_TEMPLATE_DIR_HEADER
 
         # Add OS_COMPILE_CXXFLAGS args (same as OS_COMPILE_CFLAGS):
-        dirsettings_template = dirsettings_template.replace('@PREINCLUDE_FILE_PATH@', os.path.join(self.environment.topobjdir, 'dist/include/mozilla-config.h'))
+        dirsettings_template = dirsettings_template.replace('@PREINCLUDE_FILE_PATH@', os.path.join(
+            self.environment.topobjdir, 'dist/include/mozilla-config.h'))
         dirsettings_template += add_define('MOZILLA_CLIENT', '1')
 
         # Add EXTRA_INCLUDES args:
         dirsettings_template += add_objdir_include_path('dist/include')
 
         # Add OS_INCLUDES args:
         # XXX media/webrtc/trunk/webrtc's moz.builds reset this.
         dirsettings_template += add_objdir_include_path('dist/include/nspr')
@@ -299,27 +307,28 @@ class CppEclipseBackend(CommonBackend):
                 dirsettings += add_abs_include_path(i)
             for d in args["defines"]:
                 assert(d[:2] == u"-D" or d[:2] == u"-U")
                 if d[:2] == u"-U":
                     # gfx/harfbuzz/src uses -UDEBUG, at least on Mac
                     # netwerk/sctp/src uses -U__APPLE__ on Mac
                     # XXX We should make this code smart enough to remove existing defines.
                     continue
-                d = d[2:] # get rid of leading "-D"
+                d = d[2:]  # get rid of leading "-D"
                 name_value = d.split("=", 1)
                 name = name_value[0]
                 value = ""
                 if len(name_value) == 2:
                     value = name_value[1]
                 dirsettings += add_define(name, str(value))
             dirsettings += LANGUAGE_SETTINGS_TEMPLATE_DIR_FOOTER
             fh.write(dirsettings)
 
-        fh.write(LANGUAGE_SETTINGS_TEMPLATE_FOOTER.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags))
+        fh.write(LANGUAGE_SETTINGS_TEMPLATE_FOOTER.replace(
+            "@COMPILER_FLAGS@", self._cxx + " " + self._cppflags))
 
     def _write_launch_files(self, launch_dir):
         bin_dir = os.path.join(self.environment.topobjdir, 'dist')
 
         # TODO Improve binary detection
         if self._macbundle:
             exe_path = os.path.join(bin_dir, self._macbundle, 'Contents/MacOS')
         else:
@@ -329,31 +338,35 @@ class CppEclipseBackend(CommonBackend):
 
         main_gecko_launch = os.path.join(launch_dir, 'gecko.launch')
         with open(main_gecko_launch, 'wb') as fh:
             launch = GECKO_LAUNCH_CONFIG_TEMPLATE
             launch = launch.replace('@LAUNCH_PROGRAM@', exe_path)
             launch = launch.replace('@LAUNCH_ARGS@', '-P -no-remote')
             fh.write(launch)
 
-        #TODO Add more launch configs (and delegate calls to mach)
+        # TODO Add more launch configs (and delegate calls to mach)
 
     def _write_project(self, fh):
-        project = PROJECT_TEMPLATE;
+        project = PROJECT_TEMPLATE
 
         project = project.replace('@PROJECT_NAME@', self._project_name)
         project = project.replace('@PROJECT_TOPSRCDIR@', self.environment.topsrcdir)
-        project = project.replace('@GENERATED_IPDL_FILES@', os.path.join(self.environment.topobjdir, "ipc", "ipdl"))
-        project = project.replace('@GENERATED_WEBIDL_FILES@', os.path.join(self.environment.topobjdir, "dom", "bindings"))
+        project = project.replace('@GENERATED_IPDL_FILES@', os.path.join(
+            self.environment.topobjdir, "ipc", "ipdl"))
+        project = project.replace('@GENERATED_WEBIDL_FILES@', os.path.join(
+            self.environment.topobjdir, "dom", "bindings"))
         fh.write(project)
 
     def _write_cproject(self, fh):
         cproject_header = CPROJECT_TEMPLATE_HEADER
-        cproject_header = cproject_header.replace('@PROJECT_TOPSRCDIR@', self.environment.topobjdir)
-        cproject_header = cproject_header.replace('@MACH_COMMAND@', os.path.join(self.environment.topsrcdir, 'mach'))
+        cproject_header = cproject_header.replace(
+            '@PROJECT_TOPSRCDIR@', self.environment.topobjdir)
+        cproject_header = cproject_header.replace(
+            '@MACH_COMMAND@', os.path.join(self.environment.topsrcdir, 'mach'))
         fh.write(cproject_header)
         fh.write(CPROJECT_TEMPLATE_FOOTER)
 
 
 PROJECT_TEMPLATE = """<?xml version="1.0" encoding="UTF-8"?>
 <projectDescription>
         <name>@PROJECT_NAME@</name>
         <comment></comment>
@@ -610,31 +623,31 @@ showLeadingSpaces=false
 showLineFeed=false
 showWhitespaceCharacters=true
 spacesForTabs=true
 tabWidth=2
 undoHistorySize=200
 """
 
 
-STATIC_CORE_RESOURCES_PREFS="""eclipse.preferences.version=1
+STATIC_CORE_RESOURCES_PREFS = """eclipse.preferences.version=1
 refresh.enabled=true
 """
 
-STATIC_CORE_RUNTIME_PREFS="""eclipse.preferences.version=1
+STATIC_CORE_RUNTIME_PREFS = """eclipse.preferences.version=1
 content-types/org.eclipse.cdt.core.cxxSource/file-extensions=mm
 content-types/org.eclipse.core.runtime.xml/file-extensions=xul
 content-types/org.eclipse.wst.jsdt.core.jsSource/file-extensions=jsm
 """
 
-STATIC_UI_PREFS="""eclipse.preferences.version=1
+STATIC_UI_PREFS = """eclipse.preferences.version=1
 showIntro=false
 """
 
-STATIC_CDT_CORE_PREFS="""eclipse.preferences.version=1
+STATIC_CDT_CORE_PREFS = """eclipse.preferences.version=1
 indexer.updatePolicy=0
 """
 
 FORMATTER_SETTINGS = """org.eclipse.cdt.core.formatter.alignment_for_arguments_in_method_invocation=16
 org.eclipse.cdt.core.formatter.alignment_for_assignment=16
 org.eclipse.cdt.core.formatter.alignment_for_base_clause_in_type_declaration=80
 org.eclipse.cdt.core.formatter.alignment_for_binary_expression=16
 org.eclipse.cdt.core.formatter.alignment_for_compact_if=16
@@ -792,17 +805,17 @@ org.eclipse.cdt.core.formatter.keep_then
 org.eclipse.cdt.core.formatter.lineSplit=80
 org.eclipse.cdt.core.formatter.number_of_empty_lines_to_preserve=1
 org.eclipse.cdt.core.formatter.put_empty_statement_on_new_line=true
 org.eclipse.cdt.core.formatter.tabulation.char=space
 org.eclipse.cdt.core.formatter.tabulation.size=2
 org.eclipse.cdt.core.formatter.use_tabs_only_for_leading_indentations=false
 """
 
-STATIC_CDT_UI_PREFS="""eclipse.preferences.version=1
+STATIC_CDT_UI_PREFS = """eclipse.preferences.version=1
 buildConsoleLines=10000
 Console.limitConsoleOutput=false
 ensureNewlineAtEOF=false
 formatter_profile=_Mozilla
 formatter_settings_version=1
 org.eclipse.cdt.ui.formatterprofiles.version=1
 removeTrailingWhitespace=true
 removeTrailingWhitespaceEditedLines=true
--- a/python/mozbuild/mozbuild/backend/fastermake.py
+++ b/python/mozbuild/mozbuild/backend/fastermake.py
@@ -129,20 +129,22 @@ class FasterMakeBackend(CommonBackend, P
             if obj.path != top_level:
                 entry = 'manifest %s' % mozpath.relpath(obj.path,
                                                         obj.install_target)
                 self._manifest_entries[top_level].add(entry)
             self._manifest_entries[obj.path].add(str(obj.entry))
 
         elif isinstance(obj, GeneratedFile):
             if obj.outputs:
-                first_output = mozpath.relpath(mozpath.join(obj.objdir, obj.outputs[0]), self.environment.topobjdir)
+                first_output = mozpath.relpath(mozpath.join(
+                    obj.objdir, obj.outputs[0]), self.environment.topobjdir)
                 for o in obj.outputs[1:]:
                     fullpath = mozpath.join(obj.objdir, o)
-                    self._generated_files_map[mozpath.relpath(fullpath, self.environment.topobjdir)] = first_output
+                    self._generated_files_map[mozpath.relpath(
+                        fullpath, self.environment.topobjdir)] = first_output
             # We don't actually handle GeneratedFiles, we just need to know if
             # we can build multiple of them from a single make invocation in the
             # faster backend.
             return False
 
         elif isinstance(obj, XPIDLModule):
             self._has_xpidl = True
             # We're not actually handling XPIDL files.
@@ -189,31 +191,31 @@ class FasterMakeBackend(CommonBackend, P
         mk.add_statement('INSTALL_MANIFESTS = %s'
                          % ' '.join(self._install_manifests.keys()))
 
         # Add dependencies we inferred:
         for target, deps in self._dependencies.iteritems():
             mk.create_rule([target]).add_dependencies(
                 '$(TOPOBJDIR)/%s' % d for d in deps)
 
-
         # This is not great, but it's better to have some dependencies on these Python files.
         python_deps = [
             '$(TOPSRCDIR)/python/mozbuild/mozbuild/action/l10n_merge.py',
             '$(TOPSRCDIR)/third_party/python/compare-locales/compare_locales/compare.py',
             '$(TOPSRCDIR)/third_party/python/compare-locales/compare_locales/paths.py',
         ]
         # Add l10n dependencies we inferred:
         for target, deps in self._l10n_dependencies.iteritems():
             mk.create_rule([target]).add_dependencies(
                 '%s' % d[0] for d in deps)
             for (merge, ref_file, l10n_file) in deps:
                 rule = mk.create_rule([merge]).add_dependencies(
                     [ref_file, l10n_file] + python_deps)
-                rule.add_commands(['$(PYTHON) -m mozbuild.action.l10n_merge --output {} --ref-file {} --l10n-file {}'.format(merge, ref_file, l10n_file)])
+                rule.add_commands(
+                    ['$(PYTHON) -m mozbuild.action.l10n_merge --output {} --ref-file {} --l10n-file {}'.format(merge, ref_file, l10n_file)])
                 # Add a dummy rule for the l10n file since it might not exist.
                 mk.create_rule([l10n_file])
 
         mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')
 
         for base, install_manifest in self._install_manifests.iteritems():
             with self._write_file(
                     mozpath.join(self.environment.topobjdir, 'faster',
--- a/python/mozbuild/mozbuild/backend/mach_commands.py
+++ b/python/mozbuild/mozbuild/backend/mach_commands.py
@@ -15,20 +15,21 @@ from mozbuild.base import (
 )
 
 from mach.decorators import (
     CommandArgument,
     CommandProvider,
     Command,
 )
 
+
 @CommandProvider
 class MachCommands(MachCommandBase):
     @Command('ide', category='devenv',
-        description='Generate a project and launch an IDE.')
+             description='Generate a project and launch an IDE.')
     @CommandArgument('ide', choices=['eclipse', 'visualstudio'])
     @CommandArgument('args', nargs=argparse.REMAINDER)
     def eclipse(self, ide, args):
         if ide == 'eclipse':
             backend = 'CppEclipse'
         elif ide == 'visualstudio':
             backend = 'VisualStudio'
 
--- a/python/mozbuild/mozbuild/backend/recursivemake.py
+++ b/python/mozbuild/mozbuild/backend/recursivemake.py
@@ -121,17 +121,17 @@ MOZBUILD_VARIABLES = [
     b'SHARED_LIBRARY_LIBS',
     b'SHARED_LIBRARY_NAME',
     b'SIMPLE_PROGRAMS',
     b'SONAME',
     b'STATIC_LIBRARY_NAME',
     b'TEST_DIRS',
     b'TOOL_DIRS',
     # XXX config/Makefile.in specifies this in a make invocation
-    #'USE_EXTENSION_MANIFEST',
+    # 'USE_EXTENSION_MANIFEST',
     b'XPCSHELL_TESTS',
     b'XPIDL_MODULE',
 ]
 
 DEPRECATED_VARIABLES = [
     b'ALLOW_COMPILER_WARNINGS',
     b'EXPORT_LIBRARY',
     b'EXTRA_LIBS',
@@ -220,19 +220,19 @@ class BackendMakeFile(object):
 
     def close(self):
         if self.xpt_name:
             # We just recompile all xpidls because it's easier and less error
             # prone.
             self.fh.write('NONRECURSIVE_TARGETS += export\n')
             self.fh.write('NONRECURSIVE_TARGETS_export += xpidl\n')
             self.fh.write('NONRECURSIVE_TARGETS_export_xpidl_DIRECTORY = '
-                '$(DEPTH)/xpcom/xpidl\n')
+                          '$(DEPTH)/xpcom/xpidl\n')
             self.fh.write('NONRECURSIVE_TARGETS_export_xpidl_TARGETS += '
-                'export\n')
+                          'export\n')
 
         return self.fh.close()
 
     @property
     def diff(self):
         return self.fh.diff
 
 
@@ -243,16 +243,17 @@ class RecursiveMakeTraversal(object):
     from Makefiles.
 
     Each directory may have one or more types of subdirectories:
         - (normal) dirs
         - tests
     """
     SubDirectoryCategories = ['dirs', 'tests']
     SubDirectoriesTuple = namedtuple('SubDirectories', SubDirectoryCategories)
+
     class SubDirectories(SubDirectoriesTuple):
         def __new__(self):
             return RecursiveMakeTraversal.SubDirectoriesTuple.__new__(self, [], [])
 
     def __init__(self):
         self._traversal = {}
         self._attached = set()
 
@@ -385,17 +386,17 @@ class RecursiveMakeBackend(CommonBackend
         self._idl_dirs = set()
 
         self._makefile_in_count = 0
         self._makefile_out_count = 0
 
         self._test_manifests = {}
 
         self.backend_input_files.add(mozpath.join(self.environment.topobjdir,
-            'config', 'autoconf.mk'))
+                                                  'config', 'autoconf.mk'))
 
         self._install_manifests = defaultdict(InstallManifest)
         # The build system relies on some install manifests always existing
         # even if they are empty, because the directories are still filled
         # by the build system itself, and the install manifests are only
         # used for a "magic" rm -rf.
         self._install_manifests['dist_public']
         self._install_manifests['dist_private']
@@ -419,17 +420,17 @@ class RecursiveMakeBackend(CommonBackend
                        makefile_in=self._makefile_in_count,
                        makefile_out=self._makefile_out_count)
         return summary
 
     def _get_backend_file_for(self, obj):
         if obj.objdir not in self._backend_files:
             self._backend_files[obj.objdir] = \
                 BackendMakeFile(obj.srcdir, obj.objdir, obj.config,
-                    obj.topsrcdir, self.environment.topobjdir, self.dry_run)
+                                obj.topsrcdir, self.environment.topobjdir, self.dry_run)
         return self._backend_files[obj.objdir]
 
     def consume_object(self, obj):
         """Write out build files necessary to build with recursive make."""
 
         if not isinstance(obj, ContextDerived):
             return False
 
@@ -604,29 +605,29 @@ class RecursiveMakeBackend(CommonBackend
                 backend_file.write('EXTRA_MDDEPEND_FILES += %s\n' % dep_file)
 
                 backend_file.write("""{stub}: {script}{inputs}{backend}{force}
 \t$(REPORT_BUILD)
 \t$(call py_action,file_generate,{locale}{script} {method} {output} $(MDDEPDIR)/{dep_file} {stub}{inputs}{flags})
 \t@$(TOUCH) $@
 
 """.format(stub=stub_file,
-           output=first_output,
-           dep_file=dep_file,
-           inputs=' ' + ' '.join(inputs) if inputs else '',
-           flags=' ' + ' '.join(shell_quote(f) for f in obj.flags) if obj.flags else '',
-           backend=' backend.mk' if obj.flags else '',
-           # Locale repacks repack multiple locales from a single configured objdir,
-           # so standard mtime dependencies won't work properly when the build is re-run
-           # with a different locale as input. IS_LANGUAGE_REPACK will reliably be set
-           # in this situation, so simply force the generation to run in that case.
-           force=force,
-           locale='--locale=$(AB_CD) ' if obj.localized else '',
-           script=obj.script,
-           method=obj.method))
+                    output=first_output,
+                    dep_file=dep_file,
+                    inputs=' ' + ' '.join(inputs) if inputs else '',
+                    flags=' ' + ' '.join(shell_quote(f) for f in obj.flags) if obj.flags else '',
+                    backend=' backend.mk' if obj.flags else '',
+                    # Locale repacks repack multiple locales from a single configured objdir,
+                    # so standard mtime dependencies won't work properly when the build is re-run
+                    # with a different locale as input. IS_LANGUAGE_REPACK will reliably be set
+                    # in this situation, so simply force the generation to run in that case.
+                    force=force,
+                    locale='--locale=$(AB_CD) ' if obj.localized else '',
+                    script=obj.script,
+                    method=obj.method))
 
         elif isinstance(obj, JARManifest):
             self._no_skip['libs'].add(backend_file.relobjdir)
             backend_file.write('JAR_MANIFEST := %s\n' % obj.path.full_path)
 
         elif isinstance(obj, RustProgram):
             self._process_rust_program(obj, backend_file)
             self._rust_dirs.add(obj.relobjdir)
@@ -779,29 +780,29 @@ class RecursiveMakeBackend(CommonBackend
 
         root_deps_mk = Makefile()
 
         # Fill the dependencies for traversal of each tier.
         for tier, filter in filters:
             main, all_deps = \
                 self._traversal.compute_dependencies(filter)
             for dir, deps in all_deps.items():
-                if deps is not None or (dir in self._idl_dirs \
+                if deps is not None or (dir in self._idl_dirs
                                         and tier == 'export'):
                     rule = root_deps_mk.create_rule(['%s/%s' % (dir, tier)])
                 if deps:
                     rule.add_dependencies('%s/%s' % (d, tier) for d in deps if d)
                 if dir in self._idl_dirs and tier == 'export':
                     rule.add_dependencies(['xpcom/xpidl/%s' % tier])
             rule = root_deps_mk.create_rule(['recurse_%s' % tier])
             if main:
                 rule.add_dependencies('%s/%s' % (d, tier) for d in main)
 
-        all_compile_deps = reduce(lambda x,y: x|y,
-            self._compile_graph.values()) if self._compile_graph else set()
+        all_compile_deps = reduce(lambda x, y: x | y,
+                                  self._compile_graph.values()) if self._compile_graph else set()
         # Include the following as dependencies of the top recursion target for
         # compilation:
         # - nodes that are not dependended upon by anything. Typically, this
         #   would include programs, that need to be recursed, but that nothing
         #   depends on.
         # - nodes that have no dependencies of their own. Technically, this is
         #   not necessary, because other things have dependencies on them, and
         #   they all end up rooting to nodes from the above category. But the
@@ -897,18 +898,18 @@ class RecursiveMakeBackend(CommonBackend
         makefile.add_statement(explanation)
 
         all_sources = ' '.join(source for source, _ in unified_source_mapping)
         makefile.add_statement('%s := %s' % (unified_files_makefile_variable,
                                              all_sources))
 
         if include_curdir_build_rules:
             makefile.add_statement('\n'
-                '# Make sometimes gets confused between "foo" and "$(CURDIR)/foo".\n'
-                '# Help it out by explicitly specifiying dependencies.')
+                                   '# Make sometimes gets confused between "foo" and "$(CURDIR)/foo".\n'
+                                   '# Help it out by explicitly specifiying dependencies.')
             makefile.add_statement('all_absolute_unified_files := \\\n'
                                    '  $(addprefix $(CURDIR)/,$(%s))'
                                    % unified_files_makefile_variable)
             rule = makefile.create_rule(['$(all_absolute_unified_files)'])
             rule.add_dependencies(['$(CURDIR)/%: %'])
 
     def _check_blacklisted_variables(self, makefile_in, makefile_content):
         if b'EXTERNALLY_MANAGED_MAKE_FILE' in makefile_content:
@@ -944,58 +945,58 @@ class RecursiveMakeBackend(CommonBackend
                 makefile_in = mozpath.join(srcdir, 'Makefile.in')
                 makefile = mozpath.join(objdir, 'Makefile')
 
                 # If Makefile.in exists, use it as a template. Otherwise,
                 # create a stub.
                 stub = not os.path.exists(makefile_in)
                 if not stub:
                     self.log(logging.DEBUG, 'substitute_makefile',
-                        {'path': makefile}, 'Substituting makefile: {path}')
+                             {'path': makefile}, 'Substituting makefile: {path}')
                     self._makefile_in_count += 1
 
                     # In the export and libs tiers, we don't skip directories
                     # containing a Makefile.in.
                     # topobjdir is handled separatedly, don't do anything for
                     # it.
                     if bf.relobjdir:
                         for tier in ('export', 'libs',):
                             self._no_skip[tier].add(bf.relobjdir)
                 else:
                     self.log(logging.DEBUG, 'stub_makefile',
-                        {'path': makefile}, 'Creating stub Makefile: {path}')
+                             {'path': makefile}, 'Creating stub Makefile: {path}')
 
                 obj = self.Substitution()
                 obj.output_path = makefile
                 obj.input_path = makefile_in
                 obj.topsrcdir = backend_file.topsrcdir
                 obj.topobjdir = bf.environment.topobjdir
                 obj.config = bf.environment
                 self._create_makefile(obj, stub=stub)
                 with open(obj.output_path) as fh:
                     content = fh.read()
                     # Directories with a Makefile containing a tools target, or
                     # XPI_PKGNAME or INSTALL_EXTENSION_ID can't be skipped and
                     # must run during the 'tools' tier.
                     for t in (b'XPI_PKGNAME', b'INSTALL_EXTENSION_ID',
-                            b'tools'):
+                              b'tools'):
                         if t not in content:
                             continue
                         if t == b'tools' and not re.search('(?:^|\s)tools.*::', content, re.M):
                             continue
                         if objdir == self.environment.topobjdir:
                             continue
                         self._no_skip['tools'].add(mozpath.relpath(objdir,
-                            self.environment.topobjdir))
+                                                                   self.environment.topobjdir))
 
                     # Directories with a Makefile containing a check target
                     # can't be skipped and must run during the 'check' tier.
                     if re.search('(?:^|\s)check.*::', content, re.M):
                         self._no_skip['check'].add(mozpath.relpath(objdir,
-                            self.environment.topobjdir))
+                                                                   self.environment.topobjdir))
 
                     # Detect any Makefile.ins that contain variables on the
                     # moz.build-only list
                     self._check_blacklisted_variables(makefile_in, content)
 
         self._fill_root_mk()
 
         # Make the master test manifest files.
@@ -1065,17 +1066,17 @@ class RecursiveMakeBackend(CommonBackend
 
         def relativize(base, dirs):
             return (mozpath.relpath(d.translated, base) for d in dirs)
 
         if obj.dirs:
             fh.write('DIRS := %s\n' % ' '.join(
                 relativize(backend_file.objdir, obj.dirs)))
             self._traversal.add(backend_file.relobjdir,
-                dirs=relativize(self.environment.topobjdir, obj.dirs))
+                                dirs=relativize(self.environment.topobjdir, obj.dirs))
 
         # The directory needs to be registered whether subdirectories have been
         # registered or not.
         self._traversal.add(backend_file.relobjdir)
 
     def _process_defines(self, obj, backend_file, which='DEFINES'):
         """Output the DEFINES rules to the given backend file."""
         defines = list(obj.get_defines())
@@ -1089,33 +1090,34 @@ class RecursiveMakeBackend(CommonBackend
         # much as possible here to avoid breaking things.
         if obj.xpiname:
             backend_file.write('XPI_NAME = %s\n' % (obj.xpiname))
         if obj.subdir:
             backend_file.write('DIST_SUBDIR = %s\n' % (obj.subdir))
         if obj.target and not obj.is_custom():
             backend_file.write('FINAL_TARGET = $(DEPTH)/%s\n' % (obj.target))
         else:
-            backend_file.write('FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n')
+            backend_file.write(
+                'FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n')
 
         if not obj.enabled:
             backend_file.write('NO_DIST_INSTALL := 1\n')
 
     def _handle_idl_manager(self, manager):
         build_files = self._install_manifests['xpidl']
 
         for p in ('Makefile', 'backend.mk', '.deps/.mkdir.done'):
             build_files.add_optional_exists(p)
 
         for stem in manager.idl_stems():
             self._install_manifests['dist_include'].add_optional_exists('%s.h' % stem)
 
         for module in manager.modules:
             build_files.add_optional_exists(mozpath.join('.deps',
-                '%s.pp' % module))
+                                                         '%s.pp' % module))
 
         modules = manager.modules
         xpt_modules = sorted(modules.keys())
 
         mk = Makefile()
         all_directories = set()
 
         for module_name in xpt_modules:
@@ -1145,19 +1147,19 @@ class RecursiveMakeBackend(CommonBackend
 
         # Create dependency for output header so we force regeneration if the
         # header was deleted. This ideally should not be necessary. However,
         # some processes (such as PGO at the time this was implemented) wipe
         # out dist/include without regard to our install manifests.
 
         obj = self.Substitution()
         obj.output_path = mozpath.join(self.environment.topobjdir, 'config',
-            'makefiles', 'xpidl', 'Makefile')
+                                       'makefiles', 'xpidl', 'Makefile')
         obj.input_path = mozpath.join(self.environment.topsrcdir, 'config',
-            'makefiles', 'xpidl', 'Makefile.in')
+                                      'makefiles', 'xpidl', 'Makefile.in')
         obj.topsrcdir = self.environment.topsrcdir
         obj.topobjdir = self.environment.topobjdir
         obj.config = self.environment
         self._create_makefile(obj, extra=dict(
             xpidl_rules=rules.getvalue(),
             xpidl_modules=' '.join(xpt_modules),
         ))
 
@@ -1231,45 +1233,45 @@ class RecursiveMakeBackend(CommonBackend
         self._install_manifests['_tests'].add_optional_exists(
             mozpath.join(obj.install_target[len('_tests') + 1:],
                          dest_basename))
 
     def _process_test_manifest(self, obj, backend_file):
         # Much of the logic in this function could be moved to CommonBackend.
         for source in obj.source_relpaths:
             self.backend_input_files.add(mozpath.join(obj.topsrcdir,
-                source))
+                                                      source))
 
         # Don't allow files to be defined multiple times unless it is allowed.
         # We currently allow duplicates for non-test files or test files if
         # the manifest is listed as a duplicate.
         for source, (dest, is_test) in obj.installs.items():
             try:
                 self._install_manifests['_test_files'].add_link(source, dest)
             except ValueError:
                 if not obj.dupe_manifest and is_test:
                     raise
 
         for base, pattern, dest in obj.pattern_installs:
             try:
                 self._install_manifests['_test_files'].add_pattern_link(base,
-                    pattern, dest)
+                                                                        pattern, dest)
             except ValueError:
                 if not obj.dupe_manifest:
                     raise
 
         for dest in obj.external_installs:
             try:
                 self._install_manifests['_test_files'].add_optional_exists(dest)
             except ValueError:
                 if not obj.dupe_manifest:
                     raise
 
         m = self._test_manifests.setdefault(obj.flavor,
-            (obj.install_prefix, set()))
+                                            (obj.install_prefix, set()))
         m[1].add(obj.manifest_obj_relpath)
 
         try:
             from reftest import ReftestManifest
 
             if isinstance(obj.manifest, ReftestManifest):
                 # Mark included files as part of the build backend so changes
                 # result in re-config.
@@ -1290,17 +1292,18 @@ class RecursiveMakeBackend(CommonBackend
         if quoted_path != path:
             path = quoted_path[0] + d + quoted_path[1:]
         else:
             path = d + path
         backend_file.write('LOCAL_INCLUDES += -I%s\n' % path)
 
     def _process_per_source_flag(self, per_source_flag, backend_file):
         for flag in per_source_flag.flags:
-            backend_file.write('%s_FLAGS += %s\n' % (mozpath.basename(per_source_flag.file_name), flag))
+            backend_file.write('%s_FLAGS += %s\n' %
+                               (mozpath.basename(per_source_flag.file_name), flag))
 
     def _process_computed_flags(self, computed_flags, backend_file):
         for var, flags in computed_flags.get_flags():
             backend_file.write('COMPUTED_%s += %s\n' % (var,
                                                         ' '.join(make_quote(shell_quote(f)) for f in flags)))
 
     def _process_non_default_target(self, libdef, target_name, backend_file):
         backend_file.write("%s:: %s\n" % (libdef.output_category, target_name))
@@ -1352,17 +1355,17 @@ class RecursiveMakeBackend(CommonBackend
         backend_file.write('HOST_SHARED_LIBRARY = %s\n' % libdef.lib_name)
 
     def _build_target_for_obj(self, obj):
         if hasattr(obj, 'output_category') and obj.output_category:
             target_name = obj.output_category
         else:
             target_name = obj.KIND
         return '%s/%s' % (mozpath.relpath(obj.objdir,
-            self.environment.topobjdir), target_name)
+                                          self.environment.topobjdir), target_name)
 
     def _process_linked_libraries(self, obj, backend_file):
         def pretty_relpath(lib, name):
             return os.path.normpath(mozpath.join(mozpath.relpath(lib.objdir, obj.objdir),
                                                  name))
 
         topobjdir = mozpath.normsep(obj.topobjdir)
         # This will create the node even if there aren't any linked libraries.
@@ -1409,17 +1412,17 @@ class RecursiveMakeBackend(CommonBackend
         pgo_objs_ref = ' \\\n    '.join(os.path.relpath(o, obj.objdir)
                                         for o in profile_gen_objs)
         # Don't bother with a list file if we're only linking objects built
         # in this directory or building a real static library. This
         # accommodates clang-plugin, where we would otherwise pass an
         # incorrect list file format to the host compiler as well as when
         # creating an archive with AR, which doesn't understand list files.
         if (objs == obj.objs and not isinstance(obj, (HostLibrary, StaticLibrary)) or
-          isinstance(obj, StaticLibrary) and obj.no_expand_lib):
+            isinstance(obj, StaticLibrary) and obj.no_expand_lib):
             backend_file.write_once('%s_OBJS := %s\n' % (obj.name,
                                                          objs_ref))
             if profile_gen_objs:
                 backend_file.write_once('%s_PGO_OBJS := %s\n' % (obj.name,
                                                                  pgo_objs_ref))
             write_obj_deps(obj_target, objs_ref, pgo_objs_ref)
         elif not isinstance(obj, (HostLibrary, StaticLibrary)):
             list_file_path = '%s.list' % obj.name.replace('.', '_')
@@ -1646,17 +1649,17 @@ class RecursiveMakeBackend(CommonBackend
         ]
         rule.add_commands(['$(call py_action,buildlist,%s)' % ' '.join(args)])
         fragment.dump(backend_file.fh, removal_guard=False)
 
         self._no_skip['misc'].add(obj.relsrcdir)
 
     def _write_manifests(self, dest, manifests):
         man_dir = mozpath.join(self.environment.topobjdir, '_build_manifests',
-            dest)
+                               dest)
 
         for k, manifest in manifests.items():
             with self._write_file(mozpath.join(man_dir, k)) as fh:
                 manifest.write(fileobj=fh)
 
     def _write_master_test_manifest(self, path, manifests):
         with self._write_file(path) as master:
             master.write(
@@ -1683,17 +1686,17 @@ class RecursiveMakeBackend(CommonBackend
         When the stub argument is True, no source file is used, and a stub
         makefile with the default header and footer only is created.
         '''
         with self._get_preprocessor(obj) as pp:
             if extra:
                 pp.context.update(extra)
             if not pp.context.get('autoconfmk', ''):
                 pp.context['autoconfmk'] = 'autoconf.mk'
-            pp.handleLine(b'# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n');
+            pp.handleLine(b'# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n')
             pp.handleLine(b'DEPTH := @DEPTH@\n')
             pp.handleLine(b'topobjdir := @topobjdir@\n')
             pp.handleLine(b'topsrcdir := @top_srcdir@\n')
             pp.handleLine(b'srcdir := @srcdir@\n')
             pp.handleLine(b'srcdir_rel := @srcdir_rel@\n')
             pp.handleLine(b'VPATH := @srcdir@\n')
             pp.handleLine(b'relativesrcdir := @relativesrcdir@\n')
             pp.handleLine(b'include $(DEPTH)/config/@autoconfmk@\n')
@@ -1727,37 +1730,37 @@ class RecursiveMakeBackend(CommonBackend
         for source in sorted_nonstatic_ipdl_sources:
             basename = os.path.basename(source)
             sorted_nonstatic_ipdl_basenames.append(basename)
             rule = mk.create_rule([basename])
             rule.add_dependencies([source])
             rule.add_commands([
                 '$(RM) $@',
                 '$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) '
-                    '$< -o $@)'
+                '$< -o $@)'
             ])
 
         mk.add_statement('ALL_IPDLSRCS := %s %s' % (' '.join(sorted_nonstatic_ipdl_basenames),
-                         ' '.join(sorted_static_ipdl_sources)))
+                                                    ' '.join(sorted_static_ipdl_sources)))
 
         self._add_unified_build_rules(mk, unified_ipdl_cppsrcs_mapping,
                                       unified_files_makefile_variable='CPPSRCS')
 
         # Preprocessed ipdl files are generated in ipdl_dir.
         mk.add_statement('IPDLDIRS := %s %s' % (ipdl_dir, ' '.join(sorted(set(mozpath.dirname(p)
-            for p in sorted_static_ipdl_sources)))))
+                                                                              for p in sorted_static_ipdl_sources)))))
 
         with self._write_file(mozpath.join(ipdl_dir, 'ipdlsrcs.mk')) as ipdls:
             mk.dump(ipdls, removal_guard=False)
 
     def _handle_webidl_build(self, bindings_dir, unified_source_mapping,
                              webidls, expected_build_output_files,
                              global_define_files):
         include_dir = mozpath.join(self.environment.topobjdir, 'dist',
-            'include')
+                                   'include')
         for f in expected_build_output_files:
             if f.startswith(include_dir):
                 self._install_manifests['dist_include'].add_optional_exists(
                     mozpath.relpath(f, include_dir))
 
         # We pass WebIDL info to make via a completely generated make file.
         mk = Makefile()
         mk.add_statement('nonstatic_webidl_files := %s' % ' '.join(
@@ -1778,18 +1781,18 @@ class RecursiveMakeBackend(CommonBackend
             rule = mk.create_rule([basename])
             rule.add_dependencies([source, '$(GLOBAL_DEPS)'])
             rule.add_commands([
                 # Remove the file before writing so bindings that go from
                 # static to preprocessed don't end up writing to a symlink,
                 # which would modify content in the source directory.
                 '$(RM) $@',
                 '$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) '
-                    '$< -o $@)'
+                '$< -o $@)'
             ])
 
         self._add_unified_build_rules(mk,
-            unified_source_mapping,
-            unified_files_makefile_variable='unified_binding_cpp_files')
+                                      unified_source_mapping,
+                                      unified_files_makefile_variable='unified_binding_cpp_files')
 
         webidls_mk = mozpath.join(bindings_dir, 'webidlsrcs.mk')
         with self._write_file(webidls_mk) as fh:
             mk.dump(fh, removal_guard=False)
--- a/python/mozbuild/mozbuild/backend/tup.py
+++ b/python/mozbuild/mozbuild/backend/tup.py
@@ -327,19 +327,19 @@ class TupBackend(CommonBackend):
             args += ['-j%d' % jobs]
         else:
             args += ['-j%d' % multiprocessing.cpu_count()]
 
         tiers = output.monitor.tiers
         tiers.set_tiers(('tup',))
         tiers.begin_tier('tup')
         status = config.run_process(args=args,
-                                  line_handler=output.on_line,
-                                  ensure_exit_code=False,
-                                  append_env=self._get_mozconfig_env(config))
+                                    line_handler=output.on_line,
+                                    ensure_exit_code=False,
+                                    append_env=self._get_mozconfig_env(config))
         tiers.finish_tier('tup')
         if not status and self.environment.substs.get('MOZ_AUTOMATION'):
             config.log_manager.enable_unstructured()
             config._activate_virtualenv()
             config.virtualenv_manager.install_pip_package('tablib==0.12.1')
             src = mozpath.join(self.environment.topsrcdir, '.tup')
             dst = os.environ['UPLOAD_PATH']
             if self.environment.substs.get('UPLOAD_TUP_DB'):
@@ -470,17 +470,16 @@ class TupBackend(CommonBackend):
         static_libs = self._lib_paths(backend_file.objdir, static_libs)
         shared_libs = self._lib_paths(backend_file.objdir, shared_libs)
 
         # Linking some programs will access libraries installed to dist/bin,
         # so depend on the installed libraries here. This can be made more
         # accurate once we start building libraries in their final locations.
         inputs = objs + static_libs + shared_libs + [self._shlibs]
 
-
         rust_linked = [l for l in prog.linked_libraries
                        if isinstance(l, RustLibrary)]
 
         extra_inputs = []
         if rust_linked:
             extra_inputs = [self._rust_output_group(rust_linked[0].output_category) or
                             self._rust_libs]
             static_libs += self._lib_paths(backend_file.objdir, rust_linked)
@@ -508,22 +507,20 @@ class TupBackend(CommonBackend):
         backend_file.rule(
             cmd=cmd,
             inputs=inputs,
             extra_inputs=extra_inputs,
             outputs=outputs,
             display='LINK %o'
         )
 
-
     def _gen_host_programs(self, backend_file):
         for p in backend_file.host_programs:
             self._gen_host_program(backend_file, p)
 
-
     def _gen_host_program(self, backend_file, prog):
         _, _, _, _, extra_libs, _ = self._expand_libs(prog)
         objs = prog.objs
 
         if isinstance(prog, HostSimpleProgram):
             outputs = [prog.name]
         else:
             outputs = [mozpath.relpath(prog.output_path.full_path,
@@ -554,17 +551,16 @@ class TupBackend(CommonBackend):
         backend_file.rule(
             cmd=cmd,
             inputs=inputs,
             extra_inputs=extra_inputs,
             outputs=outputs,
             display='LINK %o'
         )
 
-
     def _gen_static_library(self, backend_file):
         ar = [
             backend_file.environment.substs['AR'],
             backend_file.environment.substs['AR_FLAGS'].replace('$@', '%o')
         ]
 
         objs, _, _, shared_libs, _, static_libs = self._expand_libs(backend_file.static_lib)
         static_libs = self._lib_paths(backend_file.objdir, static_libs)
@@ -579,17 +575,16 @@ class TupBackend(CommonBackend):
 
         backend_file.rule(
             cmd=cmd,
             inputs=inputs,
             outputs=[backend_file.static_lib.name],
             display='AR %o'
         )
 
-
     def consume_object(self, obj):
         """Write out build files necessary to build with tup."""
 
         if not isinstance(obj, ContextDerived):
             return False
 
         consumed = CommonBackend.consume_object(self, obj)
         if consumed:
@@ -690,17 +685,17 @@ class TupBackend(CommonBackend):
                 self._process_generated_file(backend_file, obj)
             for path, output, output_group in backend_file.delayed_installed_files:
                 backend_file.symlink_rule(path, output=output, output_group=output_group)
             with self._write_file(fh=backend_file):
                 pass
 
         with self._write_file(mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh:
             acdefines_flags = ' '.join(['-D%s=%s' % (name, shell_quote(value))
-                for (name, value) in sorted(self.environment.acdefines.iteritems())])
+                                        for (name, value) in sorted(self.environment.acdefines.iteritems())])
             # TODO: AB_CD only exists in Makefiles at the moment.
             acdefines_flags += ' -DAB_CD=en-US'
 
             # Use BUILD_FASTER to avoid CXXFLAGS/CPPFLAGS in
             # toolkit/content/buildconfig.html
             acdefines_flags += ' -DBUILD_FASTER=1'
 
             fh.write('MOZ_OBJ_ROOT = $(TUP_CWD)\n')
@@ -724,22 +719,23 @@ class TupBackend(CommonBackend):
                 print("Found old tup root at '%s', removing..." %
                       mozpath.join(self.environment.topsrcdir, '.tup'))
                 shutil.rmtree(mozpath.join(self.environment.topsrcdir, '.tup'))
         if not os.path.isdir(mozpath.join(tup_base_dir, '.tup')):
             if tup_base_dir != self.environment.topsrcdir:
                 # Ask the user to figure out where to run 'tup init' before
                 # continuing.
                 raise Exception("Please run `tup init --no-sync` in a common "
-                    "ancestor directory of your objdir and srcdir, possibly "
-                    "%s. To reduce file scanning overhead, this directory "
-                    "should contain the fewest files possible that are not "
-                    "necessary for this build." % tup_base_dir)
+                                "ancestor directory of your objdir and srcdir, possibly "
+                                "%s. To reduce file scanning overhead, this directory "
+                                "should contain the fewest files possible that are not "
+                                "necessary for this build." % tup_base_dir)
             tup = self.environment.substs.get('TUP', 'tup')
-            self._cmd.run_process(cwd=tup_base_dir, log_name='tup', args=[tup, 'init', '--no-sync'])
+            self._cmd.run_process(cwd=tup_base_dir, log_name='tup',
+                                  args=[tup, 'init', '--no-sync'])
 
     def _get_cargo_flags(self, obj):
 
         def output_flags(obj):
             if isinstance(obj, RustLibrary):
                 return ['--lib']
             if isinstance(obj, RustProgram):
                 return ['--bin', obj.name]
@@ -979,26 +975,25 @@ class TupBackend(CommonBackend):
                     if invocation['target_kind'][0] == 'bin' and link in outputs:
                         # Additionally link the program to its final target.
                         rust_backend_file.symlink_rule(link,
                                                        mozpath.join(self.environment.topobjdir,
                                                                     obj.install_target,
                                                                     obj.name),
                                                        output_group)
 
-
         for val in enumerate(invocations):
             _process(*val)
 
-
     def _gen_rust_rules(self, obj, backend_file):
         cargo_flags = self._get_cargo_flags(obj)
         cargo_env = self._get_cargo_env(obj, backend_file)
 
         output_lines = []
+
         def accumulate_output(line):
             output_lines.append(line)
 
         cargo_status = self._cmd.run_process(
             [self.environment.substs['CARGO'], 'build'] + cargo_flags,
             line_handler=accumulate_output,
             ensure_exit_code=False,
             explicit_env=cargo_env)
@@ -1009,29 +1004,28 @@ class TupBackend(CommonBackend):
         cargo_plan = json.loads(''.join(output_lines))
 
         output_group = self._rust_libs
         if isinstance(obj, RustLibrary) and obj.output_category:
             output_group = self._rust_output_group(obj.output_category)
         self._gen_cargo_rules(obj, cargo_plan, cargo_env, output_group)
         self.backend_input_files |= set(cargo_plan['inputs'])
 
-
     def _process_generated_file(self, backend_file, obj):
         if obj.script and obj.method:
             backend_file.export_shell()
             cmd = self._py_action('file_generate')
             if obj.localized:
                 cmd.append('--locale=en-US')
             cmd.extend([
                 obj.script,
                 obj.method,
                 obj.outputs[0],
-                '%s.pp' % obj.outputs[0], # deps file required
-                'unused', # deps target is required
+                '%s.pp' % obj.outputs[0],  # deps file required
+                'unused',  # deps target is required
             ])
             full_inputs = [f.full_path for f in obj.inputs]
             cmd.extend(full_inputs)
             cmd.extend(shell_quote(f) for f in obj.flags)
 
             outputs = []
             outputs.extend(obj.outputs)
             outputs.append('%s.pp' % obj.outputs[0])
@@ -1158,40 +1152,42 @@ class TupBackend(CommonBackend):
                                 install_dir = prefix[len(obj.srcdir) + 1:]
                                 output = p
                                 if f.target_basename and '*' not in f.target_basename:
                                     output = mozpath.join(f.target_basename, output)
                                 backend_file.symlink_rule(mozpath.join(prefix, p),
                                                           output=mozpath.join(output_dir, output),
                                                           output_group=output_group)
                     else:
-                        backend_file.symlink_rule(f.full_path, output=f.target_basename, output_group=output_group)
+                        backend_file.symlink_rule(
+                            f.full_path, output=f.target_basename, output_group=output_group)
                 else:
                     if (self.environment.is_artifact_build and
                         any(mozpath.match(f.target_basename, p) for p in self._compile_env_gen_files)):
                         # If we have an artifact build we never would have generated this file,
                         # so do not attempt to install it.
                         continue
 
                     output = mozpath.join('$(MOZ_OBJ_ROOT)', target, path,
                                           f.target_basename)
                     gen_backend_file = self._get_backend_file(f.context.relobjdir)
                     if gen_backend_file.requires_delay([f]):
-                        gen_backend_file.delayed_installed_files.append((f.full_path, output, output_group))
+                        gen_backend_file.delayed_installed_files.append(
+                            (f.full_path, output, output_group))
                     else:
                         gen_backend_file.symlink_rule(f.full_path, output=output,
                                                       output_group=output_group)
 
-
     def _process_final_target_pp_files(self, obj, backend_file):
         for i, (path, files) in enumerate(obj.files.walk()):
             self._add_features(obj.install_target, path)
             for f in files:
                 self._preprocess(backend_file, f.full_path,
-                                 destdir=mozpath.join(self.environment.topobjdir, obj.install_target, path),
+                                 destdir=mozpath.join(self.environment.topobjdir,
+                                                      obj.install_target, path),
                                  target=f.target_basename)
 
     def _process_computed_flags(self, obj, backend_file):
         for var, flags in obj.get_flags():
             backend_file.local_flags[var] = flags
 
     def _process_unified_sources(self, obj):
         backend_file = self._get_backend_file_for(obj)
@@ -1310,17 +1306,18 @@ class TupBackend(CommonBackend):
             '--msg-metadata=%s/message-metadata.ini' % srcdir,
             '--outheaders-dir=%s' % outheaderdir,
             '--outcpp-dir=.',
         ]
         ipdldirs = sorted(set(mozpath.dirname(p) for p in sorted_ipdl_sources))
         cmd.extend(['-I%s' % d for d in ipdldirs])
         cmd.extend(sorted_ipdl_sources)
 
-        outputs = ['IPCMessageTypeName.cpp', mozpath.join(outheaderdir, 'IPCMessageStart.h'), 'ipdl_lextab.py', 'ipdl_yacctab.py']
+        outputs = ['IPCMessageTypeName.cpp', mozpath.join(
+            outheaderdir, 'IPCMessageStart.h'), 'ipdl_lextab.py', 'ipdl_yacctab.py']
 
         for filename in sorted_ipdl_sources:
             filepath, ext = os.path.splitext(filename)
             dirname, basename = os.path.split(filepath)
             dirname = mozpath.relpath(dirname, self.environment.topsrcdir)
 
             extensions = ['']
             if ext == '.ipdl':
@@ -1374,9 +1371,10 @@ class TupBackend(CommonBackend):
             outputs=outputs,
             output_group=self._installed_files,
             check_unchanged=True,
         )
         backend_file.sources['.cpp'].extend(u[0] for u in unified_source_mapping)
         backend_file.sources['.cpp'].extend(sorted(global_define_files))
 
         test_backend_file = self._get_backend_file('dom/bindings/test')
-        test_backend_file.sources['.cpp'].extend(sorted('../%sBinding.cpp' % s for s in webidls.all_test_stems()))
+        test_backend_file.sources['.cpp'].extend(
+            sorted('../%sBinding.cpp' % s for s in webidls.all_test_stems()))
--- a/python/mozbuild/mozbuild/backend/visualstudio.py
+++ b/python/mozbuild/mozbuild/backend/visualstudio.py
@@ -29,31 +29,35 @@ from ..frontend.data import (
     Sources,
     UnifiedSources,
 )
 from mozbuild.base import ExecutionSummary
 
 
 MSBUILD_NAMESPACE = 'http://schemas.microsoft.com/developer/msbuild/2003'
 
+
 def get_id(name):
     return str(uuid.uuid5(uuid.NAMESPACE_URL, name)).upper()
 
+
 def visual_studio_product_to_solution_version(version):
     if version == '2017':
         return '12.00', '15'
     else:
         raise Exception('Unknown version seen: %s' % version)
 
+
 def visual_studio_product_to_platform_toolset_version(version):
     if version == '2017':
         return 'v141'
     else:
         raise Exception('Unknown version seen: %s' % version)
 
+
 class VisualStudioBackend(CommonBackend):
     """Generate Visual Studio project files.
 
     This backend is used to produce Visual Studio projects and a solution
     to foster developing Firefox with Visual Studio.
 
     This backend is currently considered experimental. There are many things
     not optimal about how it works.
@@ -95,17 +99,17 @@ class VisualStudioBackend(CommonBackend)
             self._add_sources(reldir, obj)
 
         elif isinstance(obj, GeneratedSources):
             self._add_sources(reldir, obj)
 
         elif isinstance(obj, UnifiedSources):
             # XXX we should be letting CommonBackend.consume_object call this
             # for us instead.
-            self._process_unified_sources(obj);
+            self._process_unified_sources(obj)
 
         elif isinstance(obj, Library):
             self._libs_to_paths[obj.basename] = reldir
 
         elif isinstance(obj, Program) or isinstance(obj, HostProgram):
             self._progs_to_paths[obj.program] = reldir
 
         elif isinstance(obj, Defines):
@@ -128,37 +132,37 @@ class VisualStudioBackend(CommonBackend)
         s = self._paths_to_sources.setdefault(reldir, set())
         s.update(obj.files)
 
     def consume_finished(self):
         out_dir = self._out_dir
         out_proj_dir = os.path.join(self._out_dir, self._projsubdir)
 
         projects = self._write_projects_for_sources(self._libs_to_paths,
-            "library", out_proj_dir)
+                                                    "library", out_proj_dir)
         projects.update(self._write_projects_for_sources(self._progs_to_paths,
-            "binary", out_proj_dir))
+                                                         "binary", out_proj_dir))
 
         # Generate projects that can be used to build common targets.
         for target in ('export', 'binaries', 'tools', 'full'):
             basename = 'target_%s' % target
             command = '$(SolutionDir)\\mach.bat build'
             if target != 'full':
                 command += ' %s' % target
 
             project_id = self._write_vs_project(out_proj_dir, basename, target,
-                build_command=command,
-                clean_command='$(SolutionDir)\\mach.bat build clean')
+                                                build_command=command,
+                                                clean_command='$(SolutionDir)\\mach.bat build clean')
 
             projects[basename] = (project_id, basename, target)
 
         # A project that can be used to regenerate the visual studio projects.
         basename = 'target_vs'
         project_id = self._write_vs_project(out_proj_dir, basename, 'visual-studio',
-            build_command='$(SolutionDir)\\mach.bat build-backend -b VisualStudio')
+                                            build_command='$(SolutionDir)\\mach.bat build-backend -b VisualStudio')
         projects[basename] = (project_id, basename, 'visual-studio')
 
         # Write out a shared property file with common variables.
         props_path = os.path.join(out_proj_dir, 'mozilla.props')
         with self._write_file(props_path, mode='rb') as fh:
             self._write_props(fh)
 
         # Generate some wrapper scripts that allow us to invoke mach inside
@@ -185,28 +189,28 @@ class VisualStudioBackend(CommonBackend)
             sources = self._paths_to_sources.get(path, set())
             sources = set(os.path.join('$(TopSrcDir)', path, s) for s in sources)
             sources = set(os.path.normpath(s) for s in sources)
 
             finder = FileFinder(os.path.join(self.environment.topsrcdir, path))
 
             headers = [t[0] for t in finder.find('*.h')]
             headers = [os.path.normpath(os.path.join('$(TopSrcDir)',
-                path, f)) for f in headers]
+                                                     path, f)) for f in headers]
 
             includes = [
                 os.path.join('$(TopSrcDir)', path),
                 os.path.join('$(TopObjDir)', path),
             ]
             includes.extend(self._paths_to_includes.get(path, []))
             includes.append('$(TopObjDir)\\dist\\include\\nss')
             includes.append('$(TopObjDir)\\dist\\include')
 
             for v in ('NSPR_CFLAGS', 'NSS_CFLAGS', 'MOZ_JPEG_CFLAGS',
-                    'MOZ_PNG_CFLAGS', 'MOZ_ZLIB_CFLAGS', 'MOZ_PIXMAN_CFLAGS'):
+                      'MOZ_PNG_CFLAGS', 'MOZ_ZLIB_CFLAGS', 'MOZ_PIXMAN_CFLAGS'):
                 if not config:
                     break
 
                 args = config.substs.get(v, [])
 
                 for i, arg in enumerate(args):
                     if arg.startswith('-I'):
                         includes.append(os.path.normpath(arg[2:]))
@@ -218,35 +222,36 @@ class VisualStudioBackend(CommonBackend)
 
             defines = []
             for k, v in self._paths_to_defines.get(path, {}).items():
                 if v is True:
                     defines.append(k)
                 else:
                     defines.append('%s=%s' % (k, v))
 
-            debugger=None
+            debugger = None
             if prefix == 'binary':
                 if item.startswith(self.environment.substs['MOZ_APP_NAME']):
                     app_args = '-no-remote -profile $(TopObjDir)\\tmp\\profile-default'
                     if self.environment.substs.get('MOZ_LAUNCHER_PROCESS', False):
                         app_args += ' -wait-for-browser'
                     debugger = ('$(TopObjDir)\\dist\\bin\\%s' % item, app_args)
                 else:
                     debugger = ('$(TopObjDir)\\dist\\bin\\%s' % item, '')
 
             basename = '%s_%s' % (prefix, item)
 
             project_id = self._write_vs_project(out_dir, basename, item,
-                includes=includes,
-                forced_includes=['$(TopObjDir)\\dist\\include\\mozilla-config.h'],
-                defines=defines,
-                headers=headers,
-                sources=sources,
-                debugger=debugger)
+                                                includes=includes,
+                                                forced_includes=[
+                                                    '$(TopObjDir)\\dist\\include\\mozilla-config.h'],
+                                                defines=defines,
+                                                headers=headers,
+                                                sources=sources,
+                                                debugger=debugger)
 
             projects[basename] = (project_id, basename, item)
 
         return projects
 
     def _write_solution(self, fh, projects):
         # Visual Studio appears to write out its current version in the
         # solution file. Instead of trying to figure out what version it will
@@ -411,28 +416,28 @@ class VisualStudioBackend(CommonBackend)
         yield 'TOPSRCDIR', self.environment.topsrcdir
         yield 'TOPOBJDIR', self.environment.topobjdir
 
     def _write_mach_powershell(self, fh):
         for k, v in self._relevant_environment_variables():
             fh.write(b'$env:%s = "%s"\r\n' % (k, v))
 
         relpath = os.path.relpath(self.environment.topsrcdir,
-            self.environment.topobjdir).replace('\\', '/')
+                                  self.environment.topobjdir).replace('\\', '/')
 
         fh.write(b'$bashargs = "%s/mach", "--log-no-times"\r\n' % relpath)
         fh.write(b'$bashargs = $bashargs + $args\r\n')
 
         fh.write(b"$expanded = $bashargs -join ' '\r\n")
         fh.write(b'$procargs = "-c", $expanded\r\n')
 
         fh.write(b'Start-Process -WorkingDirectory $env:TOPOBJDIR '
-            b'-FilePath $env:MOZILLABUILD\\msys\\bin\\bash '
-            b'-ArgumentList $procargs '
-            b'-Wait -NoNewWindow\r\n')
+                 b'-FilePath $env:MOZILLABUILD\\msys\\bin\\bash '
+                 b'-ArgumentList $procargs '
+                 b'-Wait -NoNewWindow\r\n')
 
     def _write_mach_batch(self, fh):
         """Write out a batch script that builds the tree.
 
         The script "bootstraps" into the MozillaBuild environment by setting
         the environment variables that are active in the current MozillaBuild
         environment. Then, it builds the tree.
         """
@@ -440,44 +445,44 @@ class VisualStudioBackend(CommonBackend)
             fh.write(b'SET "%s=%s"\r\n' % (k, v))
 
         fh.write(b'cd %TOPOBJDIR%\r\n')
 
         # We need to convert Windows-native paths to msys paths. Easiest way is
         # relative paths, since munging c:\ to /c/ is slightly more
         # complicated.
         relpath = os.path.relpath(self.environment.topsrcdir,
-            self.environment.topobjdir).replace('\\', '/')
+                                  self.environment.topobjdir).replace('\\', '/')
 
         # We go through mach because it has the logic for choosing the most
         # appropriate build tool.
         fh.write(b'"%%MOZILLABUILD%%\\msys\\bin\\bash" '
-            b'-c "%s/mach --log-no-times %%1 %%2 %%3 %%4 %%5 %%6 %%7"' % relpath)
+                 b'-c "%s/mach --log-no-times %%1 %%2 %%3 %%4 %%5 %%6 %%7"' % relpath)
 
     def _write_vs_project(self, out_dir, basename, name, **kwargs):
         root = '%s.vcxproj' % basename
         project_id = get_id(basename.encode('utf-8'))
 
         with self._write_file(os.path.join(out_dir, root), mode='rb') as fh:
             project_id, name = VisualStudioBackend.write_vs_project(fh,
-                self._version, project_id, name, **kwargs)
+                                                                    self._version, project_id, name, **kwargs)
 
         with self._write_file(os.path.join(out_dir, '%s.user' % root), mode='rb') as fh:
             fh.write('<?xml version="1.0" encoding="utf-8"?>\r\n')
             fh.write('<Project ToolsVersion="4.0" xmlns="%s">\r\n' %
-                MSBUILD_NAMESPACE)
+                     MSBUILD_NAMESPACE)
             fh.write('</Project>\r\n')
 
         return project_id
 
     @staticmethod
     def write_vs_project(fh, version, project_id, name, includes=[],
-        forced_includes=[], defines=[],
-        build_command=None, clean_command=None,
-        debugger=None, headers=[], sources=[]):
+                         forced_includes=[], defines=[],
+                         build_command=None, clean_command=None,
+                         debugger=None, headers=[], sources=[]):
 
         impl = getDOMImplementation()
         doc = impl.createDocument(MSBUILD_NAMESPACE, 'Project', None)
 
         project = doc.documentElement
         project.setAttribute('DefaultTargets', 'Build')
         project.setAttribute('ToolsVersion', '4.0')
         project.setAttribute('xmlns', MSBUILD_NAMESPACE)
@@ -505,17 +510,18 @@ class VisualStudioBackend(CommonBackend)
 
         g = pg.appendChild(doc.createElement('ProjectGuid'))
         g.appendChild(doc.createTextNode('{%s}' % project_id))
 
         rn = pg.appendChild(doc.createElement('RootNamespace'))
         rn.appendChild(doc.createTextNode('mozilla'))
 
         pts = pg.appendChild(doc.createElement('PlatformToolset'))
-        pts.appendChild(doc.createTextNode(visual_studio_product_to_platform_toolset_version(version)))
+        pts.appendChild(doc.createTextNode(
+            visual_studio_product_to_platform_toolset_version(version)))
 
         i = project.appendChild(doc.createElement('Import'))
         i.setAttribute('Project', '$(VCTargetsPath)\\Microsoft.Cpp.Default.props')
 
         ig = project.appendChild(doc.createElement('ImportGroup'))
         ig.setAttribute('Label', 'ExtensionTargets')
 
         ig = project.appendChild(doc.createElement('ImportGroup'))
--- a/python/mozbuild/mozbuild/base.py
+++ b/python/mozbuild/mozbuild/base.py
@@ -48,48 +48,52 @@ def ancestors(path):
     """Emit the parent directories of a path."""
     while path:
         yield path
         newpath = os.path.dirname(path)
         if newpath == path:
             break
         path = newpath
 
+
 def samepath(path1, path2):
     if hasattr(os.path, 'samefile'):
         return os.path.samefile(path1, path2)
     return os.path.normcase(os.path.realpath(path1)) == \
         os.path.normcase(os.path.realpath(path2))
 
+
 class BadEnvironmentException(Exception):
     """Base class for errors raised when the build environment is not sane."""
 
 
 class BuildEnvironmentNotFoundException(BadEnvironmentException):
     """Raised when we could not find a build environment."""
 
 
 class ObjdirMismatchException(BadEnvironmentException):
     """Raised when the current dir is an objdir and doesn't match the mozconfig."""
+
     def __init__(self, objdir1, objdir2):
         self.objdir1 = objdir1
         self.objdir2 = objdir2
 
     def __str__(self):
         return "Objdir mismatch: %s != %s" % (self.objdir1, self.objdir2)
 
 
 class MozbuildObject(ProcessExecutionMixin):
     """Base class providing basic functionality useful to many modules.
 
     Modules in this package typically require common functionality such as
     accessing the current config, getting the location of the source directory,
     running processes, etc. This classes provides that functionality. Other
     modules can inherit from this class to obtain this functionality easily.
     """
+
     def __init__(self, topsrcdir, settings, log_manager, topobjdir=None,
                  mozconfig=MozconfigLoader.AUTODETECT):
         """Create a new Mozbuild object instance.
 
         Instances are bound to a source directory, a ConfigSettings instance,
         and a LogManager instance. The topobjdir may be passed in as well. If
         it isn't, it will be calculated from the active mozconfig.
         """
@@ -171,32 +175,32 @@ class MozbuildObject(ProcessExecutionMix
                 'Could not find Mozilla source tree or build environment.')
 
         topsrcdir = mozpath.normsep(topsrcdir)
         if topobjdir:
             topobjdir = mozpath.normsep(os.path.normpath(topobjdir))
 
             if topsrcdir == topobjdir:
                 raise BadEnvironmentException('The object directory appears '
-                    'to be the same as your source directory (%s). This build '
-                    'configuration is not supported.' % topsrcdir)
+                                              'to be the same as your source directory (%s). This build '
+                                              'configuration is not supported.' % topsrcdir)
 
         # If we can't resolve topobjdir, oh well. We'll figure out when we need
         # one.
         return cls(topsrcdir, None, None, topobjdir=topobjdir,
                    mozconfig=mozconfig)
 
     def resolve_mozconfig_topobjdir(self, default=None):
         topobjdir = self.mozconfig['topobjdir'] or default
         if not topobjdir:
             return None
 
         if '@CONFIG_GUESS@' in topobjdir:
             topobjdir = topobjdir.replace('@CONFIG_GUESS@',
-                self.resolve_config_guess())
+                                          self.resolve_config_guess())
 
         if not os.path.isabs(topobjdir):
             topobjdir = os.path.abspath(os.path.join(self.topsrcdir, topobjdir))
 
         return mozpath.normsep(os.path.normpath(topobjdir))
 
     def build_out_of_date(self, output, dep_file):
         if not os.path.isfile(output):
@@ -248,19 +252,20 @@ class MozbuildObject(ProcessExecutionMix
                 default='obj-@CONFIG_GUESS@')
 
         return self._topobjdir
 
     @property
     def virtualenv_manager(self):
         if self._virtualenv_manager is None:
             self._virtualenv_manager = VirtualenvManager(self.topsrcdir,
-                self.topobjdir, os.path.join(self.topobjdir, '_virtualenvs', 'init'),
-                sys.stdout, os.path.join(self.topsrcdir, 'build',
-                'virtualenv_packages.txt'))
+                                                         self.topobjdir, os.path.join(
+                                                             self.topobjdir, '_virtualenvs', 'init'),
+                                                         sys.stdout, os.path.join(self.topsrcdir, 'build',
+                                                                                  'virtualenv_packages.txt'))
 
         return self._virtualenv_manager
 
     @staticmethod
     @memoize
     def get_mozconfig_and_target(topsrcdir, path, env_mozconfig):
         # env_mozconfig is only useful for unittests, which change the value of
         # the environment variable, which has an impact on autodetection (when
@@ -485,17 +490,16 @@ class MozbuildObject(ProcessExecutionMix
                         raise Exception('working directory is not clean; '
                                         'refusing to use a VCS-based finder')
 
             finder = MercurialRevisionFinder(self.topsrcdir, rev=vcs_revision,
                                              recognize_repo_paths=True)
 
         return BuildReader(config, finder=finder)
 
-
     @memoized_property
     def python3(self):
         """Obtain info about a Python 3 executable.
 
         Returns a tuple of an executable path and its version (as a tuple).
         Either both entries will have a value or both will be None.
         """
         # Search configured build info first. Then fall back to system.
@@ -537,20 +541,20 @@ class MozbuildObject(ProcessExecutionMix
         substs = self.substs
 
         stem = self.distdir
         if where == 'staged-package':
             stem = os.path.join(stem, substs['MOZ_APP_NAME'])
 
         if substs['OS_ARCH'] == 'Darwin':
             if substs['MOZ_BUILD_APP'] == 'xulrunner':
-                stem = os.path.join(stem, 'XUL.framework');
+                stem = os.path.join(stem, 'XUL.framework')
             else:
                 stem = os.path.join(stem, substs['MOZ_MACBUNDLE_NAME'], 'Contents',
-                    'MacOS')
+                                    'MacOS')
         elif where == 'default':
             stem = os.path.join(stem, 'bin')
 
         leaf = None
 
         leaf = (substs['MOZ_APP_NAME'] if what == 'app' else what) + substs['BIN_SUFFIX']
         path = os.path.join(stem, leaf)
 
@@ -573,23 +577,24 @@ class MozbuildObject(ProcessExecutionMix
             return
 
         try:
             if sys.platform.startswith('darwin'):
                 try:
                     notifier = which.which('terminal-notifier')
                 except which.WhichError:
                     raise Exception('Install terminal-notifier to get '
-                        'a notification when the build finishes.')
+                                    'a notification when the build finishes.')
                 self.run_process([notifier, '-title',
-                    'Mozilla Build System', '-group', 'mozbuild',
-                    '-message', msg], ensure_exit_code=False)
+                                  'Mozilla Build System', '-group', 'mozbuild',
+                                  '-message', msg], ensure_exit_code=False)
             elif sys.platform.startswith('win'):
                 from ctypes import Structure, windll, POINTER, sizeof
                 from ctypes.wintypes import DWORD, HANDLE, WINFUNCTYPE, BOOL, UINT
+
                 class FLASHWINDOW(Structure):
                     _fields_ = [("cbSize", UINT),
                                 ("hwnd", HANDLE),
                                 ("dwFlags", DWORD),
                                 ("uCount", UINT),
                                 ("dwTimeout", DWORD)]
                 FlashWindowExProto = WINFUNCTYPE(BOOL, POINTER(FLASHWINDOW))
                 FlashWindowEx = FlashWindowExProto(("FlashWindowEx", windll.user32))
@@ -599,31 +604,31 @@ class MozbuildObject(ProcessExecutionMix
 
                 # GetConsoleWindows returns NULL if no console is attached. We
                 # can't flash nothing.
                 console = windll.kernel32.GetConsoleWindow()
                 if not console:
                     return
 
                 params = FLASHWINDOW(sizeof(FLASHWINDOW),
-                                    console,
-                                    FLASHW_CAPTION | FLASHW_TRAY | FLASHW_TIMERNOFG, 3, 0)
+                                     console,
+                                     FLASHW_CAPTION | FLASHW_TRAY | FLASHW_TIMERNOFG, 3, 0)
                 FlashWindowEx(params)
             else:
                 try:
                     notifier = which.which('notify-send')
                 except which.WhichError:
                     raise Exception('Install notify-send (usually part of '
-                        'the libnotify package) to get a notification when '
-                        'the build finishes.')
+                                    'the libnotify package) to get a notification when '
+                                    'the build finishes.')
                 self.run_process([notifier, '--app-name=Mozilla Build System',
-                    'Mozilla Build System', msg], ensure_exit_code=False)
+                                  'Mozilla Build System', msg], ensure_exit_code=False)
         except Exception as e:
             self.log(logging.WARNING, 'notifier-failed', {'error':
-                e.message}, 'Notification center failed: {error}')
+                                                          e.message}, 'Notification center failed: {error}')
 
     def _ensure_objdir_exists(self):
         if os.path.isdir(self.statedir):
             return
 
         os.makedirs(self.statedir)
 
     def _ensure_state_subdir_exists(self, subdir):
@@ -641,20 +646,20 @@ class MozbuildObject(ProcessExecutionMix
             path = os.path.join(path, subdir)
 
         return os.path.join(path, filename)
 
     def _wrap_path_argument(self, arg):
         return PathArgument(arg, self.topsrcdir, self.topobjdir)
 
     def _run_make(self, directory=None, filename=None, target=None, log=True,
-            srcdir=False, allow_parallel=True, line_handler=None,
-            append_env=None, explicit_env=None, ignore_errors=False,
-            ensure_exit_code=0, silent=True, print_directory=True,
-            pass_thru=False, num_jobs=0, keep_going=False):
+                  srcdir=False, allow_parallel=True, line_handler=None,
+                  append_env=None, explicit_env=None, ignore_errors=False,
+                  ensure_exit_code=0, silent=True, print_directory=True,
+                  pass_thru=False, num_jobs=0, keep_going=False):
         """Invoke make.
 
         directory -- Relative directory to look for Makefile in.
         filename -- Explicit makefile to run.
         target -- Makefile target(s) to make. Can be a string or iterable of
             strings.
         srcdir -- If True, invoke make from the source directory tree.
             Otherwise, make will be invoked from the object directory.
@@ -786,21 +791,21 @@ class MozbuildObject(ProcessExecutionMix
             result, xcode_lisense_error_tmp = validate_make(make)
             if result:
                 return [make]
             if xcode_lisense_error_tmp:
                 xcode_lisense_error = True
 
         if xcode_lisense_error:
             raise Exception('Xcode requires accepting to the license agreement.\n'
-                'Please run Xcode and accept the license agreement.')
+                            'Please run Xcode and accept the license agreement.')
 
         if self._is_windows():
             raise Exception('Could not find a suitable make implementation.\n'
-                'Please use MozillaBuild 1.9 or newer')
+                            'Please use MozillaBuild 1.9 or newer')
         else:
             raise Exception('Could not find a suitable make implementation.')
 
     def _run_command_in_srcdir(self, **args):
         return self.run_process(cwd=self.topsrcdir, **args)
 
     def _run_command_in_objdir(self, **args):
         return self.run_process(cwd=self.topobjdir, **args)
@@ -815,32 +820,32 @@ class MozbuildObject(ProcessExecutionMix
         """Create a new MozbuildObject-derived class instance from ourselves.
 
         This is used as a convenience method to create other
         MozbuildObject-derived class instances. It can only be used on
         classes that have the same constructor arguments as us.
         """
 
         return cls(self.topsrcdir, self.settings, self.log_manager,
-            topobjdir=self.topobjdir)
+                   topobjdir=self.topobjdir)
 
     def _activate_virtualenv(self):
         self.virtualenv_manager.ensure()
         self.virtualenv_manager.activate()
 
-
     def _set_log_level(self, verbose):
         self.log_manager.terminal_handler.setLevel(logging.INFO if not verbose else logging.DEBUG)
 
     def ensure_pipenv(self):
         self._activate_virtualenv()
         pipenv = os.path.join(self.virtualenv_manager.bin_path, 'pipenv')
         if not os.path.exists(pipenv):
             for package in ['certifi', 'pipenv', 'six', 'virtualenv', 'virtualenv-clone']:
-                path = os.path.normpath(os.path.join(self.topsrcdir, 'third_party/python', package))
+                path = os.path.normpath(os.path.join(
+                    self.topsrcdir, 'third_party/python', package))
                 self.virtualenv_manager.install_pip_package(path, vendored=True)
         return pipenv
 
     def activate_pipenv(self, pipfile=None, populate=False, python=None):
         if pipfile is not None and not os.path.exists(pipfile):
             raise Exception('Pipfile not found: %s.' % pipfile)
         self.ensure_pipenv()
         self.virtualenv_manager.activate_pipenv(pipfile, populate, python)
@@ -856,59 +861,59 @@ class MachCommandBase(MozbuildObject):
     def __init__(self, context):
         # Attempt to discover topobjdir through environment detection, as it is
         # more reliable than mozconfig when cwd is inside an objdir.
         topsrcdir = context.topdir
         topobjdir = None
         detect_virtualenv_mozinfo = True
         if hasattr(context, 'detect_virtualenv_mozinfo'):
             detect_virtualenv_mozinfo = getattr(context,
-                'detect_virtualenv_mozinfo')
+                                                'detect_virtualenv_mozinfo')
         try:
             dummy = MozbuildObject.from_environment(cwd=context.cwd,
-                detect_virtualenv_mozinfo=detect_virtualenv_mozinfo)
+                                                    detect_virtualenv_mozinfo=detect_virtualenv_mozinfo)
             topsrcdir = dummy.topsrcdir
             topobjdir = dummy._topobjdir
             if topobjdir:
                 # If we're inside a objdir and the found mozconfig resolves to
                 # another objdir, we abort. The reasoning here is that if you
                 # are inside an objdir you probably want to perform actions on
                 # that objdir, not another one. This prevents accidental usage
                 # of the wrong objdir when the current objdir is ambiguous.
                 config_topobjdir = dummy.resolve_mozconfig_topobjdir()
 
                 if config_topobjdir and not samepath(topobjdir, config_topobjdir):
                     raise ObjdirMismatchException(topobjdir, config_topobjdir)
         except BuildEnvironmentNotFoundException:
             pass
         except ObjdirMismatchException as e:
             print('Ambiguous object directory detected. We detected that '
-                'both %s and %s could be object directories. This is '
-                'typically caused by having a mozconfig pointing to a '
-                'different object directory from the current working '
-                'directory. To solve this problem, ensure you do not have a '
-                'default mozconfig in searched paths.' % (e.objdir1,
-                    e.objdir2))
+                  'both %s and %s could be object directories. This is '
+                  'typically caused by having a mozconfig pointing to a '
+                  'different object directory from the current working '
+                  'directory. To solve this problem, ensure you do not have a '
+                  'default mozconfig in searched paths.' % (e.objdir1,
+                                                            e.objdir2))
             sys.exit(1)
 
         except MozconfigLoadException as e:
             print('Error loading mozconfig: ' + e.path)
             print('')
             print(e.message)
             if e.output:
                 print('')
                 print('mozconfig output:')
                 print('')
                 for line in e.output:
                     print(line)
 
             sys.exit(1)
 
         MozbuildObject.__init__(self, topsrcdir, context.settings,
-            context.log_manager, topobjdir=topobjdir)
+                                context.log_manager, topobjdir=topobjdir)
 
         self._mach_context = context
 
         # Incur mozconfig processing so we have unified error handling for
         # errors. Otherwise, the exceptions could bubble back to mach's error
         # handler.
         try:
             self.mozconfig
--- a/python/mozbuild/mozbuild/chunkify.py
+++ b/python/mozbuild/mozbuild/chunkify.py
@@ -49,9 +49,8 @@ def chunkify(things, this_chunk, chunks)
     dist = split_evenly(len(things), chunks)
     start = sum(dist[:this_chunk-1])
     end = start + dist[this_chunk-1]
 
     try:
         return things[start:end]
     except TypeError:
         return islice(things, start, end)
-
--- a/python/mozbuild/mozbuild/codecoverage/chrome_map.py
+++ b/python/mozbuild/mozbuild/codecoverage/chrome_map.py
@@ -21,16 +21,18 @@ from mozpack.copier import FileRegistry
 from mozpack.files import PreprocessedFile
 from mozpack.manifests import InstallManifest
 import mozpack.path as mozpath
 
 from manifest_handler import ChromeManifestHandler
 
 
 _line_comment_re = re.compile('^//@line (\d+) "(.+)"$')
+
+
 def generate_pp_info(path, topsrcdir):
     with open(path) as fh:
         # (start, end) -> (included_source, start)
         section_info = dict()
 
         this_section = None
 
         def finish_section(pp_end):
@@ -52,16 +54,18 @@ def generate_pp_info(path, topsrcdir):
 
         if this_section:
             finish_section(count + 2)
 
         return section_info
 
 # This build backend is assuming the build to have happened already, as it is parsing
 # built preprocessed files to generate data to map them to the original sources.
+
+
 class ChromeMapBackend(CommonBackend):
     def _init(self):
         CommonBackend._init(self)
 
         log_manager = LoggingManager()
         self._cmd = MozbuildObject(self.environment.topsrcdir, ConfigSettings(),
                                    log_manager, self.environment.topobjdir)
         self._install_mapping = {}
--- a/python/mozbuild/mozbuild/codecoverage/lcov_rewriter.py
+++ b/python/mozbuild/mozbuild/codecoverage/lcov_rewriter.py
@@ -12,29 +12,31 @@ except ImportError:
     import urllib.parse as urlparse
 
 from six import viewitems
 
 from mozpack.chrome.manifest import parse_manifest
 import mozpack.path as mozpath
 from manifest_handler import ChromeManifestHandler
 
+
 class LcovRecord(object):
     __slots__ = ("test_name",
                  "source_file",
                  "functions",
                  "function_exec_counts",
                  "function_count",
                  "covered_function_count",
                  "branches",
                  "branch_count",
                  "covered_branch_count",
                  "lines",
                  "line_count",
                  "covered_line_count")
+
     def __init__(self):
         self.functions = {}
         self.function_exec_counts = {}
         self.branches = {}
         self.lines = {}
 
     def __iadd__(self, other):
 
@@ -67,16 +69,17 @@ class LcovRecord(object):
         self.function_exec_counts = {fn_name: count for fn_name, count in viewitems(self.function_exec_counts)
                                      if fn_name in self.functions.values()}
         self.covered_function_count = len([c for c in self.function_exec_counts.values() if c])
         self.line_count = len(self.lines)
         self.covered_line_count = len([c for c, _ in self.lines.values() if c])
         self.branch_count = len(self.branches)
         self.covered_branch_count = len([c for c in self.branches.values() if c])
 
+
 class RecordRewriter(object):
     # Helper class for rewriting/spliting individual lcov records according
     # to what the preprocessor did.
     def __init__(self):
         self._ranges = None
 
     def _get_range(self, line):
         for start, end in self._ranges:
@@ -159,30 +162,32 @@ class RecordRewriter(object):
                 continue
             rewritten_branches[(new_ln, block_number, branch_number)] = taken
 
         record.branches = rewritten_branches
 
     def rewrite_record(self, record, pp_info):
         # Rewrite the lines in the given record according to preprocessor info
         # and split to additional records when pp_info has included file info.
-        self._current_pp_info = dict([(tuple([int(l) for l in k.split(',')]), v) for k, v in pp_info.items()])
+        self._current_pp_info = dict(
+            [(tuple([int(l) for l in k.split(',')]), v) for k, v in pp_info.items()])
         self._ranges = sorted(self._current_pp_info.keys())
         self._additions = {}
         self._rewrite_lines(record)
         self._rewrite_functions(record)
         self._rewrite_branches(record)
 
         record.resummarize()
 
         generated_records = self._additions.values()
         for r in generated_records:
             r.resummarize()
         return generated_records
 
+
 class LcovFile(object):
     # Simple parser/pretty-printer for lcov format.
     # lcov parsing based on http://ltp.sourceforge.net/coverage/lcov/geninfo.1.php
 
     # TN:<test name>
     # SF:<absolute path to the source file>
     # FN:<line number of function start>,<function name>
     # FNDA:<execution count>,<function name>
@@ -399,16 +404,17 @@ class LcovFile(object):
 
     def parse_LF(self, line_count):
         self.current_record.line_count = line_count
 
 
 class UrlFinderError(Exception):
     pass
 
+
 class UrlFinder(object):
     # Given a "chrome://" or "resource://" url, uses data from the UrlMapBackend
     # and install manifests to find a path to the source file and the corresponding
     # (potentially pre-processed) file in the objdir.
     def __init__(self, chrome_map_path, appdir, gredir, extra_chrome_manifests):
         # Cached entries
         self._final_mapping = {}
 
@@ -575,43 +581,46 @@ class UrlFinder(object):
                     # e.g. file:///home/worker/workspace/build/application/firefox/browser/features/e10srollout@mozilla.org.xpi!/bootstrap.js
                     parts = url_obj.path.split('.xpi!', 1)
                 else:
                     # We don't know how to handle this jar: path, so return it to the
                     # caller to make it print a warning.
                     return url_obj.path, None
 
                 dir_parts = parts[0].rsplit(app_name + '/', 1)
-                url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist', 'bin', dir_parts[1].lstrip('/'), parts[1].lstrip('/')))
+                url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist',
+                                                    'bin', dir_parts[1].lstrip('/'), parts[1].lstrip('/')))
             elif '.xpi!' in url:
                 # This matching mechanism is quite brittle and based on examples seen in the wild.
                 # There's no rule to match the XPI name to the path in dist/xpi-stage.
                 parts = url_obj.path.split('.xpi!', 1)
                 addon_name = os.path.basename(parts[0])
                 if '-test@mozilla.org' in addon_name:
                     addon_name = addon_name[:-len('-test@mozilla.org')]
                 elif addon_name.endswith('@mozilla.org'):
                     addon_name = addon_name[:-len('@mozilla.org')]
-                url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist', 'xpi-stage', addon_name, parts[1].lstrip('/')))
+                url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist',
+                                                    'xpi-stage', addon_name, parts[1].lstrip('/')))
         elif url_obj.scheme == 'file' and os.path.isabs(url_obj.path):
             path = url_obj.path
             if not os.path.isfile(path):
                 # This may have been in a profile directory that no
                 # longer exists.
                 return None
             if not path.startswith(self.topobjdir):
                 return path, None
             url = url_obj.path
         elif url_obj.scheme in ('http', 'https', 'javascript', 'data', 'about'):
             return None
 
         result = self.find_files(url)
         self._final_mapping[url] = result
         return result
 
+
 class LcovFileRewriter(object):
     # Class for partial parses of LCOV format and rewriting to resolve urls
     # and preprocessed file lines.
     def __init__(self, chrome_map_path, appdir='dist/bin/browser/', gredir='dist/bin/', extra_chrome_manifests=[]):
         self.url_finder = UrlFinder(chrome_map_path, appdir, gredir, extra_chrome_manifests)
         self.pp_rewriter = RecordRewriter()
 
     def rewrite_files(self, in_paths, output_file, output_suffix):
@@ -689,10 +698,11 @@ def main():
     for f in args.files:
         if os.path.isdir(f):
             files += [os.path.join(f, e) for e in os.listdir(f)]
         else:
             files.append(f)
 
     rewriter.rewrite_files(files, args.output_file, args.output_suffix)
 
+
 if __name__ == '__main__':
     main()
--- a/python/mozbuild/mozbuild/codecoverage/packager.py
+++ b/python/mozbuild/mozbuild/codecoverage/packager.py
@@ -12,16 +12,17 @@ import buildconfig
 from mozpack.copier import Jarrer, FileRegistry
 from mozpack.files import FileFinder, GeneratedFile
 from mozpack.manifests import (
     InstallManifest,
     UnreadableInstallManifest,
 )
 import mozpack.path as mozpath
 
+
 def describe_install_manifest(manifest, dest_dir):
     try:
         manifest = InstallManifest(manifest)
     except UnreadableInstallManifest:
         raise IOError(errno.EINVAL, 'Error parsing manifest file', manifest)
 
     reg = FileRegistry()
 
@@ -70,10 +71,11 @@ def cli(args=sys.argv[1:]):
     args = parser.parse_args(args)
 
     if not args.root:
         from buildconfig import topobjdir
         args.root = topobjdir
 
     return package_coverage_data(args.root, args.output_file)
 
+
 if __name__ == '__main__':
     sys.exit(cli())
--- a/python/mozbuild/mozbuild/compilation/codecomplete.py
+++ b/python/mozbuild/mozbuild/compilation/codecomplete.py
@@ -21,30 +21,30 @@ from mozbuild.shellutil import (
 )
 
 
 @CommandProvider
 class Introspection(MachCommandBase):
     """Instropection commands."""
 
     @Command('compileflags', category='devenv',
-        description='Display the compilation flags for a given source file')
+             description='Display the compilation flags for a given source file')
     @CommandArgument('what', default=None,
-        help='Source file to display compilation flags for')
+                     help='Source file to display compilation flags for')
     def compileflags(self, what):
         from mozbuild.util import resolve_target_to_make
         from mozbuild.compilation import util
 
         if not util.check_top_objdir(self.topobjdir):
             return 1
 
         path_arg = self._wrap_path_argument(what)
 
         make_dir, make_target = resolve_target_to_make(self.topobjdir,
-            path_arg.relpath())
+                                                       path_arg.relpath())
 
         if make_dir is None and make_target is None:
             return 1
 
         build_vars = util.get_build_vars(make_dir, self)
 
         if what.endswith('.c'):
             cc = 'CC'
--- a/python/mozbuild/mozbuild/compilation/database.py
+++ b/python/mozbuild/mozbuild/compilation/database.py
@@ -171,18 +171,18 @@ class CompileDBBackend(CommonBackend):
         '.mm': 'CXXFLAGS',
     }
 
     def _build_db_line(self, objdir, reldir, cenv, filename,
                        canonical_suffix, unified=None):
         if canonical_suffix not in self.COMPILERS:
             return
         db = self._db.setdefault((objdir, filename, unified),
-            cenv.substs[self.COMPILERS[canonical_suffix]].split() +
-            ['-o', '/dev/null', '-c'])
+                                 cenv.substs[self.COMPILERS[canonical_suffix]].split() +
+                                 ['-o', '/dev/null', '-c'])
         reldir = reldir or mozpath.relpath(objdir, cenv.topobjdir)
 
         def append_var(name):
             value = cenv.substs.get(name)
             if not value:
                 return
             if isinstance(value, types.StringTypes):
                 value = value.split()
--- a/python/mozbuild/mozbuild/compilation/util.py
+++ b/python/mozbuild/mozbuild/compilation/util.py
@@ -1,44 +1,47 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this file,
 # You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import os
 from mozbuild import shellutil
 
+
 def check_top_objdir(topobjdir):
     top_make = os.path.join(topobjdir, 'Makefile')
     if not os.path.exists(top_make):
         print('Your tree has not been built yet. Please run '
-            '|mach build| with no arguments.')
+              '|mach build| with no arguments.')
         return False
     return True
 
+
 def get_build_vars(directory, cmd):
     build_vars = {}
 
     def on_line(line):
         elements = [s.strip() for s in line.split('=', 1)]
 
         if len(elements) != 2:
             return
 
         build_vars[elements[0]] = elements[1]
 
     try:
         old_logger = cmd.log_manager.replace_terminal_handler(None)
         cmd._run_make(directory=directory, target='showbuild', log=False,
-                print_directory=False, allow_parallel=False, silent=True,
-                line_handler=on_line)
+                      print_directory=False, allow_parallel=False, silent=True,
+                      line_handler=on_line)
     finally:
         cmd.log_manager.replace_terminal_handler(old_logger)
 
     return build_vars
 
+
 def sanitize_cflags(flags):
     # We filter out -Xclang arguments as clang based tools typically choke on
     # passing these flags down to the clang driver.  -Xclang tells the clang
     # driver driver to pass whatever comes after it down to clang cc1, which is
     # why we skip -Xclang and the argument immediately after it.  Here is an
     # example: the following two invocations pass |-foo -bar -baz| to cc1:
     # clang -cc1 -foo -bar -baz
     # clang -Xclang -foo -Xclang -bar -Xclang -baz
--- a/python/mozbuild/mozbuild/compilation/warnings.py
+++ b/python/mozbuild/mozbuild/compilation/warnings.py
@@ -82,32 +82,32 @@ class CompilerWarning(dict):
 
     def _compare(self, other, func):
         if not isinstance(other, CompilerWarning):
             return NotImplemented
 
         return func(self._cmpkey(), other._cmpkey())
 
     def __eq__(self, other):
-        return self._compare(other, lambda s,o: s == o)
+        return self._compare(other, lambda s, o: s == o)
 
     def __neq__(self, other):
-        return self._compare(other, lambda s,o: s != o)
+        return self._compare(other, lambda s, o: s != o)
 
     def __lt__(self, other):
-        return self._compare(other, lambda s,o: s < o)
+        return self._compare(other, lambda s, o: s < o)
 
     def __le__(self, other):
-        return self._compare(other, lambda s,o: s <= o)
+        return self._compare(other, lambda s, o: s <= o)
 
     def __gt__(self, other):
-        return self._compare(other, lambda s,o: s > o)
+        return self._compare(other, lambda s, o: s > o)
 
     def __ge__(self, other):
-        return self._compare(other, lambda s,o: s >= o)
+        return self._compare(other, lambda s, o: s >= o)
 
     def __hash__(self):
         """Define so this can exist inside a set, etc."""
         return hash(tuple(sorted(self.items())))
 
 
 class WarningsDatabase(object):
     """Holds a collection of warnings.
@@ -127,16 +127,17 @@ class WarningsDatabase(object):
     The WarningsDatabase handles this by storing the hash of a file a warning
     occurred in. At warning insert time, if the hash of the file does not match
     what is stored in the database, the existing warnings for that file are
     purged from the database.
 
     Callers should periodically prune old, invalid warnings from the database
     by calling prune(). A good time to do this is at the end of a build.
     """
+
     def __init__(self):
         """Create an empty database."""
         self._files = {}
 
     def __len__(self):
         i = 0
         for value in self._files.values():
             i += len(value['warnings'])
@@ -299,16 +300,17 @@ class WarningsCollector(object):
 
     Instances of this class receive data (usually the output of compiler
     invocations) and parse it into warnings.
 
     The collector works by incrementally receiving data, usually line-by-line
     output from the compiler. Therefore, it can maintain state to parse
     multi-line warning messages.
     """
+
     def __init__(self, cb, objdir=None):
         """Initialize a new collector.
 
         ``cb`` is a callable that is called with a ``CompilerWarning``
         instance whenever a new warning is parsed.
 
          ``objdir`` is the object directory. Used for normalizing paths.
          """
--- a/python/mozbuild/mozbuild/config_status.py
+++ b/python/mozbuild/mozbuild/config_status.py
@@ -75,24 +75,24 @@ def config_status(topobjdir='.', topsrcd
     The options to this function are passed when creating the
     ConfigEnvironment. These lists, as well as the actual wrapper script
     around this function, are meant to be generated by configure.
     See build/autoconf/config.status.m4.
     '''
 
     if 'CONFIG_FILES' in os.environ:
         raise Exception('Using the CONFIG_FILES environment variable is not '
-            'supported.')
+                        'supported.')
     if 'CONFIG_HEADERS' in os.environ:
         raise Exception('Using the CONFIG_HEADERS environment variable is not '
-            'supported.')
+                        'supported.')
 
     if not os.path.isabs(topsrcdir):
         raise Exception('topsrcdir must be defined as an absolute directory: '
-            '%s' % topsrcdir)
+                        '%s' % topsrcdir)
 
     default_backends = ['RecursiveMake']
     default_backends = (substs or {}).get('BUILD_BACKENDS', ['RecursiveMake'])
 
     parser = ArgumentParser()
     parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
                         help='display verbose output')
     parser.add_argument('-n', dest='not_topobjdir', action='store_true',
@@ -107,18 +107,18 @@ def config_status(topobjdir='.', topsrcd
                         help='do everything except writing files out.')
     options = parser.parse_args(args)
 
     # Without -n, the current directory is meant to be the top object directory
     if not options.not_topobjdir:
         topobjdir = os.path.abspath('.')
 
     env = ConfigEnvironment(topsrcdir, topobjdir, defines=defines,
-            non_global_defines=non_global_defines, substs=substs,
-            source=source, mozconfig=mozconfig)
+                            non_global_defines=non_global_defines, substs=substs,
+                            source=source, mozconfig=mozconfig)
 
     with FileAvoidWrite(os.path.join(topobjdir, 'mozinfo.json')) as f:
         write_mozinfo(f, env, os.environ)
 
     cpu_start = time.clock()
     time_start = time.time()
 
     # Make appropriate backend instances, defaulting to RecursiveMakeBackend,
--- a/python/mozbuild/mozbuild/configure/__init__.py
+++ b/python/mozbuild/mozbuild/configure/__init__.py
@@ -44,16 +44,17 @@ TRACE = 5
 
 
 class ConfigureError(Exception):
     pass
 
 
 class SandboxDependsFunction(object):
     '''Sandbox-visible representation of @depends functions.'''
+
     def __init__(self, unsandboxed):
         self._or = unsandboxed.__or__
         self._and = unsandboxed.__and__
         self._getattr = unsandboxed.__getattr__
 
     def __call__(self, *arg, **kwargs):
         raise ConfigureError('The `%s` function may not be called'
                              % self.__name__)
@@ -228,16 +229,17 @@ class CombinedDependsFunction(DependsFun
     def __eq__(self, other):
         return (isinstance(other, self.__class__) and
                 self._func is other._func and
                 set(self.dependencies) == set(other.dependencies))
 
     def __ne__(self, other):
         return not self == other
 
+
 class SandboxedGlobal(dict):
     '''Identifiable dict type for use as function global'''
 
 
 def forbidden_import(*args, **kwargs):
     raise ImportError('Importing modules is forbidden')
 
 
@@ -352,20 +354,22 @@ class ConfigureSandbox(dict):
                 yield
 
         self._logger = logger
 
         # Some callers will manage to log a bytestring with characters in it
         # that can't be converted to ascii. Make our log methods robust to this
         # by detecting the encoding that a producer is likely to have used.
         encoding = getpreferredencoding()
+
         def wrapped_log_method(logger, key):
             method = getattr(logger, key)
             if not encoding:
                 return method
+
             def wrapped(*args, **kwargs):
                 out_args = [
                     arg.decode(encoding) if isinstance(arg, str) else arg
                     for arg in args
                 ]
                 return method(*out_args, **kwargs)
             return wrapped
 
@@ -656,17 +660,17 @@ class ConfigureSandbox(dict):
         passed). In most cases, the result of this function is not expected to
         be used.
         Command line argument/environment variable parsing for this Option is
         handled here.
         '''
         when = self._normalize_when(kwargs.get('when'), 'option')
         args = [self._resolve(arg) for arg in args]
         kwargs = {k: self._resolve(v) for k, v in kwargs.iteritems()
-                                      if k != 'when'}
+                  if k != 'when'}
         option = Option(*args, **kwargs)
         if when:
             self._conditions[option] = when
         if option.name in self._options:
             raise ConfigureError('Option `%s` already defined' % option.option)
         if option.env in self._options:
             raise ConfigureError('Option `%s` already defined' % option.env)
         if option.name:
--- a/python/mozbuild/mozbuild/configure/check_debug_ranges.py
+++ b/python/mozbuild/mozbuild/configure/check_debug_ranges.py
@@ -7,16 +7,17 @@
 # versions of GNU ld.
 
 from __future__ import absolute_import
 
 import subprocess
 import sys
 import re
 
+
 def get_range_for(compilation_unit, debug_info):
     '''Returns the range offset for a given compilation unit
        in a given debug_info.'''
     name = ranges = ''
     search_cu = False
     for nfo in debug_info.splitlines():
         if 'DW_TAG_compile_unit' in nfo:
             search_cu = True
@@ -27,28 +28,30 @@ def get_range_for(compilation_unit, debu
             search_cu = False
         if search_cu:
             if 'DW_AT_name' in nfo:
                 name = nfo.rsplit(None, 1)[1]
             elif 'DW_AT_ranges' in nfo:
                 ranges = nfo.rsplit(None, 1)[1]
     return None
 
+
 def get_range_length(range, debug_ranges):
     '''Returns the number of items in the range starting at the
        given offset.'''
     length = 0
     for line in debug_ranges.splitlines():
         m = re.match('\s*([0-9a-fA-F]+)\s+([0-9a-fA-F]+)\s+([0-9a-fA-F]+)', line)
         if m and int(m.group(1), 16) == range:
             length += 1
     return length
 
+
 def main(bin, compilation_unit):
-    p = subprocess.Popen(['objdump', '-W', bin], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
+    p = subprocess.Popen(['objdump', '-W', bin], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
     (out, err) = p.communicate()
     sections = re.split('\n(Contents of the|The section) ', out)
     debug_info = [s for s in sections if s.startswith('.debug_info')]
     debug_ranges = [s for s in sections if s.startswith('.debug_ranges')]
     if not debug_ranges or not debug_info:
         return 0
 
     range = get_range_for(compilation_unit, debug_info[0])
--- a/python/mozbuild/mozbuild/configure/libstdcxx.py
+++ b/python/mozbuild/mozbuild/configure/libstdcxx.py
@@ -16,55 +16,61 @@
 from __future__ import absolute_import
 
 import os
 import subprocess
 import re
 
 re_for_ld = re.compile('.*\((.*)\).*')
 
+
 def parse_readelf_line(x):
     """Return the version from a readelf line that looks like:
     0x00ec: Rev: 1  Flags: none  Index: 8  Cnt: 2  Name: GLIBCXX_3.4.6
     """
     return x.split(':')[-1].split('_')[-1].strip()
 
+
 def parse_ld_line(x):
     """Parse a line from the output of ld -t. The output of gold is just
     the full path, gnu ld prints "-lstdc++ (path)".
     """
     t = re_for_ld.match(x)
     if t:
         return t.groups()[0].strip()
     return x.strip()
 
+
 def split_ver(v):
     """Covert the string '1.2.3' into the list [1,2,3]
     """
     return [int(x) for x in v.split('.')]
 
+
 def cmp_ver(a, b):
     """Compare versions in the form 'a.b.c'
     """
     for (i, j) in zip(split_ver(a), split_ver(b)):
         if i != j:
             return i - j
     return 0
 
+
 def encode_ver(v):
     """Encode the version as a single number.
     """
     t = split_ver(v)
     return t[0] << 16 | t[1] << 8 | t[2]
 
+
 def find_version(args):
     """Given a base command line for a compiler, find the version of the
     libstdc++ it uses.
     """
-    args +=  ['-shared', '-Wl,-t']
+    args += ['-shared', '-Wl,-t']
     p = subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
     candidates = [x for x in p.stdout if 'libstdc++.so' in x]
     candidates = [x for x in candidates if 'skipping incompatible' not in x]
     if not candidates:
         raise Exception('''Couldn't find libstdc++ candidates!
 command line: %s''' % args)
     if len(candidates) != 1:
         raise Exception('''Too many libstdc++ candidates!
@@ -72,19 +78,20 @@ command line: %s
 candidates:
 %s''' % (args, '\n'.join(candidates)))
 
     libstdcxx = parse_ld_line(candidates[-1])
 
     p = subprocess.Popen(['readelf', '-V', libstdcxx], stdout=subprocess.PIPE)
     versions = [parse_readelf_line(x)
                 for x in p.stdout.readlines() if 'Name: GLIBCXX' in x]
-    last_version = sorted(versions, cmp = cmp_ver)[-1]
+    last_version = sorted(versions, cmp=cmp_ver)[-1]
     return (last_version, encode_ver(last_version))
 
+
 if __name__ == '__main__':
     """Given the value of environment variable CXX or HOST_CXX, find the
     version of the libstdc++ it uses.
     """
     cxx_env = os.environ['CXX']
     print('MOZ_LIBSTDCXX_TARGET_VERSION=%s' % find_version(cxx_env.split())[1])
     host_cxx_env = os.environ.get('HOST_CXX', cxx_env)
     print('MOZ_LIBSTDCXX_HOST_VERSION=%s' % find_version(host_cxx_env.split())[1])
--- a/python/mozbuild/mozbuild/configure/lint.py
+++ b/python/mozbuild/mozbuild/configure/lint.py
@@ -232,17 +232,16 @@ class LintSandbox(ConfigureSandbox):
                     frame = frame.f_back
                 e = ConfigureError('{} should be used instead of '
                                    '{} with default={}'.format(
                                        name.replace('--{}-'.format(prefix),
                                                     '--{}-'.format(replacement)),
                                        name, default))
                 self._raise_from(e, frame.f_back if frame else None)
 
-
     def _check_help_for_option_with_func_default(self, option, *args, **kwargs):
         default = kwargs['default']
 
         if not isinstance(default, SandboxDependsFunction):
             return
 
         if not option.prefix:
             return
@@ -280,16 +279,17 @@ class LintSandbox(ConfigureSandbox):
     def wraps(self, func):
         def do_wraps(wrapper):
             self._wrapped[wrapper] = func
             return wraps(func)(wrapper)
         return do_wraps
 
     def imports_impl(self, _import, _from=None, _as=None):
         wrapper = super(LintSandbox, self).imports_impl(_import, _from=_from, _as=_as)
+
         def decorator(func):
             self._has_imports.add(func)
             return wrapper(func)
         return decorator
 
     def _prepare_function(self, func, update_globals=None):
         wrapped = super(LintSandbox, self)._prepare_function(func, update_globals)
         _, glob = self.unwrap(wrapped)
--- a/python/mozbuild/mozbuild/configure/lint_util.py
+++ b/python/mozbuild/mozbuild/configure/lint_util.py
@@ -41,17 +41,17 @@ def disassemble_as_iter(co):
     next_byte_line = lnotab.next()
     while i < n:
         while next_byte_line and i >= next_byte_line[0]:
             line = next_byte_line[1]
             next_byte_line = lnotab.next()
         c = code[i]
         op = ord(c)
         opname = dis.opname[op]
-        i += 1;
+        i += 1
         if op >= dis.HAVE_ARGUMENT:
             arg = ord(code[i]) + ord(code[i + 1]) * 256 + extended_arg
             extended_arg = 0
             i += 2
             if op == dis.EXTENDED_ARG:
                 extended_arg = arg * 65536
                 continue
             if op in dis.hasconst:
--- a/python/mozbuild/mozbuild/configure/options.py
+++ b/python/mozbuild/mozbuild/configure/options.py
@@ -101,16 +101,17 @@ class OptionValue(tuple):
                             % type(value).__name__)
 
 
 class PositiveOptionValue(OptionValue):
     '''Represents the value for a positive option (--enable/--with/--foo)
     in the form of a tuple for when values are given to the option (in the form
     --option=value[,value2...].
     '''
+
     def __nonzero__(self):
         return True
 
 
 class NegativeOptionValue(OptionValue):
     '''Represents the value for a negative option (--disable/--without)
 
     This is effectively an empty tuple with a `origin` attribute.
@@ -419,16 +420,17 @@ class CommandLineHelper(object):
     If multiple variants are given, command line is prefered over the
     environment, and if different values are given on the command line, the
     last one wins. (This mimicks the behavior of autoconf, avoiding to break
     existing mozconfigs using valid options in weird ways)
 
     Extra options can be added afterwards through API calls. For those,
     conflicting values will raise an exception.
     '''
+
     def __init__(self, environ=os.environ, argv=sys.argv):
         self._environ = dict(environ)
         self._args = OrderedDict()
         self._extra_args = OrderedDict()
         self._origins = {}
         self._last = 0
 
         assert(argv and not argv[0].startswith('--'))
--- a/python/mozbuild/mozbuild/configure/util.py
+++ b/python/mozbuild/mozbuild/configure/util.py
@@ -9,49 +9,52 @@ import itertools
 import locale
 import logging
 import os
 import sys
 from collections import deque
 from contextlib import contextmanager
 from distutils.version import LooseVersion
 
+
 def getpreferredencoding():
     # locale._parse_localename makes locale.getpreferredencoding
     # return None when LC_ALL is C, instead of e.g. 'US-ASCII' or
     # 'ANSI_X3.4-1968' when it uses nl_langinfo.
     encoding = None
     try:
         encoding = locale.getpreferredencoding()
     except ValueError:
         # On english OSX, LC_ALL is UTF-8 (not en-US.UTF-8), and
         # that throws off locale._parse_localename, which ends up
         # being used on e.g. homebrew python.
         if os.environ.get('LC_ALL', '').upper() == 'UTF-8':
             encoding = 'utf-8'
     return encoding
 
+
 class Version(LooseVersion):
     '''A simple subclass of distutils.version.LooseVersion.
     Adds attributes for `major`, `minor`, `patch` for the first three
     version components so users can easily pull out major/minor
     versions, like:
 
     v = Version('1.2b')
     v.major == 1
     v.minor == 2
     v.patch == 0
     '''
+
     def __init__(self, version):
         # Can't use super, LooseVersion's base class is not a new-style class.
         LooseVersion.__init__(self, version)
         # Take the first three integer components, stopping at the first
         # non-integer and padding the rest with zeroes.
         (self.major, self.minor, self.patch) = list(itertools.chain(
-            itertools.takewhile(lambda x:isinstance(x, int), self.version),
+            itertools.takewhile(lambda x: isinstance(x, int), self.version),
             (0, 0, 0)))[:3]
 
     def __cmp__(self, other):
         # LooseVersion checks isinstance(StringType), so work around it.
         if isinstance(other, unicode):
             other = other.encode('ascii')
         return LooseVersion.__cmp__(self, other)
 
@@ -66,16 +69,17 @@ class ConfigureOutputHandler(logging.Han
 
     Only messages above log level INFO (included) are logged.
 
     Messages below that level can be kept until an ERROR message is received,
     at which point the last `maxlen` accumulated messages below INFO are
     printed out. This feature is only enabled under the `queue_debug` context
     manager.
     '''
+
     def __init__(self, stdout=sys.stdout, stderr=sys.stderr, maxlen=20):
         super(ConfigureOutputHandler, self).__init__()
 
         # Python has this feature where it sets the encoding of pipes to
         # ascii, which blatantly fails when trying to print out non-ascii.
         def fix_encoding(fh):
             try:
                 isatty = fh.isatty()
@@ -188,16 +192,17 @@ class ConfigureOutputHandler(logging.Han
                 break
         self._keep_if_debug = self.KEEP
 
 
 class LineIO(object):
     '''File-like class that sends each line of the written data to a callback
     (without carriage returns).
     '''
+
     def __init__(self, callback, errors='strict'):
         self._callback = callback
         self._buf = ''
         self._encoding = getpreferredencoding()
         self._errors = errors
 
     def write(self, buf):
         if self._encoding and isinstance(buf, str):
--- a/python/mozbuild/mozbuild/controller/building.py
+++ b/python/mozbuild/mozbuild/controller/building.py
@@ -74,17 +74,17 @@ slower.
 Consider adding ".noindex" to the end of your object directory name to have
 Finder ignore it. Or, add an indexing exclusion through the Spotlight System
 Preferences.
 ===================
 '''.strip()
 
 
 INSTALL_TESTS_CLOBBER = ''.join([TextWrapper().fill(line) + '\n' for line in
-'''
+                                 '''
 The build system was unable to install tests because the CLOBBER file has \
 been updated. This means if you edited any test files, your changes may not \
 be picked up until a full/clobber build is performed.
 
 The easiest and fastest way to perform a clobber build is to run:
 
  $ mach clobber
  $ mach build
@@ -102,17 +102,17 @@ required to succeed, but we weren't expe
 
 Please consider filing a bug for this failure if you have reason to believe
 this is a clobber bug and not due to local changes.
 ===================
 '''.strip()
 
 
 BuildOutputResult = namedtuple('BuildOutputResult',
-    ('warning', 'state_changed', 'message'))
+                               ('warning', 'state_changed', 'message'))
 
 
 class TierStatus(object):
     """Represents the state and progress of tier traversal.
 
     The build system is organized into linear phases called tiers. Each tier
     executes in the order it was defined, 1 at a time.
     """
@@ -168,19 +168,19 @@ class TierStatus(object):
 
             o.append(t_entry)
 
         return o
 
     def add_resources_to_dict(self, entry, start=None, end=None, phase=None):
         """Helper function to append resource information to a dict."""
         cpu_percent = self.resources.aggregate_cpu_percent(start=start,
-            end=end, phase=phase, per_cpu=False)
+                                                           end=end, phase=phase, per_cpu=False)
         cpu_times = self.resources.aggregate_cpu_times(start=start, end=end,
-            phase=phase, per_cpu=False)
+                                                       phase=phase, per_cpu=False)
         io = self.resources.aggregate_io(start=start, end=end, phase=phase)
 
         if cpu_percent is None:
             return entry
 
         entry['cpu_percent'] = cpu_percent
         entry['cpu_times'] = list(cpu_times)
         entry['io'] = list(io)
@@ -334,18 +334,18 @@ class BuildMonitor(MozbuildObject):
             if not usage:
                 return
 
             self.log_resource_usage(usage)
             with open(self._get_state_filename('build_resources.json'), 'w') as fh:
                 json.dump(self.resources.as_dict(), fh, indent=2)
         except Exception as e:
             self.log(logging.WARNING, 'build_resources_error',
-                {'msg': str(e)},
-                'Exception when writing resource usage file: {msg}')
+                     {'msg': str(e)},
+                     'Exception when writing resource usage file: {msg}')
 
     def _get_finder_cpu_usage(self):
         """Obtain the CPU usage of the Finder app on OS X.
 
         This is used to detect high CPU usage.
         """
         if not sys.platform.startswith('darwin'):
             return None
@@ -433,19 +433,19 @@ class BuildMonitor(MozbuildObject):
         subsequent analysis.
 
         If no resource usage is available, None is returned.
         """
         if not self.have_resource_usage:
             return None
 
         cpu_percent = self.resources.aggregate_cpu_percent(phase=None,
-            per_cpu=False)
+                                                           per_cpu=False)
         cpu_times = self.resources.aggregate_cpu_times(phase=None,
-            per_cpu=False)
+                                                       per_cpu=False)
         io = self.resources.aggregate_io(phase=None)
 
         o = dict(
             version=3,
             argv=sys.argv,
             start=self.start_time,
             end=self.end_time,
             duration=self.end_time - self.start_time,
@@ -457,33 +457,32 @@ class BuildMonitor(MozbuildObject):
         )
 
         o['tiers'] = self.tiers.tiered_resource_usage()
 
         self.tiers.add_resource_fields_to_dict(o)
 
         for usage in self.resources.range_usage():
             cpu_percent = self.resources.aggregate_cpu_percent(usage.start,
-                usage.end, per_cpu=False)
+                                                               usage.end, per_cpu=False)
             cpu_times = self.resources.aggregate_cpu_times(usage.start,
-                usage.end, per_cpu=False)
+                                                           usage.end, per_cpu=False)
 
             entry = dict(
                 start=usage.start,
                 end=usage.end,
                 virt=list(usage.virt),
                 swap=list(usage.swap),
             )
 
             self.tiers.add_resources_to_dict(entry, start=usage.start,
-                    end=usage.end)
+                                             end=usage.end)
 
             o['resources'].append(entry)
 
-
         # If the imports for this file ran before the in-tree virtualenv
         # was bootstrapped (for instance, for a clobber build in automation),
         # psutil might not be available.
         #
         # Treat psutil as optional to avoid an outright failure to log resources
         # TODO: it would be nice to collect data on the storage device as well
         # in this case.
         o['system'] = {}
@@ -519,18 +518,18 @@ class BuildMonitor(MozbuildObject):
 
         self.log(logging.WARNING, 'resource_usage', params, message)
 
         excessive, sin, sout = self.have_excessive_swapping()
         if excessive is not None and (sin or sout):
             sin /= 1048576
             sout /= 1048576
             self.log(logging.WARNING, 'swap_activity',
-                {'sin': sin, 'sout': sout},
-                'Swap in/out (MB): {sin}/{sout}')
+                     {'sin': sin, 'sout': sout},
+                     'Swap in/out (MB): {sin}/{sout}')
 
     def ccache_stats(self):
         ccache_stats = None
 
         try:
             ccache = which.which('ccache')
             output = subprocess.check_output([ccache, '-s'])
             ccache_stats = CCacheStats(output)
@@ -543,16 +542,17 @@ class BuildMonitor(MozbuildObject):
 
 
 class TerminalLoggingHandler(logging.Handler):
     """Custom logging handler that works with terminal window dressing.
 
     This class should probably live elsewhere, like the mach core. Consider
     this a proving ground for its usefulness.
     """
+
     def __init__(self):
         logging.Handler.__init__(self)
 
         self.fh = sys.stdout
         self.footer = None
 
     def flush(self):
         self.acquire()
@@ -678,17 +678,16 @@ class BuildOutputManager(OutputManager):
     def __exit__(self, exc_type, exc_value, traceback):
         OutputManager.__exit__(self, exc_type, exc_value, traceback)
 
         # Ensure the resource monitor is stopped because leaving it running
         # could result in the process hanging on exit because the resource
         # collection child process hasn't been told to stop.
         self.monitor.stop_resource_recording()
 
-
     def on_line(self, line):
         warning, state_changed, message = self.monitor.on_line(line)
 
         if message:
             self.log(logging.INFO, 'build_output', {'line': message}, '{line}')
         elif state_changed:
             have_handler = hasattr(self, 'handler')
             if have_handler:
@@ -739,17 +738,17 @@ class StaticAnalysisOutputManager(Output
 
     def on_line(self, line):
         warning, relevant = self.monitor.on_line(line)
         if relevant:
             self.raw += line + '\n'
 
         if warning:
             self.log(logging.INFO, 'compiler_warning', warning,
-                'Warning: {flag} in {filename}: {message}')
+                     'Warning: {flag} in {filename}: {message}')
 
         if relevant:
             self.log(logging.INFO, 'build_output', {'line': line}, '{line}')
         else:
             have_handler = hasattr(self, 'handler')
             if have_handler:
                 self.handler.acquire()
             try:
@@ -781,17 +780,17 @@ class CCacheStats(object):
     Instances can be subtracted from each other to obtain differences.
     print() or str() the object to show a ``ccache -s`` like output
     of the captured stats.
 
     """
     STATS_KEYS = [
         # (key, description)
         # Refer to stats.c in ccache project for all the descriptions.
-        ('stats_zeroed', 'stats zero time'), # Old name prior to ccache 3.4
+        ('stats_zeroed', 'stats zero time'),  # Old name prior to ccache 3.4
         ('stats_zeroed', 'stats zeroed'),
         ('stats_updated', 'stats updated'),
         ('cache_hit_direct', 'cache hit (direct)'),
         ('cache_hit_preprocessed', 'cache hit (preprocessed)'),
         ('cache_hit_rate', 'cache hit rate'),
         ('cache_miss', 'cache miss'),
         ('link', 'called for link'),
         ('preprocessing', 'called for preprocessing'),
@@ -1000,21 +999,21 @@ class BuildDriver(MozbuildObject):
         # down builds.
         mkdir(self.topobjdir, not_indexed=True)
 
         with BuildOutputManager(self.log_manager, monitor, footer) as output:
             monitor.start()
 
             if directory is not None and not what:
                 print('Can only use -C/--directory with an explicit target '
-                    'name.')
+                      'name.')
                 return 1
 
             if directory is not None:
-                disable_extra_make_dependencies=True
+                disable_extra_make_dependencies = True
                 directory = mozpath.normsep(directory)
                 if directory.startswith('/'):
                     directory = directory[1:]
 
             monitor.start_resource_recording()
 
             self.mach_context.command_attrs['clobber'] = False
             config = None
@@ -1099,17 +1098,17 @@ class BuildDriver(MozbuildObject):
                     path_arg = self._wrap_path_argument(target)
 
                     if directory is not None:
                         make_dir = os.path.join(self.topobjdir, directory)
                         make_target = target
                     else:
                         make_dir, make_target = \
                             resolve_target_to_make(self.topobjdir,
-                                path_arg.relpath())
+                                                   path_arg.relpath())
 
                     if make_dir is None and make_target is None:
                         return 1
 
                     # See bug 886162 - we don't want to "accidentally" build
                     # the entire tree (if that's really the intent, it's
                     # unlikely they would have specified a directory.)
                     if not make_dir and not make_target:
@@ -1141,20 +1140,21 @@ class BuildDriver(MozbuildObject):
                 # Build target pairs.
                 for make_dir, make_target in target_pairs:
                     # We don't display build status messages during partial
                     # tree builds because they aren't reliable there. This
                     # could potentially be fixed if the build monitor were more
                     # intelligent about encountering undefined state.
                     no_build_status = b'1' if make_dir is not None else b''
                     status = self._run_make(directory=make_dir, target=make_target,
-                        line_handler=output.on_line, log=False, print_directory=False,
-                        ensure_exit_code=False, num_jobs=jobs, silent=not verbose,
-                        append_env={b'NO_BUILDSTATUS_MESSAGES': no_build_status},
-                        keep_going=keep_going)
+                                            line_handler=output.on_line, log=False, print_directory=False,
+                                            ensure_exit_code=False, num_jobs=jobs, silent=not verbose,
+                                            append_env={
+                                                b'NO_BUILDSTATUS_MESSAGES': no_build_status},
+                                            keep_going=keep_going)
 
                     if status != 0:
                         break
 
             elif status is None:
                 # If the backend doesn't specify a build() method, then just
                 # call client.mk directly.
                 status = self._run_client_mk(line_handler=output.on_line,
@@ -1199,18 +1199,18 @@ class BuildDriver(MozbuildObject):
         # whatever code we warned about.
         if not status:
             # Suppress warnings for 3rd party projects in local builds
             # until we suppress them for real.
             # TODO remove entries/feature once we stop generating warnings
             # in these directories.
             pathToThirdparty = os.path.join(self.topsrcdir,
                                             "tools",
-                                           "rewriting",
-                                           "ThirdPartyPaths.txt")
+                                            "rewriting",
+                                            "ThirdPartyPaths.txt")
 
             if os.path.exists(pathToThirdparty):
                 with open(pathToThirdparty) as f:
                     # Normalize the path (no trailing /)
                     LOCAL_SUPPRESS_DIRS = tuple(d.rstrip('/') for d in f.read().splitlines())
             else:
                 # For application based on gecko like thunderbird
                 LOCAL_SUPPRESS_DIRS = ()
@@ -1279,32 +1279,33 @@ class BuildDriver(MozbuildObject):
             #    print(EXCESSIVE_SWAP_MESSAGE)
 
             print('To view resource usage of the build, run |mach '
                   'resource-usage|.')
 
         long_build = monitor.elapsed > 600
 
         if long_build:
-            output.on_line('We know it took a while, but your build finally finished successfully!')
+            output.on_line(
+                'We know it took a while, but your build finally finished successfully!')
         else:
             output.on_line('Your build was successful!')
 
         # Only for full builds because incremental builders likely don't
         # need to be burdened with this.
         if not what:
             try:
                 # Fennec doesn't have useful output from just building. We should
                 # arguably make the build action useful for Fennec. Another day...
                 if self.substs['MOZ_BUILD_APP'] != 'mobile/android':
                     print('To take your build for a test drive, run: |mach run|')
                 app = self.substs['MOZ_BUILD_APP']
                 if app in ('browser', 'mobile/android'):
                     print('For more information on what to do now, see '
-                        'https://developer.mozilla.org/docs/Developer_Guide/So_You_Just_Built_Firefox')
+                          'https://developer.mozilla.org/docs/Developer_Guide/So_You_Just_Built_Firefox')
             except Exception:
                 # Ignore Exceptions in case we can't find config.status (such
                 # as when doing OSX Universal builds)
                 pass
 
         return status
 
     def configure(self, options=None, buildstatus_messages=False,
@@ -1320,24 +1321,24 @@ class BuildDriver(MozbuildObject):
         line_handler = line_handler or on_line
 
         options = ' '.join(shell_quote(o) for o in options or ())
         append_env = {b'CONFIGURE_ARGS': options.encode('utf-8')}
 
         # Only print build status messages when we have an active
         # monitor.
         if not buildstatus_messages:
-            append_env[b'NO_BUILDSTATUS_MESSAGES'] =  b'1'
+            append_env[b'NO_BUILDSTATUS_MESSAGES'] = b'1'
         status = self._run_client_mk(target='configure',
                                      line_handler=line_handler,
                                      append_env=append_env)
 
         if not status:
             print('Configure complete!')
-            print('Be sure to run |mach build| to pick up any changes');
+            print('Be sure to run |mach build| to pick up any changes')
 
         return status
 
     def install_tests(self, test_objs):
         """Install test files."""
 
         if self.is_clobber_needed():
             print(INSTALL_TESTS_CLOBBER.format(
--- a/python/mozbuild/mozbuild/controller/clobber.py
+++ b/python/mozbuild/mozbuild/controller/clobber.py
@@ -11,17 +11,17 @@ import os
 import subprocess
 import sys
 
 from mozfile.mozfile import remove as mozfileremove
 from textwrap import TextWrapper
 
 
 CLOBBER_MESSAGE = ''.join([TextWrapper().fill(line) + '\n' for line in
-'''
+                           '''
 The CLOBBER file has been updated, indicating that an incremental build since \
 your last build will probably not work. A full/clobber build is required.
 
 The reason for the clobber is:
 
 {clobber_reason}
 
 Clobbering can be performed automatically. However, we didn't automatically \
@@ -34,16 +34,17 @@ The easiest and fastest way to clobber i
  $ mach clobber
 
 If you know this clobber doesn't apply to you or you're feeling lucky -- \
 Well, are ya? -- you can ignore this clobber requirement by running:
 
  $ touch {clobber_file}
 '''.splitlines()])
 
+
 class Clobberer(object):
     def __init__(self, topsrcdir, topobjdir):
         """Create a new object to manage clobbering the tree.
 
         It is bound to a top source directory and to a specific object
         directory.
         """
         assert os.path.isabs(topsrcdir)
@@ -64,17 +65,17 @@ class Clobberer(object):
         """Returns a bool indicating whether a tree clobber is required."""
 
         # No object directory clobber file means we're good.
         if not os.path.exists(self.obj_clobber):
             return False
 
         # Object directory clobber older than current is fine.
         if os.path.getmtime(self.src_clobber) <= \
-            os.path.getmtime(self.obj_clobber):
+                os.path.getmtime(self.obj_clobber):
 
             return False
 
         return True
 
     def clobber_cause(self):
         """Obtain the cause why a clobber is required.
 
@@ -177,18 +178,18 @@ class Clobberer(object):
         # current directory is not under the object directory. The latter is
         # because operating systems, filesystems, and shell can throw fits
         # if the current working directory is deleted from under you. While it
         # can work in some scenarios, we take the conservative approach and
         # never try.
         if not allow_auto:
             return True, False, \
                self._message('Automatic clobbering is not enabled\n'
-                              '  (add "mk_add_options AUTOCLOBBER=1" to your '
-                              'mozconfig).')
+                             '  (add "mk_add_options AUTOCLOBBER=1" to your '
+                             'mozconfig).')
 
         if cwd.startswith(self.topobjdir) and cwd != self.topobjdir:
             return True, False, self._message(
                 'Cannot clobber while the shell is inside the object directory.')
 
         objdir = self.topobjdir.encode('utf-8', 'replace')
         print('Automatically clobbering %s' % objdir, file=fh)
         try:
@@ -199,9 +200,9 @@ class Clobberer(object):
         except (IOError) as error:
             return True, False, self._message(
                 'Error when automatically clobbering: ' + str(error))
 
     def _message(self, reason):
         lines = [' ' + line for line in self.clobber_cause()]
 
         return CLOBBER_MESSAGE.format(clobber_reason='\n'.join(lines),
-            no_reason='  ' + reason, clobber_file=self.obj_clobber)
+                                      no_reason='  ' + reason, clobber_file=self.obj_clobber)
--- a/python/mozbuild/mozbuild/doctor.py
+++ b/python/mozbuild/mozbuild/doctor.py
@@ -29,16 +29,17 @@ LATEST_MOZILLABUILD_VERSION = '1.11.0'
 DISABLE_LASTACCESS_WIN = '''
 Disable the last access time feature?
 This improves the speed of file and
 directory access by deferring Last Access Time modification on disk by up to an
 hour. Backup programs that rely on this feature may be affected.
 https://technet.microsoft.com/en-us/library/cc785435.aspx
 '''
 
+
 class Doctor(object):
     def __init__(self, srcdir, objdir, fix):
         self.srcdir = mozpath.normpath(srcdir)
         self.objdir = mozpath.normpath(objdir)
         self.srcdir_mount = self.getmount(self.srcdir)
         self.objdir_mount = self.getmount(self.objdir)
         self.path_mounts = [
             ('srcdir', self.srcdir, self.srcdir_mount),
@@ -64,17 +65,17 @@ class Doctor(object):
             if result.get('status') != 'GOOD':
                 good = False
             if result.get('fixable', False):
                 fixable = True
             if result.get('denied', False):
                 denied = True
         if denied:
             print('run "mach doctor --fix" AS ADMIN to re-attempt fixing your system')
-        elif False: # elif fixable:
+        elif False:  # elif fixable:
             print('run "mach doctor --fix" as admin to attempt fixing your system')
         return int(not good)
 
     def getmount(self, path):
         while path != '/' and not os.path.ismount(path):
             path = mozpath.abspath(mozpath.join(path, os.pardir))
         return path
 
@@ -197,17 +198,17 @@ class Doctor(object):
             except subprocess.CalledProcessError:
                 disablelastaccess = -1
                 status = 'UNSURE'
                 desc = 'unable to check lastaccess behavior'
             if disablelastaccess == 1:
                 status = 'GOOD'
                 desc = 'lastaccess disabled systemwide'
             elif disablelastaccess == 0:
-                if False: # if self.fix:
+                if False:  # if self.fix:
                     choice = self.prompt_bool(DISABLE_LASTACCESS_WIN)
                     if not choice:
                         return {'status': 'BAD, NOT FIXED',
                                 'desc': 'lastaccess enabled systemwide'}
                     try:
                         command = 'fsutil behavior set disablelastaccess 1'.split(' ')
                         fsutil_output = subprocess.check_output(command)
                         status = 'GOOD, FIXED'
--- a/python/mozbuild/mozbuild/dotproperties.py
+++ b/python/mozbuild/mozbuild/dotproperties.py
@@ -11,16 +11,17 @@ import codecs
 import re
 import sys
 
 if sys.version_info[0] == 3:
     str_type = str
 else:
     str_type = basestring
 
+
 class DotProperties:
     r'''A thin representation of a key=value .properties file.'''
 
     def __init__(self, file=None):
         self._properties = {}
         if file:
             self.update(file)
 
--- a/python/mozbuild/mozbuild/faster_daemon.py
+++ b/python/mozbuild/mozbuild/faster_daemon.py
@@ -103,18 +103,18 @@ class Daemon(object):
                 'allof',
                 ['type', 'f'],
                 ['not',
                  ['anyof',
                   ['dirname', '.hg'],
                   ['name', '.hg', 'wholename'],
                   ['dirname', '.git'],
                   ['name', '.git', 'wholename'],
+                  ],
                  ],
-                ],
             ],
             'fields': ['name'],
         }
         watch = self.client.query('watch-project', dir_to_watch)
         if 'warning' in watch:
             print('WARNING: ', watch['warning'], file=sys.stderr)
 
         root = watch['watch']
--- a/python/mozbuild/mozbuild/frontend/context.py
+++ b/python/mozbuild/mozbuild/frontend/context.py
@@ -84,16 +84,17 @@ class Context(KeyedDefaultDict):
     this context instance. Keys in this dict are the strings representing keys
     in this context which are valid. Values are tuples of stored type,
     assigned type, default value, a docstring describing the purpose of the
     variable, and a tier indicator (see comment above the VARIABLES declaration
     in this module).
 
     config is the ConfigEnvironment for this context.
     """
+
     def __init__(self, allowed_variables={}, config=None, finder=None):
         self._allowed_variables = allowed_variables
         self.main_path = None
         self.current_path = None
         # There aren't going to be enough paths for the performance of scanning
         # a list to be a problem.
         self._all_paths = []
         self.config = config
@@ -158,17 +159,17 @@ class Context(KeyedDefaultDict):
 
     @memoized_property
     def objdir(self):
         return mozpath.join(self.config.topobjdir, self.relobjdir).rstrip('/')
 
     @memoize
     def _srcdir(self, path):
         return mozpath.join(self.config.topsrcdir,
-            self._relsrcdir(path)).rstrip('/')
+                            self._relsrcdir(path)).rstrip('/')
 
     @property
     def srcdir(self):
         return self._srcdir(self.current_path or self.main_path)
 
     @memoize
     def _relsrcdir(self, path):
         return mozpath.relpath(mozpath.dirname(path), self.config.topsrcdir)
@@ -177,17 +178,17 @@ class Context(KeyedDefaultDict):
     def relsrcdir(self):
         assert self.main_path
         return self._relsrcdir(self.current_path or self.main_path)
 
     @memoized_property
     def relobjdir(self):
         assert self.main_path
         return mozpath.relpath(mozpath.dirname(self.main_path),
-            self.config.topsrcdir)
+                               self.config.topsrcdir)
 
     def _factory(self, key):
         """Function called when requesting a missing key."""
         defaults = self._allowed_variables.get(key)
         if not defaults:
             raise KeyError('global_ns', 'get_unknown', key)
 
         # If the default is specifically a lambda (or, rather, any function
@@ -264,16 +265,17 @@ class TemplateContext(Context):
 class SubContext(Context, ContextDerivedValue):
     """A Context derived from another Context.
 
     Sub-contexts are intended to be used as context managers.
 
     Sub-contexts inherit paths and other relevant state from the parent
     context.
     """
+
     def __init__(self, parent):
         assert isinstance(parent, Context)
 
         Context.__init__(self, allowed_variables=self.VARIABLES,
                          config=parent.config)
 
         # Copy state from parent.
         for p in parent.source_stack:
@@ -565,16 +567,17 @@ class PathMeta(type):
             if value.startswith('!'):
                 cls = ObjDirPath
             elif value.startswith('%'):
                 cls = AbsolutePath
             else:
                 cls = SourcePath
         return super(PathMeta, cls).__call__(context, value)
 
+
 class Path(ContextDerivedValue, unicode):
     """Stores and resolves a source path relative to a given context
 
     This class is used as a backing type for some of the sandbox variables.
     It expresses paths relative to a context. Supported paths are:
       - '/topsrcdir/relative/paths'
       - 'srcdir/relative/paths'
       - '!/topobjdir/relative/paths'
@@ -631,16 +634,17 @@ class Path(ContextDerivedValue, unicode)
 
     @memoized_property
     def target_basename(self):
         return mozpath.basename(self.full_path)
 
 
 class SourcePath(Path):
     """Like Path, but limited to paths in the source directory."""
+
     def __init__(self, context, value):
         if value.startswith('!'):
             raise ValueError('Object directory paths are not allowed')
         if value.startswith('%'):
             raise ValueError('Filesystem absolute paths are not allowed')
         super(SourcePath, self).__init__(context, value)
 
         if value.startswith('/'):
@@ -671,69 +675,74 @@ class SourcePath(Path):
 class RenamedSourcePath(SourcePath):
     """Like SourcePath, but with a different base name when installed.
 
     The constructor takes a tuple of (source, target_basename).
 
     This class is not meant to be exposed to moz.build sandboxes as of now,
     and is not supported by the RecursiveMake backend.
     """
+
     def __init__(self, context, value):
         assert isinstance(value, tuple)
         source, self._target_basename = value
         super(RenamedSourcePath, self).__init__(context, source)
 
     @property
     def target_basename(self):
         return self._target_basename
 
 
 class ObjDirPath(Path):
     """Like Path, but limited to paths in the object directory."""
+
     def __init__(self, context, value=None):
         if not value.startswith('!'):
             raise ValueError('Object directory paths must start with ! prefix')
         super(ObjDirPath, self).__init__(context, value)
 
         if value.startswith('!/'):
-            path = mozpath.join(context.config.topobjdir,value[2:])
+            path = mozpath.join(context.config.topobjdir, value[2:])
         else:
             path = mozpath.join(context.objdir, value[1:])
         self.full_path = mozpath.normpath(path)
 
 
 class AbsolutePath(Path):
     """Like Path, but allows arbitrary paths outside the source and object directories."""
+
     def __init__(self, context, value=None):
         if not value.startswith('%'):
             raise ValueError('Absolute paths must start with % prefix')
         if not os.path.isabs(value[1:]):
             raise ValueError('Path \'%s\' is not absolute' % value[1:])
         super(AbsolutePath, self).__init__(context, value)
 
         self.full_path = mozpath.normpath(value[1:])
 
 
 @memoize
 def ContextDerivedTypedList(klass, base_class=List):
     """Specialized TypedList for use with ContextDerivedValue types.
     """
     assert issubclass(klass, ContextDerivedValue)
+
     class _TypedList(ContextDerivedValue, TypedList(klass, base_class)):
         def __init__(self, context, iterable=[], **kwargs):
             self.context = context
             super(_TypedList, self).__init__(iterable, **kwargs)
 
         def normalize(self, e):
             if not isinstance(e, klass):
                 e = klass(self.context, e)
             return e
 
     return _TypedList
 
+
 @memoize
 def ContextDerivedTypedListWithItems(type, base_class=List):
     """Specialized TypedList for use with ContextDerivedValue types.
     """
     class _TypedListWithItems(ContextDerivedTypedList(type, base_class)):
         def __getitem__(self, name):
             name = self.normalize(name)
             return super(_TypedListWithItems, self).__getitem__(name)
@@ -857,60 +866,63 @@ def ContextDerivedTypedHierarchicalStrin
             child = self._children.get(name)
             if not child:
                 child = self._children[name] = _TypedListWithItems(
                     self._context)
             return child
 
     return _TypedListWithItems
 
+
 def OrderedPathListWithAction(action):
     """Returns a class which behaves as a StrictOrderingOnAppendList, but
     invokes the given callable with each input and a context as it is
     read, storing a tuple including the result and the original item.
 
     This used to extend moz.build reading to make more data available in
     filesystem-reading mode.
     """
     class _OrderedListWithAction(ContextDerivedTypedList(SourcePath,
-                                 StrictOrderingOnAppendListWithAction)):
+                                                         StrictOrderingOnAppendListWithAction)):
         def __init__(self, context, *args):
             def _action(item):
                 return item, action(context, item)
             super(_OrderedListWithAction, self).__init__(context, action=_action, *args)
 
     return _OrderedListWithAction
 
+
 def TypedListWithAction(typ, action):
     """Returns a class which behaves as a TypedList with the provided type, but
     invokes the given given callable with each input and a context as it is
     read, storing a tuple including the result and the original item.
 
     This used to extend moz.build reading to make more data available in
     filesystem-reading mode.
     """
     class _TypedListWithAction(ContextDerivedValue, TypedList(typ), ListWithAction):
         def __init__(self, context, *args):
             def _action(item):
                 return item, action(context, item)
             super(_TypedListWithAction, self).__init__(action=_action, *args)
     return _TypedListWithAction
 
+
 ManifestparserManifestList = OrderedPathListWithAction(read_manifestparser_manifest)
 ReftestManifestList = OrderedPathListWithAction(read_reftest_manifest)
 
 OrderedSourceList = ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList)
 OrderedTestFlavorList = TypedList(Enum(*all_test_flavors()),
                                   StrictOrderingOnAppendList)
 OrderedStringList = TypedList(unicode, StrictOrderingOnAppendList)
 DependentTestsEntry = ContextDerivedTypedRecord(('files', OrderedSourceList),
                                                 ('tags', OrderedStringList),
                                                 ('flavors', OrderedTestFlavorList))
 BugzillaComponent = TypedNamedTuple('BugzillaComponent',
-                        [('product', unicode), ('component', unicode)])
+                                    [('product', unicode), ('component', unicode)])
 SchedulingComponents = ContextDerivedTypedRecord(
         ('inclusive', TypedList(unicode, StrictOrderingOnAppendList)),
         ('exclusive', TypedList(unicode, StrictOrderingOnAppendList)))
 
 GeneratedFilesList = StrictOrderingOnAppendListWithFlagsFactory({
     'script': unicode,
     'inputs': list,
     'force': bool,
@@ -960,38 +972,38 @@ class Files(SubContext):
     The difference in behavior between ``*`` and ``**`` is only evident if
     a pattern follows the ``*`` or ``**``. A pattern ending with ``*`` is
     greedy. ``**`` is needed when you need an additional pattern after the
     wildcard. e.g. ``**/foo``.
     """
 
     VARIABLES = {
         'BUG_COMPONENT': (BugzillaComponent, tuple,
-            """The bug component that tracks changes to these files.
+                          """The bug component that tracks changes to these files.
 
             Values are a 2-tuple of unicode describing the Bugzilla product and
             component. e.g. ``('Firefox Build System', 'General')``.
             """),
 
         'FINAL': (bool, bool,
-            """Mark variable assignments as finalized.
+                  """Mark variable assignments as finalized.
 
             During normal processing, values from newer Files contexts
             overwrite previously set values. Last write wins. This behavior is
             not always desired. ``FINAL`` provides a mechanism to prevent
             further updates to a variable.
 
             When ``FINAL`` is set, the value of all variables defined in this
             context are marked as frozen and all subsequent writes to them
             are ignored during metadata reading.
 
             See :ref:`mozbuild_files_metadata_finalizing` for more info.
             """),
         'IMPACTED_TESTS': (DependentTestsEntry, list,
-            """File patterns, tags, and flavors for tests relevant to these files.
+                           """File patterns, tags, and flavors for tests relevant to these files.
 
             Maps source files to the tests potentially impacted by those files.
             Tests can be specified by file pattern, tag, or flavor.
 
             For example:
 
             with Files('runtests.py'):
                IMPACTED_TESTS.files += [
@@ -1029,17 +1041,17 @@ class Files(SubContext):
                 IMPACTED_TESTS.flavors += [
                     'mochitest',
                 ]
 
             Would suggest that nsGlobalWindow.cpp is potentially relevant to
             any plain mochitest.
             """),
         'SCHEDULES': (Schedules, list,
-            """Maps source files to the CI tasks that should be scheduled when
+                      """Maps source files to the CI tasks that should be scheduled when
             they change.  The tasks are grouped by named components, and those
             names appear again in the taskgraph configuration
             `($topsrcdir/taskgraph/).
 
             Some components are "inclusive", meaning that changes to most files
             do not schedule them, aside from those described in a Files
             subcontext.  For example, py-lint tasks need not be scheduled for
             most changes, but should be scheduled when any Python file changes.
@@ -1185,76 +1197,76 @@ SUBCONTEXTS = {cls.__name__: cls for cls
 # This defines the set of mutable global variables.
 #
 # Each variable is a tuple of:
 #
 #   (storage_type, input_types, docs)
 
 VARIABLES = {
     'SOURCES': (ContextDerivedTypedListWithItems(Path, StrictOrderingOnAppendListWithFlagsFactory({'no_pgo': bool, 'flags': List, 'pgo_generate_only': bool})), list,
-        """Source code files.
+                """Source code files.
 
         This variable contains a list of source code files to compile.
         Accepts assembler, C, C++, Objective C/C++.
         """),
 
     'FILES_PER_UNIFIED_FILE': (int, int,
-        """The number of source files to compile into each unified source file.
+                               """The number of source files to compile into each unified source file.
 
         """),
 
     'IS_RUST_LIBRARY': (bool, bool,
-        """Whether the current library defined by this moz.build is built by Rust.
+                        """Whether the current library defined by this moz.build is built by Rust.
 
         The library defined by this moz.build should have a build definition in
         a Cargo.toml file that exists in this moz.build's directory.
         """),
 
     'RUST_LIBRARY_FEATURES': (List, list,
-        """Cargo features to activate for this library.
+                              """Cargo features to activate for this library.
 
         This variable should not be used directly; you should be using the
         RustLibrary template instead.
         """),
 
     'RUST_LIBRARY_TARGET_DIR': (unicode, unicode,
-        """Where CARGO_TARGET_DIR should point when compiling this library.  If
+                                """Where CARGO_TARGET_DIR should point when compiling this library.  If
         not set, it defaults to the current objdir.  It should be a relative path
         to the current objdir; absolute paths should not be used.
 
         This variable should not be used directly; you should be using the
         RustLibrary template instead.
         """),
 
     'HOST_RUST_LIBRARY_FEATURES': (List, list,
-        """Cargo features to activate for this host library.
+                                   """Cargo features to activate for this host library.
 
         This variable should not be used directly; you should be using the
         HostRustLibrary template instead.
         """),
 
     'RUST_TESTS': (TypedList(unicode), list,
-        """Names of Rust tests to build and run via `cargo test`.
+                   """Names of Rust tests to build and run via `cargo test`.
         """),
 
     'RUST_TEST_FEATURES': (TypedList(unicode), list,
-        """Cargo features to activate for RUST_TESTS.
+                           """Cargo features to activate for RUST_TESTS.
         """),
 
     'UNIFIED_SOURCES': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
-        """Source code files that can be compiled together.
+                        """Source code files that can be compiled together.
 
         This variable contains a list of source code files to compile,
         that can be concatenated all together and built as a single source
         file. This can help make the build faster and reduce the debug info
         size.
         """),
 
     'GENERATED_FILES': (GeneratedFilesList, list,
-        """Generic generated files.
+                        """Generic generated files.
 
         This variable contains a list of files for the build system to
         generate at export time. The generation method may be declared
         with optional ``script``, ``inputs``, ``flags``, and ``force``
         attributes on individual entries.
         If the optional ``script`` attribute is not present on an entry, it
         is assumed that rules for generating the file are present in
         the associated Makefile.in.
@@ -1295,17 +1307,17 @@ VARIABLES = {
 
         When the ``force`` attribute is present, the file is generated every
         build, regardless of whether it is stale.  This is special to the
         RecursiveMake backend and intended for special situations only (e.g.,
         localization).  Please consult a build peer before using ``force``.
         """),
 
     'DEFINES': (InitializedDefines, dict,
-        """Dictionary of compiler defines to declare.
+                """Dictionary of compiler defines to declare.
 
         These are passed in to the compiler as ``-Dkey='value'`` for string
         values, ``-Dkey=value`` for numeric values, or ``-Dkey`` if the
         value is True. Note that for string values, the outer-level of
         single-quotes will be consumed by the shell. If you want to have
         a string-literal in the program, the value needs to have
         double-quotes.
 
@@ -1323,73 +1335,73 @@ VARIABLES = {
            DEFINES.update({
                'NS_NO_XPCOM': True,
                'MOZ_EXTENSIONS_DB_SCHEMA': 15,
                'DLL_SUFFIX': '".so"',
            })
         """),
 
     'DELAYLOAD_DLLS': (List, list,
-        """Delay-loaded DLLs.
+                       """Delay-loaded DLLs.
 
         This variable contains a list of DLL files which the module being linked
         should load lazily.  This only has an effect when building with MSVC.
         """),
 
     'DIRS': (ContextDerivedTypedList(SourcePath), list,
-        """Child directories to descend into looking for build frontend files.
+             """Child directories to descend into looking for build frontend files.
 
         This works similarly to the ``DIRS`` variable in make files. Each str
         value in the list is the name of a child directory. When this file is
         done parsing, the build reader will descend into each listed directory
         and read the frontend file there. If there is no frontend file, an error
         is raised.
 
         Values are relative paths. They can be multiple directory levels
         above or below. Use ``..`` for parent directories and ``/`` for path
         delimiters.
         """),
 
     'HAS_MISC_RULE': (bool, bool,
-        """Whether this directory should be traversed in the ``misc`` tier.
+                      """Whether this directory should be traversed in the ``misc`` tier.
 
         Many ``libs`` rules still exist in Makefile.in files. We highly prefer
         that these rules exist in the ``misc`` tier/target so that they can be
         executed concurrently during tier traversal (the ``misc`` tier is
         fully concurrent).
 
         Presence of this variable indicates that this directory should be
         traversed by the ``misc`` tier.
 
         Please note that converting ``libs`` rules to the ``misc`` tier must
         be done with care, as there are many implicit dependencies that can
         break the build in subtle ways.
         """),
 
     'FINAL_TARGET_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
-        """List of files to be installed into the application directory.
+                           """List of files to be installed into the application directory.
 
         ``FINAL_TARGET_FILES`` will copy (or symlink, if the platform supports it)
         the contents of its files to the directory specified by
         ``FINAL_TARGET`` (typically ``dist/bin``). Files that are destined for a
         subdirectory can be specified by accessing a field, or as a dict access.
         For example, to export ``foo.png`` to the top-level directory and
         ``bar.svg`` to the directory ``images/do-not-use``, append to
         ``FINAL_TARGET_FILES`` like so::
 
            FINAL_TARGET_FILES += ['foo.png']
            FINAL_TARGET_FILES.images['do-not-use'] += ['bar.svg']
         """),
 
     'FINAL_TARGET_PP_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
-        """Like ``FINAL_TARGET_FILES``, with preprocessing.
+                              """Like ``FINAL_TARGET_FILES``, with preprocessing.
         """),
 
     'LOCALIZED_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
-        """List of locale-dependent files to be installed into the application
+                        """List of locale-dependent files to be installed into the application
         directory.
 
         This functions similarly to ``FINAL_TARGET_FILES``, but the files are
         sourced from the locale directory and will vary per localization.
         For an en-US build, this is functionally equivalent to
         ``FINAL_TARGET_FILES``. For a build with ``--enable-ui-locale``,
         the file will be taken from ``$LOCALE_SRCDIR``, with the leading
         ``en-US`` removed. For a l10n repack of an en-US build, the file
@@ -1421,24 +1433,24 @@ VARIABLES = {
         ``toolkit/locales/en-US/foo.js`` and
         ``toolkit/locales/en-US/things/*.ini`` to ``$(DIST)/bin/foo`` in an
         en-US build, and in a build of a different locale (or a repack),
         it would copy ``$(LOCALE_SRCDIR)/toolkit/foo.js`` and
         ``$(LOCALE_SRCDIR)/toolkit/things/*.ini``.
         """),
 
     'LOCALIZED_PP_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
-        """Like ``LOCALIZED_FILES``, with preprocessing.
+                           """Like ``LOCALIZED_FILES``, with preprocessing.
 
         Note that the ``AB_CD`` define is available and expands to the current
         locale being packaged, as with preprocessed entries in jar manifests.
         """),
 
     'LOCALIZED_GENERATED_FILES': (GeneratedFilesList, list,
-        """Like ``GENERATED_FILES``, but for files whose content varies based on the locale in use.
+                                  """Like ``GENERATED_FILES``, but for files whose content varies based on the locale in use.
 
         For simple cases of text substitution, prefer ``LOCALIZED_PP_FILES``.
 
         Refer to the documentation of ``GENERATED_FILES``; for the most part things work the same.
         The two major differences are:
         1. The function in the Python script will be passed an additional keyword argument `locale`
            which provides the locale in use, i.e. ``en-US``.
         2. The ``inputs`` list may contain paths to files that will be taken from the locale
@@ -1452,248 +1464,248 @@ VARIABLES = {
         In addition, ``LOCALIZED_GENERATED_FILES`` can use the special substitutions ``{AB_CD}``
         and ``{AB_rCD}`` in their output paths.  ``{AB_CD}`` expands to the current locale during
         multi-locale builds and single-locale repacks and ``{AB_rCD}`` expands to an
         Android-specific encoding of the current locale.  Both expand to the empty string when the
         current locale is ``en-US``.
         """),
 
     'OBJDIR_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
-        """List of files to be installed anywhere in the objdir. Use sparingly.
+                     """List of files to be installed anywhere in the objdir. Use sparingly.
 
         ``OBJDIR_FILES`` is similar to FINAL_TARGET_FILES, but it allows copying
         anywhere in the object directory. This is intended for various one-off
         cases, not for general use. If you wish to add entries to OBJDIR_FILES,
         please consult a build peer.
         """),
 
     'OBJDIR_PP_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
-        """Like ``OBJDIR_FILES``, with preprocessing. Use sparingly.
+                        """Like ``OBJDIR_FILES``, with preprocessing. Use sparingly.
         """),
 
     'FINAL_LIBRARY': (unicode, unicode,
-        """Library in which the objects of the current directory will be linked.
+                      """Library in which the objects of the current directory will be linked.
 
         This variable contains the name of a library, defined elsewhere with
         ``LIBRARY_NAME``, in which the objects of the current directory will be
         linked.
         """),
 
     'CPP_UNIT_TESTS': (StrictOrderingOnAppendList, list,
-        """Compile a list of C++ unit test names.
+                       """Compile a list of C++ unit test names.
 
         Each name in this variable corresponds to an executable built from the
         corresponding source file with the same base name.
 
         If the configuration token ``BIN_SUFFIX`` is set, its value will be
         automatically appended to each name. If a name already ends with
         ``BIN_SUFFIX``, the name will remain unchanged.
         """),
 
     'FORCE_SHARED_LIB': (bool, bool,
-        """Whether the library in this directory is a shared library.
+                         """Whether the library in this directory is a shared library.
         """),
 
     'FORCE_STATIC_LIB': (bool, bool,
-        """Whether the library in this directory is a static library.
+                         """Whether the library in this directory is a static library.
         """),
 
     'USE_STATIC_LIBS': (bool, bool,
-        """Whether the code in this directory is a built against the static
+                        """Whether the code in this directory is a built against the static
         runtime library.
 
         This variable only has an effect when building with MSVC.
         """),
 
     'HOST_SOURCES': (ContextDerivedTypedList(Path, StrictOrderingOnAppendList), list,
-        """Source code files to compile with the host compiler.
+                     """Source code files to compile with the host compiler.
 
         This variable contains a list of source code files to compile.
         with the host compiler.
         """),
 
     'HOST_LIBRARY_NAME': (unicode, unicode,
-        """Name of target library generated when cross compiling.
+                          """Name of target library generated when cross compiling.
         """),
 
     'LIBRARY_DEFINES': (OrderedDict, dict,
-        """Dictionary of compiler defines to declare for the entire library.
+                        """Dictionary of compiler defines to declare for the entire library.
 
         This variable works like DEFINES, except that declarations apply to all
         libraries that link into this library via FINAL_LIBRARY.
         """),
 
     'LIBRARY_NAME': (unicode, unicode,
-        """The code name of the library generated for a directory.
+                     """The code name of the library generated for a directory.
 
         By default STATIC_LIBRARY_NAME and SHARED_LIBRARY_NAME take this name.
         In ``example/components/moz.build``,::
 
            LIBRARY_NAME = 'xpcomsample'
 
         would generate ``example/components/libxpcomsample.so`` on Linux, or
         ``example/components/xpcomsample.lib`` on Windows.
         """),
 
     'SHARED_LIBRARY_NAME': (unicode, unicode,
-        """The name of the static library generated for a directory, if it needs to
+                            """The name of the static library generated for a directory, if it needs to
         differ from the library code name.
 
         Implies FORCE_SHARED_LIB.
         """),
 
     'SHARED_LIBRARY_OUTPUT_CATEGORY': (unicode, unicode,
-        """The output category for this context's shared library. If set this will
+                                       """The output category for this context's shared library. If set this will
         correspond to the build command that will build this shared library, and
         the library will not be built as part of the default build.
         """),
 
     'RUST_LIBRARY_OUTPUT_CATEGORY': (unicode, unicode,
-        """The output category for this context's rust library. If set this will
+                                     """The output category for this context's rust library. If set this will
         correspond to the build command that will build this rust library, and
         the library will not be built as part of the default build.
         """),
 
     'IS_FRAMEWORK': (bool, bool,
-        """Whether the library to build should be built as a framework on OSX.
+                     """Whether the library to build should be built as a framework on OSX.
 
         This implies the name of the library won't be prefixed nor suffixed.
         Implies FORCE_SHARED_LIB.
         """),
 
     'STATIC_LIBRARY_NAME': (unicode, unicode,
-        """The name of the static library generated for a directory, if it needs to
+                            """The name of the static library generated for a directory, if it needs to
         differ from the library code name.
 
         Implies FORCE_STATIC_LIB.
         """),
 
     'USE_LIBS': (StrictOrderingOnAppendList, list,
-        """List of libraries to link to programs and libraries.
+                 """List of libraries to link to programs and libraries.
         """),
 
     'HOST_USE_LIBS': (StrictOrderingOnAppendList, list,
-        """List of libraries to link to host programs and libraries.
+                      """List of libraries to link to host programs and libraries.
         """),
 
     'HOST_OS_LIBS': (List, list,
-        """List of system libraries for host programs and libraries.
+                     """List of system libraries for host programs and libraries.
         """),
 
     'LOCAL_INCLUDES': (ContextDerivedTypedList(Path, StrictOrderingOnAppendList), list,
-        """Additional directories to be searched for include files by the compiler.
+                       """Additional directories to be searched for include files by the compiler.
         """),
 
     'NO_PGO': (bool, bool,
-        """Whether profile-guided optimization is disable in this directory.
+               """Whether profile-guided optimization is disable in this directory.
         """),
 
     'OS_LIBS': (List, list,
-        """System link libraries.
+                """System link libraries.
 
         This variable contains a list of system libaries to link against.
         """),
     'RCFILE': (unicode, unicode,
-        """The program .rc file.
+               """The program .rc file.
 
         This variable can only be used on Windows.
         """),
 
     'RESFILE': (unicode, unicode,
-        """The program .res file.
+                """The program .res file.
 
         This variable can only be used on Windows.
         """),
 
     'RCINCLUDE': (unicode, unicode,
-        """The resource script file to be included in the default .res file.
+                  """The resource script file to be included in the default .res file.
 
         This variable can only be used on Windows.
         """),
 
     'DEFFILE': (Path, unicode,
-        """The program .def (module definition) file.
+                """The program .def (module definition) file.
 
         This variable can only be used on Windows.
         """),
 
     'SYMBOLS_FILE': (Path, unicode,
-        """A file containing a list of symbols to export from a shared library.
+                     """A file containing a list of symbols to export from a shared library.
 
         The given file contains a list of symbols to be exported, and is
         preprocessed.
         A special marker "@DATA@" must be added after a symbol name if it
         points to data instead of code, so that the Windows linker can treat
         them correctly.
         """),
 
     'SIMPLE_PROGRAMS': (StrictOrderingOnAppendList, list,
-        """Compile a list of executable names.
+                        """Compile a list of executable names.
 
         Each name in this variable corresponds to an executable built from the
         corresponding source file with the same base name.
 
         If the configuration token ``BIN_SUFFIX`` is set, its value will be
         automatically appended to each name. If a name already ends with
         ``BIN_SUFFIX``, the name will remain unchanged.
         """),
 
     'SONAME': (unicode, unicode,
-        """The soname of the shared object currently being linked
+               """The soname of the shared object currently being linked
 
         soname is the "logical name" of a shared object, often used to provide
         version backwards compatibility. This variable makes sense only for
         shared objects, and is supported only on some unix platforms.
         """),
 
     'HOST_SIMPLE_PROGRAMS': (StrictOrderingOnAppendList, list,
-        """Compile a list of host executable names.
+                             """Compile a list of host executable names.
 
         Each name in this variable corresponds to a hosst executable built
         from the corresponding source file with the same base name.
 
         If the configuration token ``HOST_BIN_SUFFIX`` is set, its value will
         be automatically appended to each name. If a name already ends with
         ``HOST_BIN_SUFFIX``, the name will remain unchanged.
         """),
 
     'RUST_PROGRAMS': (StrictOrderingOnAppendList, list,
-        """Compile a list of Rust host executable names.
+                      """Compile a list of Rust host executable names.
 
         Each name in this variable corresponds to an executable built from
         the Cargo.toml in the same directory.
         """),
 
     'HOST_RUST_PROGRAMS': (StrictOrderingOnAppendList, list,
-        """Compile a list of Rust executable names.
+                           """Compile a list of Rust executable names.
 
         Each name in this variable corresponds to an executable built from
         the Cargo.toml in the same directory.
         """),
 
     'CONFIGURE_SUBST_FILES': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
-        """Output files that will be generated using configure-like substitution.
+                              """Output files that will be generated using configure-like substitution.
 
         This is a substitute for ``AC_OUTPUT`` in autoconf. For each path in this
         list, we will search for a file in the srcdir having the name
         ``{path}.in``. The contents of this file will be read and variable
         patterns like ``@foo@`` will be substituted with the values of the
         ``AC_SUBST`` variables declared during configure.
         """),
 
     'CONFIGURE_DEFINE_FILES': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
-        """Output files generated from configure/config.status.
+                               """Output files generated from configure/config.status.
 
         This is a substitute for ``AC_CONFIG_HEADER`` in autoconf. This is very
         similar to ``CONFIGURE_SUBST_FILES`` except the generation logic takes
         into account the values of ``AC_DEFINE`` instead of ``AC_SUBST``.
         """),
 
     'EXPORTS': (ContextDerivedTypedHierarchicalStringList(Path), list,
-        """List of files to be exported, and in which subdirectories.
+                """List of files to be exported, and in which subdirectories.
 
         ``EXPORTS`` is generally used to list the include files to be exported to
         ``dist/include``, but it can be used for other files as well. This variable
         behaves as a list when appending filenames for export in the top-level
         directory. Files can also be appended to a field to indicate which
         subdirectory they should be exported to. For example, to export
         ``foo.h`` to the top-level directory, and ``bar.h`` to ``mozilla/dom/``,
         append to ``EXPORTS`` like so::
@@ -1701,247 +1713,247 @@ VARIABLES = {
            EXPORTS += ['foo.h']
            EXPORTS.mozilla.dom += ['bar.h']
 
         Entries in ``EXPORTS`` are paths, so objdir paths may be used, but
         any files listed from the objdir must also be listed in
         ``GENERATED_FILES``.
         """),
 
-    'PROGRAM' : (unicode, unicode,
-        """Compiled executable name.
+    'PROGRAM': (unicode, unicode,
+                """Compiled executable name.
 
         If the configuration token ``BIN_SUFFIX`` is set, its value will be
         automatically appended to ``PROGRAM``. If ``PROGRAM`` already ends with
         ``BIN_SUFFIX``, ``PROGRAM`` will remain unchanged.
         """),
 
-    'HOST_PROGRAM' : (unicode, unicode,
-        """Compiled host executable name.
+    'HOST_PROGRAM': (unicode, unicode,
+                     """Compiled host executable name.
 
         If the configuration token ``HOST_BIN_SUFFIX`` is set, its value will be
         automatically appended to ``HOST_PROGRAM``. If ``HOST_PROGRAM`` already
         ends with ``HOST_BIN_SUFFIX``, ``HOST_PROGRAM`` will remain unchanged.
         """),
 
     'DIST_INSTALL': (Enum(None, False, True), bool,
-        """Whether to install certain files into the dist directory.
+                     """Whether to install certain files into the dist directory.
 
         By default, some files types are installed in the dist directory, and
         some aren't. Set this variable to True to force the installation of
         some files that wouldn't be installed by default. Set this variable to
         False to force to not install some files that would be installed by
         default.
 
         This is confusing for historical reasons, but eventually, the behavior
         will be made explicit.
         """),
 
     'JAR_MANIFESTS': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
-        """JAR manifest files that should be processed as part of the build.
+                      """JAR manifest files that should be processed as part of the build.
 
         JAR manifests are files in the tree that define how to package files
         into JARs and how chrome registration is performed. For more info,
         see :ref:`jar_manifests`.
         """),
 
     # IDL Generation.
     'XPIDL_SOURCES': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
-        """XPCOM Interface Definition Files (xpidl).
+                      """XPCOM Interface Definition Files (xpidl).
 
         This is a list of files that define XPCOM interface definitions.
         Entries must be files that exist. Entries are almost certainly ``.idl``
         files.
         """),
 
     'XPIDL_MODULE': (unicode, unicode,
-        """XPCOM Interface Definition Module Name.
+                     """XPCOM Interface Definition Module Name.
 
         This is the name of the ``.xpt`` file that is created by linking
         ``XPIDL_SOURCES`` together. If unspecified, it defaults to be the same
         as ``MODULE``.
         """),
 
     'XPCOM_MANIFESTS': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
-        """XPCOM Component Manifest Files.
+                        """XPCOM Component Manifest Files.
 
         This is a list of files that define XPCOM components to be added
         to the component registry.
         """),
 
     'PREPROCESSED_IPDL_SOURCES': (StrictOrderingOnAppendList, list,
-        """Preprocessed IPDL source files.
+                                  """Preprocessed IPDL source files.
 
         These files will be preprocessed, then parsed and converted to
         ``.cpp`` files.
         """),
 
     'IPDL_SOURCES': (StrictOrderingOnAppendList, list,
-        """IPDL source files.
+                     """IPDL source files.
 
         These are ``.ipdl`` files that will be parsed and converted to
         ``.cpp`` files.
         """),
 
     'WEBIDL_FILES': (StrictOrderingOnAppendList, list,
-        """WebIDL source files.
+                     """WebIDL source files.
 
         These will be parsed and converted to ``.cpp`` and ``.h`` files.
         """),
 
     'GENERATED_EVENTS_WEBIDL_FILES': (StrictOrderingOnAppendList, list,
-        """WebIDL source files for generated events.
+                                      """WebIDL source files for generated events.
 
         These will be parsed and converted to ``.cpp`` and ``.h`` files.
         """),
 
     'TEST_WEBIDL_FILES': (StrictOrderingOnAppendList, list,
-         """Test WebIDL source files.
+                          """Test WebIDL source files.
 
          These will be parsed and converted to ``.cpp`` and ``.h`` files
          if tests are enabled.
          """),
 
     'GENERATED_WEBIDL_FILES': (StrictOrderingOnAppendList, list,
-         """Generated WebIDL source files.
+                               """Generated WebIDL source files.
 
          These will be generated from some other files.
          """),
 
     'PREPROCESSED_TEST_WEBIDL_FILES': (StrictOrderingOnAppendList, list,
-         """Preprocessed test WebIDL source files.
+                                       """Preprocessed test WebIDL source files.
 
          These will be preprocessed, then parsed and converted to .cpp
          and ``.h`` files if tests are enabled.
          """),
 
     'PREPROCESSED_WEBIDL_FILES': (StrictOrderingOnAppendList, list,
-         """Preprocessed WebIDL source files.
+                                  """Preprocessed WebIDL source files.
 
          These will be preprocessed before being parsed and converted.
          """),
 
     'WEBIDL_EXAMPLE_INTERFACES': (StrictOrderingOnAppendList, list,
-        """Names of example WebIDL interfaces to build as part of the build.
+                                  """Names of example WebIDL interfaces to build as part of the build.
 
         Names in this list correspond to WebIDL interface names defined in
         WebIDL files included in the build from one of the \*WEBIDL_FILES
         variables.
         """),
 
     # Test declaration.
     'A11Y_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining a11y tests.
+                       """List of manifest files defining a11y tests.
         """),
 
     'BROWSER_CHROME_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining browser chrome tests.
+                                 """List of manifest files defining browser chrome tests.
         """),
 
     'ANDROID_INSTRUMENTATION_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining Android instrumentation tests.
+                                          """List of manifest files defining Android instrumentation tests.
         """),
 
     'FIREFOX_UI_FUNCTIONAL_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining firefox-ui-functional tests.
+                                        """List of manifest files defining firefox-ui-functional tests.
         """),
 
     'FIREFOX_UI_UPDATE_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining firefox-ui-update tests.
+                                    """List of manifest files defining firefox-ui-update tests.
         """),
 
     'PUPPETEER_FIREFOX_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining puppeteer unit tests for Firefox.
+                                    """List of manifest files defining puppeteer unit tests for Firefox.
         """),
 
     'MARIONETTE_LAYOUT_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining marionette-layout tests.
+                                    """List of manifest files defining marionette-layout tests.
         """),
 
     'MARIONETTE_GPU_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining marionette-gpu tests.
+                                 """List of manifest files defining marionette-gpu tests.
         """),
 
     'MARIONETTE_UNIT_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining marionette-unit tests.
+                                  """List of manifest files defining marionette-unit tests.
         """),
 
     'METRO_CHROME_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining metro browser chrome tests.
+                               """List of manifest files defining metro browser chrome tests.
         """),
 
     'MOCHITEST_CHROME_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining mochitest chrome tests.
+                                   """List of manifest files defining mochitest chrome tests.
         """),
 
     'MARIONETTE_DOM_MEDIA_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining marionette-media tests.
+                                       """List of manifest files defining marionette-media tests.
         """),
 
     'MOCHITEST_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining mochitest tests.
+                            """List of manifest files defining mochitest tests.
         """),
 
     'REFTEST_MANIFESTS': (ReftestManifestList, list,
-        """List of manifest files defining reftests.
+                          """List of manifest files defining reftests.
 
         These are commonly named reftest.list.
         """),
 
     'CRASHTEST_MANIFESTS': (ReftestManifestList, list,
-        """List of manifest files defining crashtests.
+                            """List of manifest files defining crashtests.
 
         These are commonly named crashtests.list.
         """),
 
     'WEBRTC_SIGNALLING_TEST_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining WebRTC signalling tests.
+                                         """List of manifest files defining WebRTC signalling tests.
         """),
 
     'XPCSHELL_TESTS_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining xpcshell tests.
+                                 """List of manifest files defining xpcshell tests.
         """),
 
     'PYTHON_UNITTEST_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining python unit tests.
+                                  """List of manifest files defining python unit tests.
         """),
 
     'CRAMTEST_MANIFESTS': (ManifestparserManifestList, list,
-        """List of manifest files defining cram unit tests.
+                           """List of manifest files defining cram unit tests.
         """),
 
 
     # The following variables are used to control the target of installed files.
     'XPI_NAME': (unicode, unicode,
-        """The name of an extension XPI to generate.
+                 """The name of an extension XPI to generate.
 
         When this variable is present, the results of this directory will end up
         being packaged into an extension instead of the main dist/bin results.
         """),
 
     'DIST_SUBDIR': (unicode, unicode,
-        """The name of an alternate directory to install files to.
+                    """The name of an alternate directory to install files to.
 
         When this variable is present, the results of this directory will end up
         being placed in the $(DIST_SUBDIR) subdirectory of where it would
         otherwise be placed.
         """),
 
     'FINAL_TARGET': (FinalTargetValue, unicode,
-        """The name of the directory to install targets to.
+                     """The name of the directory to install targets to.
 
         The directory is relative to the top of the object directory. The
         default value is dependent on the values of XPI_NAME and DIST_SUBDIR. If
         neither are present, the result is dist/bin. If XPI_NAME is present, the
         result is dist/xpi-stage/$(XPI_NAME). If DIST_SUBDIR is present, then
         the $(DIST_SUBDIR) directory of the otherwise default value is used.
         """),
 
     'USE_EXTENSION_MANIFEST': (bool, bool,
-        """Controls the name of the manifest for JAR files.
+                               """Controls the name of the manifest for JAR files.
 
         By default, the name of the manifest is ${JAR_MANIFEST}.manifest.
         Setting this variable to ``True`` changes the name of the manifest to
         chrome.manifest.
         """),
 
     'GYP_DIRS': (StrictOrderingOnAppendListWithFlagsFactory({
             'variables': dict,
@@ -2001,204 +2013,204 @@ VARIABLES = {
               the current moz.build, that should be excluded from source file
               unification.
             - mozilla_flags, a set of flags that if present in the gn config
               will be mirrored to the resulting mozbuild configuration.
             - gn_target, the name of the target to build.
         """),
 
     'SPHINX_TREES': (dict, dict,
-        """Describes what the Sphinx documentation tree will look like.
+                     """Describes what the Sphinx documentation tree will look like.
 
         Keys are relative directories inside the final Sphinx documentation
         tree to install files into. Values are directories (relative to this
         file) whose content to copy into the Sphinx documentation tree.
         """),
 
     'SPHINX_PYTHON_PACKAGE_DIRS': (StrictOrderingOnAppendList, list,
-        """Directories containing Python packages that Sphinx documents.
+                                   """Directories containing Python packages that Sphinx documents.
         """),
 
     'COMPILE_FLAGS': (CompileFlags, dict,
-        """Recipe for compile flags for this context. Not to be manipulated
+                      """Recipe for compile flags for this context. Not to be manipulated
         directly.
         """),
 
     'LINK_FLAGS': (LinkFlags, dict,
-        """Recipe for linker flags for this context. Not to be manipulated
+                   """Recipe for linker flags for this context. Not to be manipulated
         directly.
         """),
 
     'ASM_FLAGS': (AsmFlags, dict,
-        """Recipe for linker flags for this context. Not to be manipulated
+                  """Recipe for linker flags for this context. Not to be manipulated
         directly.
         """),
 
     'CFLAGS': (List, list,
-        """Flags passed to the C compiler for all of the C source files
+               """Flags passed to the C compiler for all of the C source files
            declared in this directory.
 
            Note that the ordering of flags matters here, these flags will be
            added to the compiler's command line in the same order as they
            appear in the moz.build file.
         """),
 
     'CXXFLAGS': (List, list,
-        """Flags passed to the C++ compiler for all of the C++ source files
+                 """Flags passed to the C++ compiler for all of the C++ source files
            declared in this directory.
 
            Note that the ordering of flags matters here; these flags will be
            added to the compiler's command line in the same order as they
            appear in the moz.build file.
         """),
 
     'HOST_COMPILE_FLAGS': (HostCompileFlags, dict,
-        """Recipe for host compile flags for this context. Not to be manipulated
+                           """Recipe for host compile flags for this context. Not to be manipulated
         directly.
         """),
 
     'HOST_DEFINES': (InitializedDefines, dict,
-        """Dictionary of compiler defines to declare for host compilation.
+                     """Dictionary of compiler defines to declare for host compilation.
         See ``DEFINES`` for specifics.
         """),
 
     'CMFLAGS': (List, list,
-        """Flags passed to the Objective-C compiler for all of the Objective-C
+                """Flags passed to the Objective-C compiler for all of the Objective-C
            source files declared in this directory.
 
            Note that the ordering of flags matters here; these flags will be
            added to the compiler's command line in the same order as they
            appear in the moz.build file.
         """),
 
     'CMMFLAGS': (List, list,
-        """Flags passed to the Objective-C++ compiler for all of the
+                 """Flags passed to the Objective-C++ compiler for all of the
            Objective-C++ source files declared in this directory.
 
            Note that the ordering of flags matters here; these flags will be
            added to the compiler's command line in the same order as they
            appear in the moz.build file.
         """),
 
     'ASFLAGS': (List, list,
-        """Flags passed to the assembler for all of the assembly source files
+                """Flags passed to the assembler for all of the assembly source files
            declared in this directory.
 
            Note that the ordering of flags matters here; these flags will be
            added to the assembler's command line in the same order as they
            appear in the moz.build file.
         """),
 
     'HOST_CFLAGS': (List, list,
-        """Flags passed to the host C compiler for all of the C source files
+                    """Flags passed to the host C compiler for all of the C source files
            declared in this directory.
 
            Note that the ordering of flags matters here, these flags will be
            added to the compiler's command line in the same order as they
            appear in the moz.build file.
         """),
 
     'HOST_CXXFLAGS': (List, list,
-        """Flags passed to the host C++ compiler for all of the C++ source files
+                      """Flags passed to the host C++ compiler for all of the C++ source files
            declared in this directory.
 
            Note that the ordering of flags matters here; these flags will be
            added to the compiler's command line in the same order as they
            appear in the moz.build file.
         """),
 
     'LDFLAGS': (List, list,
-        """Flags passed to the linker when linking all of the libraries and
+                """Flags passed to the linker when linking all of the libraries and
            executables declared in this directory.
 
            Note that the ordering of flags matters here; these flags will be
            added to the linker's command line in the same order as they
            appear in the moz.build file.
         """),
 
     'EXTRA_DSO_LDOPTS': (List, list,
-        """Flags passed to the linker when linking a shared library.
+                         """Flags passed to the linker when linking a shared library.
 
            Note that the ordering of flags matter here, these flags will be
            added to the linker's command line in the same order as they
            appear in the moz.build file.
         """),
 
     'WIN32_EXE_LDFLAGS': (List, list,
-        """Flags passed to the linker when linking a Windows .exe executable
+                          """Flags passed to the linker when linking a Windows .exe executable
            declared in this directory.
 
            Note that the ordering of flags matter here, these flags will be
            added to the linker's command line in the same order as they
            appear in the moz.build file.
 
            This variable only has an effect on Windows.
         """),
 
     'TEST_HARNESS_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
-        """List of files to be installed for test harnesses.
+                           """List of files to be installed for test harnesses.
 
         ``TEST_HARNESS_FILES`` can be used to install files to any directory
         under $objdir/_tests. Files can be appended to a field to indicate
         which subdirectory they should be exported to. For example,
         to export ``foo.py`` to ``_tests/foo``, append to
         ``TEST_HARNESS_FILES`` like so::
            TEST_HARNESS_FILES.foo += ['foo.py']
 
         Files from topsrcdir and the objdir can also be installed by prefixing
         the path(s) with a '/' character and a '!' character, respectively::
            TEST_HARNESS_FILES.path += ['/build/bar.py', '!quux.py']
         """),
 
     'NO_EXPAND_LIBS': (bool, bool,
-        """Forces to build a real static library, and no corresponding fake
+                       """Forces to build a real static library, and no corresponding fake
            library.
         """),
 
     'NO_COMPONENTS_MANIFEST': (bool, bool,
-        """Do not create a binary-component manifest entry for the
+                               """Do not create a binary-component manifest entry for the
         corresponding XPCOMBinaryComponent.
         """),
 
     'USE_NASM': (bool, bool,
-        """Use the nasm assembler to assemble assembly files from SOURCES.
+                 """Use the nasm assembler to assemble assembly files from SOURCES.
 
         By default, the build will use the toolchain assembler, $(AS), to
         assemble source files in assembly language (.s or .asm files). Setting
         this value to ``True`` will cause it to use nasm instead.
 
         If nasm is not available on this system, or does not support the
         current target architecture, an error will be raised.
         """),
 
     'USE_YASM': (bool, bool,
-        """Use the yasm assembler to assemble assembly files from SOURCES.
+                 """Use the yasm assembler to assemble assembly files from SOURCES.
 
         By default, the build will use the toolchain assembler, $(AS), to
         assemble source files in assembly language (.s or .asm files). Setting
         this value to ``True`` will cause it to use yasm instead.
 
         If yasm is not available on this system, or does not support the
         current target architecture, an error will be raised.
         """),
 
     'USE_INTEGRATED_CLANGCL_AS': (bool, bool,
-        """Use the integrated clang-cl assembler to assemble assembly files from SOURCES.
+                                  """Use the integrated clang-cl assembler to assemble assembly files from SOURCES.
 
         This allows using clang-cl to assemble assembly files which is useful
         on platforms like aarch64 where the alternative is to have to run a
         pre-processor to generate files with suitable syntax.
         """),
 }
 
 # Sanity check: we don't want any variable above to have a list as storage type.
 for name, (storage_type, input_types, docs) in VARIABLES.items():
     if storage_type == list:
         raise RuntimeError('%s has a "list" storage type. Use "List" instead.'
-            % name)
+                           % name)
 
 # Set of variables that are only allowed in templates:
 TEMPLATE_VARIABLES = {
     'CPP_UNIT_TESTS',
     'FORCE_SHARED_LIB',
     'HOST_PROGRAM',
     'HOST_LIBRARY_NAME',
     'HOST_SIMPLE_PROGRAMS',
@@ -2207,34 +2219,34 @@ TEMPLATE_VARIABLES = {
     'PROGRAM',
     'SIMPLE_PROGRAMS',
 }
 
 # Add a note to template variable documentation.
 for name in TEMPLATE_VARIABLES:
     if name not in VARIABLES:
         raise RuntimeError('%s is in TEMPLATE_VARIABLES but not in VARIABLES.'
-            % name)
+                           % name)
     storage_type, input_types, docs = VARIABLES[name]
     docs += 'This variable is only available in templates.\n'
     VARIABLES[name] = (storage_type, input_types, docs)
 
 
 # The set of functions exposed to the sandbox.
 #
 # Each entry is a tuple of:
 #
 #  (function returning the corresponding function from a given sandbox,
 #   (argument types), docs)
 #
 # The first element is an attribute on Sandbox that should be a function type.
 #
 FUNCTIONS = {
     'include': (lambda self: self._include, (SourcePath,),
-        """Include another mozbuild file in the context of this one.
+                """Include another mozbuild file in the context of this one.
 
         This is similar to a ``#include`` in C languages. The filename passed to
         the function will be read and its contents will be evaluated within the
         context of the calling file.
 
         If a relative path is given, it is evaluated as relative to the file
         currently being processed. If there is a chain of multiple include(),
         the relative path computation is from the most recent/active file.
@@ -2251,17 +2263,17 @@ FUNCTIONS = {
            include('sibling.build')
 
         Include ``foo.build`` from a path within the top source directory::
 
            include('/elsewhere/foo.build')
         """),
 
     'export': (lambda self: self._export, (str,),
-        """Make the specified variable available to all child directories.
+               """Make the specified variable available to all child directories.
 
         The variable specified by the argument string is added to the
         environment of all directories specified in the DIRS and TEST_DIRS
         variables. If those directories themselves have child directories,
         the variable will be exported to all of them.
 
         The value used for the variable is the final value at the end of the
         moz.build file, so it is possible (but not recommended style) to place
@@ -2278,31 +2290,31 @@ FUNCTIONS = {
 
         To make all children directories install as the given extension::
 
           XPI_NAME = 'cool-extension'
           export('XPI_NAME')
         """),
 
     'warning': (lambda self: self._warning, (str,),
-        """Issue a warning.
+                """Issue a warning.
 
         Warnings are string messages that are printed during execution.
 
         Warnings are ignored during execution.
         """),
 
     'error': (lambda self: self._error, (str,),
-        """Issue a fatal error.
+              """Issue a fatal error.
 
         If this function is called, processing is aborted immediately.
         """),
 
     'template': (lambda self: self._template_decorator, (FunctionType,),
-        """Decorator for template declarations.
+                 """Decorator for template declarations.
 
         Templates are a special kind of functions that can be declared in
         mozbuild files. Uppercase variables assigned in the function scope
         are considered to be the result of the template.
 
         Contrary to traditional python functions:
            - return values from template functions are ignored,
            - template functions don't have access to the global scope.
@@ -2351,48 +2363,48 @@ TestDirsPlaceHolder = List()
 # Special variables. These complement VARIABLES.
 #
 # Each entry is a tuple of:
 #
 #  (function returning the corresponding value from a given context, type, docs)
 #
 SPECIAL_VARIABLES = {
     'TOPSRCDIR': (lambda context: context.config.topsrcdir, str,
-        """Constant defining the top source directory.
+                  """Constant defining the top source directory.
 
         The top source directory is the parent directory containing the source
         code and all build files. It is typically the root directory of a
         cloned repository.
         """),
 
     'TOPOBJDIR': (lambda context: context.config.topobjdir, str,
-        """Constant defining the top object directory.
+                  """Constant defining the top object directory.
 
         The top object directory is the parent directory which will contain
         the output of the build. This is commonly referred to as "the object
         directory."
         """),
 
     'RELATIVEDIR': (lambda context: context.relsrcdir, str,
-        """Constant defining the relative path of this file.
+                    """Constant defining the relative path of this file.
 
         The relative path is from ``TOPSRCDIR``. This is defined as relative
         to the main file being executed, regardless of whether additional
         files have been included using ``include()``.
         """),
 
     'SRCDIR': (lambda context: context.srcdir, str,
-        """Constant defining the source directory of this file.
+               """Constant defining the source directory of this file.
 
         This is the path inside ``TOPSRCDIR`` where this file is located. It
         is the same as ``TOPSRCDIR + RELATIVEDIR``.
         """),
 
     'OBJDIR': (lambda context: context.objdir, str,
-        """The path to the object directory for this file.
+               """The path to the object directory for this file.
 
         Is is the same as ``TOPOBJDIR + RELATIVEDIR``.
         """),
 
     'CONFIG': (lambda context: ReadOnlyKeyedDefaultDict(
             lambda key: context.config.substs_unicode.get(key)), dict,
         """Dictionary containing the current configuration variables.
 
@@ -2401,92 +2413,92 @@ SPECIAL_VARIABLES = {
 
         Values in this container are read-only. Attempts at changing values
         will result in a run-time error.
 
         Access to an unknown variable will return None.
         """),
 
     'EXTRA_COMPONENTS': (lambda context: context['FINAL_TARGET_FILES'].components._strings, list,
-        """Additional component files to distribute.
+                         """Additional component files to distribute.
 
        This variable contains a list of files to copy into
        ``$(FINAL_TARGET)/components/``.
         """),
 
     'EXTRA_PP_COMPONENTS': (lambda context: context['FINAL_TARGET_PP_FILES'].components._strings, list,
-        """Javascript XPCOM files.
+                            """Javascript XPCOM files.
 
        This variable contains a list of files to preprocess.  Generated
        files will be installed in the ``/components`` directory of the distribution.
         """),
 
     'JS_PREFERENCE_FILES': (lambda context: context['FINAL_TARGET_FILES'].defaults.pref._strings, list,
-        """Exported JavaScript files.
+                            """Exported JavaScript files.
 
         A list of files copied into the dist directory for packaging and installation.
         Path will be defined for gre or application prefs dir based on what is building.
         """),
 
     'JS_PREFERENCE_PP_FILES': (lambda context: context['FINAL_TARGET_PP_FILES'].defaults.pref._strings, list,
-        """Like JS_PREFERENCE_FILES, preprocessed..
+                               """Like JS_PREFERENCE_FILES, preprocessed..
         """),
 
     'RESOURCE_FILES': (lambda context: context['FINAL_TARGET_FILES'].res, list,
-        """List of resources to be exported, and in which subdirectories.
+                       """List of resources to be exported, and in which subdirectories.
 
         ``RESOURCE_FILES`` is used to list the resource files to be exported to
         ``dist/bin/res``, but it can be used for other files as well. This variable
         behaves as a list when appending filenames for resources in the top-level
         directory. Files can also be appended to a field to indicate which
         subdirectory they should be exported to. For example, to export
         ``foo.res`` to the top-level directory, and ``bar.res`` to ``fonts/``,
         append to ``RESOURCE_FILES`` like so::
 
            RESOURCE_FILES += ['foo.res']
            RESOURCE_FILES.fonts += ['bar.res']
         """),
 
     'CONTENT_ACCESSIBLE_FILES': (lambda context: context['FINAL_TARGET_FILES'].contentaccessible, list,
-        """List of files which can be accessed by web content through resource:// URIs.
+                                 """List of files which can be accessed by web content through resource:// URIs.
 
         ``CONTENT_ACCESSIBLE_FILES`` is used to list the files to be exported
         to ``dist/bin/contentaccessible``. Files can also be appended to a
         field to indicate which subdirectory they should be exported to.
         """),
 
     'EXTRA_JS_MODULES': (lambda context: context['FINAL_TARGET_FILES'].modules, list,
-        """Additional JavaScript files to distribute.
+                         """Additional JavaScript files to distribute.
 
         This variable contains a list of files to copy into
         ``$(FINAL_TARGET)/modules.
         """),
 
     'EXTRA_PP_JS_MODULES': (lambda context: context['FINAL_TARGET_PP_FILES'].modules, list,
-        """Additional JavaScript files to distribute.
+                            """Additional JavaScript files to distribute.
 
         This variable contains a list of files to copy into
         ``$(FINAL_TARGET)/modules``, after preprocessing.
         """),
 
     'TESTING_JS_MODULES': (lambda context: context['TEST_HARNESS_FILES'].modules, list,
-        """JavaScript modules to install in the test-only destination.
+                           """JavaScript modules to install in the test-only destination.
 
         Some JavaScript modules (JSMs) are test-only and not distributed
         with Firefox. This variable defines them.
 
         To install modules in a subdirectory, use properties of this
         variable to control the final destination. e.g.
 
         ``TESTING_JS_MODULES.foo += ['module.jsm']``.
         """),
 
     'TEST_DIRS': (lambda context: context['DIRS'] if context.config.substs.get('ENABLE_TESTS')
-                                  else TestDirsPlaceHolder, list,
-        """Like DIRS but only for directories that contain test-only code.
+                  else TestDirsPlaceHolder, list,
+                  """Like DIRS but only for directories that contain test-only code.
 
         If tests are not enabled, this variable will be ignored.
 
         This variable may go away once the transition away from Makefiles is
         complete.
         """),
 
 }
--- a/python/mozbuild/mozbuild/frontend/data.py
+++ b/python/mozbuild/mozbuild/frontend/data.py
@@ -187,31 +187,33 @@ class ComputedFlags(ContextDerived):
         flags = defaultdict(list)
         for key, _, dest_vars in self.flags.flag_variables:
             value = self.flags.get(key)
             if value:
                 for dest_var in dest_vars:
                     flags[dest_var].extend(value)
         return flags.items()
 
+
 class XPIDLModule(ContextDerived):
     """Describes an XPIDL module to be compiled."""
 
     __slots__ = (
         'name',
         'idl_files',
     )
 
     def __init__(self, context, name, idl_files):
         ContextDerived.__init__(self, context)
 
         assert all(isinstance(idl, SourcePath) for idl in idl_files)
         self.name = name
         self.idl_files = idl_files
 
+
 class BaseDefines(ContextDerived):
     """Context derived container object for DEFINES/HOST_DEFINES,
     which are OrderedDicts.
     """
     __slots__ = ('defines')
 
     def __init__(self, context, defines):
         ContextDerived.__init__(self, context)
@@ -227,22 +229,25 @@ class BaseDefines(ContextDerived):
                 yield('-D%s=%s' % (define, value))
 
     def update(self, more_defines):
         if isinstance(more_defines, Defines):
             self.defines.update(more_defines.defines)
         else:
             self.defines.update(more_defines)
 
+
 class Defines(BaseDefines):
     pass
 
+
 class HostDefines(BaseDefines):
     pass
 
+
 class WebIDLCollection(ContextDerived):
     """Collects WebIDL info referenced during the build."""
 
     def __init__(self, context):
         ContextDerived.__init__(self, context)
         self.sources = set()
         self.generated_sources = set()
         self.generated_events_sources = set()
@@ -671,17 +676,17 @@ class Library(BaseLibrary):
 class StaticLibrary(Library):
     """Context derived container object for a static library"""
     __slots__ = (
         'link_into',
         'no_expand_lib',
     )
 
     def __init__(self, context, basename, real_name=None,
-        link_into=None, no_expand_lib=False):
+                 link_into=None, no_expand_lib=False):
         Library.__init__(self, context, basename, real_name)
         self.link_into = link_into
         self.no_expand_lib = no_expand_lib
 
 
 class RustLibrary(StaticLibrary):
     """Context derived container object for a static library"""
     __slots__ = (
@@ -703,18 +708,18 @@ class RustLibrary(StaticLibrary):
         self.cargo_file = cargo_file
         self.crate_type = crate_type
         # We need to adjust our naming here because cargo replaces '-' in
         # package names defined in Cargo.toml with underscores in actual
         # filenames. But we need to keep the basename consistent because
         # many other things in the build system depend on that.
         assert self.crate_type == 'staticlib'
         self.lib_name = '%s%s%s' % (context.config.rust_lib_prefix,
-                                     basename.replace('-', '_'),
-                                     context.config.rust_lib_suffix)
+                                    basename.replace('-', '_'),
+                                    context.config.rust_lib_suffix)
         self.dependencies = dependencies
         self.features = features
         self.target_dir = target_dir
         self.output_category = context.get('RUST_LIBRARY_OUTPUT_CATEGORY')
         # Skip setting properties below which depend on cargo
         # when we don't have a compile environment. The required
         # config keys won't be available, but the instance variables
         # that we don't set should never be accessed by the actual
@@ -887,18 +892,18 @@ class TestManifest(ContextDerived):
         'source_relpaths',
 
         # If this manifest is a duplicate of another one, this is the
         # manifestparser.TestManifest of the other one.
         'dupe_manifest',
     )
 
     def __init__(self, context, path, manifest, flavor=None,
-            install_prefix=None, relpath=None, sources=(),
-            dupe_manifest=False):
+                 install_prefix=None, relpath=None, sources=(),
+                 dupe_manifest=False):
         ContextDerived.__init__(self, context)
 
         assert flavor in all_test_flavors()
 
         self.path = path
         self.directory = mozpath.dirname(path)
         self.manifest = manifest
         self.flavor = flavor
@@ -1031,17 +1036,17 @@ class UnifiedSources(BaseSources):
             # On Windows, path names have a maximum length of 255 characters,
             # so avoid creating extremely long path names.
             unified_prefix = context.relsrcdir
             if len(unified_prefix) > 20:
                 unified_prefix = unified_prefix[-20:].split('/', 1)[-1]
             unified_prefix = unified_prefix.replace('/', '_')
 
             suffix = self.canonical_suffix[1:]
-            unified_prefix='Unified_%s_%s' % (suffix, unified_prefix)
+            unified_prefix = 'Unified_%s_%s' % (suffix, unified_prefix)
             self.unified_source_mapping = list(group_unified_files(source_files,
                                                                    unified_prefix=unified_prefix,
                                                                    unified_suffix=suffix,
                                                                    files_per_unified_file=files_per_unified_file))
 
 
 class InstallationTarget(ContextDerived):
     """Describes the rules that affect where files get installed to."""
@@ -1095,16 +1100,17 @@ class FinalTargetPreprocessedFiles(Conte
     FINAL_TARGET_PP_FILES.
     """
     __slots__ = ('files')
 
     def __init__(self, sandbox, files):
         ContextDerived.__init__(self, sandbox)
         self.files = files
 
+
 class LocalizedFiles(FinalTargetFiles):
     """Sandbox container object for LOCALIZED_FILES, which is a
     HierarchicalStringList.
     """
     pass
 
 
 class LocalizedPreprocessedFiles(FinalTargetPreprocessedFiles):
@@ -1182,22 +1188,24 @@ class GeneratedFile(ContextDerived):
         suffixes = (
             '.asm',
             '.c',
             '.cpp',
             '.h',
             '.inc',
             '.py',
             '.rs',
-            'node.stub', # To avoid VPATH issues with installing node files: https://bugzilla.mozilla.org/show_bug.cgi?id=1461714#c55
-            'android_apks', # We need to compile Java to generate JNI wrappers for native code compilation to consume.
+            'node.stub',  # To avoid VPATH issues with installing node files: https://bugzilla.mozilla.org/show_bug.cgi?id=1461714#c55
+            # We need to compile Java to generate JNI wrappers for native code compilation to consume.
+            'android_apks',
             '.profdata',
             '.webidl'
         )
-        self.required_for_compile = [f for f in self.outputs if f.endswith(suffixes) or 'stl_wrappers/' in f]
+        self.required_for_compile = [
+            f for f in self.outputs if f.endswith(suffixes) or 'stl_wrappers/' in f]
 
 
 class ChromeManifestEntry(ContextDerived):
     """Represents a chrome.manifest entry."""
 
     __slots__ = (
         'path',
         'entry',
--- a/python/mozbuild/mozbuild/frontend/emitter.py
+++ b/python/mozbuild/mozbuild/frontend/emitter.py
@@ -180,28 +180,30 @@ class TreeMetadataEmitter(LoggingMixin):
                 # Keep all contexts around, we will need them later.
                 contexts[out.objdir] = out
 
                 start = time.time()
                 # We need to expand the generator for the timings to work.
                 objs = list(emitfn(out))
                 self._emitter_time += time.time() - start
 
-                for o in emit_objs(objs): yield o
+                for o in emit_objs(objs):
+                    yield o
 
             else:
                 raise Exception('Unhandled output type: %s' % type(out))
 
         # Don't emit Linkable objects when COMPILE_ENVIRONMENT is not set
         if self.config.substs.get('COMPILE_ENVIRONMENT'):
             start = time.time()
             objs = list(self._emit_libs_derived(contexts))
             self._emitter_time += time.time() - start
 
-            for o in emit_objs(objs): yield o
+            for o in emit_objs(objs):
+                yield o
 
     def _emit_libs_derived(self, contexts):
 
         # First aggregate idl sources.
         webidl_attrs = [
             ('GENERATED_EVENTS_WEBIDL_FILES', lambda c: c.generated_events_sources),
             ('GENERATED_WEBIDL_FILES', lambda c: c.generated_sources),
             ('PREPROCESSED_TEST_WEBIDL_FILES', lambda c: c.preprocessed_test_sources),
@@ -229,45 +231,45 @@ class TreeMetadataEmitter(LoggingMixin):
                 collection = cls(contexts[root])
                 for var, src_getter in attrs:
                     src_getter(collection).update(self._idls[var])
 
                 idl_sources[root] = collection.all_source_files()
                 if isinstance(collection, WebIDLCollection):
                     # Test webidl sources are added here as a somewhat special
                     # case.
-                    idl_sources[mozpath.join(root, 'test')] = [s for s in collection.all_test_cpp_basenames()]
+                    idl_sources[mozpath.join(root, 'test')] = [
+                                             s for s in collection.all_test_cpp_basenames()]
 
                 yield collection
 
-
         # Next do FINAL_LIBRARY linkage.
         for lib in (l for libs in self._libs.values() for l in libs):
             if not isinstance(lib, (StaticLibrary, RustLibrary)) or not lib.link_into:
                 continue
             if lib.link_into not in self._libs:
                 raise SandboxValidationError(
                     'FINAL_LIBRARY ("%s") does not match any LIBRARY_NAME'
                     % lib.link_into, contexts[lib.objdir])
             candidates = self._libs[lib.link_into]
 
             # When there are multiple candidates, but all are in the same
             # directory and have a different type, we want all of them to
             # have the library linked. The typical usecase is when building
             # both a static and a shared library in a directory, and having
             # that as a FINAL_LIBRARY.
             if len(set(type(l) for l in candidates)) == len(candidates) and \
-                   len(set(l.objdir for l in candidates)) == 1:
+                    len(set(l.objdir for l in candidates)) == 1:
                 for c in candidates:
                     c.link_library(lib)
             else:
                 raise SandboxValidationError(
                     'FINAL_LIBRARY ("%s") matches a LIBRARY_NAME defined in '
                     'multiple places:\n    %s' % (lib.link_into,
-                    '\n    '.join(l.objdir for l in candidates)),
+                                                  '\n    '.join(l.objdir for l in candidates)),
                     contexts[lib.objdir])
 
         # ...and USE_LIBS linkage.
         for context, obj, variable in self._linkage:
             self._link_libraries(context, obj, variable, idl_sources)
 
         def recurse_refs(lib):
             for o in lib.refs:
@@ -276,23 +278,23 @@ class TreeMetadataEmitter(LoggingMixin):
                     for q in recurse_refs(o):
                         yield q
 
         # Check that all static libraries refering shared libraries in
         # USE_LIBS are linked into a shared library or program.
         for lib in self._static_linking_shared:
             if all(isinstance(o, StaticLibrary) for o in recurse_refs(lib)):
                 shared_libs = sorted(l.basename for l in lib.linked_libraries
-                    if isinstance(l, SharedLibrary))
+                                     if isinstance(l, SharedLibrary))
                 raise SandboxValidationError(
                     'The static "%s" library is not used in a shared library '
                     'or a program, but USE_LIBS contains the following shared '
                     'library names:\n    %s\n\nMaybe you can remove the '
                     'static "%s" library?' % (lib.basename,
-                    '\n    '.join(shared_libs), lib.basename),
+                                              '\n    '.join(shared_libs), lib.basename),
                     contexts[lib.objdir])
 
         # Propagate LIBRARY_DEFINES to all child libraries recursively.
         def propagate_defines(outerlib, defines):
             outerlib.lib_defines.update(defines)
             for lib in outerlib.linked_libraries:
                 # Propagate defines only along FINAL_LIBRARY paths, not USE_LIBS
                 # paths.
@@ -300,17 +302,16 @@ class TreeMetadataEmitter(LoggingMixin):
                         lib.link_into == outerlib.basename):
                     propagate_defines(lib, defines)
 
         for lib in (l for libs in self._libs.values() for l in libs):
             if isinstance(lib, Library):
                 propagate_defines(lib, lib.lib_defines)
             yield lib
 
-
         for lib in (l for libs in self._libs.values() for l in libs):
             lib_defines = list(lib.lib_defines.get_defines())
             if lib_defines:
                 objdir_flags = self._compile_flags[lib.objdir]
                 objdir_flags.resolve_flags('LIBRARY_DEFINES', lib_defines)
 
                 objdir_flags = self._compile_as_flags.get(lib.objdir)
                 if objdir_flags:
@@ -320,17 +321,16 @@ class TreeMetadataEmitter(LoggingMixin):
             yield flags_obj
 
         for flags_obj in self._compile_as_flags.values():
             yield flags_obj
 
         for obj in self._binaries.values():
             yield obj
 
-
     LIBRARY_NAME_VAR = {
         'host': 'HOST_LIBRARY_NAME',
         'target': 'LIBRARY_NAME',
     }
 
     LIBSTDCXX_VAR = {
         'host': 'MOZ_LIBSTDCXX_HOST_VERSION',
         'target': 'MOZ_LIBSTDCXX_TARGET_VERSION',
@@ -388,24 +388,24 @@ class TreeMetadataEmitter(LoggingMixin):
             candidates = [l for l in candidates if l.relobjdir == dir]
             if not candidates:
                 # If the given directory is under one of the external
                 # (third party) paths, use a fake library reference to
                 # there.
                 for d in self._external_paths:
                     if dir.startswith('%s/' % d):
                         candidates = [self._get_external_library(dir, name,
-                            force_static)]
+                                                                 force_static)]
                         break
 
             if not candidates:
                 raise SandboxValidationError(
                     '%s contains "%s", but there is no "%s" %s in %s.'
                     % (variable, path, name,
-                    self.LIBRARY_NAME_VAR[obj.KIND], dir), context)
+                       self.LIBRARY_NAME_VAR[obj.KIND], dir), context)
 
         if len(candidates) > 1:
             # If there's more than one remaining candidate, it could be
             # that there are instances for the same library, in static and
             # shared form.
             libs = {}
             for l in candidates:
                 key = mozpath.join(l.relobjdir, l.basename)
@@ -418,47 +418,47 @@ class TreeMetadataEmitter(LoggingMixin):
                     if key not in libs:
                         libs[key] = l
             candidates = libs.values()
             if force_static and not candidates:
                 if dir:
                     raise SandboxValidationError(
                         '%s contains "static:%s", but there is no static '
                         '"%s" %s in %s.' % (variable, path, name,
-                        self.LIBRARY_NAME_VAR[obj.KIND], dir), context)
+                                            self.LIBRARY_NAME_VAR[obj.KIND], dir), context)
                 raise SandboxValidationError(
                     '%s contains "static:%s", but there is no static "%s" '
                     '%s in the tree' % (variable, name, name,
-                    self.LIBRARY_NAME_VAR[obj.KIND]), context)
+                                        self.LIBRARY_NAME_VAR[obj.KIND]), context)
 
         if not candidates:
             raise SandboxValidationError(
                 '%s contains "%s", which does not match any %s in the tree.'
                 % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND]),
                 context)
 
         elif len(candidates) > 1:
             paths = (mozpath.join(l.relsrcdir, 'moz.build')
-                for l in candidates)
+                     for l in candidates)
             raise SandboxValidationError(
                 '%s contains "%s", which matches a %s defined in multiple '
                 'places:\n    %s' % (variable, path,
-                self.LIBRARY_NAME_VAR[obj.KIND],
-                '\n    '.join(paths)), context)
+                                     self.LIBRARY_NAME_VAR[obj.KIND],
+                                     '\n    '.join(paths)), context)
 
         elif force_static and not isinstance(candidates[0], StaticLibrary):
             raise SandboxValidationError(
                 '%s contains "static:%s", but there is only a shared "%s" '
                 'in %s. You may want to add FORCE_STATIC_LIB=True in '
                 '%s/moz.build, or remove "static:".' % (variable, path,
-                name, candidates[0].relobjdir, candidates[0].relobjdir),
+                                                        name, candidates[0].relobjdir, candidates[0].relobjdir),
                 context)
 
         elif isinstance(obj, StaticLibrary) and isinstance(candidates[0],
-                SharedLibrary):
+                                                           SharedLibrary):
             self._static_linking_shared.add(obj)
         obj.link_library(candidates[0])
 
     @memoize
     def _get_external_library(self, dir, name, force_static):
         # Create ExternalStaticLibrary or ExternalSharedLibrary object with a
         # context more or less truthful about where the external library is.
         context = Context(config=self.config)
@@ -480,35 +480,39 @@ class TreeMetadataEmitter(LoggingMixin):
             return pytoml.load(f), cargo_file
 
     def _verify_deps(self, context, crate_dir, crate_name, dependencies, description='Dependency'):
         """Verify that a crate's dependencies all specify local paths."""
         for dep_crate_name, values in dependencies.iteritems():
             # A simple version number.
             if isinstance(values, (str, unicode)):
                 raise SandboxValidationError(
-                    '%s %s of crate %s does not list a path' % (description, dep_crate_name, crate_name),
+                    '%s %s of crate %s does not list a path' % (
+                        description, dep_crate_name, crate_name),
                     context)
 
             dep_path = values.get('path', None)
             if not dep_path:
                 raise SandboxValidationError(
-                    '%s %s of crate %s does not list a path' % (description, dep_crate_name, crate_name),
+                    '%s %s of crate %s does not list a path' % (
+                        description, dep_crate_name, crate_name),
                     context)
 
             # Try to catch the case where somebody listed a
             # local path for development.
             if os.path.isabs(dep_path):
                 raise SandboxValidationError(
-                    '%s %s of crate %s has a non-relative path' % (description, dep_crate_name, crate_name),
+                    '%s %s of crate %s has a non-relative path' % (
+                        description, dep_crate_name, crate_name),
                     context)
 
             if not os.path.exists(mozpath.join(context.config.topsrcdir, crate_dir, dep_path)):
                 raise SandboxValidationError(
-                    '%s %s of crate %s refers to a non-existent path' % (description, dep_crate_name, crate_name),
+                    '%s %s of crate %s refers to a non-existent path' % (
+                        description, dep_crate_name, crate_name),
                     context)
 
     def _rust_library(self, context, libname, static_args, cls=RustLibrary):
         # We need to note any Rust library for linking purposes.
         config, cargo_file = self._parse_cargo_file(context)
         crate_name = config['package']['name']
 
         if crate_name != libname:
@@ -544,17 +548,16 @@ class TreeMetadataEmitter(LoggingMixin):
         if len(features) != len(unique_features):
             raise SandboxValidationError(
                 'features for %s should not contain duplicates: %s' % (libname, features),
                 context)
 
         return cls(context, libname, cargo_file, crate_type, dependencies,
                    features, cargo_target_dir, **static_args)
 
-
     def _handle_gn_dirs(self, context):
         for target_dir in context.get('GN_DIRS', []):
             context['DIRS'] += [target_dir]
             gn_dir = context['GN_DIRS'][target_dir]
             for v in ('variables',):
                 if not getattr(gn_dir, 'variables'):
                     raise SandboxValidationError('Missing value for '
                                                  'GN_DIRS["%s"].%s' % (target_dir, v), context)
@@ -564,53 +567,53 @@ class TreeMetadataEmitter(LoggingMixin):
                 source = SourcePath(context, s)
                 if not os.path.exists(source.full_path):
                     raise SandboxValidationError('Cannot find %s.' % source,
                                                  context)
                 non_unified_sources.add(mozpath.join(context.relsrcdir, s))
 
             yield GnProjectData(context, target_dir, gn_dir, non_unified_sources)
 
-
     def _handle_linkables(self, context, passthru, generated_files):
         linkables = []
         host_linkables = []
+
         def add_program(prog, var):
             if var.startswith('HOST_'):
                 host_linkables.append(prog)
             else:
                 linkables.append(prog)
 
         def check_unique_binary(program, kind):
             if program in self._binaries:
                 raise SandboxValidationError(
                     'Cannot use "%s" as %s name, '
                     'because it is already used in %s' % (program, kind,
-                    self._binaries[program].relsrcdir), context)
+                                                          self._binaries[program].relsrcdir), context)
         for kind, cls in [('PROGRAM', Program), ('HOST_PROGRAM', HostProgram)]:
             program = context.get(kind)
             if program:
                 check_unique_binary(program, kind)
                 self._binaries[program] = cls(context, program)
                 self._linkage.append((context, self._binaries[program],
-                    kind.replace('PROGRAM', 'USE_LIBS')))
+                                      kind.replace('PROGRAM', 'USE_LIBS')))
                 add_program(self._binaries[program], kind)
 
         all_rust_programs = []
         for kind, cls in [('RUST_PROGRAMS', RustProgram),
                           ('HOST_RUST_PROGRAMS', HostRustProgram)]:
             programs = context[kind]
             if not programs:
                 continue
 
             all_rust_programs.append((programs, kind, cls))
 
         # Verify Rust program definitions.
         if all_rust_programs:
-            config, cargo_file = self._parse_cargo_file(context);
+            config, cargo_file = self._parse_cargo_file(context)
             bin_section = config.get('bin', None)
             if not bin_section:
                 raise SandboxValidationError(
                     'Cargo.toml in %s has no [bin] section' % context.srcdir,
                     context)
 
             defined_binaries = {b['name'] for b in bin_section}
 
@@ -629,31 +632,31 @@ class TreeMetadataEmitter(LoggingMixin):
                 ('SIMPLE_PROGRAMS', SimpleProgram),
                 ('CPP_UNIT_TESTS', SimpleProgram),
                 ('HOST_SIMPLE_PROGRAMS', HostSimpleProgram)]:
             for program in context[kind]:
                 if program in self._binaries:
                     raise SandboxValidationError(
                         'Cannot use "%s" in %s, '
                         'because it is already used in %s' % (program, kind,
-                        self._binaries[program].relsrcdir), context)
+                                                              self._binaries[program].relsrcdir), context)
                 self._binaries[program] = cls(context, program,
-                    is_unit_test=kind == 'CPP_UNIT_TESTS')
+                                              is_unit_test=kind == 'CPP_UNIT_TESTS')
                 self._linkage.append((context, self._binaries[program],
-                    'HOST_USE_LIBS' if kind == 'HOST_SIMPLE_PROGRAMS'
-                    else 'USE_LIBS'))
+                                      'HOST_USE_LIBS' if kind == 'HOST_SIMPLE_PROGRAMS'
+                                      else 'USE_LIBS'))
                 add_program(self._binaries[program], kind)
 
         host_libname = context.get('HOST_LIBRARY_NAME')
         libname = context.get('LIBRARY_NAME')
 
         if host_libname:
             if host_libname == libname:
                 raise SandboxValidationError('LIBRARY_NAME and '
-                    'HOST_LIBRARY_NAME must have a different value', context)
+                                             'HOST_LIBRARY_NAME must have a different value', context)
 
             is_rust_library = context.get('IS_RUST_LIBRARY')
             if is_rust_library:
                 lib = self._rust_library(context, host_libname, {}, cls=HostRustLibrary)
             elif context.get('FORCE_SHARED_LIB'):
                 lib = HostSharedLibrary(context, host_libname)
             else:
                 lib = HostLibrary(context, host_libname)
@@ -774,17 +777,17 @@ class TreeMetadataEmitter(LoggingMixin):
                     raise SandboxValidationError(
                         'SYMBOLS_FILE cannot be used along DEFFILE.',
                         context)
                 if isinstance(symbols_file, SourcePath):
                     if not os.path.exists(symbols_file.full_path):
                         raise SandboxValidationError(
                             'Path specified in SYMBOLS_FILE does not exist: %s '
                             '(resolved to %s)' % (symbols_file,
-                            symbols_file.full_path), context)
+                                                  symbols_file.full_path), context)
                     shared_args['symbols_file'] = True
                 else:
                     if symbols_file.target_basename not in generated_files:
                         raise SandboxValidationError(
                             ('Objdir file specified in SYMBOLS_FILE not in ' +
                              'GENERATED_FILES: %s') % (symbols_file,), context)
                     shared_args['symbols_file'] = symbols_file.target_basename
 
@@ -798,32 +801,32 @@ class TreeMetadataEmitter(LoggingMixin):
                 if symbols_file and isinstance(symbols_file, SourcePath):
                     script = mozpath.join(
                         mozpath.dirname(mozpath.dirname(__file__)),
                         'action', 'generate_symbols_file.py')
                     defines = ()
                     if lib.defines:
                         defines = lib.defines.get_defines()
                     yield GeneratedFile(context, script,
-                        'generate_symbols_file', lib.symbols_file,
-                        [symbols_file], defines)
+                                        'generate_symbols_file', lib.symbols_file,
+                                        [symbols_file], defines)
             if static_lib:
                 is_rust_library = context.get('IS_RUST_LIBRARY')
                 if is_rust_library:
                     lib = self._rust_library(context, libname, static_args)
                 else:
                     lib = StaticLibrary(context, libname, **static_args)
                 self._libs[libname].append(lib)
                 self._linkage.append((context, lib, 'USE_LIBS'))
                 linkables.append(lib)
 
             if lib_defines:
                 if not libname:
                     raise SandboxValidationError('LIBRARY_DEFINES needs a '
-                        'LIBRARY_NAME to take effect', context)
+                                                 'LIBRARY_NAME to take effect', context)
                 lib.lib_defines.update(lib_defines)
 
         # Only emit sources if we have linkables defined in the same context.
         # Note the linkables are not emitted in this function, but much later,
         # after aggregation (because of e.g. USE_LIBS processing).
         if not (linkables or host_linkables):
             return
 
@@ -844,59 +847,59 @@ class TreeMetadataEmitter(LoggingMixin):
         for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'):
             srcs = sources[symbol]
             gen_srcs = gen_sources[symbol]
             context_srcs = context.get(symbol, [])
             seen_sources = set()
             for f in context_srcs:
                 if f in seen_sources:
                     raise SandboxValidationError('Source file should only '
-                        'be added to %s once: %s' % (symbol, f), context)
+                                                 'be added to %s once: %s' % (symbol, f), context)
                 seen_sources.add(f)
                 full_path = f.full_path
                 if isinstance(f, SourcePath):
                     srcs.append(full_path)
                 else:
                     assert isinstance(f, Path)
                     gen_srcs.append(full_path)
                 if symbol == 'SOURCES':
                     flags = context_srcs[f]
                     if flags:
                         all_flags[full_path] = flags
                     # Files for the generation phase of PGO are unusual, so
                     # it's not unreasonable to require them to be special.
                     if flags.pgo_generate_only:
                         if not isinstance(f, Path):
                             raise SandboxValidationError('pgo_generate_only file'
-                                'must not be a generated file: %s' % f, context)
+                                                         'must not be a generated file: %s' % f, context)
                         if mozpath.splitext(f)[1] != '.cpp':
                             raise SandboxValidationError('pgo_generate_only file'
-                                'must be a .cpp file: %s' % f, context)
+                                                         'must be a .cpp file: %s' % f, context)
                         if flags.no_pgo:
                             raise SandboxValidationError('pgo_generate_only files'
-                                'cannot be marked no_pgo: %s' % f, context)
+                                                         'cannot be marked no_pgo: %s' % f, context)
                         pgo_generate_only.add(f)
 
                 if isinstance(f, SourcePath) and not os.path.exists(full_path):
                     raise SandboxValidationError('File listed in %s does not '
-                        'exist: \'%s\'' % (symbol, full_path), context)
+                                                 'exist: \'%s\'' % (symbol, full_path), context)
 
         # UNIFIED_SOURCES only take SourcePaths, so there should be no
         # generated source in here
         assert not gen_sources['UNIFIED_SOURCES']
 
         no_pgo = context.get('NO_PGO')
         no_pgo_sources = [f for f, flags in all_flags.iteritems()
                           if flags.no_pgo]
         pgo_gen_only_sources = set(f for f, flags in all_flags.iteritems()
                                    if flags.pgo_generate_only)
         if no_pgo:
             if no_pgo_sources:
                 raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo '
-                    'cannot be set at the same time', context)
+                                             'cannot be set at the same time', context)
             passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo
         if no_pgo_sources:
             passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources
 
         # A map from "canonical suffixes" for a particular source file
         # language to the range of suffixes associated with that language.
         #
         # We deliberately don't list the canonical suffix in the suffix list
@@ -998,17 +1001,16 @@ class TreeMetadataEmitter(LoggingMixin):
         for vars, linkable_items in ((('SOURCES', 'UNIFIED_SOURCES'), linkables),
                                      (('HOST_SOURCES',), host_linkables)):
             for var in vars:
                 if cxx_sources[var]:
                     for l in linkable_items:
                         l.cxx_link = True
                     break
 
-
     def emit_from_context(self, context):
         """Convert a Context to tree metadata objects.
 
         This is a generator of mozbuild.frontend.data.ContextDerived instances.
         """
 
         # We only want to emit an InstallationTarget if one of the consulted
         # variables is defined. Later on, we look up FINAL_TARGET, which has
@@ -1017,17 +1019,18 @@ class TreeMetadataEmitter(LoggingMixin):
         if any(k in context for k in ('FINAL_TARGET', 'XPI_NAME', 'DIST_SUBDIR')):
             yield InstallationTarget(context)
 
         for obj in self._handle_gn_dirs(context):
             yield obj
 
         # We always emit a directory traversal descriptor. This is needed by
         # the recursive make backend.
-        for o in self._emit_directory_traversal_from_context(context): yield o
+        for o in self._emit_directory_traversal_from_context(context):
+            yield o
 
         for obj in self._process_xpidl(context):
             yield obj
 
         computed_flags = ComputedFlags(context, context['COMPILE_FLAGS'])
         computed_link_flags = ComputedFlags(context, context['LINK_FLAGS'])
         computed_host_flags = ComputedFlags(context, context['HOST_COMPILE_FLAGS'])
         computed_as_flags = ComputedFlags(context, context['ASM_FLAGS'])
@@ -1047,17 +1050,17 @@ class TreeMetadataEmitter(LoggingMixin):
         ]
         for v in varlist:
             if v in context and context[v]:
                 passthru.variables[v] = context[v]
 
         if context.config.substs.get('OS_TARGET') == 'WINNT' and \
                 context['DELAYLOAD_DLLS']:
             context['LDFLAGS'].extend([('-DELAYLOAD:%s' % dll)
-                for dll in context['DELAYLOAD_DLLS']])
+                                       for dll in context['DELAYLOAD_DLLS']])
             context['OS_LIBS'].append('delayimp')
 
         for v in ['CMFLAGS', 'CMMFLAGS']:
             if v in context and context[v]:
                 passthru.variables['MOZBUILD_' + v] = context[v]
 
         for v in ['CXXFLAGS', 'CFLAGS']:
             if v in context and context[v]:
@@ -1072,17 +1075,17 @@ class TreeMetadataEmitter(LoggingMixin):
 
         deffile = context.get('DEFFILE')
         if deffile and context.config.substs.get('OS_TARGET') == 'WINNT':
             if isinstance(deffile, SourcePath):
                 if not os.path.exists(deffile.full_path):
                     raise SandboxValidationError(
                         'Path specified in DEFFILE does not exist: %s '
                         '(resolved to %s)' % (deffile,
-                        deffile.full_path), context)
+                                              deffile.full_path), context)
                 path = mozpath.relpath(deffile.full_path, context.objdir)
             else:
                 path = deffile.target_basename
 
             # We don't have any better way to indicate that the def file
             # is a dependency to whatever we're building beyond stuffing
             # it into EXTRA_DEPS.
             passthru.variables['EXTRA_DEPS'] = [path]
@@ -1133,17 +1136,17 @@ class TreeMetadataEmitter(LoggingMixin):
             for f in obj.outputs:
                 generated_files.add(f)
                 if obj.localized:
                     localized_generated_files.add(f)
             yield obj
 
         for path in context['CONFIGURE_SUBST_FILES']:
             sub = self._create_substitution(ConfigFileSubstitution, context,
-                path)
+                                            path)
             generated_files.add(str(sub.relpath))
             yield sub
 
         for defines_var, cls, backend_flags in (('DEFINES', Defines, (computed_flags, computed_as_flags)),
                                                 ('HOST_DEFINES', HostDefines, (computed_host_flags,))):
             defines = context.get(defines_var)
             if defines:
                 defines_obj = cls(context, defines)
@@ -1180,39 +1183,40 @@ class TreeMetadataEmitter(LoggingMixin):
             self._idls['WEBIDL_EXAMPLE_INTERFACES'].add(name)
 
         local_includes = []
         for local_include in context.get('LOCAL_INCLUDES', []):
             full_path = local_include.full_path
             if not isinstance(local_include, ObjDirPath):
                 if not os.path.exists(full_path):
                     raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
-                        'does not exist: %s (resolved to %s)' % (local_include,
-                        full_path), context)
+                                                 'does not exist: %s (resolved to %s)' % (local_include,
+                                                                                          full_path), context)
                 if not os.path.isdir(full_path):
                     raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
-                        'is a filename, but a directory is required: %s '
-                        '(resolved to %s)' % (local_include, full_path), context)
+                                                 'is a filename, but a directory is required: %s '
+                                                 '(resolved to %s)' % (local_include, full_path), context)
             if (full_path == context.config.topsrcdir or
                     full_path == context.config.topobjdir):
                 raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
-                    '(%s) resolves to the topsrcdir or topobjdir (%s), which is '
-                    'not allowed' % (local_include, full_path), context)
+                                             '(%s) resolves to the topsrcdir or topobjdir (%s), which is '
+                                             'not allowed' % (local_include, full_path), context)
             include_obj = LocalInclude(context, local_include)
             local_includes.append(include_obj.path.full_path)
             yield include_obj
 
         computed_flags.resolve_flags('LOCAL_INCLUDES', ['-I%s' % p for p in local_includes])
         computed_as_flags.resolve_flags('LOCAL_INCLUDES', ['-I%s' % p for p in local_includes])
         computed_host_flags.resolve_flags('LOCAL_INCLUDES', ['-I%s' % p for p in local_includes])
 
         for obj in self._handle_linkables(context, passthru, generated_files):
             yield obj
 
-        generated_files.update(['%s%s' % (k, self.config.substs.get('BIN_SUFFIX', '')) for k in self._binaries.keys()])
+        generated_files.update(['%s%s' % (k, self.config.substs.get('BIN_SUFFIX', ''))
+                                for k in self._binaries.keys()])
 
         components = []
         for var, cls in (
             ('EXPORTS', Exports),
             ('FINAL_TARGET_FILES', FinalTargetFiles),
             ('FINAL_TARGET_PP_FILES', FinalTargetPreprocessedFiles),
             ('LOCALIZED_FILES', LocalizedFiles),
             ('LOCALIZED_PP_FILES', LocalizedPreprocessedFiles),
@@ -1371,17 +1375,16 @@ class TreeMetadataEmitter(LoggingMixin):
             yield computed_link_flags
 
         if context.objdir in self._asm_compile_dirs:
             self._compile_as_flags[context.objdir] = computed_as_flags
 
         if context.objdir in self._host_compile_dirs:
             yield computed_host_flags
 
-
     def _create_substitution(self, cls, context, path):
         sub = cls(context)
         sub.input_path = '%s.in' % path.full_path
         sub.output_path = path.translated
         sub.relpath = path
 
         return sub
 
@@ -1390,32 +1393,32 @@ class TreeMetadataEmitter(LoggingMixin):
         # If there are multiple XPIDL files in a directory, they get linked
         # together into a final .xpt, which has the name defined by
         # XPIDL_MODULE.
         xpidl_module = context['XPIDL_MODULE']
 
         if not xpidl_module:
             if context['XPIDL_SOURCES']:
                 raise SandboxValidationError('XPIDL_MODULE must be defined if '
-                    'XPIDL_SOURCES is defined.', context)
+                                             'XPIDL_SOURCES is defined.', context)
             return
 
         if not context['XPIDL_SOURCES']:
             raise SandboxValidationError('XPIDL_MODULE cannot be defined '
-                'unless there are XPIDL_SOURCES', context)
+                                         'unless there are XPIDL_SOURCES', context)
 
         if context['DIST_INSTALL'] is False:
             self.log(logging.WARN, 'mozbuild_warning', dict(
                 path=context.main_path),
                 '{path}: DIST_INSTALL = False has no effect on XPIDL_SOURCES.')
 
         for idl in context['XPIDL_SOURCES']:
             if not os.path.exists(idl.full_path):
                 raise SandboxValidationError('File %s from XPIDL_SOURCES '
-                    'does not exist' % idl.full_path, context)
+                                             'does not exist' % idl.full_path, context)
 
         yield XPIDLModule(context, xpidl_module, context['XPIDL_SOURCES'])
 
     def _process_generated_files(self, context):
         for path in context['CONFIGURE_DEFINE_FILES']:
             script = mozpath.join(mozpath.dirname(mozpath.dirname(__file__)),
                                   'action', 'process_define_files.py')
             yield GeneratedFile(context, script, 'process_define_file',
@@ -1477,40 +1480,40 @@ class TreeMetadataEmitter(LoggingMixin):
                     yield obj
 
     def _process_test_manifest(self, context, info, manifest_path, mpmanifest):
         flavor, install_root, install_subdir, package_tests = info
 
         path = manifest_path.full_path
         manifest_dir = mozpath.dirname(path)
         manifest_reldir = mozpath.dirname(mozpath.relpath(path,
-            context.config.topsrcdir))
+                                                          context.config.topsrcdir))
         manifest_sources = [mozpath.relpath(pth, context.config.topsrcdir)
                             for pth in mpmanifest.source_files]
         install_prefix = mozpath.join(install_root, install_subdir)
 
         try:
             if not mpmanifest.tests:
                 raise SandboxValidationError('Empty test manifest: %s'
-                    % path, context)
+                                             % path, context)
 
             defaults = mpmanifest.manifest_defaults[os.path.normpath(path)]
             obj = TestManifest(context, path, mpmanifest, flavor=flavor,
-                install_prefix=install_prefix,
-                relpath=mozpath.join(manifest_reldir, mozpath.basename(path)),
-                sources=manifest_sources,
-                dupe_manifest='dupe-manifest' in defaults)
+                               install_prefix=install_prefix,
+                               relpath=mozpath.join(manifest_reldir, mozpath.basename(path)),
+                               sources=manifest_sources,
+                               dupe_manifest='dupe-manifest' in defaults)
 
             filtered = mpmanifest.tests
 
             missing = [t['name'] for t in filtered if not os.path.exists(t['path'])]
             if missing:
                 raise SandboxValidationError('Test manifest (%s) lists '
-                    'test that does not exist: %s' % (
-                    path, ', '.join(missing)), context)
+                                             'test that does not exist: %s' % (
+                                                 path, ', '.join(missing)), context)
 
             out_dir = mozpath.join(install_prefix, manifest_reldir)
             if 'install-to-subdir' in defaults:
                 # This is terrible, but what are you going to do?
                 out_dir = mozpath.join(out_dir, defaults['install-to-subdir'])
                 obj.manifest_obj_relpath = mozpath.join(manifest_reldir,
                                                         defaults['install-to-subdir'],
                                                         mozpath.basename(path))
@@ -1524,29 +1527,29 @@ class TreeMetadataEmitter(LoggingMixin):
                     obj.installs[source] = (dest, False)
                 obj.external_installs |= install_info.external_installs
                 for install_path in install_info.deferred_installs:
                     if all(['*' not in install_path,
                             not os.path.isfile(mozpath.join(context.config.topsrcdir,
                                                             install_path[2:])),
                             install_path not in install_info.external_installs]):
                         raise SandboxValidationError('Error processing test '
-                           'manifest %s: entry in support-files not present '
-                           'in the srcdir: %s' % (path, install_path), context)
+                                                     'manifest %s: entry in support-files not present '
+                                                     'in the srcdir: %s' % (path, install_path), context)
 
                 obj.deferred_installs |= install_info.deferred_installs
 
             for test in filtered:
                 obj.tests.append(test)
 
                 # Some test files are compiled and should not be copied into the
                 # test package. They function as identifiers rather than files.
                 if package_tests:
                     manifest_relpath = mozpath.relpath(test['path'],
-                        mozpath.dirname(test['manifest']))
+                                                       mozpath.dirname(test['manifest']))
                     obj.installs[mozpath.normpath(test['path'])] = \
                         ((mozpath.join(out_dir, manifest_relpath)), True)
 
                 process_support_files(test)
 
             for path, m_defaults in mpmanifest.manifest_defaults.items():
                 process_support_files(m_defaults)
 
@@ -1563,38 +1566,38 @@ class TreeMetadataEmitter(LoggingMixin):
             # FUTURE we should be able to detect autogenerated files from
             # other build metadata. Once we do that, we can get rid of this.
             for f in defaults.get('generated-files', '').split():
                 # We re-raise otherwise the stack trace isn't informative.
                 try:
                     del obj.installs[mozpath.join(manifest_dir, f)]
                 except KeyError:
                     raise SandboxValidationError('Error processing test '
-                        'manifest %s: entry in generated-files not present '
-                        'elsewhere in manifest: %s' % (path, f), context)
+                                                 'manifest %s: entry in generated-files not present '
+                                                 'elsewhere in manifest: %s' % (path, f), context)
 
             yield obj
         except (AssertionError, Exception):
             raise SandboxValidationError('Error processing test '
-                'manifest file %s: %s' % (path,
-                    '\n'.join(traceback.format_exception(*sys.exc_info()))),
-                context)
+                                         'manifest file %s: %s' % (path,
+                                                                   '\n'.join(traceback.format_exception(*sys.exc_info()))),
+                                         context)
 
     def _process_reftest_manifest(self, context, flavor, manifest_path, manifest):
         manifest_full_path = manifest_path.full_path
         manifest_reldir = mozpath.dirname(mozpath.relpath(manifest_full_path,
-            context.config.topsrcdir))
+                                                          context.config.topsrcdir))
 
         # reftest manifests don't come from manifest parser. But they are
         # similar enough that we can use the same emitted objects. Note
         # that we don't perform any installs for reftests.
         obj = TestManifest(context, manifest_full_path, manifest,
-                flavor=flavor, install_prefix='%s/' % flavor,
-                relpath=mozpath.join(manifest_reldir,
-                    mozpath.basename(manifest_path)))
+                           flavor=flavor, install_prefix='%s/' % flavor,
+                           relpath=mozpath.join(manifest_reldir,
+                                                mozpath.basename(manifest_path)))
 
         for test, source_manifest in sorted(manifest.tests):
             obj.tests.append({
                 'path': test,
                 'here': mozpath.dirname(test),
                 'manifest': source_manifest,
                 'name': mozpath.basename(test),
                 'head': '',
@@ -1603,33 +1606,33 @@ class TreeMetadataEmitter(LoggingMixin):
             })
 
         yield obj
 
     def _process_jar_manifests(self, context):
         jar_manifests = context.get('JAR_MANIFESTS', [])
         if len(jar_manifests) > 1:
             raise SandboxValidationError('While JAR_MANIFESTS is a list, '
-                'it is currently limited to one value.', context)
+                                         'it is currently limited to one value.', context)
 
         for path in jar_manifests:
             yield JARManifest(context, path)
 
         # Temporary test to look for jar.mn files that creep in without using
         # the new declaration. Before, we didn't require jar.mn files to
         # declared anywhere (they were discovered). This will detect people
         # relying on the old behavior.
         if os.path.exists(os.path.join(context.srcdir, 'jar.mn')):
             if 'jar.mn' not in jar_manifests:
                 raise SandboxValidationError('A jar.mn exists but it '
-                    'is not referenced in the moz.build file. '
-                    'Please define JAR_MANIFESTS.', context)
+                                             'is not referenced in the moz.build file. '
+                                             'Please define JAR_MANIFESTS.', context)
 
     def _emit_directory_traversal_from_context(self, context):
         o = DirectoryTraversal(context)
         o.dirs = context.get('DIRS', [])
 
         # Some paths have a subconfigure, yet also have a moz.build. Those
         # shouldn't end up in self._external_paths.
         if o.objdir:
-            self._external_paths -= { o.relobjdir }
+            self._external_paths -= {o.relobjdir}
 
         yield o
--- a/python/mozbuild/mozbuild/frontend/gyp_reader.py
+++ b/python/mozbuild/mozbuild/frontend/gyp_reader.py
@@ -35,17 +35,17 @@ sys.modules['gyp.generator.mozbuild'] = 
 # build/gyp_chromium does this:
 #   script_dir = os.path.dirname(os.path.realpath(__file__))
 #   chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
 #   sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
 # We're not importing gyp_chromium, but we want both script_dir and
 # chrome_src for the default includes, so go backwards from the pylib
 # directory, which is the parent directory of gyp module.
 chrome_src = mozpath.abspath(mozpath.join(mozpath.dirname(gyp.__file__),
-    '../../../../..'))
+                                          '../../../../..'))
 script_dir = mozpath.join(chrome_src, 'build')
 
 
 def encode(value):
     if isinstance(value, unicode):
         return value.encode('utf-8')
     return value
 
@@ -69,47 +69,51 @@ for unused in ['RULE_INPUT_PATH', 'RULE_
 
 class GypContext(TemplateContext):
     """Specialized Context for use with data extracted from Gyp.
 
     config is the ConfigEnvironment for this context.
     relobjdir is the object directory that will be used for this context,
     relative to the topobjdir defined in the ConfigEnvironment.
     """
+
     def __init__(self, config, relobjdir):
         self._relobjdir = relobjdir
         TemplateContext.__init__(self, template='Gyp',
-            allowed_variables=VARIABLES, config=config)
+                                 allowed_variables=VARIABLES, config=config)
 
 
 def handle_actions(actions, context, action_overrides):
     idir = '$INTERMEDIATE_DIR/'
     for action in actions:
         name = action['action_name']
         if name not in action_overrides:
             raise RuntimeError('GYP action %s not listed in action_overrides' % name)
         outputs = action['outputs']
         if len(outputs) > 1:
-            raise NotImplementedError('GYP actions with more than one output not supported: %s' % name)
+            raise NotImplementedError(
+                'GYP actions with more than one output not supported: %s' % name)
         output = outputs[0]
         if not output.startswith(idir):
-            raise NotImplementedError('GYP actions outputting to somewhere other than <(INTERMEDIATE_DIR) not supported: %s' % output)
+            raise NotImplementedError(
+                'GYP actions outputting to somewhere other than <(INTERMEDIATE_DIR) not supported: %s' % output)
         output = output[len(idir):]
         context['GENERATED_FILES'] += [output]
         g = context['GENERATED_FILES'][output]
         g.script = action_overrides[name]
         g.inputs = action['inputs']
 
 
 def handle_copies(copies, context):
     dist = '$PRODUCT_DIR/dist/'
     for copy in copies:
         dest = copy['destination']
         if not dest.startswith(dist):
-            raise NotImplementedError('GYP copies to somewhere other than <(PRODUCT_DIR)/dist not supported: %s' % dest)
+            raise NotImplementedError(
+                'GYP copies to somewhere other than <(PRODUCT_DIR)/dist not supported: %s' % dest)
         dest_paths = dest[len(dist):].split('/')
         exports = context['EXPORTS']
         while dest_paths:
             exports = getattr(exports, dest_paths.pop(0))
         exports += sorted(copy['files'], key=lambda x: x.lower())
 
 
 def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
@@ -125,18 +129,18 @@ def process_gyp_result(gyp_result, gyp_d
         build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target)
 
         # Each target is given its own objdir. The base of that objdir
         # is derived from the relative path from the root gyp file path
         # to the current build_file, placed under the given output
         # directory. Since several targets can be in a given build_file,
         # separate them in subdirectories using the build_file basename
         # and the target_name.
-        reldir  = mozpath.relpath(mozpath.dirname(build_file),
-                                  mozpath.dirname(path))
+        reldir = mozpath.relpath(mozpath.dirname(build_file),
+                                 mozpath.dirname(path))
         subdir = '%s_%s' % (
             mozpath.splitext(mozpath.basename(build_file))[0],
             target_name,
         )
         # Emit a context for each target.
         context = GypContext(config, mozpath.relpath(
             mozpath.join(output, reldir, subdir), config.topobjdir))
         context.add_source(mozpath.abspath(build_file))
@@ -150,54 +154,55 @@ def process_gyp_result(gyp_result, gyp_d
         # Derive which gyp configuration to use based on MOZ_DEBUG.
         c = 'Debug' if config.substs.get('MOZ_DEBUG') else 'Release'
         if c not in spec['configurations']:
             raise RuntimeError('Missing %s gyp configuration for target %s '
                                'in %s' % (c, target_name, build_file))
         target_conf = spec['configurations'][c]
 
         if 'actions' in spec:
-          handle_actions(spec['actions'], context, action_overrides)
+            handle_actions(spec['actions'], context, action_overrides)
         if 'copies' in spec:
-          handle_copies(spec['copies'], context)
+            handle_copies(spec['copies'], context)
 
         use_libs = []
         libs = []
+
         def add_deps(s):
             for t in s.get('dependencies', []) + s.get('dependencies_original', []):
                 ty = targets[t]['type']
                 if ty in ('static_library', 'shared_library'):
                     use_libs.append(targets[t]['target_name'])
                 # Manually expand out transitive dependencies--
                 # gyp won't do this for static libs or none targets.
                 if ty in ('static_library', 'none'):
                     add_deps(targets[t])
             libs.extend(spec.get('libraries', []))
-        #XXX: this sucks, but webrtc breaks with this right now because
+        # XXX: this sucks, but webrtc breaks with this right now because
         # it builds a library called 'gtest' and we just get lucky
         # that it isn't in USE_LIBS by that name anywhere.
         if no_chromium:
             add_deps(spec)
 
         os_libs = []
         for l in libs:
-          if l.startswith('-'):
-              os_libs.append(l)
-          elif l.endswith('.lib'):
-              os_libs.append(l[:-4])
-          elif l:
-            # For library names passed in from moz.build.
-            use_libs.append(os.path.basename(l))
+            if l.startswith('-'):
+                os_libs.append(l)
+            elif l.endswith('.lib'):
+                os_libs.append(l[:-4])
+            elif l:
+                # For library names passed in from moz.build.
+                use_libs.append(os.path.basename(l))
 
         if spec['type'] == 'none':
-          if not ('actions' in spec or 'copies' in spec):
-            continue
+            if not ('actions' in spec or 'copies' in spec):
+                continue
         elif spec['type'] in ('static_library', 'shared_library', 'executable'):
-            # Remove leading 'lib' from the target_name if any, and use as
-            # library name.
+                # Remove leading 'lib' from the target_name if any, and use as
+                # library name.
             name = spec['target_name']
             if spec['type'] in ('static_library', 'shared_library'):
                 if name.startswith('lib'):
                     name = name[3:]
                 # The context expects an unicode string.
                 context['LIBRARY_NAME'] = name.decode('utf-8')
             else:
                 context['PROGRAM'] = name.decode('utf-8')
@@ -217,19 +222,19 @@ def process_gyp_result(gyp_result, gyp_d
             sources = []
             unified_sources = []
             extensions = set()
             use_defines_in_asflags = False
             for f in spec.get('sources', []):
                 ext = mozpath.splitext(f)[-1]
                 extensions.add(ext)
                 if f.startswith('$INTERMEDIATE_DIR/'):
-                  s = ObjDirPath(context, f.replace('$INTERMEDIATE_DIR/', '!'))
+                    s = ObjDirPath(context, f.replace('$INTERMEDIATE_DIR/', '!'))
                 else:
-                  s = SourcePath(context, f)
+                    s = SourcePath(context, f)
                 if ext == '.h':
                     continue
                 if ext == '.def':
                     context['SYMBOLS_FILE'] = s
                 elif ext != '.S' and not no_unified and s not in non_unified_sources:
                     unified_sources.append(s)
                 else:
                     sources.append(s)
@@ -277,17 +282,18 @@ def process_gyp_result(gyp_result, gyp_d
                     # filtered out by trying to find them in topsrcdir.
                     #
                     # We do allow !- and %-prefixed paths, assuming they come
                     # from moz.build and will be handled the same way as if they
                     # were given to LOCAL_INCLUDES in moz.build.
                     if include.startswith('/'):
                         resolved = mozpath.abspath(mozpath.join(config.topsrcdir, include[1:]))
                     elif not include.startswith(('!', '%')):
-                        resolved = mozpath.abspath(mozpath.join(mozpath.dirname(build_file), include))
+                        resolved = mozpath.abspath(mozpath.join(
+                            mozpath.dirname(build_file), include))
                     if not include.startswith(('!', '%')) and not os.path.exists(resolved):
                         continue
                 context['LOCAL_INCLUDES'] += [include]
 
             context['ASFLAGS'] = target_conf.get('asflags_mozilla', [])
             if use_defines_in_asflags and defines:
                 context['ASFLAGS'] += ['-D' + d for d in defines]
             if config.substs['OS_TARGET'] == 'SunOS':
@@ -320,27 +326,27 @@ def process_gyp_result(gyp_result, gyp_d
                             context[var].extend(f)
         else:
             # Ignore other types because we don't have
             # anything using them, and we're not testing them. They can be
             # added when that becomes necessary.
             raise NotImplementedError('Unsupported gyp target type: %s' % spec['type'])
 
         if not no_chromium:
-          # Add some features to all contexts. Put here in case LOCAL_INCLUDES
-          # order matters.
-          context['LOCAL_INCLUDES'] += [
-              '!/ipc/ipdl/_ipdlheaders',
-              '/ipc/chromium/src',
-              '/ipc/glue',
-          ]
-          # These get set via VC project file settings for normal GYP builds.
-          if config.substs['OS_TARGET'] == 'WINNT':
-              context['DEFINES']['UNICODE'] = True
-              context['DEFINES']['_UNICODE'] = True
+            # Add some features to all contexts. Put here in case LOCAL_INCLUDES
+            # order matters.
+            context['LOCAL_INCLUDES'] += [
+                '!/ipc/ipdl/_ipdlheaders',
+                '/ipc/chromium/src',
+                '/ipc/glue',
+            ]
+            # These get set via VC project file settings for normal GYP builds.
+            if config.substs['OS_TARGET'] == 'WINNT':
+                context['DEFINES']['UNICODE'] = True
+                context['DEFINES']['_UNICODE'] = True
         context['COMPILE_FLAGS']['OS_INCLUDES'] = []
 
         for key, value in gyp_dir_attrs.sandbox_vars.items():
             if context.get(key) and isinstance(context[key], list):
                 # If we have a key from sanbox_vars that's also been
                 # populated here we use the value from sandbox_vars as our
                 # basis rather than overriding outright.
                 context[key] = value + context[key]
@@ -363,16 +369,17 @@ class GypProcessor(object):
     """Reads a gyp configuration in the background using the given executor and
     emits GypContexts for the backend to process.
 
     config is a ConfigEnvironment, path is the path to a root gyp configuration
     file, and output is the base path under which the objdir for the various
     gyp dependencies will be. gyp_dir_attrs are attributes set for the dir
     from moz.build.
     """
+
     def __init__(self, config, gyp_dir_attrs, path, output, executor,
                  action_overrides, non_unified_sources):
         self._path = path
         self._config = config
         self._output = output
         self._non_unified_sources = non_unified_sources
         self._gyp_dir_attrs = gyp_dir_attrs
         self._action_overrides = action_overrides
--- a/python/mozbuild/mozbuild/frontend/mach_commands.py
+++ b/python/mozbuild/mozbuild/frontend/mach_commands.py
@@ -16,28 +16,29 @@ from mach.decorators import (
     SubCommand,
 )
 
 from mozbuild.base import MachCommandBase
 import mozpack.path as mozpath
 
 TOPSRCDIR = os.path.abspath(os.path.join(__file__, '../../../../../'))
 
+
 class InvalidPathException(Exception):
     """Represents an error due to an invalid path."""
 
 
 @CommandProvider
 class MozbuildFileCommands(MachCommandBase):
     @Command('mozbuild-reference', category='build-dev',
-        description='View reference documentation on mozbuild files.')
+             description='View reference documentation on mozbuild files.')
     @CommandArgument('symbol', default=None, nargs='*',
-        help='Symbol to view help on. If not specified, all will be shown.')
+                     help='Symbol to view help on. If not specified, all will be shown.')
     @CommandArgument('--name-only', '-n', default=False, action='store_true',
-        help='Print symbol names only.')
+                     help='Print symbol names only.')
     def reference(self, symbol, name_only=False):
         # mozbuild.sphinx imports some Sphinx modules, so we need to be sure
         # the optional Sphinx package is installed.
         self._activate_virtualenv()
         self.virtualenv_manager.install_pip_package('Sphinx==1.1.3')
 
         from mozbuild.sphinx import (
             format_module,
@@ -279,17 +280,16 @@ class MozbuildFileCommands(MachCommandBa
                     print('\tRelevant flavors:')
                     for p in m.test_flavors:
                         print('\t\t%s' % p)
 
         except InvalidPathException as e:
             print(e.message)
             return 1
 
-
     def _get_files_info(self, paths, rev=None):
         reader = self.mozbuild_reader(config_mode='empty', vcs_revision=rev)
 
         # Normalize to relative from topsrcdir.
         relpaths = []
         for p in paths:
             a = mozpath.abspath(p)
             if not mozpath.basedir(a, [self.topsrcdir]):
@@ -323,17 +323,16 @@ class MozbuildFileCommands(MachCommandBa
             for path, f in reader.finder.find(search):
                 path = path[len(self.topsrcdir):]
                 if path not in all_paths_set:
                     all_paths_set.add(path)
                     allpaths.append(path)
 
         return reader.files_info(allpaths)
 
-
     @SubCommand('file-info', 'schedules',
                 'Show the combined SCHEDULES for the files listed.')
     @CommandArgument('paths', nargs='+',
                      help='Paths whose data to query')
     def file_info_schedules(self, paths):
         """Show what is scheduled by the given files.
 
         Given a requested set of files (which can be specified using
--- a/python/mozbuild/mozbuild/frontend/reader.py
+++ b/python/mozbuild/mozbuild/frontend/reader.py
@@ -75,17 +75,16 @@ from .context import (
     SubContext,
     TemplateContext,
 )
 
 from mozbuild.base import ExecutionSummary
 from concurrent.futures.process import ProcessPoolExecutor
 
 
-
 if sys.version_info.major == 2:
     text_type = unicode
     type_type = types.TypeType
 else:
     text_type = str
     type_type = type
 
 
@@ -101,16 +100,17 @@ class EmptyConfig(object):
     bootstrapped from a top source directory path.
     """
     class PopulateOnGetDict(ReadOnlyDefaultDict):
         """A variation on ReadOnlyDefaultDict that populates during .get().
 
         This variation is needed because CONFIG uses .get() to access members.
         Without it, None (instead of our EmptyValue types) would be returned.
         """
+
         def get(self, key, default=None):
             return self[key]
 
     default_substs = {
         # These 2 variables are used semi-frequently and it isn't worth
         # changing all the instances.
         b'MOZ_APP_NAME': b'empty',
         b'MOZ_CHILD_PROCESS_NAME': b'empty',
@@ -177,16 +177,17 @@ class MozbuildSandbox(Sandbox):
     We expose a few useful functions and expose the set of variables defining
     Mozilla's build system.
 
     context is a Context instance.
 
     metadata is a dict of metadata that can be used during the sandbox
     evaluation.
     """
+
     def __init__(self, context, metadata={}, finder=default_finder):
         assert isinstance(context, Context)
 
         Sandbox.__init__(self, context, finder=finder)
 
         self._log = logging.getLogger(__name__)
 
         self.metadata = dict(metadata)
@@ -236,17 +237,17 @@ class MozbuildSandbox(Sandbox):
         Paths will be rejected if they do not fall under topsrcdir or one of
         the external roots.
         """
 
         # realpath() is needed for true security. But, this isn't for security
         # protection, so it is omitted.
         if not is_read_allowed(path, self._context.config):
             raise SandboxLoadError(self._context.source_stack,
-                sys.exc_info()[2], illegal_path=path)
+                                   sys.exc_info()[2], illegal_path=path)
 
         Sandbox.exec_file(self, path)
 
     def _export(self, varname):
         """Export the variable to all subdirectories of the current path."""
 
         exports = self.metadata.setdefault('exports', dict())
         if varname in exports:
@@ -285,24 +286,24 @@ class MozbuildSandbox(Sandbox):
         else:
             self._warning(message)
 
     def _template_decorator(self, func):
         """Registers a template function."""
 
         if not inspect.isfunction(func):
             raise Exception('`template` is a function decorator. You must '
-                'use it as `@template` preceding a function declaration.')
+                            'use it as `@template` preceding a function declaration.')
 
         name = func.func_name
 
         if name in self.templates:
             raise KeyError(
                 'A template named "%s" was already declared in %s.' % (name,
-                self.templates[name].path))
+                                                                       self.templates[name].path))
 
         if name.islower() or name.isupper() or name[0].islower():
             raise NameError('Template function names must be CamelCase.')
 
         self.templates[name] = TemplateFunction(func, self)
 
     @memoize
     def _create_subcontext(self, cls):
@@ -315,16 +316,17 @@ class MozbuildSandbox(Sandbox):
     @memoize
     def _create_function(self, function_def):
         """Returns a function object for use within the sandbox for the given
         function definition.
 
         The wrapper function does type coercion on the function arguments
         """
         func, args_def, doc = function_def
+
         def function(*args):
             def coerce(arg, type):
                 if not isinstance(arg, type):
                     if issubclass(type, ContextDerivedValue):
                         arg = type(self._context, arg)
                     else:
                         arg = type(arg)
                 return arg
@@ -411,17 +413,17 @@ class TemplateFunction(object):
         func_ast.body[0].decorator_list = []
         # Adjust line numbers accordingly
         ast.increment_lineno(func_ast, firstlineno - 1)
 
         # When using a custom dictionary for function globals/locals, Cpython
         # actually never calls __getitem__ and __setitem__, so we need to
         # modify the AST so that accesses to globals are properly directed
         # to a dict.
-        self._global_name = b'_data' # AST wants str for this, not unicode
+        self._global_name = b'_data'  # AST wants str for this, not unicode
         # In case '_data' is a name used for a variable in the function code,
         # prepend more underscores until we find an unused name.
         while (self._global_name in code.co_names or
                 self._global_name in code.co_varnames):
             self._global_name += '_'
         func_ast = self.RewriteName(sandbox, self._global_name).visit(func_ast)
 
         # Execute the rewritten code. That code now looks like:
@@ -458,16 +460,17 @@ class TemplateFunction(object):
         )
         sandbox.exec_function(func, args, kwargs, self.path,
                               becomes_current_path=False)
 
     class RewriteName(ast.NodeTransformer):
         """AST Node Transformer to rewrite variable accesses to go through
         a dict.
         """
+
         def __init__(self, sandbox, global_name):
             self._sandbox = sandbox
             self._global_name = global_name
 
         def visit_Str(self, node):
             # String nodes we got from the AST parser are str, but we want
             # unicode literals everywhere, so transform them.
             node.s = unicode(node.s)
@@ -486,16 +489,17 @@ class TemplateFunction(object):
                 value=c(ast.Name(id=self._global_name, ctx=ast.Load())),
                 slice=c(ast.Index(value=c(ast.Str(s=node.id)))),
                 ctx=node.ctx
             ))
 
 
 class SandboxValidationError(Exception):
     """Represents an error encountered when validating sandbox results."""
+
     def __init__(self, message, context):
         Exception.__init__(self, message)
         self.context = context
 
     def __str__(self):
         s = StringIO()
 
         delim = '=' * 30
@@ -527,19 +531,20 @@ class BuildReaderError(Exception):
       - Where they failed
       - What can be done to prevent the error
 
     A lot of the code in this class should arguably be inside sandbox.py.
     However, extraction is somewhat difficult given the additions
     MozbuildSandbox has over Sandbox (e.g. the concept of included files -
     which affect error messages, of course).
     """
+
     def __init__(self, file_stack, trace, sandbox_exec_error=None,
-        sandbox_load_error=None, validation_error=None, other_error=None,
-        sandbox_called_error=None):
+                 sandbox_load_error=None, validation_error=None, other_error=None,
+                 sandbox_called_error=None):
 
         self.file_stack = file_stack
         self.trace = trace
         self.sandbox_called_error = sandbox_called_error
         self.sandbox_exec = sandbox_exec_error
         self.sandbox_load = sandbox_load_error
         self.validation_error = validation_error
         self.other = other_error
@@ -554,17 +559,17 @@ class BuildReaderError(Exception):
         if self.sandbox_load is not None:
             if len(self.sandbox_load.file_stack) > 1:
                 return self.sandbox_load.file_stack[-2]
 
             if len(self.file_stack) > 1:
                 return self.file_stack[-2]
 
         if self.sandbox_error is not None and \
-            len(self.sandbox_error.file_stack):
+                len(self.sandbox_error.file_stack):
             return self.sandbox_error.file_stack[-1]
 
         return self.file_stack[-1]
 
     @property
     def sandbox_error(self):
         return self.sandbox_exec or self.sandbox_load or \
             self.sandbox_called_error
@@ -597,17 +602,17 @@ class BuildReaderError(Exception):
             s.write('\n')
         else:
             s.write('The error appears to be part of the %s ' % __name__)
             s.write('Python module itself! It is possible you have stumbled ')
             s.write('across a legitimate bug.\n')
             s.write('\n')
 
             for l in traceback.format_exception(type(self.other), self.other,
-                self.trace):
+                                                self.trace):
                 s.write(unicode(l))
 
         return s.getvalue()
 
     def _print_sandbox_error(self, s):
         # Try to find the frame of the executed code.
         script_frame = None
 
@@ -755,17 +760,17 @@ class BuildReaderError(Exception):
             close_matches = difflib.get_close_matches(inner.args[2],
                                                       VARIABLES.keys(), 2)
             if close_matches:
                 s.write('Maybe you meant %s?\n' % ' or '.join(close_matches))
                 s.write('\n')
 
             if inner.args[2] in DEPRECATION_HINTS:
                 s.write('%s\n' %
-                    textwrap.dedent(DEPRECATION_HINTS[inner.args[2]]).strip())
+                        textwrap.dedent(DEPRECATION_HINTS[inner.args[2]]).strip())
                 return
 
             s.write('Please change the file to not use this variable.\n')
             s.write('\n')
             s.write('For reference, the set of valid variables is:\n')
             s.write('\n')
             s.write(', '.join(sorted(VARIABLES.keys())) + '\n')
             return
@@ -797,17 +802,17 @@ class BuildReaderError(Exception):
         s.write('    %s\n' % type(inner.args[3]).__name__)
         s.write('\n')
         s.write('This variable expects the following type(s):\n')
         s.write('\n')
         if type(inner.args[4]) == type_type:
             s.write('    %s\n' % inner.args[4].__name__)
         else:
             for t in inner.args[4]:
-                s.write( '    %s\n' % t.__name__)
+                s.write('    %s\n' % t.__name__)
         s.write('\n')
         s.write('Change the file to write a value of the appropriate type ')
         s.write('and try again.\n')
 
     def _print_exception(self, e, s):
         s.write('An error was encountered as part of executing the file ')
         s.write('itself. The error appears to be the fault of the script.\n')
         s.write('\n')
@@ -1048,33 +1053,33 @@ class BuildReader(object):
                                          metadata=metadata):
                 yield s
 
         except BuildReaderError as bre:
             raise bre
 
         except SandboxCalledError as sce:
             raise BuildReaderError(list(self._execution_stack),
-                sys.exc_info()[2], sandbox_called_error=sce)
+                                   sys.exc_info()[2], sandbox_called_error=sce)
 
         except SandboxExecutionError as se:
             raise BuildReaderError(list(self._execution_stack),
-                sys.exc_info()[2], sandbox_exec_error=se)
+                                   sys.exc_info()[2], sandbox_exec_error=se)
 
         except SandboxLoadError as sle:
             raise BuildReaderError(list(self._execution_stack),
-                sys.exc_info()[2], sandbox_load_error=sle)
+                                   sys.exc_info()[2], sandbox_load_error=sle)
 
         except SandboxValidationError as ve:
             raise BuildReaderError(list(self._execution_stack),
-                sys.exc_info()[2], validation_error=ve)
+                                   sys.exc_info()[2], validation_error=ve)
 
         except Exception as e:
             raise BuildReaderError(list(self._execution_stack),
-                sys.exc_info()[2], other_error=e)
+                                   sys.exc_info()[2], other_error=e)
 
     def _read_mozbuild(self, path, config, descend, metadata):
         path = mozpath.normpath(path)
         log(self._log, logging.DEBUG, 'read_mozbuild', {'path': path},
             'Reading file: {path}')
 
         if path in self._read_files:
             log(self._log, logging.WARNING, 'read_already', {'path': path},
@@ -1122,30 +1127,30 @@ class BuildReader(object):
 
         curdir = mozpath.dirname(path)
 
         for target_dir in context.get('GYP_DIRS', []):
             gyp_dir = context['GYP_DIRS'][target_dir]
             for v in ('input', 'variables'):
                 if not getattr(gyp_dir, v):
                     raise SandboxValidationError('Missing value for '
-                        'GYP_DIRS["%s"].%s' % (target_dir, v), context)
+                                                 'GYP_DIRS["%s"].%s' % (target_dir, v), context)
 
             # The make backend assumes contexts for sub-directories are
             # emitted after their parent, so accumulate the gyp contexts.
             # We could emit the parent context before processing gyp
             # configuration, but we need to add the gyp objdirs to that context
             # first.
             from .gyp_reader import GypProcessor
             non_unified_sources = set()
             for s in gyp_dir.non_unified_sources:
                 source = SourcePath(context, s)
                 if not self.finder.get(source.full_path):
                     raise SandboxValidationError('Cannot find %s.' % source,
-                        context)
+                                                 context)
                 non_unified_sources.add(source)
             action_overrides = {}
             for action, script in gyp_dir.action_overrides.iteritems():
                 action_overrides[action] = SourcePath(context, script)
 
             gyp_processor = GypProcessor(context.config,
                                          gyp_dir,
                                          mozpath.join(curdir, gyp_dir.input),
@@ -1184,17 +1189,17 @@ class BuildReader(object):
 
             # Ensure we don't break out of the topsrcdir. We don't do realpath
             # because it isn't necessary. If there are symlinks in the srcdir,
             # that's not our problem. We're not a hosted application: we don't
             # need to worry about security too much.
             if not is_read_allowed(child_path, context.config):
                 raise SandboxValidationError(
                     'Attempting to process file outside of allowed paths: %s' %
-                        child_path, context)
+                    child_path, context)
 
             if not descend:
                 continue
 
             for res in self.read_mozbuild(child_path, context.config,
                                           metadata=child_metadata):
                 yield res
 
@@ -1278,16 +1283,17 @@ class BuildReader(object):
                 target_dir = mozpath.dirname(mbpaths[i + 1])
 
                 d = mozpath.normpath(mozpath.join(topsrcdir, mbpath))
                 dirs[d].add(mozpath.relpath(target_dir, source_dir))
 
         # Exporting doesn't work reliably in tree traversal mode. Override
         # the function to no-op.
         functions = dict(FUNCTIONS)
+
         def export(sandbox):
             return lambda varname: None
         functions['export'] = tuple([export] + list(FUNCTIONS['export'][1:]))
 
         metadata = {
             'functions': functions,
         }
 
@@ -1332,16 +1338,17 @@ class BuildReader(object):
         """
         paths, _ = self.read_relevant_mozbuilds(paths)
 
         # For thousands of inputs (say every file in a sub-tree),
         # test_defaults_for_path() gets called with the same contexts multiple
         # times (once for every path in a directory that doesn't have any
         # test metadata). So, we cache the function call.
         defaults_cache = {}
+
         def test_defaults_for_path(ctxs):
             key = tuple(ctx.current_path or ctx.main_path for ctx in ctxs)
 
             if key not in defaults_cache:
                 defaults_cache[key] = self.test_defaults_for_path(ctxs)
 
             return defaults_cache[key]
 
@@ -1389,17 +1396,18 @@ class BuildReader(object):
         return r
 
     def test_defaults_for_path(self, ctxs):
         # This names the context keys that will end up emitting a test
         # manifest.
         test_manifest_contexts = set(
             ['%s_MANIFESTS' % key for key in TEST_MANIFESTS] +
             ['%s_MANIFESTS' % flavor.upper() for flavor in REFTEST_FLAVORS] +
-            ['%s_MANIFESTS' % flavor.upper().replace('-', '_') for flavor in WEB_PLATFORM_TESTS_FLAVORS]
+            ['%s_MANIFESTS' % flavor.upper().replace('-', '_')
+             for flavor in WEB_PLATFORM_TESTS_FLAVORS]
         )
 
         result_context = Files(Context())
         for ctx in ctxs:
             for key in ctx:
                 if key not in test_manifest_contexts:
                     continue
                 for paths, obj in ctx[key]:
--- a/python/mozbuild/mozbuild/frontend/sandbox.py
+++ b/python/mozbuild/mozbuild/frontend/sandbox.py
@@ -48,32 +48,34 @@ class SandboxError(Exception):
 
 
 class SandboxExecutionError(SandboxError):
     """Represents errors encountered during execution of a Sandbox.
 
     This is a simple container exception. It's purpose is to capture state
     so something else can report on it.
     """
+
     def __init__(self, file_stack, exc_type, exc_value, trace):
         SandboxError.__init__(self, file_stack)
 
         self.exc_type = exc_type
         self.exc_value = exc_value
         self.trace = trace
 
 
 class SandboxLoadError(SandboxError):
     """Represents errors encountered when loading a file for execution.
 
     This exception represents errors in a Sandbox that occurred as part of
     loading a file. The error could have occurred in the course of executing
     a file. If so, the file_stack will be non-empty and the file that caused
     the load will be on top of the stack.
     """
+
     def __init__(self, file_stack, trace, illegal_path=None, read_error=None):
         SandboxError.__init__(self, file_stack)
 
         self.trace = trace
         self.illegal_path = illegal_path
         self.read_error = read_error
 
 
@@ -150,17 +152,17 @@ class Sandbox(dict):
         The path must be absolute.
         """
         assert os.path.isabs(path)
 
         try:
             source = self._finder.get(path).read()
         except Exception as e:
             raise SandboxLoadError(self._context.source_stack,
-                sys.exc_info()[2], read_error=path)
+                                   sys.exc_info()[2], read_error=path)
 
         self.exec_source(source, path)
 
     def exec_source(self, source, path=''):
         """Execute Python code within a string.
 
         The passed string should contain Python code to be executed. The string
         will be compiled and executed.
@@ -283,17 +285,17 @@ class Sandbox(dict):
             #   foo.__iadd__(['bar'])
             #   namespace.__setitem__('FOO', foo)
             # This means __setitem__ is called with the value that is already
             # in the dict, when doing +=, which is permitted.
             if key in self._context and self._context[key] is not value:
                 raise KeyError('global_ns', 'reassign', key)
 
             if (key not in self._context and isinstance(value, (list, dict))
-               and not value):
+                and not value):
                 raise KeyError('Variable %s assigned an empty value.' % key)
 
             self._context[key] = value
         else:
             dict.__setitem__(self, key, value)
 
     def get(self, key, default=None):
         raise NotImplementedError('Not supported')
--- a/python/mozbuild/mozbuild/gn_processor.py
+++ b/python/mozbuild/mozbuild/gn_processor.py
@@ -73,17 +73,18 @@ class MozbuildWriter(object):
             else:
                 self.write_mozbuild_value(k, v)
 
     def write_mozbuild_list(self, key, value):
         if value:
             self.write('\n')
             self.write(self.indent + key)
             self.write(' += [\n    ' + self.indent)
-            self.write((',\n    ' + self.indent).join(alphabetical_sorted(self.mb_serialize(v) for v in value)))
+            self.write(
+                (',\n    ' + self.indent).join(alphabetical_sorted(self.mb_serialize(v) for v in value)))
             self.write('\n')
             self.write_ln(']')
 
     def write_mozbuild_value(self, key, value):
         if value:
             if key == 'LIBRARY_NAME':
                 self._library_name = value
             elif key == 'FORCE_SHARED_LIB':
@@ -107,17 +108,16 @@ class MozbuildWriter(object):
                 for flags, tmpl in replacements:
                     if subst_vals == flags:
                         self.write_ln(tmpl)
                         wrote_ln = True
 
                 if not wrote_ln:
                     self.write_ln("%s[%s] = %s" % subst_vals)
 
-
     def write_condition(self, values):
         def mk_condition(k, v):
             if not v:
                 return 'not CONFIG["%s"]' % k
             return 'CONFIG["%s"] == %s' % (k, self.mb_serialize(v))
 
         self.write('\n')
         self.write('if ')
--- a/python/mozbuild/mozbuild/jar.py
+++ b/python/mozbuild/mozbuild/jar.py
@@ -85,17 +85,18 @@ class JarInfo(object):
             # base, the jar name is under chrome/
             if not self.base:
                 self.name = mozpath.join('chrome', self.name)
         self.relativesrcdir = None
         self.chrome_manifests = []
         self.entries = []
 
 
-class DeprecatedJarManifest(Exception): pass
+class DeprecatedJarManifest(Exception):
+    pass
 
 
 class JarManifestParser(object):
 
     ignore = re.compile('\s*(\#.*)?$')
     jarline = re.compile('''
         (?:
             (?:\[(?P<base>[\w\d.\-\_\\\/{}@]+)\]\s*)? # optional [base/path]
@@ -192,17 +193,17 @@ class JarManifestParser(object):
 
 
 class JarMaker(object):
     '''JarMaker reads jar.mn files and process those into jar files or
       flat directories, along with chrome.manifest files.
       '''
 
     def __init__(self, outputFormat='flat', useJarfileManifest=True,
-        useChromeManifest=False):
+                 useChromeManifest=False):
 
         self.outputFormat = outputFormat
         self.useJarfileManifest = useJarfileManifest
         self.useChromeManifest = useChromeManifest
         self.pp = Preprocessor()
         self.topsourcedir = None
         self.sourcedirs = []
         self.localedirs = None
@@ -219,37 +220,36 @@ class JarMaker(object):
         the options for the inner PreProcessor.
         '''
 
         # HACK, we need to unescape the string variables we get,
         # the perl versions didn't grok strings right
 
         p = self.pp.getCommandLineParser(unescapeDefines=True)
         p.add_option('-f', type='choice', default='jar',
-            choices=('jar', 'flat', 'symlink'),
-            help='fileformat used for output',
-            metavar='[jar, flat, symlink]',
-            )
+                     choices=('jar', 'flat', 'symlink'),
+                     help='fileformat used for output',
+                     metavar='[jar, flat, symlink]',
+                     )
         p.add_option('-v', action='store_true', dest='verbose',
                      help='verbose output')
         p.add_option('-q', action='store_false', dest='verbose',
                      help='verbose output')
         p.add_option('-e', action='store_true',
                      help='create chrome.manifest instead of jarfile.manifest'
                      )
         p.add_option('-s', type='string', action='append', default=[],
                      help='source directory')
         p.add_option('-t', type='string', help='top source directory')
-        p.add_option('-c', '--l10n-src', type='string', action='append'
-                     , help='localization directory')
+        p.add_option('-c', '--l10n-src', type='string',
+                     action='append', help='localization directory')
         p.add_option('--l10n-base', type='string', action='store',
                      help='base directory to be used for localization (requires relativesrcdir)'
                      )
-        p.add_option('--locale-mergedir', type='string', action='store'
-                     ,
+        p.add_option('--locale-mergedir', type='string', action='store',
                      help='base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)'
                      )
         p.add_option('--relativesrcdir', type='string',
                      help='relativesrcdir to be used for localization')
         p.add_option('-d', type='string', help='base directory')
         p.add_option('--root-manifest-entry-appid', type='string',
                      help='add an app id specific root chrome manifest entry.'
                      )
@@ -287,39 +287,39 @@ class JarMaker(object):
         if self.rootManifestAppId:
             rootChromeManifest = \
                 os.path.join(os.path.normpath(os.path.dirname(chromeManifest)),
                              '..', 'chrome.manifest')
             rootChromeManifest = os.path.normpath(rootChromeManifest)
             chromeDir = \
                 os.path.basename(os.path.dirname(os.path.normpath(chromeManifest)))
             logging.info("adding '%s' entry to root chrome manifest appid=%s"
-                          % (chromeDir, self.rootManifestAppId))
+                         % (chromeDir, self.rootManifestAppId))
             addEntriesToListFile(rootChromeManifest,
                                  ['manifest %s/chrome.manifest application=%s'
                                   % (chromeDir,
-                                 self.rootManifestAppId)])
+                                     self.rootManifestAppId)])
 
     def updateManifest(self, manifestPath, chromebasepath, register):
         '''updateManifest replaces the % in the chrome registration entries
         with the given chrome base path, and updates the given manifest file.
         '''
         myregister = dict.fromkeys(map(lambda s: s.replace('%',
-            chromebasepath), register))
+                                                           chromebasepath), register))
         addEntriesToListFile(manifestPath, myregister.iterkeys())
 
     def makeJar(self, infile, jardir):
         '''makeJar is the main entry point to JarMaker.
 
         It takes the input file, the output directory, the source dirs and the
         top source dir as argument, and optionally the l10n dirs.
         '''
 
         # making paths absolute, guess srcdir if file and add to sourcedirs
-        _normpath = lambda p: os.path.normpath(os.path.abspath(p))
+        def _normpath(p): return os.path.normpath(os.path.abspath(p))
         self.topsourcedir = _normpath(self.topsourcedir)
         self.sourcedirs = [_normpath(p) for p in self.sourcedirs]
         if self.localedirs:
             self.localedirs = [_normpath(p) for p in self.localedirs]
         elif self.relativesrcdir:
             self.localedirs = \
                 self.generateLocaleDirs(self.relativesrcdir)
         if isinstance(infile, basestring):
@@ -343,17 +343,17 @@ class JarMaker(object):
         # generate locales dirs, merge, l10nbase, en-US
         if self.l10nmerge:
             locdirs.append(os.path.join(self.l10nmerge, l10nrelsrcdir))
         if self.l10nbase:
             locdirs.append(os.path.join(self.l10nbase, l10nrelsrcdir))
         if self.l10nmerge or not self.l10nbase:
             # add en-US if we merge, or if it's not l10n
             locdirs.append(os.path.join(self.topsourcedir,
-                           relativesrcdir, 'en-US'))
+                                        relativesrcdir, 'en-US'))
         return locdirs
 
     def processJarSection(self, jarinfo, jardir):
         '''Internal method called by makeJar to actually process a section
         of a jar.mn file.
         '''
 
         # chromebasepath is used for chrome registration manifests
@@ -452,17 +452,17 @@ class JarMaker(object):
         for _srcdir in src_base:
             if os.path.isfile(os.path.join(_srcdir, src)):
                 realsrc = os.path.join(_srcdir, src)
                 break
         if realsrc is None:
             if jf is not None:
                 jf.close()
             raise RuntimeError('File "{0}" not found in {1}'.format(src,
-                               ', '.join(src_base)))
+                                                                    ', '.join(src_base)))
 
         if out in self._seen_output:
             raise RuntimeError('%s already added' % out)
         self._seen_output.add(out)
 
         if e.preprocess:
             outf = outHelper.getOutput(out)
             inf = open(realsrc)
@@ -580,17 +580,17 @@ def main(args=None):
             p.error('relativesrcdir required when using l10n-base')
         if options.l10n_src:
             p.error('both l10n-src and l10n-base are not supported')
         jm.l10nbase = options.l10n_base
         jm.relativesrcdir = options.relativesrcdir
         jm.l10nmerge = options.locale_mergedir
         if jm.l10nmerge and not os.path.isdir(jm.l10nmerge):
             logging.warning("WARNING: --locale-mergedir passed, but '%s' does not exist. "
-                "Ignore this message if the locale is complete." % jm.l10nmerge)
+                            "Ignore this message if the locale is complete." % jm.l10nmerge)
     elif options.locale_mergedir:
         p.error('l10n-base required when using locale-mergedir')
     jm.localedirs = options.l10n_src
     if options.root_manifest_entry_appid:
         jm.rootManifestAppId = options.root_manifest_entry_appid
     noise = logging.INFO
     if options.verbose is not None:
         noise = options.verbose and logging.DEBUG or logging.WARN
--- a/python/mozbuild/mozbuild/mach_commands.py
+++ b/python/mozbuild/mozbuild/mach_commands.py
@@ -135,29 +135,29 @@ class Watch(MachCommandBase):
 
 
 @CommandProvider
 class Build(MachCommandBase):
     """Interface to build the tree."""
 
     @Command('build', category='build', description='Build the tree.')
     @CommandArgument('--jobs', '-j', default='0', metavar='jobs', type=int,
-        help='Number of concurrent jobs to run. Default is the number of CPUs.')
+                     help='Number of concurrent jobs to run. Default is the number of CPUs.')
     @CommandArgument('-C', '--directory', default=None,
-        help='Change to a subdirectory of the build directory first.')
+                     help='Change to a subdirectory of the build directory first.')
     @CommandArgument('what', default=None, nargs='*', help=BUILD_WHAT_HELP)
     @CommandArgument('-X', '--disable-extra-make-dependencies',
                      default=False, action='store_true',
                      help='Do not add extra make dependencies.')
     @CommandArgument('-v', '--verbose', action='store_true',
-        help='Verbose output for what commands the build is running.')
+                     help='Verbose output for what commands the build is running.')
     @CommandArgument('--keep-going', action='store_true',
                      help='Keep building after an error has occurred')
     def build(self, what=None, disable_extra_make_dependencies=None, jobs=0,
-        directory=None, verbose=False, keep_going=False):
+              directory=None, verbose=False, keep_going=False):
         """Build the source tree.
 
         With no arguments, this will perform a full build.
 
         Positional arguments define targets to build. These can be make targets
         or patterns like "<dir>/<target>" to indicate a make target within a
         directory.
 
@@ -185,83 +185,83 @@ class Build(MachCommandBase):
             disable_extra_make_dependencies=disable_extra_make_dependencies,
             jobs=jobs,
             directory=directory,
             verbose=verbose,
             keep_going=keep_going,
             mach_context=self._mach_context)
 
     @Command('configure', category='build',
-        description='Configure the tree (run configure and config.status).')
+             description='Configure the tree (run configure and config.status).')
     @CommandArgument('options', default=None, nargs=argparse.REMAINDER,
                      help='Configure options')
     def configure(self, options=None, buildstatus_messages=False, line_handler=None):
         from mozbuild.controller.building import (
             BuildDriver,
         )
 
         self.log_manager.enable_all_structured_loggers()
         driver = self._spawn(BuildDriver)
 
         return driver.configure(
             options=options,
             buildstatus_messages=buildstatus_messages,
             line_handler=line_handler)
 
     @Command('resource-usage', category='post-build',
-        description='Show information about system resource usage for a build.')
+             description='Show information about system resource usage for a build.')
     @CommandArgument('--address', default='localhost',
-        help='Address the HTTP server should listen on.')
+                     help='Address the HTTP server should listen on.')
     @CommandArgument('--port', type=int, default=0,
-        help='Port number the HTTP server should listen on.')
+                     help='Port number the HTTP server should listen on.')
     @CommandArgument('--browser', default='firefox',
-        help='Web browser to automatically open. See webbrowser Python module.')
+                     help='Web browser to automatically open. See webbrowser Python module.')
     @CommandArgument('--url',
-        help='URL of JSON document to display')
+                     help='URL of JSON document to display')
     def resource_usage(self, address=None, port=None, browser=None, url=None):
         import webbrowser
         from mozbuild.html_build_viewer import BuildViewerServer
 
         server = BuildViewerServer(address, port)
 
         if url:
             server.add_resource_json_url('url', url)
         else:
             last = self._get_state_filename('build_resources.json')
             if not os.path.exists(last):
                 print('Build resources not available. If you have performed a '
-                    'build and receive this message, the psutil Python package '
-                    'likely failed to initialize properly.')
+                      'build and receive this message, the psutil Python package '
+                      'likely failed to initialize properly.')
                 return 1
 
             server.add_resource_json_file('last', last)
         try:
             webbrowser.get(browser).open_new_tab(server.url)
         except Exception:
             print('Cannot get browser specified, trying the default instead.')
             try:
                 browser = webbrowser.get().open_new_tab(server.url)
             except Exception:
                 print('Please open %s in a browser.' % server.url)
 
         print('Hit CTRL+c to stop server.')
         server.run()
 
     @Command('build-backend', category='build',
-        description='Generate a backend used to build the tree.')
+             description='Generate a backend used to build the tree.')
     @CommandArgument('-d', '--diff', action='store_true',
-        help='Show a diff of changes.')
+                     help='Show a diff of changes.')
     # It would be nice to filter the choices below based on
     # conditions, but that is for another day.
     @CommandArgument('-b', '--backend', nargs='+', choices=sorted(backends),
-        help='Which backend to build.')
+                     help='Which backend to build.')
     @CommandArgument('-v', '--verbose', action='store_true',
-        help='Verbose output.')
+                     help='Verbose output.')
     @CommandArgument('-n', '--dry-run', action='store_true',
-        help='Do everything except writing files out.')
+                     help='Do everything except writing files out.')
     def build_backend(self, backend, diff=False, verbose=False, dry_run=False):
         python = self.virtualenv_manager.python_path
         config_status = os.path.join(self.topobjdir, 'config.status')
 
         if not os.path.exists(config_status):
             print('config.status not found.  Please run |mach configure| '
                   'or |mach build| prior to building the %s build backend.'
                   % backend)
@@ -274,33 +274,33 @@ class Build(MachCommandBase):
         if diff:
             args.append('--diff')
         if verbose:
             args.append('--verbose')
         if dry_run:
             args.append('--dry-run')
 
         return self._run_command_in_objdir(args=args, pass_thru=True,
-            ensure_exit_code=False)
+