bug 1463425 - autopep8 on config/ r=gps
authorSylvestre Ledru <sledru@mozilla.com>
Tue, 22 May 2018 00:01:01 +0200
changeset 473816 fba6f974041a3d3c22ef95c44f6e4262e8e5c52f
parent 473815 afa720beeefb08b8f0c3e2a7f88fde4530eddd69
child 473817 da3c81f986fa63b42a2874b1791eddcdbfbe8424
push id9374
push userjlund@mozilla.com
push dateMon, 18 Jun 2018 21:43:20 +0000
treeherdermozilla-beta@160e085dfb0b [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersgps
bugs1463425
milestone62.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
bug 1463425 - autopep8 on config/ r=gps MozReview-Commit-ID: EaTAhH2CAee
config/MozZipFile.py
config/check_js_msg_encoding.py
config/check_js_opcode.py
config/check_macroassembler_style.py
config/check_source_count.py
config/check_spidermonkey_style.py
config/check_vanilla_allocations.py
config/createprecomplete.py
config/find_OOM_errors.py
config/make-stl-wrappers.py
config/nsinstall.py
config/printprereleasesuffix.py
config/pythonpath.py
config/rebuild_check.py
config/run_spidermonkey_checks.py
config/tests/test_mozbuild_reading.py
config/tests/unit-mozunit.py
config/tests/unit-nsinstall.py
config/tests/unit-printprereleasesuffix.py
config/tests/unitMozZipFile.py
--- a/config/MozZipFile.py
+++ b/config/MozZipFile.py
@@ -5,133 +5,134 @@
 import os
 import time
 import zipfile
 
 from mozbuild.util import lock_file
 
 
 class ZipFile(zipfile.ZipFile):
-  """ Class with methods to open, read, write, close, list zip files.
+    """ Class with methods to open, read, write, close, list zip files.
+
+    Subclassing zipfile.ZipFile to allow for overwriting of existing
+    entries, though only for writestr, not for write.
+    """
 
-  Subclassing zipfile.ZipFile to allow for overwriting of existing
-  entries, though only for writestr, not for write.
-  """
-  def __init__(self, file, mode="r", compression=zipfile.ZIP_STORED,
-               lock = False):
-    if lock:
-      assert isinstance(file, basestring)
-      self.lockfile = lock_file(file + '.lck')
-    else:
-      self.lockfile = None
+    def __init__(self, file, mode="r", compression=zipfile.ZIP_STORED,
+                 lock=False):
+        if lock:
+            assert isinstance(file, basestring)
+            self.lockfile = lock_file(file + '.lck')
+        else:
+            self.lockfile = None
 
-    if mode == 'a' and lock:
-      # appending to a file which doesn't exist fails, but we can't check
-      # existence util we hold the lock
-      if (not os.path.isfile(file)) or os.path.getsize(file) == 0:
-        mode = 'w'
+        if mode == 'a' and lock:
+            # appending to a file which doesn't exist fails, but we can't check
+            # existence util we hold the lock
+            if (not os.path.isfile(file)) or os.path.getsize(file) == 0:
+                mode = 'w'
 
-    zipfile.ZipFile.__init__(self, file, mode, compression)
-    self._remove = []
-    self.end = self.fp.tell()
-    self.debug = 0
+        zipfile.ZipFile.__init__(self, file, mode, compression)
+        self._remove = []
+        self.end = self.fp.tell()
+        self.debug = 0
 
-  def writestr(self, zinfo_or_arcname, bytes):
-    """Write contents into the archive.
+    def writestr(self, zinfo_or_arcname, bytes):
+        """Write contents into the archive.
 
-    The contents is the argument 'bytes',  'zinfo_or_arcname' is either
-    a ZipInfo instance or the name of the file in the archive.
-    This method is overloaded to allow overwriting existing entries.
-    """
-    if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
-      zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname,
-                              date_time=time.localtime(time.time()))
-      zinfo.compress_type = self.compression
-      # Add some standard UNIX file access permissions (-rw-r--r--).
-      zinfo.external_attr = (0x81a4 & 0xFFFF) << 16L
-    else:
-      zinfo = zinfo_or_arcname
+        The contents is the argument 'bytes',  'zinfo_or_arcname' is either
+        a ZipInfo instance or the name of the file in the archive.
+        This method is overloaded to allow overwriting existing entries.
+        """
+        if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
+            zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname,
+                                    date_time=time.localtime(time.time()))
+            zinfo.compress_type = self.compression
+            # Add some standard UNIX file access permissions (-rw-r--r--).
+            zinfo.external_attr = (0x81a4 & 0xFFFF) << 16L
+        else:
+            zinfo = zinfo_or_arcname
 
-    # Now to the point why we overwrote this in the first place,
-    # remember the entry numbers if we already had this entry.
-    # Optimizations:
-    # If the entry to overwrite is the last one, just reuse that.
-    # If we store uncompressed and the new content has the same size
-    # as the old, reuse the existing entry.
+        # Now to the point why we overwrote this in the first place,
+        # remember the entry numbers if we already had this entry.
+        # Optimizations:
+        # If the entry to overwrite is the last one, just reuse that.
+        # If we store uncompressed and the new content has the same size
+        # as the old, reuse the existing entry.
 
-    doSeek = False # store if we need to seek to the eof after overwriting
-    if self.NameToInfo.has_key(zinfo.filename):
-      # Find the last ZipInfo with our name.
-      # Last, because that's catching multiple overwrites
-      i = len(self.filelist)
-      while i > 0:
-        i -= 1
-        if self.filelist[i].filename == zinfo.filename:
-          break
-      zi = self.filelist[i]
-      if ((zinfo.compress_type == zipfile.ZIP_STORED
-           and zi.compress_size == len(bytes))
-          or (i + 1) == len(self.filelist)):
-        # make sure we're allowed to write, otherwise done by writestr below
-        self._writecheck(zi)
-        # overwrite existing entry
-        self.fp.seek(zi.header_offset)
-        if (i + 1) == len(self.filelist):
-          # this is the last item in the file, just truncate
-          self.fp.truncate()
-        else:
-          # we need to move to the end of the file afterwards again
-          doSeek = True
-        # unhook the current zipinfo, the writestr of our superclass
-        # will add a new one
-        self.filelist.pop(i)
-        self.NameToInfo.pop(zinfo.filename)
-      else:
-        # Couldn't optimize, sadly, just remember the old entry for removal
-        self._remove.append(self.filelist.pop(i))
-    zipfile.ZipFile.writestr(self, zinfo, bytes)
-    self.filelist.sort(lambda l, r: cmp(l.header_offset, r.header_offset))
-    if doSeek:
-      self.fp.seek(self.end)
-    self.end = self.fp.tell()
+        doSeek = False  # store if we need to seek to the eof after overwriting
+        if self.NameToInfo.has_key(zinfo.filename):
+            # Find the last ZipInfo with our name.
+            # Last, because that's catching multiple overwrites
+            i = len(self.filelist)
+            while i > 0:
+                i -= 1
+                if self.filelist[i].filename == zinfo.filename:
+                    break
+            zi = self.filelist[i]
+            if ((zinfo.compress_type == zipfile.ZIP_STORED
+                 and zi.compress_size == len(bytes))
+                    or (i + 1) == len(self.filelist)):
+                # make sure we're allowed to write, otherwise done by writestr below
+                self._writecheck(zi)
+                # overwrite existing entry
+                self.fp.seek(zi.header_offset)
+                if (i + 1) == len(self.filelist):
+                    # this is the last item in the file, just truncate
+                    self.fp.truncate()
+                else:
+                    # we need to move to the end of the file afterwards again
+                    doSeek = True
+                # unhook the current zipinfo, the writestr of our superclass
+                # will add a new one
+                self.filelist.pop(i)
+                self.NameToInfo.pop(zinfo.filename)
+            else:
+                # Couldn't optimize, sadly, just remember the old entry for removal
+                self._remove.append(self.filelist.pop(i))
+        zipfile.ZipFile.writestr(self, zinfo, bytes)
+        self.filelist.sort(lambda l, r: cmp(l.header_offset, r.header_offset))
+        if doSeek:
+            self.fp.seek(self.end)
+        self.end = self.fp.tell()
 
-  def close(self):
-    """Close the file, and for mode "w" and "a" write the ending
-    records.
+    def close(self):
+        """Close the file, and for mode "w" and "a" write the ending
+        records.
 
-    Overwritten to compact overwritten entries.
-    """
-    if not self._remove:
-      # we don't have anything special to do, let's just call base
-      r = zipfile.ZipFile.close(self)
-      self.lockfile = None
-      return r
+        Overwritten to compact overwritten entries.
+        """
+        if not self._remove:
+            # we don't have anything special to do, let's just call base
+            r = zipfile.ZipFile.close(self)
+            self.lockfile = None
+            return r
 
-    if self.fp.mode != 'r+b':
-      # adjust file mode if we originally just wrote, now we rewrite
-      self.fp.close()
-      self.fp = open(self.filename, 'r+b')
-    all = map(lambda zi: (zi, True), self.filelist) + \
-        map(lambda zi: (zi, False), self._remove)
-    all.sort(lambda l, r: cmp(l[0].header_offset, r[0].header_offset))
-    # empty _remove for multiple closes
-    self._remove = []
+        if self.fp.mode != 'r+b':
+            # adjust file mode if we originally just wrote, now we rewrite
+            self.fp.close()
+            self.fp = open(self.filename, 'r+b')
+        all = map(lambda zi: (zi, True), self.filelist) + \
+            map(lambda zi: (zi, False), self._remove)
+        all.sort(lambda l, r: cmp(l[0].header_offset, r[0].header_offset))
+        # empty _remove for multiple closes
+        self._remove = []
 
-    lengths = [all[i+1][0].header_offset - all[i][0].header_offset
-               for i in xrange(len(all)-1)]
-    lengths.append(self.end - all[-1][0].header_offset)
-    to_pos = 0
-    for (zi, keep), length in zip(all, lengths):
-      if not keep:
-        continue
-      oldoff = zi.header_offset
-      # python <= 2.4 has file_offset
-      if hasattr(zi, 'file_offset'):
-        zi.file_offset = zi.file_offset + to_pos - oldoff
-      zi.header_offset = to_pos
-      self.fp.seek(oldoff)
-      content = self.fp.read(length)
-      self.fp.seek(to_pos)
-      self.fp.write(content)
-      to_pos += length
-    self.fp.truncate()
-    zipfile.ZipFile.close(self)
-    self.lockfile = None
+        lengths = [all[i+1][0].header_offset - all[i][0].header_offset
+                   for i in xrange(len(all)-1)]
+        lengths.append(self.end - all[-1][0].header_offset)
+        to_pos = 0
+        for (zi, keep), length in zip(all, lengths):
+            if not keep:
+                continue
+            oldoff = zi.header_offset
+            # python <= 2.4 has file_offset
+            if hasattr(zi, 'file_offset'):
+                zi.file_offset = zi.file_offset + to_pos - oldoff
+            zi.header_offset = to_pos
+            self.fp.seek(oldoff)
+            content = self.fp.read(length)
+            self.fp.seek(to_pos)
+            self.fp.write(content)
+            to_pos += length
+        self.fp.truncate()
+        zipfile.ZipFile.close(self)
+        self.lockfile = None
--- a/config/check_js_msg_encoding.py
+++ b/config/check_js_msg_encoding.py
@@ -12,57 +12,63 @@
 from __future__ import print_function
 
 import os
 import sys
 
 from mozversioncontrol import get_repository_from_env
 
 
-scriptname = os.path.basename(__file__);
+scriptname = os.path.basename(__file__)
 expected_encoding = 'ascii'
 
 # The following files don't define JSErrorFormatString.
 ignore_files = [
     'dom/base/domerr.msg',
     'js/xpconnect/src/xpc.msg',
 ]
 
+
 def log_pass(filename, text):
     print('TEST-PASS | {} | {} | {}'.format(scriptname, filename, text))
 
+
 def log_fail(filename, text):
     print('TEST-UNEXPECTED-FAIL | {} | {} | {}'.format(scriptname, filename,
                                                        text))
 
+
 def check_single_file(filename):
     with open(filename, 'rb') as f:
         data = f.read()
         try:
             data.decode(expected_encoding)
         except:
             log_fail(filename, 'not in {} encoding'.format(expected_encoding))
 
     log_pass(filename, 'ok')
     return True
 
+
 def check_files():
     result = True
 
     with get_repository_from_env() as repo:
         root = repo.path
 
         for filename in repo.get_files_in_working_directory():
             if filename.endswith('.msg'):
                 if filename not in ignore_files:
                     if not check_single_file(os.path.join(root, filename)):
                         result = False
 
     return result
 
+
 def main():
     if not check_files():
         sys.exit(1)
 
     sys.exit(0)
 
+
 if __name__ == '__main__':
     main()
--- a/config/check_js_opcode.py
+++ b/config/check_js_opcode.py
@@ -7,37 +7,42 @@
 # This script checks bytecode documentation in js/src/vm/Opcodes.h
 #----------------------------------------------------------------------------
 
 from __future__ import print_function
 
 import os
 import sys
 
-scriptname = os.path.basename(__file__);
+scriptname = os.path.basename(__file__)
 topsrcdir = os.path.dirname(os.path.dirname(__file__))
 
+
 def log_pass(text):
     print('TEST-PASS | {} | {}'.format(scriptname, text))
 
+
 def log_fail(text):
     print('TEST-UNEXPECTED-FAIL | {} | {}'.format(scriptname, text))
 
+
 def check_opcode():
     sys.path.insert(0, os.path.join(topsrcdir, 'js', 'src', 'vm'))
     import opcode
 
     try:
         opcode.get_opcodes(topsrcdir)
     except Exception as e:
         log_fail(e.args[0])
 
     log_pass('ok')
     return True
 
+
 def main():
     if not check_opcode():
         sys.exit(1)
 
     sys.exit(0)
 
+
 if __name__ == '__main__':
     main()
--- a/config/check_macroassembler_style.py
+++ b/config/check_macroassembler_style.py
@@ -32,17 +32,18 @@ all_unsupported_architectures_names = se
 all_architecture_names = set(['x86', 'x64', 'arm', 'arm64'])
 all_shared_architecture_names = set(['x86_shared', 'arm', 'arm64'])
 
 reBeforeArg = "(?<=[(,\s])"
 reArgType = "(?P<type>[\w\s:*&]+)"
 reArgName = "(?P<name>\s\w+)"
 reArgDefault = "(?P<default>(?:\s=[^,)]+)?)"
 reAfterArg = "(?=[,)])"
-reMatchArg = re.compile(reBeforeArg + reArgType + reArgName + reArgDefault + reAfterArg)
+reMatchArg = re.compile(reBeforeArg + reArgType +
+                        reArgName + reArgDefault + reAfterArg)
 
 
 def get_normalized_signatures(signature, fileAnnot=None):
     # Remove static
     signature = signature.replace('static', '')
     # Remove semicolon.
     signature = signature.replace(';', ' ')
     # Normalize spaces.
@@ -53,17 +54,18 @@ def get_normalized_signatures(signature,
     signature = signature.replace('MacroAssembler::', '')
 
     # Extract list of architectures
     archs = ['generic']
     if fileAnnot:
         archs = [fileAnnot['arch']]
 
     if 'DEFINED_ON(' in signature:
-        archs = re.sub(r'.*DEFINED_ON\((?P<archs>[^()]*)\).*', '\g<archs>', signature).split(',')
+        archs = re.sub(
+            r'.*DEFINED_ON\((?P<archs>[^()]*)\).*', '\g<archs>', signature).split(',')
         archs = [a.strip() for a in archs]
         signature = re.sub(r'\s+DEFINED_ON\([^()]*\)', '', signature)
 
     elif 'PER_ARCH' in signature:
         archs = all_architecture_names
         signature = re.sub(r'\s+PER_ARCH', '', signature)
 
     elif 'PER_SHARED_ARCH' in signature:
@@ -152,17 +154,18 @@ def get_macroassembler_definitions(filen
             elif '//}}} check_macroassembler_style' in line:
                 style_section = False
             if not style_section:
                 continue
 
             line = re.sub(r'//.*', '', line)
             if line.startswith('{') or line.strip() == "{}":
                 if 'MacroAssembler::' in lines:
-                    signatures.extend(get_normalized_signatures(lines, fileAnnot))
+                    signatures.extend(
+                        get_normalized_signatures(lines, fileAnnot))
                 if line.strip() != "{}":  # Empty declaration, no need to declare
                     # a new code section
                     code_section = True
                 continue
             if line.startswith('}'):
                 code_section = False
                 lines = ''
                 continue
@@ -239,17 +242,18 @@ def generate_file_content(signatures):
             else:
                 output.append('    is defined in MacroAssembler.cpp\n')
         else:
             if len(archs.symmetric_difference(all_architecture_names)) == 0:
                 output.append(s + ' PER_ARCH;\n')
             elif len(archs.symmetric_difference(all_shared_architecture_names)) == 0:
                 output.append(s + ' PER_SHARED_ARCH;\n')
             else:
-                output.append(s + ' DEFINED_ON(' + ', '.join(sorted(archs)) + ');\n')
+                output.append(
+                    s + ' DEFINED_ON(' + ', '.join(sorted(archs)) + ');\n')
             for a in sorted(archs):
                 a = a.replace('_', '-')
                 masm = '%s/MacroAssembler-%s' % (a, a)
                 if s.startswith('inline'):
                     output.append('    is defined in %s-inl.h\n' % masm)
                 else:
                     output.append('    is defined in %s.cpp\n' % masm)
     return output
@@ -266,18 +270,20 @@ def check_style():
     for dirpath, dirnames, filenames in os.walk(root_dir):
         for filename in filenames:
             if 'MacroAssembler' not in filename:
                 continue
 
             filepath = os.path.join(dirpath, filename).replace('\\', '/')
 
             if filepath.endswith('MacroAssembler.h'):
-                decls = append_signatures(decls, get_macroassembler_declaration(filepath))
-            defs = append_signatures(defs, get_macroassembler_definitions(filepath))
+                decls = append_signatures(
+                    decls, get_macroassembler_declaration(filepath))
+            defs = append_signatures(
+                defs, get_macroassembler_definitions(filepath))
 
     if not decls or not defs:
         raise Exception("Did not find any definitions or declarations")
 
     # Compare declarations and definitions output.
     difflines = difflib.unified_diff(generate_file_content(decls),
                                      generate_file_content(defs),
                                      fromfile='check_macroassembler_style.py declared syntax',
--- a/config/check_source_count.py
+++ b/config/check_source_count.py
@@ -42,16 +42,15 @@ else:
     if count < expected_count:
         print("There are fewer occurrences of /{0}/ than expected. "
               "This may mean that you have removed some, but forgotten to "
               "account for it {1}.".format(search_string, error_location))
     else:
         print("There are more occurrences of /{0}/ than expected. We're trying "
               "to prevent an increase in the number of {1}'s, using {2} if "
               "possible. If it is unavoidable, you should update the expected "
-              "count {3}.".format(search_string, search_string, replacement, 
-                                 error_location))
+              "count {3}.".format(search_string, search_string, replacement,
+                                  error_location))
 
     print("Expected: {0}; found: {1}".format(expected_count, count))
     for k in sorted(details):
-        print("Found {0} occurences in {1}".format(details[k],k))
+        print("Found {0} occurences in {1}".format(details[k], k))
     sys.exit(-1)
-
--- a/config/check_spidermonkey_style.py
+++ b/config/check_spidermonkey_style.py
@@ -40,32 +40,32 @@ from __future__ import print_function
 import difflib
 import os
 import re
 import sys
 
 # We don't bother checking files in these directories, because they're (a) auxiliary or (b)
 # imported code that doesn't follow our coding style.
 ignored_js_src_dirs = [
-   'js/src/config/',            # auxiliary stuff
-   'js/src/ctypes/libffi/',     # imported code
-   'js/src/devtools/',          # auxiliary stuff
-   'js/src/editline/',          # imported code
-   'js/src/gdb/',               # auxiliary stuff
-   'js/src/vtune/'              # imported code
+    'js/src/config/',            # auxiliary stuff
+    'js/src/ctypes/libffi/',     # imported code
+    'js/src/devtools/',          # auxiliary stuff
+    'js/src/editline/',          # imported code
+    'js/src/gdb/',               # auxiliary stuff
+    'js/src/vtune/'              # imported code
 ]
 
 # We ignore #includes of these files, because they don't follow the usual rules.
 included_inclnames_to_ignore = set([
     'ffi.h',                    # generated in ctypes/libffi/
     'devtools/sharkctl.h',      # we ignore devtools/ in general
     'devtools/Instruments.h',   # we ignore devtools/ in general
-    'double-conversion/double-conversion.h', # strange MFBT case
+    'double-conversion/double-conversion.h',  # strange MFBT case
     'javascript-trace.h',       # generated in $OBJDIR if HAVE_DTRACE is defined
-    'frontend/ReservedWordsGenerated.h', # generated in $OBJDIR
+    'frontend/ReservedWordsGenerated.h',  # generated in $OBJDIR
     'gc/StatsPhasesGenerated.h',         # generated in $OBJDIR
     'gc/StatsPhasesGenerated.cpp',       # generated in $OBJDIR
     'jit/LOpcodes.h',           # generated in $OBJDIR
     'jit/MOpcodes.h',           # generated in $OBJDIR
     'jscustomallocator.h',      # provided by embedders;  allowed to be missing
     'js-config.h',              # generated in $OBJDIR
     'fdlibm.h',                 # fdlibm
     'FuzzerDefs.h',             # included without a path
@@ -87,17 +87,17 @@ included_inclnames_to_ignore = set([
     'unicode/timezone.h',       # ICU
     'unicode/plurrule.h',       # ICU
     'unicode/ucal.h',           # ICU
     'unicode/uchar.h',          # ICU
     'unicode/uclean.h',         # ICU
     'unicode/ucol.h',           # ICU
     'unicode/udat.h',           # ICU
     'unicode/udatpg.h',         # ICU
-    'unicode/udisplaycontext.h',# ICU
+    'unicode/udisplaycontext.h',  # ICU
     'unicode/uenum.h',          # ICU
     'unicode/uloc.h',           # ICU
     'unicode/unistr.h',         # ICU
     'unicode/unorm2.h',         # ICU
     'unicode/unum.h',           # ICU
     'unicode/unumsys.h',        # ICU
     'unicode/upluralrules.h',   # ICU
     'unicode/ureldatefmt.h',    # ICU
@@ -105,18 +105,19 @@ included_inclnames_to_ignore = set([
     'unicode/utypes.h',         # ICU
     'vtune/VTuneWrapper.h'      # VTune
 ])
 
 # These files have additional constraints on where they are #included, so we
 # ignore #includes of them when checking #include ordering.
 oddly_ordered_inclnames = set([
     'ctypes/typedefs.h',        # Included multiple times in the body of ctypes/CTypes.h
-    'frontend/BinSource-auto.h', # Included in the body of frontend/BinSource.h
-    'frontend/ReservedWordsGenerated.h', # Included in the body of frontend/TokenStream.h
+    'frontend/BinSource-auto.h',  # Included in the body of frontend/BinSource.h
+    # Included in the body of frontend/TokenStream.h
+    'frontend/ReservedWordsGenerated.h',
     'gc/StatsPhasesGenerated.h',         # Included in the body of gc/Statistics.h
     'gc/StatsPhasesGenerated.cpp',       # Included in the body of gc/Statistics.cpp
     'psapi.h',                  # Must be included after "util/Windows.h" on Windows
     'machine/endian.h',         # Must be included after <sys/types.h> on BSD
     'winbase.h',                # Must precede other system headers(?)
     'windef.h'                  # Must precede other system headers(?)
 ])
 
@@ -312,26 +313,27 @@ def check_style(enable_fixup):
             with open(filename) as f:
                 code = read_file(f)
 
             if enable_fixup:
                 code = code.sorted(inclname)
                 with open(filename, 'w') as f:
                     f.write(code.to_source())
 
-            check_file(filename, inclname, file_kind, code, all_inclnames, included_h_inclnames)
+            check_file(filename, inclname, file_kind, code,
+                       all_inclnames, included_h_inclnames)
 
         edges[inclname] = included_h_inclnames
 
     find_cycles(all_inclnames, edges)
 
     # Compare expected and actual output.
     difflines = difflib.unified_diff(expected_output, actual_output,
                                      fromfile='check_spidermonkey_style.py expected output',
-                                       tofile='check_spidermonkey_style.py actual output')
+                                     tofile='check_spidermonkey_style.py actual output')
     ok = True
     for diffline in difflines:
         ok = False
         print(diffline, end='')
 
     return ok
 
 
@@ -430,16 +432,17 @@ class CppBlock(object):
     '''C preprocessor block: a whole file or a single #if/#elif/#else block.
 
     A #if/#endif block is the contents of a #if/#endif (or similar) section.
     The top-level block, which is not within a #if/#endif pair, is also
     considered a block.
 
     Each kid is either an Include (representing a #include), OrdinaryCode, or
     a nested CppBlock.'''
+
     def __init__(self, start_line=""):
         self.start = start_line
         self.end = ''
         self.kids = []
 
     def is_style_relevant(self):
         return True
 
@@ -503,17 +506,18 @@ class CppBlock(object):
             last_include_index = -1
             for i, item in enumerate(batch):
                 if isinstance(item, Include):
                     includes.append(item)
                     last_include_index = i
             cutoff = last_include_index + 1
 
             if should_try_to_sort(includes):
-                output.extend(pretty_sorted_includes(includes) + batch[cutoff:])
+                output.extend(pretty_sorted_includes(
+                    includes) + batch[cutoff:])
             else:
                 output.extend(batch)
             del batch[:]
 
         for kid in self.kids:
             if isinstance(kid, CppBlock):
                 flush_batch()
                 output.append(kid.sorted(enclosing_inclname))
@@ -535,16 +539,17 @@ class CppBlock(object):
         return result
 
     def to_source(self):
         return self.start + ''.join(kid.to_source() for kid in self.kids) + self.end
 
 
 class OrdinaryCode(object):
     ''' A list of lines of code that aren't #include/#if/#else/#endif lines. '''
+
     def __init__(self, lines=None):
         self.lines = lines if lines is not None else []
 
     def is_style_relevant(self):
         return False
 
     def to_source(self):
         return ''.join(self.lines)
@@ -561,24 +566,26 @@ def read_file(f):
 
     # Extract the #include statements as a tree of snippets.
     for linenum, line in enumerate(f, start=1):
         if line.lstrip().startswith('#'):
             # Look for a |#include "..."| line.
             m = re.match(r'(\s*#\s*include\s+)"([^"]*)"(.*)', line)
             if m is not None:
                 prefix, inclname, suffix = m.groups()
-                block_stack[-1].kids.append(Include(prefix, inclname, suffix, linenum, is_system=False))
+                block_stack[-1].kids.append(Include(prefix,
+                                                    inclname, suffix, linenum, is_system=False))
                 continue
 
             # Look for a |#include <...>| line.
             m = re.match(r'(\s*#\s*include\s+)<([^>]*)>(.*)', line)
             if m is not None:
                 prefix, inclname, suffix = m.groups()
-                block_stack[-1].kids.append(Include(prefix, inclname, suffix, linenum, is_system=True))
+                block_stack[-1].kids.append(Include(prefix,
+                                                    inclname, suffix, linenum, is_system=True))
                 continue
 
             # Look for a |#{if,ifdef,ifndef}| line.
             m = re.match(r'\s*#\s*(if|ifdef|ifndef)\b', line)
             if m is not None:
                 # Open a new block.
                 new_block = CppBlock(line)
                 block_stack[-1].kids.append(new_block)
@@ -596,17 +603,18 @@ def read_file(f):
                 continue
 
             # Look for a |#endif| line.
             m = re.match(r'\s*#\s*endif\b', line)
             if m is not None:
                 # Close the current block.
                 block_stack.pop().end = line
                 if len(block_stack) == 0:
-                    raise ValueError("#endif without #if at line " + str(linenum))
+                    raise ValueError(
+                        "#endif without #if at line " + str(linenum))
                 continue
 
         # Otherwise, we have an ordinary line.
         block_stack[-1].append_ordinary_line(line)
 
     if len(block_stack) > 1:
         raise ValueError("unmatched #if")
     return block_stack[-1]
@@ -643,17 +651,18 @@ def check_file(filename, inclname, file_
                 # Check a H file doesn't #include an INL_H file.
                 if file_kind == FileKind.H and included_kind == FileKind.INL_H:
                     error(filename, include.linenum,
                           'vanilla header includes an inline-header file ' + include.quote())
 
                 # Check a file doesn't #include itself.  (We do this here because the cycle
                 # detection below doesn't detect this case.)
                 if inclname == include.inclname:
-                    error(filename, include.linenum, 'the file includes itself')
+                    error(filename, include.linenum,
+                          'the file includes itself')
 
     def check_includes_order(include1, include2):
         '''Check the ordering of two #include statements.'''
 
         if include1.inclname in oddly_ordered_inclnames or \
            include2.inclname in oddly_ordered_inclnames:
             return
 
@@ -684,32 +693,34 @@ def find_cycles(all_inclnames, edges):
 
     SCCs = tarjan(all_inclnames, edges)
 
     # The various sorted() calls below ensure the output is deterministic.
 
     def draw_SCC(c):
         cset = set(c)
         drawn = set()
+
         def draw(v, indent):
             out('   ' * indent + ('-> ' if indent else '   ') + v)
             if v in drawn:
                 return
             drawn.add(v)
             for succ in sorted(edges[v]):
                 if succ in cset:
                     draw(succ, indent + 1)
         draw(sorted(c)[0], 0)
         out('')
 
     have_drawn_an_SCC = False
     for scc in sorted(SCCs):
         if len(scc) != 1:
             if not have_drawn_an_SCC:
-                error('(multiple files)', None, 'header files form one or more cycles')
+                error('(multiple files)', None,
+                      'header files form one or more cycles')
                 have_drawn_an_SCC = True
 
             draw_SCC(scc)
 
 
 # Tarjan's algorithm for finding the strongly connected components (SCCs) of a graph.
 # https://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
 def tarjan(V, E):
--- a/config/check_vanilla_allocations.py
+++ b/config/check_vanilla_allocations.py
@@ -89,19 +89,19 @@ def main():
         # Matches |operator new(unsigned T)|, where |T| is |int| or |long|.
         r'operator new\(unsigned',
 
         # Matches |operator new[](unsigned T)|, where |T| is |int| or |long|.
         r'operator new\[\]\(unsigned',
 
         r'memalign',
         # These three aren't available on all Linux configurations.
-        #r'posix_memalign',
-        #r'aligned_alloc',
-        #r'valloc',
+        # r'posix_memalign',
+        # r'aligned_alloc',
+        # r'valloc',
     ]
 
     if args.aggressive:
         alloc_fns += [
             r'malloc',
             r'calloc',
             r'realloc',
             r'free',
@@ -154,17 +154,16 @@ def main():
         if filename == 'jsutil.o':
             jsutil_cpp.add(fn)
         else:
             # An allocation is present in a non-special file.  Fail!
             fail("'" + fn + "' present in " + filename)
             # Try to give more precise information about the offending code.
             emit_line_info = True
 
-
     # Check that all functions we expect are used in jsutil.cpp.  (This will
     # fail if the function-detection code breaks at any point.)
     for fn in alloc_fns_unescaped:
         if fn not in jsutil_cpp:
             fail("'" + fn + "' isn't used as expected in jsutil.cpp")
         else:
             jsutil_cpp.remove(fn)
 
@@ -194,17 +193,18 @@ def main():
         #
         #       U malloc jsutil.cpp:117
         #
         alloc_lines_re = r'U ((' + r'|'.join(alloc_fns) + r').*)\s+(\S+:\d+)$'
 
         for line in lines:
             m = re.search(alloc_lines_re, line)
             if m:
-                print('check_vanilla_allocations.py:', m.group(1), 'called at', m.group(3))
+                print('check_vanilla_allocations.py:',
+                      m.group(1), 'called at', m.group(3))
 
     if has_failed:
         sys.exit(1)
 
     print('TEST-PASS | check_vanilla_allocations.py | ok')
     sys.exit(0)
 
 
--- a/config/createprecomplete.py
+++ b/config/createprecomplete.py
@@ -4,16 +4,17 @@
 # Creates the precomplete file containing the remove and rmdir application
 # update instructions which is used to remove files and directories that are no
 # longer present in a complete update. The current working directory is used for
 # the location to enumerate and to create the precomplete file.
 
 import sys
 import os
 
+
 def get_build_entries(root_path):
     """ Iterates through the root_path, creating a list for each file and
         directory. Excludes any file paths ending with channel-prefs.js.
     """
     rel_file_path_set = set()
     rel_dir_path_set = set()
     for root, dirs, files in os.walk(root_path):
         for file_name in files:
@@ -34,34 +35,36 @@ def get_build_entries(root_path):
 
     rel_file_path_list = list(rel_file_path_set)
     rel_file_path_list.sort(reverse=True)
     rel_dir_path_list = list(rel_dir_path_set)
     rel_dir_path_list.sort(reverse=True)
 
     return rel_file_path_list, rel_dir_path_list
 
+
 def generate_precomplete(root_path):
     """ Creates the precomplete file containing the remove and rmdir
         application update instructions. The given directory is used
         for the location to enumerate and to create the precomplete file.
     """
     rel_path_precomplete = "precomplete"
     # If inside a Mac bundle use the root of the bundle for the path.
     if os.path.basename(root_path) == "Resources":
         root_path = os.path.abspath(os.path.join(root_path, '../../'))
         rel_path_precomplete = "Contents/Resources/precomplete"
 
-    precomplete_file_path = os.path.join(root_path,rel_path_precomplete)
+    precomplete_file_path = os.path.join(root_path, rel_path_precomplete)
     # Open the file so it exists before building the list of files and open it
     # in binary mode to prevent OS specific line endings.
     precomplete_file = open(precomplete_file_path, "wb")
     rel_file_path_list, rel_dir_path_list = get_build_entries(root_path)
     for rel_file_path in rel_file_path_list:
         precomplete_file.writelines("remove \""+rel_file_path+"\"\n")
 
     for rel_dir_path in rel_dir_path_list:
         precomplete_file.writelines("rmdir \""+rel_dir_path+"\"\n")
 
     precomplete_file.close()
 
+
 if __name__ == "__main__":
     generate_precomplete(os.getcwd())
--- a/config/find_OOM_errors.py
+++ b/config/find_OOM_errors.py
@@ -24,329 +24,347 @@ import sys
 import threading
 import time
 
 from optparse import OptionParser
 
 #####################################################################
 # Utility functions
 #####################################################################
+
+
 def run(args, stdin=None):
-  class ThreadWorker(threading.Thread):
-    def __init__(self, pipe):
-      super(ThreadWorker, self).__init__()
-      self.all = ""
-      self.pipe = pipe
-      self.setDaemon(True)
+    class ThreadWorker(threading.Thread):
+        def __init__(self, pipe):
+            super(ThreadWorker, self).__init__()
+            self.all = ""
+            self.pipe = pipe
+            self.setDaemon(True)
 
-    def run(self):
-      while True:
-        line = self.pipe.readline()
-        if line == '': break
-        else:
-          self.all += line
+        def run(self):
+            while True:
+                line = self.pipe.readline()
+                if line == '':
+                    break
+                else:
+                    self.all += line
 
-  try:
-    if type(args) == str:
-      args = shlex.split(args)
+    try:
+        if type(args) == str:
+            args = shlex.split(args)
 
-    args = [str(a) for a in args] # convert to strs
+        args = [str(a) for a in args]  # convert to strs
 
-    stdin_pipe = subprocess.PIPE if stdin else None
-    proc = subprocess.Popen(args, stdin=stdin_pipe, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    if stdin_pipe:
-      proc.stdin.write(stdin)
-      proc.stdin.close()
+        stdin_pipe = subprocess.PIPE if stdin else None
+        proc = subprocess.Popen(args, stdin=stdin_pipe,
+                                stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        if stdin_pipe:
+            proc.stdin.write(stdin)
+            proc.stdin.close()
 
-    stdout_worker = ThreadWorker(proc.stdout)
-    stderr_worker = ThreadWorker(proc.stderr)
-    stdout_worker.start()
-    stderr_worker.start()
+        stdout_worker = ThreadWorker(proc.stdout)
+        stderr_worker = ThreadWorker(proc.stderr)
+        stdout_worker.start()
+        stderr_worker.start()
 
-    proc.wait()
-    stdout_worker.join()
-    stderr_worker.join()
+        proc.wait()
+        stdout_worker.join()
+        stderr_worker.join()
 
-  except KeyboardInterrupt as e:
-    sys.exit(-1)
+    except KeyboardInterrupt as e:
+        sys.exit(-1)
 
-  stdout, stderr = stdout_worker.all, stderr_worker.all
-  result = (stdout, stderr, proc.returncode)
-  return result
+    stdout, stderr = stdout_worker.all, stderr_worker.all
+    result = (stdout, stderr, proc.returncode)
+    return result
+
 
 def get_js_files():
-  (out, err, exit) = run('find ../jit-test/tests -name "*.js"')
-  if (err, exit) != ("", 0):
-    sys.exit("Wrong directory, run from an objdir")
-  return out.split()
-
+    (out, err, exit) = run('find ../jit-test/tests -name "*.js"')
+    if (err, exit) != ("", 0):
+        sys.exit("Wrong directory, run from an objdir")
+    return out.split()
 
 
 #####################################################################
 # Blacklisting
 #####################################################################
 def in_blacklist(sig):
-  return sig in blacklist
+    return sig in blacklist
+
 
 def add_to_blacklist(sig):
-  blacklist[sig] = blacklist.get(sig, 0)
-  blacklist[sig] += 1
+    blacklist[sig] = blacklist.get(sig, 0)
+    blacklist[sig] += 1
 
 # How often is a particular lines important for this.
+
+
 def count_lines():
-  """Keep track of the amount of times individual lines occur, in order to
-     prioritize the errors which occur most frequently."""
-  counts = {}
-  for string,count in blacklist.items():
-    for line in string.split("\n"):
-      counts[line] = counts.get(line, 0) + count
+    """Keep track of the amount of times individual lines occur, in order to
+       prioritize the errors which occur most frequently."""
+    counts = {}
+    for string, count in blacklist.items():
+        for line in string.split("\n"):
+            counts[line] = counts.get(line, 0) + count
 
-  lines = []
-  for k,v in counts.items():
-    lines.append("{0:6}: {1}".format(v, k))
+    lines = []
+    for k, v in counts.items():
+        lines.append("{0:6}: {1}".format(v, k))
 
-  lines.sort()
+    lines.sort()
 
-  countlog = file("../OOM_count_log", "w")
-  countlog.write("\n".join(lines))
-  countlog.flush()
-  countlog.close()
+    countlog = file("../OOM_count_log", "w")
+    countlog.write("\n".join(lines))
+    countlog.flush()
+    countlog.close()
 
 
 #####################################################################
 # Output cleaning
 #####################################################################
 def clean_voutput(err):
-  # Skip what we can't reproduce
-  err = re.sub(r"^--\d+-- run: /usr/bin/dsymutil \"shell/js\"$", "", err, flags=re.MULTILINE)
-  err = re.sub(r"^==\d+==", "", err, flags=re.MULTILINE)
-  err = re.sub(r"^\*\*\d+\*\*", "", err, flags=re.MULTILINE)
-  err = re.sub(r"^\s+by 0x[0-9A-Fa-f]+: ", "by: ", err, flags=re.MULTILINE)
-  err = re.sub(r"^\s+at 0x[0-9A-Fa-f]+: ", "at: ", err, flags=re.MULTILINE)
-  err = re.sub(r"(^\s+Address 0x)[0-9A-Fa-f]+( is not stack'd)", r"\1\2", err, flags=re.MULTILINE)
-  err = re.sub(r"(^\s+Invalid write of size )\d+", r"\1x", err, flags=re.MULTILINE)
-  err = re.sub(r"(^\s+Invalid read of size )\d+", r"\1x", err, flags=re.MULTILINE)
-  err = re.sub(r"(^\s+Address 0x)[0-9A-Fa-f]+( is )\d+( bytes inside a block of size )[0-9,]+( free'd)", r"\1\2\3\4", err, flags=re.MULTILINE)
+    # Skip what we can't reproduce
+    err = re.sub(r"^--\d+-- run: /usr/bin/dsymutil \"shell/js\"$",
+                 "", err, flags=re.MULTILINE)
+    err = re.sub(r"^==\d+==", "", err, flags=re.MULTILINE)
+    err = re.sub(r"^\*\*\d+\*\*", "", err, flags=re.MULTILINE)
+    err = re.sub(r"^\s+by 0x[0-9A-Fa-f]+: ", "by: ", err, flags=re.MULTILINE)
+    err = re.sub(r"^\s+at 0x[0-9A-Fa-f]+: ", "at: ", err, flags=re.MULTILINE)
+    err = re.sub(
+        r"(^\s+Address 0x)[0-9A-Fa-f]+( is not stack'd)", r"\1\2", err, flags=re.MULTILINE)
+    err = re.sub(r"(^\s+Invalid write of size )\d+",
+                 r"\1x", err, flags=re.MULTILINE)
+    err = re.sub(r"(^\s+Invalid read of size )\d+",
+                 r"\1x", err, flags=re.MULTILINE)
+    err = re.sub(r"(^\s+Address 0x)[0-9A-Fa-f]+( is )\d+( bytes inside a block of size )[0-9,]+( free'd)",
+                 r"\1\2\3\4", err, flags=re.MULTILINE)
 
-  # Skip the repeating bit due to the segfault
-  lines = []
-  for l in err.split('\n'):
-    if l == " Process terminating with default action of signal 11 (SIGSEGV)":
-      break
-    lines.append(l)
-  err = '\n'.join(lines)
+    # Skip the repeating bit due to the segfault
+    lines = []
+    for l in err.split('\n'):
+        if l == " Process terminating with default action of signal 11 (SIGSEGV)":
+            break
+        lines.append(l)
+    err = '\n'.join(lines)
 
-  return err
+    return err
+
 
 def remove_failed_allocation_backtraces(err):
-  lines = []
+    lines = []
 
-  add = True
-  for l in err.split('\n'):
+    add = True
+    for l in err.split('\n'):
 
-    # Set start and end conditions for including text
-    if l == " The site of the failed allocation is:":
-      add = False
-    elif l[:2] not in ['by: ', 'at:']:
-      add = True
+        # Set start and end conditions for including text
+        if l == " The site of the failed allocation is:":
+            add = False
+        elif l[:2] not in ['by: ', 'at:']:
+            add = True
 
-    if add:
-      lines.append(l)
-
+        if add:
+            lines.append(l)
 
-  err = '\n'.join(lines)
+    err = '\n'.join(lines)
 
-  return err
+    return err
 
 
 def clean_output(err):
-  err = re.sub(r"^js\(\d+,0x[0-9a-f]+\) malloc: \*\*\* error for object 0x[0-9a-f]+: pointer being freed was not allocated\n\*\*\* set a breakppoint in malloc_error_break to debug\n$", "pointer being freed was not allocated", err, flags=re.MULTILINE)
+    err = re.sub(r"^js\(\d+,0x[0-9a-f]+\) malloc: \*\*\* error for object 0x[0-9a-f]+: pointer being freed was not allocated\n\*\*\* set a breakppoint in malloc_error_break to debug\n$",
+                 "pointer being freed was not allocated", err, flags=re.MULTILINE)
 
-  return err
+    return err
 
 
 #####################################################################
 # Consts, etc
 #####################################################################
 
 command_template = 'shell/js' \
-                 + ' -m -j -p' \
-                 + ' -e "const platform=\'darwin\'; const libdir=\'../jit-test/lib/\';"' \
-                 + ' -f ../jit-test/lib/prolog.js' \
-                 + ' -f {0}'
+    + ' -m -j -p' \
+    + ' -e "const platform=\'darwin\'; const libdir=\'../jit-test/lib/\';"' \
+    + ' -f ../jit-test/lib/prolog.js' \
+    + ' -f {0}'
 
 
 # Blacklists are things we don't want to see in our logs again (though we do
 # want to count them when they happen). Whitelists we do want to see in our
 # logs again, principally because the information we have isn't enough.
 
 blacklist = {}
-add_to_blacklist(r"('', '', 1)") # 1 means OOM if the shell hasn't launched yet.
+# 1 means OOM if the shell hasn't launched yet.
+add_to_blacklist(r"('', '', 1)")
 add_to_blacklist(r"('', 'out of memory\n', 1)")
 
 whitelist = set()
-whitelist.add(r"('', 'out of memory\n', -11)") # -11 means OOM
+whitelist.add(r"('', 'out of memory\n', -11)")  # -11 means OOM
 whitelist.add(r"('', 'out of memory\nout of memory\n', -11)")
 
 
-
 #####################################################################
 # Program
 #####################################################################
 
 # Options
 parser = OptionParser(usage=usage)
 parser.add_option("-r", "--regression", action="store", metavar="REGRESSION_COUNT", help=help,
                   type="int", dest="regression", default=None)
-                  
+
 (OPTIONS, args) = parser.parse_args()
 
 
 if OPTIONS.regression != None:
-  # TODO: This should be expanded as we get a better hang of the OOM problems.
-  # For now, we'll just check that the number of OOMs in one short file does not
-  # increase.
-  files = ["../jit-test/tests/arguments/args-createontrace.js"]
+    # TODO: This should be expanded as we get a better hang of the OOM problems.
+    # For now, we'll just check that the number of OOMs in one short file does not
+    # increase.
+    files = ["../jit-test/tests/arguments/args-createontrace.js"]
 else:
-  files = get_js_files()
+    files = get_js_files()
 
-  # Use a command-line arg to reduce the set of files
-  if len (args):
-    files = [f for f in files if f.find(args[0]) != -1]
+    # Use a command-line arg to reduce the set of files
+    if len(args):
+        files = [f for f in files if f.find(args[0]) != -1]
 
 
 if OPTIONS.regression == None:
-  # Don't use a logfile, this is automated for tinderbox.
-  log = file("../OOM_log", "w")
+    # Don't use a logfile, this is automated for tinderbox.
+    log = file("../OOM_log", "w")
 
 
 num_failures = 0
 for f in files:
 
-  # Run it once to establish boundaries
-  command = (command_template + ' -O').format(f)
-  out, err, exit = run(command)
-  max = re.match(".*OOM max count: (\d+).*", out, flags=re.DOTALL).groups()[0]
-  max = int(max)
-  
-  # OOMs don't recover well for the first 20 allocations or so.
-  # TODO: revisit this.
-  for i in range(20, max): 
+    # Run it once to establish boundaries
+    command = (command_template + ' -O').format(f)
+    out, err, exit = run(command)
+    max = re.match(".*OOM max count: (\d+).*", out,
+                   flags=re.DOTALL).groups()[0]
+    max = int(max)
+
+    # OOMs don't recover well for the first 20 allocations or so.
+    # TODO: revisit this.
+    for i in range(20, max):
+
+        if OPTIONS.regression == None:
+            print("Testing allocation {0}/{1} in {2}".format(i, max, f))
+        else:
+            # something short for tinderbox, no space or \n
+            sys.stdout.write('.')
+
+        command = (command_template + ' -A {0}').format(f, i)
+        out, err, exit = run(command)
+
+        # Success (5 is SM's exit code for controlled errors)
+        if exit == 5 and err.find("out of memory") != -1:
+            continue
+
+        # Failure
+        else:
+
+            if OPTIONS.regression != None:
+                # Just count them
+                num_failures += 1
+                continue
+
+            #########################################################################
+            # The regression tests ends above. The rest of this is for running  the
+            # script manually.
+            #########################################################################
+
+            problem = str((out, err, exit))
+            if in_blacklist(problem) and problem not in whitelist:
+                add_to_blacklist(problem)
+                continue
+
+            add_to_blacklist(problem)
+
+            # Get valgrind output for a good stack trace
+            vcommand = "valgrind --dsymutil=yes -q --log-file=OOM_valgrind_log_file " + command
+            run(vcommand)
+            vout = file("OOM_valgrind_log_file").read()
+            vout = clean_voutput(vout)
+            sans_alloc_sites = remove_failed_allocation_backtraces(vout)
+
+            # Don't print duplicate information
+            if in_blacklist(sans_alloc_sites):
+                add_to_blacklist(sans_alloc_sites)
+                continue
+
+            add_to_blacklist(sans_alloc_sites)
+
+            log.write("\n")
+            log.write("\n")
+            log.write(
+                "=========================================================================")
+            log.write("\n")
+            log.write("An allocation failure at\n\tallocation {0}/{1} in {2}\n\t"
+                      "causes problems (detected using bug 624094)"
+                      .format(i, max, f))
+            log.write("\n")
+            log.write("\n")
+
+            log.write(
+                "Command (from obj directory, using patch from bug 624094):\n  " + command)
+            log.write("\n")
+            log.write("\n")
+            log.write("stdout, stderr, exitcode:\n  " + problem)
+            log.write("\n")
+            log.write("\n")
+
+            double_free = err.find(
+                "pointer being freed was not allocated") != -1
+            oom_detected = err.find("out of memory") != -1
+            multiple_oom_detected = err.find(
+                "out of memory\nout of memory") != -1
+            segfault_detected = exit == -11
+
+            log.write("Diagnosis: ")
+            log.write("\n")
+            if multiple_oom_detected:
+                log.write("  - Multiple OOMs reported")
+                log.write("\n")
+            if segfault_detected:
+                log.write("  - segfault")
+                log.write("\n")
+            if not oom_detected:
+                log.write("  - No OOM checking")
+                log.write("\n")
+            if double_free:
+                log.write("  - Double free")
+                log.write("\n")
+
+            log.write("\n")
+
+            log.write("Valgrind info:\n" + vout)
+            log.write("\n")
+            log.write("\n")
+            log.flush()
 
     if OPTIONS.regression == None:
-      print("Testing allocation {0}/{1} in {2}".format(i,max,f))
-    else:
-      sys.stdout.write('.') # something short for tinderbox, no space or \n
-
-    command = (command_template + ' -A {0}').format(f, i)
-    out, err, exit = run(command)
-
-    # Success (5 is SM's exit code for controlled errors)
-    if exit == 5 and err.find("out of memory") != -1:
-      continue
-
-    # Failure
-    else:
-
-      if OPTIONS.regression != None:
-        # Just count them
-        num_failures += 1
-        continue
-
-      #########################################################################
-      # The regression tests ends above. The rest of this is for running  the
-      # script manually.
-      #########################################################################
-
-      problem = str((out, err, exit))
-      if in_blacklist(problem) and problem not in whitelist:
-        add_to_blacklist(problem)
-        continue
-
-      add_to_blacklist(problem)
-
-
-      # Get valgrind output for a good stack trace
-      vcommand = "valgrind --dsymutil=yes -q --log-file=OOM_valgrind_log_file " + command
-      run(vcommand)
-      vout = file("OOM_valgrind_log_file").read()
-      vout = clean_voutput(vout)
-      sans_alloc_sites = remove_failed_allocation_backtraces(vout)
-
-      # Don't print duplicate information
-      if in_blacklist(sans_alloc_sites):
-        add_to_blacklist(sans_alloc_sites)
-        continue
-
-      add_to_blacklist(sans_alloc_sites)
-
-      log.write ("\n")
-      log.write ("\n")
-      log.write ("=========================================================================")
-      log.write ("\n")
-      log.write ("An allocation failure at\n\tallocation {0}/{1} in {2}\n\t"
-                 "causes problems (detected using bug 624094)"
-                 .format(i, max, f))
-      log.write ("\n")
-      log.write ("\n")
-
-      log.write ("Command (from obj directory, using patch from bug 624094):\n  " + command)
-      log.write ("\n")
-      log.write ("\n")
-      log.write ("stdout, stderr, exitcode:\n  " + problem)
-      log.write ("\n")
-      log.write ("\n")
-
-      double_free = err.find("pointer being freed was not allocated") != -1
-      oom_detected = err.find("out of memory") != -1
-      multiple_oom_detected = err.find("out of memory\nout of memory") != -1
-      segfault_detected = exit == -11
-
-      log.write ("Diagnosis: ")
-      log.write ("\n")
-      if multiple_oom_detected:
-        log.write ("  - Multiple OOMs reported")
-        log.write ("\n")
-      if segfault_detected:
-        log.write ("  - segfault")
-        log.write ("\n")
-      if not oom_detected:
-        log.write ("  - No OOM checking")
-        log.write ("\n")
-      if double_free:
-        log.write ("  - Double free")
-        log.write ("\n")
-
-      log.write ("\n")
-
-      log.write ("Valgrind info:\n" + vout)
-      log.write ("\n")
-      log.write ("\n")
-      log.flush()
-
-  if OPTIONS.regression == None:
-    count_lines()
+        count_lines()
 
 print()
 
 # Do the actual regression check
 if OPTIONS.regression != None:
-  expected_num_failures = OPTIONS.regression
+    expected_num_failures = OPTIONS.regression
 
-  if num_failures != expected_num_failures:
+    if num_failures != expected_num_failures:
 
-    print("TEST-UNEXPECTED-FAIL |", end='')
-    if num_failures > expected_num_failures:
-      print("More out-of-memory errors were found ({0}) than expected ({1}). "
-            "This probably means an allocation site has been added without a "
-            "NULL-check. If this is unavoidable, you can account for it by "
-            "updating Makefile.in.".format(num_failures, expected_num_failures),
-            end='')
+        print("TEST-UNEXPECTED-FAIL |", end='')
+        if num_failures > expected_num_failures:
+            print("More out-of-memory errors were found ({0}) than expected ({1}). "
+                  "This probably means an allocation site has been added without a "
+                  "NULL-check. If this is unavoidable, you can account for it by "
+                  "updating Makefile.in.".format(
+                      num_failures, expected_num_failures),
+                  end='')
+        else:
+            print("Congratulations, you have removed {0} out-of-memory error(s) "
+                  "({1} remain)! Please account for it by updating Makefile.in."
+                  .format(expected_num_failures - num_failures, num_failures),
+                  end='')
+        sys.exit(-1)
     else:
-      print("Congratulations, you have removed {0} out-of-memory error(s) "
-            "({1} remain)! Please account for it by updating Makefile.in." 
-            .format(expected_num_failures - num_failures, num_failures),
-            end='')
-    sys.exit(-1)
-  else:
-    print('TEST-PASS | find_OOM_errors | Found the expected number of OOM '
-          'errors ({0})'.format(expected_num_failures))
-
+        print('TEST-PASS | find_OOM_errors | Found the expected number of OOM '
+              'errors ({0})'.format(expected_num_failures))
--- a/config/make-stl-wrappers.py
+++ b/config/make-stl-wrappers.py
@@ -1,34 +1,41 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 from __future__ import print_function
-import os, re, string, sys
+import os
+import re
+import string
+import sys
 from mozbuild.util import FileAvoidWrite
 
+
 def find_in_path(file, searchpath):
     for dir in searchpath.split(os.pathsep):
         f = os.path.join(dir, file)
         if os.path.exists(f):
             return f
     return ''
 
+
 def header_path(header, compiler):
     if compiler == 'gcc':
         # we use include_next on gcc
         return header
     elif compiler == 'msvc':
         return find_in_path(header, os.environ.get('INCLUDE', ''))
     else:
         # hope someone notices this ...
         raise NotImplementedError(compiler)
 
 # The 'unused' arg is the output file from the file_generate action. We actually
 # generate all the files in header_list
+
+
 def gen_wrappers(unused, outdir, compiler, template_file, *header_list):
     template = open(template_file, 'r').read()
 
     for header in header_list:
         path = header_path(header, compiler)
         with FileAvoidWrite(os.path.join(outdir, header)) as f:
             f.write(string.Template(template).substitute(HEADER=header,
                                                          HEADER_PATH=path))
--- a/config/nsinstall.py
+++ b/config/nsinstall.py
@@ -13,170 +13,177 @@ from __future__ import print_function
 from optparse import OptionParser
 import mozfile
 import os
 import os.path
 import sys
 import shutil
 import stat
 
+
 def _nsinstall_internal(argv):
-  usage = "usage: %prog [options] arg1 [arg2 ...] target-directory"
-  p = OptionParser(usage=usage)
+    usage = "usage: %prog [options] arg1 [arg2 ...] target-directory"
+    p = OptionParser(usage=usage)
 
-  p.add_option('-D', action="store_true",
-               help="Create a single directory only")
-  p.add_option('-t', action="store_true",
-               help="Preserve time stamp")
-  p.add_option('-m', action="store",
-               help="Set mode", metavar="mode")
-  p.add_option('-d', action="store_true",
-               help="Create directories in target")
-  p.add_option('-R', action="store_true",
-               help="Use relative symbolic links (ignored)")
-  p.add_option('-L', action="store", metavar="linkprefix",
-               help="Link prefix (ignored)")
-  p.add_option('-X', action="append", metavar="file",
-               help="Ignore a file when installing a directory recursively.")
+    p.add_option('-D', action="store_true",
+                 help="Create a single directory only")
+    p.add_option('-t', action="store_true",
+                 help="Preserve time stamp")
+    p.add_option('-m', action="store",
+                 help="Set mode", metavar="mode")
+    p.add_option('-d', action="store_true",
+                 help="Create directories in target")
+    p.add_option('-R', action="store_true",
+                 help="Use relative symbolic links (ignored)")
+    p.add_option('-L', action="store", metavar="linkprefix",
+                 help="Link prefix (ignored)")
+    p.add_option('-X', action="append", metavar="file",
+                 help="Ignore a file when installing a directory recursively.")
 
-  # The remaining arguments are not used in our tree, thus they're not
-  # implented.
-  def BadArg(option, opt, value, parser):
-    parser.error('option not supported: {0}'.format(opt))
+    # The remaining arguments are not used in our tree, thus they're not
+    # implented.
+    def BadArg(option, opt, value, parser):
+        parser.error('option not supported: {0}'.format(opt))
 
-  p.add_option('-C', action="callback", metavar="CWD",
-               callback=BadArg,
-               help="NOT SUPPORTED")
-  p.add_option('-o', action="callback", callback=BadArg,
-               help="Set owner (NOT SUPPORTED)", metavar="owner")
-  p.add_option('-g', action="callback", callback=BadArg,
-               help="Set group (NOT SUPPORTED)", metavar="group")
+    p.add_option('-C', action="callback", metavar="CWD",
+                 callback=BadArg,
+                 help="NOT SUPPORTED")
+    p.add_option('-o', action="callback", callback=BadArg,
+                 help="Set owner (NOT SUPPORTED)", metavar="owner")
+    p.add_option('-g', action="callback", callback=BadArg,
+                 help="Set group (NOT SUPPORTED)", metavar="group")
 
-  (options, args) = p.parse_args(argv)
+    (options, args) = p.parse_args(argv)
 
-  if options.m:
-    # mode is specified
-    try:
-      options.m = int(options.m, 8)
-    except:
-      sys.stderr.write('nsinstall: {0} is not a valid mode\n'
-                       .format(options.m))
-      return 1
+    if options.m:
+        # mode is specified
+        try:
+            options.m = int(options.m, 8)
+        except:
+            sys.stderr.write('nsinstall: {0} is not a valid mode\n'
+                             .format(options.m))
+            return 1
 
-  # just create one directory?
-  def maybe_create_dir(dir, mode, try_again):
-    dir = os.path.abspath(dir)
-    if os.path.exists(dir):
-      if not os.path.isdir(dir):
-        print('nsinstall: {0} is not a directory'.format(dir), file=sys.stderr)
-        return 1
-      if mode:
-        os.chmod(dir, mode)
-      return 0
+    # just create one directory?
+    def maybe_create_dir(dir, mode, try_again):
+        dir = os.path.abspath(dir)
+        if os.path.exists(dir):
+            if not os.path.isdir(dir):
+                print('nsinstall: {0} is not a directory'.format(dir), file=sys.stderr)
+                return 1
+            if mode:
+                os.chmod(dir, mode)
+            return 0
 
-    try:
-      if mode:
-        os.makedirs(dir, mode)
-      else:
-        os.makedirs(dir)
-    except Exception as e:
-      # We might have hit EEXIST due to a race condition (see bug 463411) -- try again once
-      if try_again:
-        return maybe_create_dir(dir, mode, False)
-      print("nsinstall: failed to create directory {0}: {1}".format(dir, e))
-      return 1
-    else:
-      return 0
+        try:
+            if mode:
+                os.makedirs(dir, mode)
+            else:
+                os.makedirs(dir)
+        except Exception as e:
+            # We might have hit EEXIST due to a race condition (see bug 463411) -- try again once
+            if try_again:
+                return maybe_create_dir(dir, mode, False)
+            print(
+                "nsinstall: failed to create directory {0}: {1}".format(dir, e))
+            return 1
+        else:
+            return 0
 
-  if options.X:
-    options.X = [os.path.abspath(p) for p in options.X]
+    if options.X:
+        options.X = [os.path.abspath(p) for p in options.X]
 
-  if options.D:
-    return maybe_create_dir(args[0], options.m, True)
+    if options.D:
+        return maybe_create_dir(args[0], options.m, True)
 
-  # nsinstall arg1 [...] directory
-  if len(args) < 2:
-    p.error('not enough arguments')
+    # nsinstall arg1 [...] directory
+    if len(args) < 2:
+        p.error('not enough arguments')
 
-  def copy_all_entries(entries, target):
-    for e in entries:
-      e = os.path.abspath(e)
-      if options.X and e in options.X:
-        continue
+    def copy_all_entries(entries, target):
+        for e in entries:
+            e = os.path.abspath(e)
+            if options.X and e in options.X:
+                continue
 
-      dest = os.path.join(target, os.path.basename(e))
-      dest = os.path.abspath(dest)
-      handleTarget(e, dest)
-      if options.m:
-        os.chmod(dest, options.m)
+            dest = os.path.join(target, os.path.basename(e))
+            dest = os.path.abspath(dest)
+            handleTarget(e, dest)
+            if options.m:
+                os.chmod(dest, options.m)
 
-  # set up handler
-  if options.d:
-    # we're supposed to create directories
-    def handleTarget(srcpath, targetpath):
-      # target directory was already created, just use mkdir
-      os.mkdir(targetpath)
-  else:
-    # we're supposed to copy files
-    def handleTarget(srcpath, targetpath):
-      if os.path.isdir(srcpath):
-        if not os.path.exists(targetpath):
-          os.mkdir(targetpath)
-        entries = [os.path.join(srcpath, e) for e in os.listdir(srcpath)]
-        copy_all_entries(entries, targetpath)
-        # options.t is not relevant for directories
-        if options.m:
-          os.chmod(targetpath, options.m)
-      else:
-        if os.path.exists(targetpath):
-          if sys.platform == "win32":
-            mozfile.remove(targetpath)
-          else:
-            os.remove(targetpath)
-        if options.t:
-          shutil.copy2(srcpath, targetpath)
-        else:
-          shutil.copy(srcpath, targetpath)
+    # set up handler
+    if options.d:
+        # we're supposed to create directories
+        def handleTarget(srcpath, targetpath):
+            # target directory was already created, just use mkdir
+            os.mkdir(targetpath)
+    else:
+        # we're supposed to copy files
+        def handleTarget(srcpath, targetpath):
+            if os.path.isdir(srcpath):
+                if not os.path.exists(targetpath):
+                    os.mkdir(targetpath)
+                entries = [os.path.join(srcpath, e)
+                           for e in os.listdir(srcpath)]
+                copy_all_entries(entries, targetpath)
+                # options.t is not relevant for directories
+                if options.m:
+                    os.chmod(targetpath, options.m)
+            else:
+                if os.path.exists(targetpath):
+                    if sys.platform == "win32":
+                        mozfile.remove(targetpath)
+                    else:
+                        os.remove(targetpath)
+                if options.t:
+                    shutil.copy2(srcpath, targetpath)
+                else:
+                    shutil.copy(srcpath, targetpath)
 
-  # the last argument is the target directory
-  target = args.pop()
-  # ensure target directory (importantly, we do not apply a mode to the directory
-  # because we want to copy files into it and the mode might be read-only)
-  rv = maybe_create_dir(target, None, True)
-  if rv != 0:
-    return rv
+    # the last argument is the target directory
+    target = args.pop()
+    # ensure target directory (importantly, we do not apply a mode to the directory
+    # because we want to copy files into it and the mode might be read-only)
+    rv = maybe_create_dir(target, None, True)
+    if rv != 0:
+        return rv
 
-  copy_all_entries(args, target)
-  return 0
+    copy_all_entries(args, target)
+    return 0
 
 # nsinstall as a native command is always UTF-8
+
+
 def nsinstall(argv):
-  return _nsinstall_internal([unicode(arg, "utf-8") for arg in argv])
+    return _nsinstall_internal([unicode(arg, "utf-8") for arg in argv])
+
 
 if __name__ == '__main__':
-  # sys.argv corrupts characters outside the system code page on Windows
-  # <http://bugs.python.org/issue2128>. Use ctypes instead. This is also
-  # useful because switching to Unicode strings makes python use the wide
-  # Windows APIs, which is what we want here since the wide APIs normally do a
-  # better job at handling long paths and such.
-  if sys.platform == "win32":
-    import ctypes
-    from ctypes import wintypes
-    GetCommandLine = ctypes.windll.kernel32.GetCommandLineW
-    GetCommandLine.argtypes = []
-    GetCommandLine.restype = wintypes.LPWSTR
+    # sys.argv corrupts characters outside the system code page on Windows
+    # <http://bugs.python.org/issue2128>. Use ctypes instead. This is also
+    # useful because switching to Unicode strings makes python use the wide
+    # Windows APIs, which is what we want here since the wide APIs normally do a
+    # better job at handling long paths and such.
+    if sys.platform == "win32":
+        import ctypes
+        from ctypes import wintypes
+        GetCommandLine = ctypes.windll.kernel32.GetCommandLineW
+        GetCommandLine.argtypes = []
+        GetCommandLine.restype = wintypes.LPWSTR
 
-    CommandLineToArgv = ctypes.windll.shell32.CommandLineToArgvW
-    CommandLineToArgv.argtypes = [wintypes.LPWSTR, ctypes.POINTER(ctypes.c_int)]
-    CommandLineToArgv.restype = ctypes.POINTER(wintypes.LPWSTR)
+        CommandLineToArgv = ctypes.windll.shell32.CommandLineToArgvW
+        CommandLineToArgv.argtypes = [
+            wintypes.LPWSTR, ctypes.POINTER(ctypes.c_int)]
+        CommandLineToArgv.restype = ctypes.POINTER(wintypes.LPWSTR)
 
-    argc = ctypes.c_int(0)
-    argv_arr = CommandLineToArgv(GetCommandLine(), ctypes.byref(argc))
-    # The first argv will be "python", the second will be the .py file
-    argv = argv_arr[1:argc.value]
-  else:
-    # For consistency, do it on Unix as well
-    if sys.stdin.encoding is not None:
-      argv = [unicode(arg, sys.stdin.encoding) for arg in sys.argv]
+        argc = ctypes.c_int(0)
+        argv_arr = CommandLineToArgv(GetCommandLine(), ctypes.byref(argc))
+        # The first argv will be "python", the second will be the .py file
+        argv = argv_arr[1:argc.value]
     else:
-      argv = [unicode(arg) for arg in sys.argv]
+        # For consistency, do it on Unix as well
+        if sys.stdin.encoding is not None:
+            argv = [unicode(arg, sys.stdin.encoding) for arg in sys.argv]
+        else:
+            argv = [unicode(arg) for arg in sys.argv]
 
-  sys.exit(_nsinstall_internal(argv[1:]))
+    sys.exit(_nsinstall_internal(argv[1:]))
--- a/config/printprereleasesuffix.py
+++ b/config/printprereleasesuffix.py
@@ -9,23 +9,25 @@
 # 2.1a3pre > ""
 # 3.2b4    > " 3.2 Beta 4"
 # 3.2b4pre > ""
 from __future__ import print_function
 
 import sys
 import re
 
+
 def get_prerelease_suffix(version):
-  """ Returns the prerelease suffix from the version string argument """
+    """ Returns the prerelease suffix from the version string argument """
 
-  def mfunc(m):
-    return " {0} {1} {2}".format(m.group('prefix'),
-                                 {'a': 'Alpha', 'b': 'Beta'}[m.group('c')],
-                                 m.group('suffix'))
-  result, c = re.subn(r'^(?P<prefix>(\d+\.)*\d+)(?P<c>[ab])(?P<suffix>\d+)$',
-                      mfunc, version)
-  if c != 1:
-    return ''
-  return result
+    def mfunc(m):
+        return " {0} {1} {2}".format(m.group('prefix'),
+                                     {'a': 'Alpha', 'b': 'Beta'}[m.group('c')],
+                                     m.group('suffix'))
+    result, c = re.subn(r'^(?P<prefix>(\d+\.)*\d+)(?P<c>[ab])(?P<suffix>\d+)$',
+                        mfunc, version)
+    if c != 1:
+        return ''
+    return result
+
 
 if len(sys.argv) == 2:
-  print(get_prerelease_suffix(sys.argv[1]))
+    print(get_prerelease_suffix(sys.argv[1]))
--- a/config/pythonpath.py
+++ b/config/pythonpath.py
@@ -42,15 +42,17 @@ def main(args):
     sys.argv = args
     sys.argc = len(args)
 
     frozenglobals['__name__'] = '__main__'
     frozenglobals['__file__'] = script
 
     execfile(script, frozenglobals)
 
+
 # Freeze scope here ... why this makes things work I have no idea ...
 frozenglobals = globals()
 
-import sys, os
+import sys
+import os
 
 if __name__ == '__main__':
     main(sys.argv[1:])
--- a/config/rebuild_check.py
+++ b/config/rebuild_check.py
@@ -1,23 +1,25 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import os
 import errno
 
+
 def mtime(path):
     try:
         return os.stat(path).st_mtime
     except OSError as e:
         if e.errno == errno.ENOENT:
             return -1
         raise
 
+
 def rebuild_check(args):
     target = args[0]
     deps = args[1:]
     t = mtime(target)
     if t < 0:
         print target
         return
 
@@ -26,19 +28,22 @@ def rebuild_check(args):
     for dep in deps:
         deptime = mtime(dep)
         if deptime < 0:
             removed.append(dep)
         elif mtime(dep) > t:
             newer.append(dep)
 
     if newer and removed:
-        print 'Rebuilding %s because %s changed and %s was removed' % (target, ', '.join(newer), ', '.join(removed))
+        print 'Rebuilding %s because %s changed and %s was removed' % (
+            target, ', '.join(newer), ', '.join(removed))
     elif newer:
         print 'Rebuilding %s because %s changed' % (target, ', '.join(newer))
     elif removed:
-        print 'Rebuilding %s because %s was removed' % (target, ', '.join(removed))
+        print 'Rebuilding %s because %s was removed' % (
+            target, ', '.join(removed))
     else:
         print 'Rebuilding %s for an unknown reason' % target
 
+
 if __name__ == '__main__':
     import sys
     rebuild_check(sys.argv[1:])
--- a/config/run_spidermonkey_checks.py
+++ b/config/run_spidermonkey_checks.py
@@ -1,13 +1,15 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import buildconfig
 import subprocess
 import sys
 
+
 def main(output, lib_file, *scripts):
     for script in scripts:
-        retcode = subprocess.call([sys.executable, script], cwd=buildconfig.topsrcdir)
+        retcode = subprocess.call(
+            [sys.executable, script], cwd=buildconfig.topsrcdir)
         if retcode != 0:
             raise Exception(script + " failed")
--- a/config/tests/test_mozbuild_reading.py
+++ b/config/tests/test_mozbuild_reading.py
@@ -63,17 +63,16 @@ class TestMozbuildReading(unittest.TestC
         root = os.path.normpath(os.path.join(here, '..', '..'))
         config = EmptyConfig(root)
         reader = BuildReader(config)
         all_paths = self._mozbuilds(reader)
         paths, contexts = reader.read_relevant_mozbuilds(all_paths)
         self.assertEqual(set(paths.keys()), all_paths)
         self.assertGreaterEqual(len(contexts), len(paths))
 
-
     def test_orphan_file_patterns(self):
         if sys.platform == 'win32':
             raise unittest.SkipTest('failing on windows builds')
 
         mb = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False)
 
         try:
             config = mb.config_environment
@@ -106,10 +105,11 @@ class TestMozbuildReading(unittest.TestC
             test_files = ctx['IMPACTED_TESTS'].files
             for p in test_files:
                 if not pattern_exists(os.path.relpath(p.full_path, config.topsrcdir)):
                     self.fail("The pattern '%s' in a dependent tests entry "
                               "in '%s' corresponds to no files in the tree.\n"
                               "Please update this entry." %
                               (p, ctx.main_path))
 
+
 if __name__ == '__main__':
     main()
--- a/config/tests/unit-mozunit.py
+++ b/config/tests/unit-mozunit.py
@@ -3,22 +3,23 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import sys
 import os
 from mozunit import main, MockedOpen
 import unittest
 from tempfile import mkstemp
 
+
 class TestMozUnit(unittest.TestCase):
     def test_mocked_open(self):
         # Create a temporary file on the file system.
         (fd, path) = mkstemp()
         with os.fdopen(fd, 'w') as file:
-            file.write('foobar');
+            file.write('foobar')
 
         self.assertFalse(os.path.exists('file1'))
         self.assertFalse(os.path.exists('file2'))
 
         with MockedOpen({'file1': 'content1',
                          'file2': 'content2'}):
             self.assertTrue(os.path.exists('file1'))
             self.assertTrue(os.path.exists('file2'))
@@ -77,10 +78,11 @@ class TestMozUnit(unittest.TestCase):
         # Check that the file was not actually modified on the file system.
         self.assertEqual(open(path, 'r').read(), 'foobar')
         os.remove(path)
 
         # Check that the file created inside MockedOpen wasn't actually
         # created.
         self.assertRaises(IOError, open, 'file3', 'r')
 
+
 if __name__ == "__main__":
     main()
--- a/config/tests/unit-nsinstall.py
+++ b/config/tests/unit-nsinstall.py
@@ -1,31 +1,36 @@
 import unittest
 
-import os, sys, os.path, time
+import os
+import sys
+import os.path
+import time
 from tempfile import mkdtemp
 from shutil import rmtree
 import mozunit
 from mozprocess import processhandler
 
 from nsinstall import nsinstall
 import nsinstall as nsinstall_module
 NSINSTALL_PATH = nsinstall_module.__file__
 
 # Run the non-ASCII tests on (a) Windows, or (b) any platform with
 # sys.stdin.encoding set to UTF-8
 import codecs
 RUN_NON_ASCII_TESTS = (sys.platform == "win32" or
                        (sys.stdin.encoding is not None and
                         codecs.lookup(sys.stdin.encoding) == codecs.lookup("utf-8")))
 
+
 class TestNsinstall(unittest.TestCase):
     """
     Unit tests for nsinstall.py
     """
+
     def setUp(self):
         self.tmpdir = mkdtemp()
 
     def tearDown(self):
         # Unicode strings means non-ASCII children can be deleted properly on
         # Windows
         if sys.stdin.encoding is None:
             tmpdir = unicode(self.tmpdir)
@@ -75,17 +80,18 @@ class TestNsinstall(unittest.TestCase):
                                     '-X', Xfile,
                                     '-X', Xdir]), 0)
 
         testdir = os.path.join(destdir, "sourcedir")
         self.assert_(os.path.isdir(testdir))
         self.assert_(os.path.isfile(os.path.join(testdir, "testfile")))
         self.assert_(not os.path.exists(os.path.join(testdir, "Xfile")))
         self.assert_(os.path.isdir(os.path.join(testdir, "copieddir")))
-        self.assert_(os.path.isfile(os.path.join(testdir, "copieddir", "testfile2")))
+        self.assert_(os.path.isfile(os.path.join(
+            testdir, "copieddir", "testfile2")))
         self.assert_(not os.path.exists(os.path.join(testdir, "Xdir")))
 
     def test_nsinstall_multiple(self):
         "Test nsinstall <three files> <dest dir>"
         testfiles = [self.touch("testfile1"),
                      self.touch("testfile2"),
                      self.touch("testfile3")]
         testdir = self.mkdirs("testdir")
@@ -163,12 +169,13 @@ class TestNsinstall(unittest.TestCase):
                                                     testfile, testdir])
             p.run()
             rv = p.waitForFinish()
 
             self.assertEqual(rv, 0)
             destfile = os.path.join(testdir, filename)
             self.assert_(os.path.isfile(destfile))
 
-    #TODO: implement -R, -l, -L and test them!
+    # TODO: implement -R, -l, -L and test them!
+
 
 if __name__ == '__main__':
-  mozunit.main()
+    mozunit.main()
--- a/config/tests/unit-printprereleasesuffix.py
+++ b/config/tests/unit-printprereleasesuffix.py
@@ -1,80 +1,82 @@
 import unittest
 
 import sys
 import os.path
 import mozunit
 
 from printprereleasesuffix import get_prerelease_suffix
 
+
 class TestGetPreReleaseSuffix(unittest.TestCase):
-  """
-  Unit tests for the get_prerelease_suffix function
-  """
+    """
+    Unit tests for the get_prerelease_suffix function
+    """
 
-  def test_alpha_1(self):
-    """test 1a1 version string"""
-    self.c = get_prerelease_suffix('1a1')
-    self.assertEqual(self.c, ' 1 Alpha 1')
+    def test_alpha_1(self):
+        """test 1a1 version string"""
+        self.c = get_prerelease_suffix('1a1')
+        self.assertEqual(self.c, ' 1 Alpha 1')
 
-  def test_alpha_10(self):
-    """test 1.2a10 version string"""
-    self.c = get_prerelease_suffix('1.2a10')
-    self.assertEqual(self.c, ' 1.2 Alpha 10')
+    def test_alpha_10(self):
+        """test 1.2a10 version string"""
+        self.c = get_prerelease_suffix('1.2a10')
+        self.assertEqual(self.c, ' 1.2 Alpha 10')
 
-  def test_beta_3(self):
-    """test 1.2.3b3 version string"""
-    self.c = get_prerelease_suffix('1.2.3b3')
-    self.assertEqual(self.c, ' 1.2.3 Beta 3')
+    def test_beta_3(self):
+        """test 1.2.3b3 version string"""
+        self.c = get_prerelease_suffix('1.2.3b3')
+        self.assertEqual(self.c, ' 1.2.3 Beta 3')
 
-  def test_beta_30(self):
-    """test 1.2.3.4b30 version string"""
-    self.c = get_prerelease_suffix('1.2.3.4b30')
-    self.assertEqual(self.c, ' 1.2.3.4 Beta 30')
+    def test_beta_30(self):
+        """test 1.2.3.4b30 version string"""
+        self.c = get_prerelease_suffix('1.2.3.4b30')
+        self.assertEqual(self.c, ' 1.2.3.4 Beta 30')
 
-  def test_release_1(self):
-    """test 1.2.3.4 version string"""
-    self.c = get_prerelease_suffix('1.2.3.4')
-    self.assertEqual(self.c, '')
+    def test_release_1(self):
+        """test 1.2.3.4 version string"""
+        self.c = get_prerelease_suffix('1.2.3.4')
+        self.assertEqual(self.c, '')
 
-  def test_alpha_1_pre(self):
-    """test 1.2a1pre version string"""
-    self.c = get_prerelease_suffix('1.2a1pre')
-    self.assertEqual(self.c, '')
+    def test_alpha_1_pre(self):
+        """test 1.2a1pre version string"""
+        self.c = get_prerelease_suffix('1.2a1pre')
+        self.assertEqual(self.c, '')
 
-  def test_beta_10_pre(self):
-    """test 3.4b10pre version string"""
-    self.c = get_prerelease_suffix('3.4b10pre')
-    self.assertEqual(self.c, '')
+    def test_beta_10_pre(self):
+        """test 3.4b10pre version string"""
+        self.c = get_prerelease_suffix('3.4b10pre')
+        self.assertEqual(self.c, '')
 
-  def test_pre_0(self):
-    """test 1.2pre0 version string"""
-    self.c = get_prerelease_suffix('1.2pre0')
-    self.assertEqual(self.c, '')
+    def test_pre_0(self):
+        """test 1.2pre0 version string"""
+        self.c = get_prerelease_suffix('1.2pre0')
+        self.assertEqual(self.c, '')
 
-  def test_pre_1_b(self):
-    """test 1.2pre1b version string"""
-    self.c = get_prerelease_suffix('1.2pre1b')
-    self.assertEqual(self.c, '')
+    def test_pre_1_b(self):
+        """test 1.2pre1b version string"""
+        self.c = get_prerelease_suffix('1.2pre1b')
+        self.assertEqual(self.c, '')
 
-  def test_a_a(self):
-    """test 1.2aa version string"""
-    self.c = get_prerelease_suffix('1.2aa')
-    self.assertEqual(self.c, '')
+    def test_a_a(self):
+        """test 1.2aa version string"""
+        self.c = get_prerelease_suffix('1.2aa')
+        self.assertEqual(self.c, '')
 
-  def test_b_b(self):
-    """test 1.2bb version string"""
-    self.c = get_prerelease_suffix('1.2bb')
-    self.assertEqual(self.c, '')
+    def test_b_b(self):
+        """test 1.2bb version string"""
+        self.c = get_prerelease_suffix('1.2bb')
+        self.assertEqual(self.c, '')
 
-  def test_a_b(self):
-    """test 1.2ab version string"""
-    self.c = get_prerelease_suffix('1.2ab')
-    self.assertEqual(self.c, '')
+    def test_a_b(self):
+        """test 1.2ab version string"""
+        self.c = get_prerelease_suffix('1.2ab')
+        self.assertEqual(self.c, '')
 
-  def test_plus(self):
-    """test 1.2+ version string """
-    self.c = get_prerelease_suffix('1.2+')
-    self.assertEqual(self.c, '')
+    def test_plus(self):
+        """test 1.2+ version string """
+        self.c = get_prerelease_suffix('1.2+')
+        self.assertEqual(self.c, '')
+
 
 if __name__ == '__main__':
-  mozunit.main()
+    mozunit.main()
--- a/config/tests/unitMozZipFile.py
+++ b/config/tests/unitMozZipFile.py
@@ -23,179 +23,188 @@ written is always the first file, the se
 the second, the third is one of the first three. That is, if we
 had 4 files, but only three writes, the fourth file would never even
 get tried.
 
 The content written to the jars is pseudorandom with a fixed seed.
 '''
 
 if not __file__:
-  __file__ = sys.argv[0]
+    __file__ = sys.argv[0]
 sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
 
 from MozZipFile import ZipFile
 import zipfile
 
 leafs = (
-  'firstdir/oneleaf',
-  'seconddir/twoleaf',
-  'thirddir/with/sub/threeleaf')
+    'firstdir/oneleaf',
+    'seconddir/twoleaf',
+    'thirddir/with/sub/threeleaf')
 _lengths = map(lambda n: n * 64, [16, 64, 80])
 lengths = 3
 writes = 5
 
+
 def givenlength(i):
-  '''Return a length given in the _lengths array to allow manual
-  tuning of which lengths of zip entries to use.
-  '''
-  return _lengths[i]
+    '''Return a length given in the _lengths array to allow manual
+    tuning of which lengths of zip entries to use.
+    '''
+    return _lengths[i]
 
 
 def prod(*iterables):
-  ''''Tensor product of a list of iterables.
+    ''''Tensor product of a list of iterables.
 
-  This generator returns lists of items, one of each given
-  iterable. It iterates over all possible combinations.
-  '''
-  for item in iterables[0]:
-    if len(iterables) == 1:
-      yield [item]
-    else:
-      for others in prod(*iterables[1:]):
-        yield [item] + others
+    This generator returns lists of items, one of each given
+    iterable. It iterates over all possible combinations.
+    '''
+    for item in iterables[0]:
+        if len(iterables) == 1:
+            yield [item]
+        else:
+            for others in prod(*iterables[1:]):
+                yield [item] + others
 
 
 def getid(descs):
-  'Convert a list of ints to a string.'
-  return reduce(lambda x,y: x+'{0}{1}'.format(*tuple(y)), descs,'')
+    'Convert a list of ints to a string.'
+    return reduce(lambda x, y: x+'{0}{1}'.format(*tuple(y)), descs, '')
 
 
 def getContent(length):
-  'Get pseudo random content of given length.'
-  rv = [None] * length
-  for i in xrange(length):
-    rv[i] = random.choice(letters)
-  return ''.join(rv)
+    'Get pseudo random content of given length.'
+    rv = [None] * length
+    for i in xrange(length):
+        rv[i] = random.choice(letters)
+    return ''.join(rv)
 
 
 def createWriter(sizer, *items):
-  'Helper method to fill in tests, one set of writes, one for each item'
-  locitems = copy.deepcopy(items)
-  for item in locitems:
-    item['length'] = sizer(item.pop('length', 0))
-  def helper(self):
-    mode  = 'w'
-    if os.path.isfile(self.f):
-      mode = 'a'
-    zf = ZipFile(self.f, mode, self.compression)
+    'Helper method to fill in tests, one set of writes, one for each item'
+    locitems = copy.deepcopy(items)
     for item in locitems:
-      self._write(zf, **item)
-    zf = None
-    pass
-  return helper
+        item['length'] = sizer(item.pop('length', 0))
+
+    def helper(self):
+        mode = 'w'
+        if os.path.isfile(self.f):
+            mode = 'a'
+        zf = ZipFile(self.f, mode, self.compression)
+        for item in locitems:
+            self._write(zf, **item)
+        zf = None
+        pass
+    return helper
+
 
 def createTester(name, *writes):
-  '''Helper method to fill in tests, calls into a list of write
-  helper methods.
-  '''
-  _writes = copy.copy(writes)
-  def tester(self):
-    for w in _writes:
-      getattr(self, w)()
-    self._verifyZip()
-    pass
-  # unit tests get confused if the method name isn't test...
-  tester.__name__ = name
-  return tester
+    '''Helper method to fill in tests, calls into a list of write
+    helper methods.
+    '''
+    _writes = copy.copy(writes)
+
+    def tester(self):
+        for w in _writes:
+            getattr(self, w)()
+        self._verifyZip()
+        pass
+    # unit tests get confused if the method name isn't test...
+    tester.__name__ = name
+    return tester
+
 
 class TestExtensiveStored(unittest.TestCase):
-  '''Unit tests for MozZipFile
+    '''Unit tests for MozZipFile
+
+    The testcase are actually populated by code following the class
+    definition.
+    '''
+
+    stage = "mozzipfilestage"
+    compression = zipfile.ZIP_STORED
+
+    def leaf(self, *leafs):
+        return os.path.join(self.stage, *leafs)
 
-  The testcase are actually populated by code following the class
-  definition.
-  '''
-  
-  stage = "mozzipfilestage"
-  compression = zipfile.ZIP_STORED
+    def setUp(self):
+        if os.path.exists(self.stage):
+            shutil.rmtree(self.stage)
+        os.mkdir(self.stage)
+        self.f = self.leaf('test.jar')
+        self.ref = {}
+        self.seed = 0
+
+    def tearDown(self):
+        self.f = None
+        self.ref = None
 
-  def leaf(self, *leafs):
-    return os.path.join(self.stage, *leafs)
-  def setUp(self):
-    if os.path.exists(self.stage):
-      shutil.rmtree(self.stage)
-    os.mkdir(self.stage)
-    self.f = self.leaf('test.jar')
-    self.ref = {}
-    self.seed = 0
-  
-  def tearDown(self):
-    self.f = None
-    self.ref = None
-  
-  def _verifyZip(self):
-    zf = zipfile.ZipFile(self.f)
-    badEntry = zf.testzip()
-    self.failIf(badEntry, badEntry)
-    zlist = zf.namelist()
-    zlist.sort()
-    vlist = self.ref.keys()
-    vlist.sort()
-    self.assertEqual(zlist, vlist)
-    for leaf, content in self.ref.iteritems():
-      zcontent = zf.read(leaf)
-      self.assertEqual(content, zcontent)
-  
-  def _write(self, zf, seed=None, leaf=0, length=0):
-    if seed is None:
-      seed = self.seed
-      self.seed += 1
-    random.seed(seed)
-    leaf = leafs[leaf]
-    content = getContent(length)
-    self.ref[leaf] = content
-    zf.writestr(leaf, content)
-    dir = os.path.dirname(self.leaf('stage', leaf))
-    if not os.path.isdir(dir):
-      os.makedirs(dir)
-    open(self.leaf('stage', leaf), 'w').write(content)
+    def _verifyZip(self):
+        zf = zipfile.ZipFile(self.f)
+        badEntry = zf.testzip()
+        self.failIf(badEntry, badEntry)
+        zlist = zf.namelist()
+        zlist.sort()
+        vlist = self.ref.keys()
+        vlist.sort()
+        self.assertEqual(zlist, vlist)
+        for leaf, content in self.ref.iteritems():
+            zcontent = zf.read(leaf)
+            self.assertEqual(content, zcontent)
+
+    def _write(self, zf, seed=None, leaf=0, length=0):
+        if seed is None:
+            seed = self.seed
+            self.seed += 1
+        random.seed(seed)
+        leaf = leafs[leaf]
+        content = getContent(length)
+        self.ref[leaf] = content
+        zf.writestr(leaf, content)
+        dir = os.path.dirname(self.leaf('stage', leaf))
+        if not os.path.isdir(dir):
+            os.makedirs(dir)
+        open(self.leaf('stage', leaf), 'w').write(content)
+
 
 # all leafs in all lengths
 atomics = list(prod(xrange(len(leafs)), xrange(lengths)))
 
 # populate TestExtensiveStore with testcases
 for w in xrange(writes):
-  # Don't iterate over all files for the the first n passes,
-  # those are redundant as long as w < lengths.
-  # There are symmetries in the trailing end, too, but I don't know
-  # how to reduce those out right now.
-  nonatomics = [list(prod(range(min(i,len(leafs))), xrange(lengths)))
-                for i in xrange(1, w+1)] + [atomics]
-  for descs in prod(*nonatomics):
-    suffix = getid(descs)
-    dicts = [dict(leaf=leaf, length=length) for leaf, length in descs]
-    setattr(TestExtensiveStored, '_write' + suffix,
-            createWriter(givenlength, *dicts))
-    setattr(TestExtensiveStored, 'test' + suffix,
-            createTester('test' + suffix, '_write' + suffix))
+    # Don't iterate over all files for the the first n passes,
+    # those are redundant as long as w < lengths.
+    # There are symmetries in the trailing end, too, but I don't know
+    # how to reduce those out right now.
+    nonatomics = [list(prod(range(min(i, len(leafs))), xrange(lengths)))
+                  for i in xrange(1, w+1)] + [atomics]
+    for descs in prod(*nonatomics):
+        suffix = getid(descs)
+        dicts = [dict(leaf=leaf, length=length) for leaf, length in descs]
+        setattr(TestExtensiveStored, '_write' + suffix,
+                createWriter(givenlength, *dicts))
+        setattr(TestExtensiveStored, 'test' + suffix,
+                createTester('test' + suffix, '_write' + suffix))
 
 # now create another round of tests, with two writing passes
 # first, write all file combinations into the jar, close it,
 # and then write all atomics again.
 # This should catch more or less all artifacts generated
 # by the final ordering step when closing the jar.
 files = [list(prod([i], xrange(lengths))) for i in xrange(len(leafs))]
-allfiles = reduce(lambda l,r:l+r,
+allfiles = reduce(lambda l, r: l+r,
                   [list(prod(*files[:(i+1)])) for i in xrange(len(leafs))])
 
 for first in allfiles:
-  testbasename = 'test{0}_'.format(getid(first))
-  test = [None, '_write' + getid(first), None]
-  for second in atomics:
-    test[0] = testbasename + getid([second])
-    test[2] = '_write' + getid([second])
-    setattr(TestExtensiveStored, test[0], createTester(*test))
+    testbasename = 'test{0}_'.format(getid(first))
+    test = [None, '_write' + getid(first), None]
+    for second in atomics:
+        test[0] = testbasename + getid([second])
+        test[2] = '_write' + getid([second])
+        setattr(TestExtensiveStored, test[0], createTester(*test))
+
 
 class TestExtensiveDeflated(TestExtensiveStored):
-  'Test all that has been tested with ZIP_STORED with DEFLATED, too.'
-  compression = zipfile.ZIP_DEFLATED
+    'Test all that has been tested with ZIP_STORED with DEFLATED, too.'
+    compression = zipfile.ZIP_DEFLATED
+
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()