Bug 812179 - Removed hacks for Python < 2.6 from config/ [r=ted]
authorYati Sagade <yati.sagade@gmail.com>
Wed, 27 Feb 2013 22:30:56 +0530
changeset 123443 8f83edc05fa4d458d82668a95853c907544d0b8d
parent 123442 f581b9c6b9169fc5a4d5f497bec0e912271c5a08
child 123444 1f5912993f1111d978d3d5901b6f7789daa35c1b
push id24382
push userryanvm@gmail.com
push dateFri, 01 Mar 2013 23:43:19 +0000
treeherdermozilla-central@3362afba690e [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersted
bugs812179
milestone22.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 812179 - Removed hacks for Python < 2.6 from config/ [r=ted]
config/Expression.py
config/JarMaker.py
config/Preprocessor.py
config/buildlist.py
config/check_source_count.py
config/expandlibs.py
config/expandlibs_exec.py
config/expandlibs_gen.py
config/find_OOM_errors.py
config/make-stl-wrappers.py
config/mozunit.py
config/nsinstall.py
config/printconfigsetting.py
config/printprereleasesuffix.py
config/tests/unit-JarMaker.py
config/tests/unit-Preprocessor.py
config/tests/unit-buildlist.py
config/tests/unit-expandlibs.py
config/tests/unit-mozunit.py
config/tests/unit-nsinstall.py
config/tests/unit-writemozinfo.py
config/tests/unitMozZipFile.py
config/utils.py
config/writemozinfo.py
js/src/config/Expression.py
js/src/config/Preprocessor.py
js/src/config/check_source_count.py
js/src/config/expandlibs.py
js/src/config/expandlibs_exec.py
js/src/config/expandlibs_gen.py
js/src/config/find_OOM_errors.py
js/src/config/nsinstall.py
--- a/config/Expression.py
+++ b/config/Expression.py
@@ -213,17 +213,18 @@ class Expression:
     Error raised when parsing fails.
     It has two members, offset and content, which give the offset of the
     error and the offending content.
     """
     def __init__(self, expression):
       self.offset = expression.offset
       self.content = expression.content[:3]
     def __str__(self):
-      return 'Unexpected content at offset %i, "%s"'%(self.offset, self.content)
+      return 'Unexpected content at offset {0}, "{1}"'.format(self.offset, 
+                                                              self.content)
 
 class Context(dict):
   """
   This class holds variable values by subclassing dict, and while it
   truthfully reports True and False on
   
   name in context
   
--- a/config/JarMaker.py
+++ b/config/JarMaker.py
@@ -2,17 +2,16 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 '''jarmaker.py provides a python class to package up chrome content by
 processing jar.mn files.
 
 See the documentation for jar.mn on MDC for further details on the format.
 '''
-
 import sys
 import os
 import os.path
 import errno
 import re
 import logging
 from time import localtime
 from optparse import OptionParser
@@ -147,21 +146,22 @@ class JarMaker(object):
     # rewrite the manifest, if entries given
     if not register:
       return
 
     chromeManifest = os.path.join(os.path.dirname(jarPath),
                                   '..', 'chrome.manifest')
 
     if self.useJarfileManifest:
-      self.updateManifest(jarPath + '.manifest', chromebasepath % '',
+      self.updateManifest(jarPath + '.manifest', chromebasepath.format(''),
                           register)
-      addEntriesToListFile(chromeManifest, ['manifest chrome/%s.manifest' % (os.path.basename(jarPath),)])
+      addEntriesToListFile(chromeManifest, ['manifest chrome/{0}.manifest'
+                                            .format(os.path.basename(jarPath))])
     if self.useChromeManifest:
-      self.updateManifest(chromeManifest, chromebasepath % 'chrome/',
+      self.updateManifest(chromeManifest, chromebasepath.format('chrome/'),
                           register)
 
     # If requested, add a root chrome manifest entry (assumed to be in the parent directory
     # of chromeManifest) with the application specific id. In cases where we're building
     # lang packs, the root manifest must know about application sub directories.
     if self.rootManifestAppId:
       rootChromeManifest = os.path.join(os.path.normpath(os.path.dirname(chromeManifest)),
                                         '..', 'chrome.manifest')
@@ -253,31 +253,31 @@ class JarMaker(object):
     of a jar.mn file.
 
     jarfile is the basename of the jarfile or the directory name for 
     flat output, lines is a pushback_iterator of the lines of jar.mn,
     the remaining options are carried over from makeJar.
     '''
 
     # chromebasepath is used for chrome registration manifests
-    # %s is getting replaced with chrome/ for chrome.manifest, and with
+    # {0} is getting replaced with chrome/ for chrome.manifest, and with
     # an empty string for jarfile.manifest
-    chromebasepath = '%s' + os.path.basename(jarfile)
+    chromebasepath = '{0}' + os.path.basename(jarfile)
     if self.outputFormat == 'jar':
       chromebasepath = 'jar:' + chromebasepath + '.jar!'
     chromebasepath += '/'
 
     jarfile = os.path.join(jardir, jarfile)
     jf = None
     if self.outputFormat == 'jar':
       #jar
       jarfilepath = jarfile + '.jar'
       try:
         os.makedirs(os.path.dirname(jarfilepath))
-      except OSError, error:
+      except OSError as error:
         if error.errno != errno.EEXIST:
           raise
       jf = ZipFile(jarfilepath, 'a', lock = True)
       outHelper = self.OutputHelper_jar(jf)
     else:
       outHelper = getattr(self, 'OutputHelper_' + self.outputFormat)(jarfile)
     register = {}
     # This loop exits on either
@@ -340,17 +340,18 @@ class JarMaker(object):
       realsrc = None
       for _srcdir in src_base:
         if os.path.isfile(os.path.join(_srcdir, src)):
           realsrc = os.path.join(_srcdir, src)
           break
       if realsrc is None:
         if jf is not None:
           jf.close()
-        raise RuntimeError('File "%s" not found in %s' % (src, ', '.join(src_base)))
+        raise RuntimeError('File "{0}" not found in {1}'
+                           .format(src, ', '.join(src_base)))
       if m.group('optPreprocess'):
         outf = outHelper.getOutput(out)
         inf = open(realsrc)
         pp = self.pp.clone()
         if src[-4:] == '.css':
           pp.setMarker('%')
         pp.out = outf
         pp.do_include(inf)
@@ -396,41 +397,41 @@ class JarMaker(object):
       self.basepath = basepath
     def getDestModTime(self, aPath):
       return getModTime(os.path.join(self.basepath, aPath))
     def getOutput(self, name):
       out = self.ensureDirFor(name)
       # remove previous link or file
       try:
         os.remove(out)
-      except OSError, e:
+      except OSError as e:
         if e.errno != errno.ENOENT:
           raise
       return open(out, 'wb')
     def ensureDirFor(self, name):
       out = os.path.join(self.basepath, name)
       outdir = os.path.dirname(out)
       if not os.path.isdir(outdir):
         try:
           os.makedirs(outdir)
-        except OSError, error:
+        except OSError as error:
           if error.errno != errno.EEXIST:
             raise
       return out
 
   class OutputHelper_symlink(OutputHelper_flat):
     '''Subclass of OutputHelper_flat that provides a helper for
     creating a symlink including creating the parent directories.
     '''
     def symlink(self, src, dest):
       out = self.ensureDirFor(dest)
       # remove previous link or file
       try:
         os.remove(out)
-      except OSError, e:
+      except OSError as e:
         if e.errno != errno.ENOENT:
           raise
       if sys.platform != "win32":
         os.symlink(src, out)
       else:
         # On Win32, use ctypes to create a hardlink
         rv = CreateHardLink(out, src, None)
         if rv == 0:
--- a/config/Preprocessor.py
+++ b/config/Preprocessor.py
@@ -72,36 +72,38 @@ class Preprocessor:
       self.cmds[cmd] = (level, getattr(self, 'do_' + cmd))
     self.out = sys.stdout
     self.setMarker('#')
     self.LE = '\n'
     self.varsubst = re.compile('@(?P<VAR>\w+)@', re.U)
   
   def warnUnused(self, file):
     if self.actionLevel == 0:
-      sys.stderr.write('%s: WARNING: no preprocessor directives found\n' % file)
+      sys.stderr.write('{0}: WARNING: no preprocessor directives found\n'.format(file))
     elif self.actionLevel == 1:
-      sys.stderr.write('%s: WARNING: no useful preprocessor directives found\n' % file)
+      sys.stderr.write('{0}: WARNING: no useful preprocessor directives found\n'.format(file))
     pass
 
   def setLineEndings(self, aLE):
     """
     Set the line endings to be used for output.
     """
     self.LE = {'cr': '\x0D', 'lf': '\x0A', 'crlf': '\x0D\x0A'}[aLE]
   
   def setMarker(self, aMarker):
     """
     Set the marker to be used for processing directives.
     Used for handling CSS files, with pp.setMarker('%'), for example.
     The given marker may be None, in which case no markers are processed.
     """
     self.marker = aMarker
     if aMarker:
-      self.instruction = re.compile('%s(?P<cmd>[a-z]+)(?:\s(?P<args>.*))?$'%aMarker, re.U)
+      self.instruction = re.compile('{0}(?P<cmd>[a-z]+)(?:\s(?P<args>.*))?$'
+                                    .format(aMarker), 
+                                    re.U)
       self.comment = re.compile(aMarker, re.U)
     else:
       class NoMatch(object):
         def match(self, *args):
           return False
       self.instruction = self.comment = NoMatch()
   
   def clone(self):
@@ -124,19 +126,19 @@ class Preprocessor:
   def write(self, aLine):
     """
     Internal method for handling output.
     """
     if self.checkLineNumbers:
       self.writtenLines += 1
       ln = self.context['LINE']
       if self.writtenLines != ln:
-        self.out.write('//@line %(line)d "%(file)s"%(le)s'%{'line': ln,
-                                                            'file': self.context['FILE'],
-                                                            'le': self.LE})
+        self.out.write('//@line {line} "{file}"{le}'.format(line=ln,
+                                                            file=self.context['FILE'],
+                                                            le=self.LE))
         self.writtenLines = ln
     filteredLine = self.applyFilters(aLine)
     if filteredLine != aLine:
       self.actionLevel = 2
     # ensure our line ending. Only need to handle \n, as we're reading
     # with universal line ending support, at least for files.
     filteredLine = re.sub('\n', self.LE, filteredLine)
     self.out.write(filteredLine)
--- a/config/buildlist.py
+++ b/config/buildlist.py
@@ -2,39 +2,41 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 '''A generic script to add entries to a file 
 if the entry does not already exist.
 
 Usage: buildlist.py <filename> <entry> [<entry> ...]
 '''
+from __future__ import print_function
 
 import sys
 import os
 from utils import lockFile
 
 def addEntriesToListFile(listFile, entries):
   """Given a file |listFile| containing one entry per line,
   add each entry in |entries| to the file, unless it is already
   present."""
   lock = lockFile(listFile + ".lck")
   try:
     if os.path.exists(listFile):
       f = open(listFile)
-      existing = set([x.strip() for x in f.readlines()])
+      existing = set(x.strip() for x in f.readlines())
       f.close()
     else:
       existing = set()
     f = open(listFile, 'a')
     for e in entries:
       if e not in existing:
-        f.write("%s\n" % e)
+        f.write("{0}\n".format(e))
         existing.add(e)
     f.close()
   finally:
     lock = None
 
 if __name__ == '__main__':
   if len(sys.argv) < 3:
-    print >>sys.stderr, "Usage: buildlist.py <list file> <entry> [<entry> ...]"
+    print("Usage: buildlist.py <list file> <entry> [<entry> ...]",
+          file=sys.stderr)
     sys.exit(1)
   addEntriesToListFile(sys.argv[1], sys.argv[2:])
--- a/config/check_source_count.py
+++ b/config/check_source_count.py
@@ -4,17 +4,17 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 
 # Usage: check_source_count.py SEARCH_TERM COUNT ERROR_LOCATION REPLACEMENT [FILES...]
 #   Checks that FILES contains exactly COUNT matches of SEARCH_TERM. If it does
 #   not, an error message is printed, quoting ERROR_LOCATION, which should
 #   probably be the filename and line number of the erroneous call to
 #   check_source_count.py.
-
+from __future__ import print_function
 import sys
 import os
 import re
 
 search_string = sys.argv[1]
 expected_count = int(sys.argv[2])
 error_location = sys.argv[3]
 replacement = sys.argv[4]
@@ -27,22 +27,31 @@ for f in files:
     text = file(f).read()
     match = re.findall(search_string, text)
     if match:
         num = len(match)
         count += num
         details[f] = num
 
 if count == expected_count:
-    print "TEST-PASS | check_source_count.py %s | %d" % (search_string, expected_count)
+    print("TEST-PASS | check_source_count.py {0} | {1}"
+          .format(search_string, expected_count))
 
 else:
-    print "TEST-UNEXPECTED-FAIL | check_source_count.py %s | " % (search_string),
+    print("TEST-UNEXPECTED-FAIL | check_source_count.py {0} | "
+          .format(search_string),
+          end='')
     if count < expected_count:
-        print "There are fewer occurrences of /%s/ than expected. This may mean that you have removed some, but forgotten to account for it %s." % (search_string, error_location)
+        print("There are fewer occurrences of /{0}/ than expected. "
+              "This may mean that you have removed some, but forgotten to "
+              "account for it {1}.".format(search_string, error_location))
     else:
-        print "There are more occurrences of /%s/ than expected. We're trying to prevent an increase in the number of %s's, using %s if possible. If it in unavoidable, you should update the expected count %s." % (search_string, search_string, replacement, error_location)
+        print("There are more occurrences of /{0}/ than expected. We're trying "
+              "to prevent an increase in the number of {1}'s, using {2} if "
+              "possible. If it is unavoidable, you should update the expected "
+              "count {3}.".format(search_string, search_string, replacement, 
+                                 error_location))
 
-    print "Expected: %d; found: %d" % (expected_count, count)
+    print("Expected: {0}; found: {1}".format(expected_count, count))
     for k in sorted(details):
-        print "Found %d occurences in %s" % (details[k],k)
+        print("Found {0} occurences in {1}".format(details[k],k))
     sys.exit(-1)
 
--- a/config/expandlibs.py
+++ b/config/expandlibs.py
@@ -21,27 +21,26 @@ given a list of files, expandlibs will r
 - If a ${DLL_PREFIX}${ROOT}.${DLL_SUFFIX} or
   ${DLL_PREFIX}${ROOT}.${IMPORT_LIB_SUFFIX} file exists, use that instead
 - If the ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} file exists, use it
 - If a ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX}.${LIB_DESC_SUFFIX} file exists,
   replace ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} with the OBJS and LIBS the
   descriptor contains. And for each of these LIBS, also apply the same
   rules.
 '''
-from __future__ import with_statement
 import sys, os, errno
 import expandlibs_config as conf
 
 def ensureParentDir(file):
     '''Ensures the directory parent to the given file exists'''
     dir = os.path.dirname(file)
     if dir and not os.path.exists(dir):
         try:
             os.makedirs(dir)
-        except OSError, error:
+        except OSError as error:
             if error.errno != errno.EEXIST:
                 raise
 
 def relativize(path):
     '''Returns a path relative to the current working directory, if it is
     shorter than the given path'''
     def splitpath(path):
         dir, file = os.path.split(path)
@@ -85,17 +84,18 @@ class LibDescriptor(dict):
         if not content:
             return
         for key, value in [(s.strip() for s in item.split('=', 2)) for item in content if item.find('=') >= 0]:
             if key in self.KEYS:
                 self[key] = value.split()
 
     def __str__(self):
         '''Serializes the lib descriptor'''
-        return '\n'.join('%s = %s' % (k, ' '.join(self[k])) for k in self.KEYS if len(self[k]))
+        return '\n'.join('{0} = {1}'.format(k, ' '.join(self[k]))
+                         for k in self.KEYS if len(self[k]))
 
 class ExpandArgs(list):
     def __init__(self, args):
         '''Creates a clone of the |args| list and performs file expansion on
         each item it contains'''
         super(ExpandArgs, self).__init__()
         for arg in args:
             self += self._expand(arg)
@@ -130,9 +130,9 @@ class ExpandLibsDeps(ExpandArgs):
     '''Same as ExpandArgs, but also adds the library descriptor to the list'''
     def _expand_desc(self, arg):
         objs = super(ExpandLibsDeps, self)._expand_desc(arg)
         if os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
             objs += [relativize(arg + conf.LIBS_DESC_SUFFIX)]
         return objs
 
 if __name__ == '__main__':
-    print " ".join(ExpandArgs(sys.argv[1:]))
+    print(" ".join(ExpandArgs(sys.argv[1:])))
--- a/config/expandlibs_exec.py
+++ b/config/expandlibs_exec.py
@@ -15,20 +15,21 @@ of a command line. The kind of list file
 EXPAND_LIBS_LIST_STYLE variable: 'list' for MSVC style lists (@file.list)
 or 'linkerscript' for GNU ld linker scripts.
 See https://bugzilla.mozilla.org/show_bug.cgi?id=584474#c59 for more details.
 
 With the --symbol-order argument, followed by a file name, it will add the
 relevant linker options to change the order in which the linker puts the
 symbols appear in the resulting binary. Only works for ELF targets.
 '''
-from __future__ import with_statement
+from __future__ import print_function
 import sys
 import os
-from expandlibs import ExpandArgs, relativize, isObject, ensureParentDir, ExpandLibsDeps
+from expandlibs import (ExpandArgs, relativize, isObject, ensureParentDir,
+                        ExpandLibsDeps)
 import expandlibs_config as conf
 from optparse import OptionParser
 import subprocess
 import tempfile
 import shutil
 import subprocess
 import re
 
@@ -87,20 +88,20 @@ class ExpandArgsMore(ExpandArgs):
     def makelist(self):
         '''Replaces object file names with a temporary list file, using a
         list format depending on the EXPAND_LIBS_LIST_STYLE variable
         '''
         objs = [o for o in self if isObject(o)]
         if not len(objs): return
         fd, tmp = tempfile.mkstemp(suffix=".list",dir=os.curdir)
         if conf.EXPAND_LIBS_LIST_STYLE == "linkerscript":
-            content = ['INPUT("%s")\n' % obj for obj in objs]
+            content = ['INPUT("{0}")\n'.format(obj) for obj in objs]
             ref = tmp
         elif conf.EXPAND_LIBS_LIST_STYLE == "list":
-            content = ["%s\n" % obj for obj in objs]
+            content = ["{0}\n".format(obj) for obj in objs]
             ref = "@" + tmp
         else:
             os.close(fd)
             os.remove(tmp)
             return
         self.tmp.append(tmp)
         f = os.fdopen(fd, "w")
         f.writelines(content)
@@ -134,19 +135,23 @@ class ExpandArgsMore(ExpandArgs):
                     result[quoted[5]].append(quoted[1])
                 else:
                     result[quoted[5]] = [quoted[1]]
         return result
 
     def _getOrderedSections(self, ordered_symbols):
         '''Given an ordered list of symbols, returns the corresponding list
         of sections following the order.'''
-        if not conf.EXPAND_LIBS_ORDER_STYLE in ['linkerscript', 'section-ordering-file']:
-            raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
-        finder = SectionFinder([arg for arg in self if isObject(arg) or os.path.splitext(arg)[1] == conf.LIB_SUFFIX])
+        if conf.EXPAND_LIBS_ORDER_STYLE not in ['linkerscript',
+                                                'section-ordering-file']:
+            raise Exception('EXPAND_LIBS_ORDER_STYLE "{0}" is not supported'
+                            .format(conf.EXPAND_LIBS_ORDER_STYLE))
+        finder = SectionFinder([arg for arg in self 
+                                if isObject(arg) or 
+                                os.path.splitext(arg)[1] == conf.LIB_SUFFIX])
         folded = self._getFoldedSections()
         sections = set()
         ordered_sections = []
         for symbol in ordered_symbols:
             symbol_sections = finder.getSections(symbol)
             all_symbol_sections = []
             for section in symbol_sections:
                 if section in folded:
@@ -177,59 +182,64 @@ class ExpandArgsMore(ExpandArgs):
                     else:
                         split_sections[linked_section] = [s]
                     break
         content = []
         # Order is important
         linked_sections = [s for s in linked_sections if s in split_sections]
 
         if conf.EXPAND_LIBS_ORDER_STYLE == 'section-ordering-file':
-            option = '-Wl,--section-ordering-file,%s'
+            option = '-Wl,--section-ordering-file,{0}'
             content = sections
             for linked_section in linked_sections:
                 content.extend(split_sections[linked_section])
-                content.append('%s.*' % linked_section)
+                content.append('{0}.*'.format(linked_section))
                 content.append(linked_section)
 
         elif conf.EXPAND_LIBS_ORDER_STYLE == 'linkerscript':
-            option = '-Wl,-T,%s'
+            option = '-Wl,-T,{0}'
             section_insert_before = dict(SECTION_INSERT_BEFORE)
             for linked_section in linked_sections:
-                content.append('SECTIONS {')
-                content.append('  %s : {' % linked_section)
-                content.extend('    *(%s)' % s for s in split_sections[linked_section])
-                content.append('  }')
-                content.append('}')
-                content.append('INSERT BEFORE %s' % section_insert_before[linked_section])
+                content.append('SECTIONS {{')
+                content.append('  {0} : {{'.format(linked_section))
+                content.extend('    *({0})'
+                               .format(s for s in split_sections[linked_section]))
+                content.append('  }}')
+                content.append('}}')
+                content.append('INSERT BEFORE {0}'
+                               .format(section_insert_before[linked_section]))
         else:
-            raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
+            raise Exception('EXPAND_LIBS_ORDER_STYLE "{0}" is not supported'
+                            .format(conf.EXPAND_LIBS_ORDER_STYLE))
 
         fd, tmp = tempfile.mkstemp(dir=os.curdir)
         f = os.fdopen(fd, "w")
         f.write('\n'.join(content)+'\n')
         f.close()
         self.tmp.append(tmp)
-        self.append(option % tmp)
+        self.append(option.format(tmp))
 
 class SectionFinder(object):
     '''Instances of this class allow to map symbol names to sections in
     object files.'''
 
     def __init__(self, objs):
         '''Creates an instance, given a list of object files.'''
         if not conf.EXPAND_LIBS_ORDER_STYLE in ['linkerscript', 'section-ordering-file']:
-            raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
+            raise Exception('EXPAND_LIBS_ORDER_STYLE "{0}" is not supported'
+                            .format(conf.EXPAND_LIBS_ORDER_STYLE))
         self.mapping = {}
         for obj in objs:
             if not isObject(obj) and os.path.splitext(obj)[1] != conf.LIB_SUFFIX:
-                raise Exception('%s is not an object nor a static library' % obj)
+                raise Exception('{0} is not an object nor a static library'
+                                .format(obj))
             for symbol, section in SectionFinder._getSymbols(obj):
                 sym = SectionFinder._normalize(symbol)
                 if sym in self.mapping:
-                    if not section in self.mapping[sym]:
+                    if section not in self.mapping[sym]:
                         self.mapping[sym].append(section)
                 else:
                     self.mapping[sym] = [section]
 
     def getSections(self, symbol):
         '''Given a symbol, returns a list of sections containing it or the
         corresponding thunks. When the given symbol is a thunk, returns the
         list of sections containing its corresponding normal symbol and the
@@ -263,21 +273,21 @@ class SectionFinder(object):
             # and where the [FfO] flag is either F (function) or O (object).
             if len(tmp) > 1 and len(tmp[1]) > 6 and tmp[1][6] in ['O', 'F']:
                 tmp = tmp[1][8:].split()
                 # That gives us ["<section>","<length>", "<symbol>"]
                 syms.append((tmp[-1], tmp[0]))
         return syms
 
 def print_command(out, args):
-    print >>out, "Executing: " + " ".join(args)
+    print("Executing: " + " ".join(args), file=out)
     for tmp in [f for f in args.tmp if os.path.isfile(f)]:
-        print >>out, tmp + ":"
+        print(tmp + ":", file=out)
         with open(tmp) as file:
-            print >>out, "".join(["    " + l for l in file.readlines()])
+            print("".join(["    " + l for l in file.readlines()]), file=out)
     out.flush()
 
 def main():
     parser = OptionParser()
     parser.add_option("--depend", dest="depend", metavar="FILE",
         help="generate dependencies for the given execution and store it in the given file")
     parser.add_option("--target", dest="target", metavar="FILE",
         help="designate the target for dependencies")
@@ -318,13 +328,16 @@ def main():
         sys.stderr.write(stdout)
         sys.stderr.flush()
         if proc.returncode:
             exit(proc.returncode)
     if not options.depend:
         return
     ensureParentDir(options.depend)
     with open(options.depend, 'w') as depfile:
-        depfile.write("%s : %s\n" % (options.target, ' '.join(dep for dep in deps if os.path.isfile(dep) and dep != options.target)))
+        depfile.write("{0} : {1}\n"
+                      .format(options.target, ' '.join(dep for dep in deps 
+                                                       if os.path.isfile(dep) and 
+                                                       dep != options.target)))
 
 
 if __name__ == '__main__':
     main()
--- a/config/expandlibs_gen.py
+++ b/config/expandlibs_gen.py
@@ -1,47 +1,47 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
+from __future__ import print_function
 '''Given a list of object files and library names, prints a library
 descriptor to standard output'''
 
-from __future__ import with_statement
 import sys
 import os
 import expandlibs_config as conf
 from expandlibs import LibDescriptor, isObject, ensureParentDir, ExpandLibsDeps
 from optparse import OptionParser
 
 def generate(args):
     desc = LibDescriptor()
     for arg in args:
         if isObject(arg):
             if os.path.exists(arg):
                 desc['OBJS'].append(os.path.abspath(arg))
             else:
-                raise Exception("File not found: %s" % arg)
+                raise Exception("File not found: {0}".format(arg))
         elif os.path.splitext(arg)[1] == conf.LIB_SUFFIX:
             if os.path.exists(arg) or os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
                 desc['LIBS'].append(os.path.abspath(arg))
             else:
-                raise Exception("File not found: %s" % arg)
+                raise Exception("File not found: {0}".format(arg))
     return desc
 
 if __name__ == '__main__':
     parser = OptionParser()
     parser.add_option("--depend", dest="depend", metavar="FILE",
         help="generate dependencies for the given execution and store it in the given file")
     parser.add_option("-o", dest="output", metavar="FILE",
         help="send output to the given file")
 
     (options, args) = parser.parse_args()
     if not options.output:
         raise Exception("Missing option: -o")
 
     ensureParentDir(options.output)
     with open(options.output, 'w') as outfile:
-        print >>outfile, generate(args)
+        print(generate(args), file=outfile)
     if options.depend:
         ensureParentDir(options.depend)
         with open(options.depend, 'w') as depfile:
-            depfile.write("%s : %s\n" % (options.output, ' '.join(ExpandLibsDeps(args))))
+            depfile.write("{0} : {1}\n".format(options.output, 
+                                               ' '.join(ExpandLibsDeps(args))))
--- a/config/find_OOM_errors.py
+++ b/config/find_OOM_errors.py
@@ -1,13 +1,13 @@
 #!/usr/bin/env python
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
+from __future__ import print_function
 
 usage = """%prog: A test for OOM conditions in the shell.
 
 %prog finds segfaults and other errors caused by incorrect handling of
 allocation during OOM (out-of-memory) conditions.
 """
 
 help = """Check for regressions only. This runs a set of files with a known
@@ -60,17 +60,17 @@ def run(args, stdin=None):
     stderr_worker = ThreadWorker(proc.stderr)
     stdout_worker.start()
     stderr_worker.start()
 
     proc.wait()
     stdout_worker.join()
     stderr_worker.join()
 
-  except KeyboardInterrupt, e:
+  except KeyboardInterrupt as e:
     sys.exit(-1)
 
   stdout, stderr = stdout_worker.all, stderr_worker.all
   result = (stdout, stderr, proc.returncode)
   return result
 
 def get_js_files():
   (out, err, exit) = run('find ../jit-test/tests -name "*.js"')
@@ -96,17 +96,17 @@ def count_lines():
      prioritize the errors which occur most frequently."""
   counts = {}
   for string,count in blacklist.items():
     for line in string.split("\n"):
       counts[line] = counts.get(line, 0) + count
 
   lines = []
   for k,v in counts.items():
-    lines.append("%6d: %s" % (v,k))
+    lines.append("{0:6}: {1}".format(v, k))
 
   lines.sort()
 
   countlog = file("../OOM_count_log", "w")
   countlog.write("\n".join(lines))
   countlog.flush()
   countlog.close()
 
@@ -166,17 +166,17 @@ def clean_output(err):
 #####################################################################
 # Consts, etc
 #####################################################################
 
 command_template = 'shell/js' \
                  + ' -m -j -p' \
                  + ' -e "const platform=\'darwin\'; const libdir=\'../jit-test/lib/\';"' \
                  + ' -f ../jit-test/lib/prolog.js' \
-                 + ' -f %s'
+                 + ' -f {0}'
 
 
 # Blacklists are things we don't want to see in our logs again (though we do
 # want to count them when they happen). Whitelists we do want to see in our
 # logs again, principally because the information we have isn't enough.
 
 blacklist = {}
 add_to_blacklist(r"('', '', 1)") # 1 means OOM if the shell hasn't launched yet.
@@ -217,31 +217,31 @@ if OPTIONS.regression == None:
   # Don't use a logfile, this is automated for tinderbox.
   log = file("../OOM_log", "w")
 
 
 num_failures = 0
 for f in files:
 
   # Run it once to establish boundaries
-  command = (command_template + ' -O') % (f)
+  command = (command_template + ' -O').format(f)
   out, err, exit = run(command)
   max = re.match(".*OOM max count: (\d+).*", out, flags=re.DOTALL).groups()[0]
   max = int(max)
   
   # OOMs don't recover well for the first 20 allocations or so.
   # TODO: revisit this.
   for i in range(20, max): 
 
     if OPTIONS.regression == None:
-      print "Testing allocation %d/%d in %s" % (i,max,f)
+      print("Testing allocation {0}/{1} in {2}".format(i,max,f))
     else:
       sys.stdout.write('.') # something short for tinderbox, no space or \n
 
-    command = (command_template + ' -A %d') % (f, i)
+    command = (command_template + ' -A {0}').format(f, i)
     out, err, exit = run(command)
 
     # Success (5 is SM's exit code for controlled errors)
     if exit == 5 and err.find("out of memory") != -1:
       continue
 
     # Failure
     else:
@@ -277,17 +277,19 @@ for f in files:
         continue
 
       add_to_blacklist(sans_alloc_sites)
 
       log.write ("\n")
       log.write ("\n")
       log.write ("=========================================================================")
       log.write ("\n")
-      log.write ("An allocation failure at\n\tallocation %d/%d in %s\n\tcauses problems (detected using bug 624094)" % (i, max, f))
+      log.write ("An allocation failure at\n\tallocation {0}/{1} in {2}\n\t"
+                 "causes problems (detected using bug 624094)"
+                 .format(i, max, f))
       log.write ("\n")
       log.write ("\n")
 
       log.write ("Command (from obj directory, using patch from bug 624094):\n  " + command)
       log.write ("\n")
       log.write ("\n")
       log.write ("stdout, stderr, exitcode:\n  " + problem)
       log.write ("\n")
@@ -318,25 +320,33 @@ for f in files:
       log.write ("Valgrind info:\n" + vout)
       log.write ("\n")
       log.write ("\n")
       log.flush()
 
   if OPTIONS.regression == None:
     count_lines()
 
-print '\n',
+print()
 
 # Do the actual regression check
 if OPTIONS.regression != None:
   expected_num_failures = OPTIONS.regression
 
   if num_failures != expected_num_failures:
 
-    print "TEST-UNEXPECTED-FAIL |",
+    print("TEST-UNEXPECTED-FAIL |", end='')
     if num_failures > expected_num_failures:
-      print "More out-of-memory errors were found (%s) than expected (%d). This probably means an allocation site has been added without a NULL-check. If this is unavoidable, you can account for it by updating Makefile.in." % (num_failures, expected_num_failures),
+      print("More out-of-memory errors were found ({0}) than expected ({1}). "
+            "This probably means an allocation site has been added without a "
+            "NULL-check. If this is unavoidable, you can account for it by "
+            "updating Makefile.in.".format(num_failures, expected_num_failures),
+            end='')
     else:
-      print "Congratulations, you have removed %d out-of-memory error(s) (%d remain)! Please account for it by updating Makefile.in." % (expected_num_failures - num_failures, num_failures),
+      print("Congratulations, you have removed {0} out-of-memory error(s) "
+            "({1} remain)! Please account for it by updating Makefile.in." 
+            .format(expected_num_failures - num_failures, num_failures),
+            end='')
     sys.exit(-1)
   else:
-    print 'TEST-PASS | find_OOM_errors | Found the expected number of OOM errors (%d)' % (expected_num_failures)
+    print('TEST-PASS | find_OOM_errors | Found the expected number of OOM '
+          'errors ({0})'.format(expected_num_failures))
 
--- a/config/make-stl-wrappers.py
+++ b/config/make-stl-wrappers.py
@@ -1,12 +1,12 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
+from __future__ import print_function
 import os, re, string, sys
 
 def find_in_path(file, searchpath):
     for dir in searchpath.split(os.pathsep):
         f = os.path.join(dir, file)
         if os.path.exists(f):
             return f
     return ''
@@ -14,17 +14,17 @@ def find_in_path(file, searchpath):
 def header_path(header, compiler):
     if compiler == 'gcc':
         # we use include_next on gcc
         return header
     elif compiler == 'msvc':
         return find_in_path(header, os.environ.get('INCLUDE', ''))
     else:
         # hope someone notices this ...
-        raise NotImplementedError, compiler
+        raise NotImplementedError(compiler)
 
 def is_comment(line):
     return re.match(r'\s*#.*', line)
 
 def main(outdir, compiler, template_file, header_list_file):
     if not os.path.isdir(outdir):
         os.mkdir(outdir)
 
@@ -43,14 +43,14 @@ def main(outdir, compiler, template_file
                                                          HEADER_PATH=path,
                                                          NEW_HEADER_PATH=path_to_new))
         finally:
             f.close()
 
 
 if __name__ == '__main__':
     if 5 != len(sys.argv):
-        print >>sys.stderr, """Usage:
-  python %s OUT_DIR ('msvc'|'gcc') TEMPLATE_FILE HEADER_LIST_FILE
-"""% (sys.argv[0])
+        print("""Usage:
+  python {0} OUT_DIR ('msvc'|'gcc') TEMPLATE_FILE HEADER_LIST_FILE
+""".format(sys.argv[0]), file=sys.stderr)
         sys.exit(1)
 
     main(*sys.argv[1:])
--- a/config/mozunit.py
+++ b/config/mozunit.py
@@ -31,17 +31,17 @@ class _MozTestResult(_TestResult):
             return test.shortDescription() or str(test)
         else:
             return str(test)
 
     def addSuccess(self, test):
         _TestResult.addSuccess(self, test)
         filename = inspect.getfile(test.__class__)
         testname = test._testMethodName
-        self.stream.writeln("TEST-PASS | %s | %s" % (filename, testname))
+        self.stream.writeln("TEST-PASS | {0} | {1}".format(filename, testname))
 
     def addError(self, test, err):
         _TestResult.addError(self, test, err)
         self.printFail(test, err)
 
     def addFailure(self, test, err):
         _TestResult.addFailure(self, test, err)
         self.printFail(test,err)
@@ -49,23 +49,23 @@ class _MozTestResult(_TestResult):
     def printFail(self, test, err):
         exctype, value, tb = err
         # Skip test runner traceback levels
         while tb and self._is_relevant_tb_level(tb):
             tb = tb.tb_next
         if not tb:
             self.stream.writeln("TEST-UNEXPECTED-FAIL | NO TRACEBACK |")
         _f, _ln, _t = inspect.getframeinfo(tb)[:3]
-        self.stream.writeln("TEST-UNEXPECTED-FAIL | %s | line %d, %s: %s" % 
-                            (_f, _ln, _t, value.message))
+        self.stream.writeln("TEST-UNEXPECTED-FAIL | {0} | line {1}, {2}: {3}" 
+                            .format(_f, _ln, _t, value.message))
 
     def printErrorList(self):
         for test, err in self.errors:
-            self.stream.writeln("ERROR: %s" % self.getDescription(test))
-            self.stream.writeln("%s" % err)
+            self.stream.writeln("ERROR: {0}".format(self.getDescription(test)))
+            self.stream.writeln("{0}".format(err))
 
 
 class MozTestRunner(_TestRunner):
     def _makeResult(self):
         return _MozTestResult(self.stream, self.descriptions)
     def run(self, test):
         result = self._makeResult()
         test(result)
--- a/config/nsinstall.py
+++ b/config/nsinstall.py
@@ -4,17 +4,17 @@
 
 # This is a partial python port of nsinstall.
 # It's intended to be used when there's no natively compile nsinstall
 # available, and doesn't intend to be fully equivalent.
 # Its major use is for l10n repackaging on systems that don't have
 # a full build environment set up.
 # The basic limitation is, it doesn't even try to link and ignores
 # all related options.
-
+from __future__ import print_function
 from optparse import OptionParser
 import os
 import os.path
 import sys
 import shutil
 import stat
 
 def _nsinstall_internal(argv):
@@ -34,57 +34,58 @@ def _nsinstall_internal(argv):
   p.add_option('-L', action="store", metavar="linkprefix",
                help="Link prefix (ignored)")
   p.add_option('-X', action="append", metavar="file",
                help="Ignore a file when installing a directory recursively.")
 
   # The remaining arguments are not used in our tree, thus they're not
   # implented.
   def BadArg(option, opt, value, parser):
-    parser.error('option not supported: %s' % opt)
+    parser.error('option not supported: {0}'.format(opt))
     
   p.add_option('-C', action="callback", metavar="CWD",
                callback=BadArg,
                help="NOT SUPPORTED")
   p.add_option('-o', action="callback", callback=BadArg,
                help="Set owner (NOT SUPPORTED)", metavar="owner")
   p.add_option('-g', action="callback", callback=BadArg,
                help="Set group (NOT SUPPORTED)", metavar="group")
 
   (options, args) = p.parse_args(argv)
 
   if options.m:
     # mode is specified
     try:
       options.m = int(options.m, 8)
     except:
-      sys.stderr.write('nsinstall: ' + options.m + ' is not a valid mode\n')
+      sys.stderr.write('nsinstall: {0} is not a valid mode\n'
+                       .format(options.m))
       return 1
 
   # just create one directory?
   def maybe_create_dir(dir, mode, try_again):
     dir = os.path.abspath(dir)
     if os.path.exists(dir):
       if not os.path.isdir(dir):
-        print >> sys.stderr, ('nsinstall: %s is not a directory' % dir)
+        print('nsinstall: {0} is not a directory'.format(dir), file=sys.stderr)
         return 1
       if mode:
         os.chmod(dir, mode)
       return 0
 
     try:
       if mode:
         os.makedirs(dir, mode)
       else:
         os.makedirs(dir)
-    except Exception, e:
+    except Exception as e:
       # We might have hit EEXIST due to a race condition (see bug 463411) -- try again once
       if try_again:
         return maybe_create_dir(dir, mode, False)
-      print >> sys.stderr, ("nsinstall: failed to create directory %s: %s" % (dir, e))
+      print("nsinstall: failed to create directory {0}: {1}".format(dir, e))
       return 1
     else:
       return 0
 
   if options.X:
     options.X = [os.path.abspath(p) for p in options.X]
 
   if options.D:
--- a/config/printconfigsetting.py
+++ b/config/printconfigsetting.py
@@ -1,25 +1,26 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import print_function
 
 import configobj, sys
 
 try:
     (file, section, key) = sys.argv[1:]
 except ValueError:
-    print "Usage: printconfigsetting.py <file> <section> <setting>"
+    print("Usage: printconfigsetting.py <file> <section> <setting>")
     sys.exit(1)
 
 c = configobj.ConfigObj(file)
 
 try:
     s = c[section]
 except KeyError:
-    print >>sys.stderr, "Section [%s] not found." % section
+    print("Section [{0}] not found.".format(section), file=sys.stderr)
     sys.exit(1)
 
 try:
-    print s[key]
+    print(s[key])
 except KeyError:
-    print >>sys.stderr, "Key %s not found." % key
+    print("Key {0} not found.".format(key), file=sys.stderr)
     sys.exit(1)
--- a/config/printprereleasesuffix.py
+++ b/config/printprereleasesuffix.py
@@ -4,27 +4,28 @@
 
 # Prints the pre-release version suffix based on the version string
 #
 # Examples:
 # 2.1a3    > " 2.1 Alpha 3"
 # 2.1a3pre > ""
 # 3.2b4    > " 3.2 Beta 4"
 # 3.2b4pre > ""
+from __future__ import print_function
 
 import sys
 import re
 
 def get_prerelease_suffix(version):
   """ Returns the prerelease suffix from the version string argument """
 
   def mfunc(m):
-    return " %s %s %s" % (m.group('prefix'),
-                         {'a': 'Alpha', 'b': 'Beta'}[m.group('c')],
-                         m.group('suffix'))
+    return " {0} {1} {2}".format(m.group('prefix'),
+                                 {'a': 'Alpha', 'b': 'Beta'}[m.group('c')],
+                                 m.group('suffix'))
   result, c = re.subn(r'^(?P<prefix>(\d+\.)*\d+)(?P<c>[ab])(?P<suffix>\d+)$',
                       mfunc, version)
   if c != 1:
     return ''
   return result
 
 if len(sys.argv) == 2:
-  print get_prerelease_suffix(sys.argv[1])
+  print(get_prerelease_suffix(sys.argv[1]))
--- a/config/tests/unit-JarMaker.py
+++ b/config/tests/unit-JarMaker.py
@@ -1,8 +1,9 @@
+from __future__ import print_function
 import unittest
 
 import os, sys, os.path, time, inspect
 from filecmp import dircmp
 from tempfile import mkdtemp
 from shutil import rmtree, copy2
 from StringIO import StringIO
 from zipfile import ZipFile
@@ -104,39 +105,39 @@ def is_symlink_to(dest, src):
             return False
         target = os.path.abspath(os.readlink(dest))
         abssrc = os.path.abspath(src)
         return target == abssrc
 
 class _TreeDiff(dircmp):
     """Helper to report rich results on difference between two directories.
     """
-    def _fillDiff(self, dc, rv, basepath="%s"):
-        rv['right_only'] += map(lambda l: basepath % l, dc.right_only)
-        rv['left_only'] += map(lambda l: basepath % l, dc.left_only)
-        rv['diff_files'] += map(lambda l: basepath % l, dc.diff_files)
-        rv['funny'] += map(lambda l: basepath % l, dc.common_funny)
-        rv['funny'] += map(lambda l: basepath % l, dc.funny_files)
+    def _fillDiff(self, dc, rv, basepath="{0}"):
+        rv['right_only'] += map(lambda l: basepath.format(l), dc.right_only)
+        rv['left_only'] += map(lambda l: basepath.format(l), dc.left_only)
+        rv['diff_files'] += map(lambda l: basepath.format(l), dc.diff_files)
+        rv['funny'] += map(lambda l: basepath.format(l), dc.common_funny)
+        rv['funny'] += map(lambda l: basepath.format(l), dc.funny_files)
         for subdir, _dc in dc.subdirs.iteritems():
-            self._fillDiff(_dc, rv, basepath % (subdir + "/%s"))
+            self._fillDiff(_dc, rv, basepath.format(subdir + "/{0}"))
     def allResults(self, left, right):
         rv = {'right_only':[], 'left_only':[],
               'diff_files':[], 'funny': []}
         self._fillDiff(self, rv)
         chunks = []
         if rv['right_only']:
-            chunks.append('%s only in %s' % (', '.join(rv['right_only']),
-                                            right))
+            chunks.append('{0} only in {1}'.format(', '.join(rv['right_only']),
+                                                   right))
         if rv['left_only']:
-            chunks.append('%s only in %s' % (', '.join(rv['left_only']),
-                                            left))
+            chunks.append('{0} only in {1}'.format(', '.join(rv['left_only']),
+                                                   left))
         if rv['diff_files']:
-            chunks.append('%s differ' % ', '.join(rv['diff_files']))
+            chunks.append('{0} differ'.format(', '.join(rv['diff_files'])))
         if rv['funny']:
-            chunks.append("%s don't compare" % ', '.join(rv['funny']))
+            chunks.append("{0} don't compare".format(', '.join(rv['funny'])))
         return '; '.join(chunks)
 
 class TestJarMaker(unittest.TestCase):
     """
     Unit tests for JarMaker.py
     """
     debug = False # set to True to debug failing tests on disk
     def setUp(self):
@@ -147,17 +148,17 @@ class TestJarMaker(unittest.TestCase):
         os.mkdir(self.builddir)
         self.refdir = os.path.join(self.tmpdir, 'ref')
         os.mkdir(self.refdir)
         self.stagedir = os.path.join(self.tmpdir, 'stage')
         os.mkdir(self.stagedir)
 
     def tearDown(self):
         if self.debug:
-            print self.tmpdir
+            print(self.tmpdir)
         elif sys.platform != "win32":
             # can't clean up on windows
             rmtree(self.tmpdir)
 
     def _jar_and_compare(self, infile, **kwargs):
         jm = JarMaker(outputFormat='jar')
         jardir = os.path.join(self.builddir, 'chrome')
         if 'topsourcedir' not in kwargs:
@@ -233,17 +234,17 @@ class TestJarMaker(unittest.TestCase):
         jm.sourcedirs = [self.srcdir]
         jm.topsourcedir = self.srcdir
         jardir = os.path.join(self.builddir, 'chrome')
         jm.makeJar(os.path.join(self.srcdir,'jar.mn'), jardir)
         # All we do is check that srcdir/bar points to builddir/chrome/test/dir/foo
         srcbar = os.path.join(self.srcdir, 'bar')
         destfoo = os.path.join(self.builddir, 'chrome', 'test', 'dir', 'foo')
         self.assertTrue(is_symlink_to(destfoo, srcbar),
-                        "%s is not a symlink to %s" % (destfoo, srcbar))
+                        "{0} is not a symlink to {1}".format(destfoo, srcbar))
 
 
 class Test_relativesrcdir(unittest.TestCase):
     def setUp(self):
         self.jm = JarMaker()
         self.jm.topsourcedir = '/TOPSOURCEDIR'
         self.jm.relativesrcdir = 'browser/locales'
         self.fake_empty_file = StringIO()
--- a/config/tests/unit-Preprocessor.py
+++ b/config/tests/unit-Preprocessor.py
@@ -1,9 +1,8 @@
-from __future__ import with_statement
 import unittest
 
 from StringIO import StringIO
 import os
 import sys
 import os.path
 from mozunit import main, MockedOpen
 
--- a/config/tests/unit-buildlist.py
+++ b/config/tests/unit-buildlist.py
@@ -28,19 +28,23 @@ class TestBuildList(unittest.TestCase):
   def assertFileContains(self, filename, l):
     """Assert that the lines in the file |filename| are equal
     to the contents of the list |l|, in order."""
     l = l[:]
     f = open(filename, 'r')
     lines = [line.rstrip() for line in f.readlines()]
     f.close()
     for line in lines:
-      self.assert_(len(l) > 0, "ran out of expected lines! (expected '%s', got '%s')" % (l, lines))
+      self.assert_(len(l) > 0,
+                   "ran out of expected lines! (expected '{0}', got '{1}')"
+                   .format(l, lines))
       self.assertEqual(line, l.pop(0))
-    self.assert_(len(l) == 0, "not enough lines in file! (expected '%s', got '%s'" % (l, lines))
+    self.assert_(len(l) == 0, 
+                 "not enough lines in file! (expected '{0}',"
+                 " got '{1}'".format(l, lines))
 
   def test_basic(self):
     "Test that addEntriesToListFile works when file doesn't exist."
     testfile = os.path.join(self.tmpdir, "test.list")
     l = ["a", "b", "c"]
     addEntriesToListFile(testfile, l)
     self.assertFileContains(testfile, l)
     # ensure that attempting to add the same entries again doesn't change it
--- a/config/tests/unit-expandlibs.py
+++ b/config/tests/unit-expandlibs.py
@@ -1,9 +1,8 @@
-from __future__ import with_statement
 import subprocess
 import unittest
 import sys
 import os
 import imp
 from tempfile import mkdtemp
 from shutil import rmtree
 import mozunit
@@ -67,31 +66,33 @@ class TestRelativize(unittest.TestCase):
         self.assertEqual(relativize(os.sep), os.sep)
         os.path.exists = os.path.exists
 
 class TestLibDescriptor(unittest.TestCase):
     def test_serialize(self):
         '''Test LibDescriptor's serialization'''
         desc = LibDescriptor()
         desc[LibDescriptor.KEYS[0]] = ['a', 'b']
-        self.assertEqual(str(desc), "%s = a b" % LibDescriptor.KEYS[0])
+        self.assertEqual(str(desc), "{0} = a b".format(LibDescriptor.KEYS[0]))
         desc['unsupported-key'] = ['a']
-        self.assertEqual(str(desc), "%s = a b" % LibDescriptor.KEYS[0])
+        self.assertEqual(str(desc), "{0} = a b".format(LibDescriptor.KEYS[0]))
         desc[LibDescriptor.KEYS[1]] = ['c', 'd', 'e']
-        self.assertEqual(str(desc), "%s = a b\n%s = c d e" % (LibDescriptor.KEYS[0], LibDescriptor.KEYS[1]))
+        self.assertEqual(str(desc),
+                         "{0} = a b\n{1} = c d e"
+                         .format(LibDescriptor.KEYS[0], LibDescriptor.KEYS[1]))
         desc[LibDescriptor.KEYS[0]] = []
-        self.assertEqual(str(desc), "%s = c d e" % (LibDescriptor.KEYS[1]))
+        self.assertEqual(str(desc), "{0} = c d e".format(LibDescriptor.KEYS[1]))
 
     def test_read(self):
         '''Test LibDescriptor's initialization'''
         desc_list = ["# Comment",
-                     "%s = a b" % LibDescriptor.KEYS[1],
+                     "{0} = a b".format(LibDescriptor.KEYS[1]),
                      "", # Empty line
                      "foo = bar", # Should be discarded
-                     "%s = c d e" % LibDescriptor.KEYS[0]]
+                     "{0} = c d e".format(LibDescriptor.KEYS[0])]
         desc = LibDescriptor(desc_list)
         self.assertEqual(desc[LibDescriptor.KEYS[1]], ['a', 'b'])
         self.assertEqual(desc[LibDescriptor.KEYS[0]], ['c', 'd', 'e'])
         self.assertEqual(False, 'foo' in desc)
 
 def wrap_method(conf, wrapped_method):
     '''Wrapper used to call a test with a specific configuration'''
     def _method(self):
@@ -224,17 +225,17 @@ class TestExpandArgsMore(TestExpandInit)
             self.assertRelEqual(args[:3], ['foo', '-bar'] + self.files[:1])
             self.assertRelEqual(args[4:], [self.files[3]] + self.files[5:] + [self.tmpfile('liby', Lib('z'))])
 
             # Check the list file content
             objs = [f for f in self.files + self.liby_files + self.libx_files if f.endswith(config.OBJ_SUFFIX)]
             if config.EXPAND_LIBS_LIST_STYLE == "linkerscript":
                 self.assertNotEqual(args[3][0], '@')
                 filename = args[3]
-                content = ['INPUT("%s")' % relativize(f) for f in objs]
+                content = ['INPUT("{0}")'.format(relativize(f)) for f in objs]
                 with open(filename, 'r') as f:
                     self.assertEqual([l.strip() for l in f.readlines() if len(l.strip())], content)
             elif config.EXPAND_LIBS_LIST_STYLE == "list":
                 self.assertEqual(args[3][0], '@')
                 filename = args[3][1:]
                 content = objs
                 with open(filename, 'r') as f:
                     self.assertRelEqual([l.strip() for l in f.readlines() if len(l.strip())], content)
@@ -251,17 +252,17 @@ class TestExpandArgsMore(TestExpandInit)
         def call(args, **kargs):
             # The command called is always AR_EXTRACT
             ar_extract = config.AR_EXTRACT.split()
             self.assertRelEqual(args[:len(ar_extract)], ar_extract)
             # Remaining argument is always one library
             self.assertRelEqual([os.path.splitext(arg)[1] for arg in args[len(ar_extract):]], [config.LIB_SUFFIX])
             # Simulate AR_EXTRACT extracting one object file for the library
             lib = os.path.splitext(os.path.basename(args[len(ar_extract)]))[0]
-            extracted[lib] = os.path.join(kargs['cwd'], "%s" % Obj(lib))
+            extracted[lib] = os.path.join(kargs['cwd'], "{0}".format(Obj(lib)))
             self.touch([extracted[lib]])
         subprocess.call = call
 
         # ExpandArgsMore does the same as ExpandArgs
         self.touch([self.tmpfile('liby', Lib('y'))])
         with ExpandArgsMore(['foo', '-bar'] + self.arg_files + [self.tmpfile('liby', Lib('y'))]) as args:
             self.assertRelEqual(args, ['foo', '-bar'] + self.files + [self.tmpfile('liby', Lib('y'))])
 
--- a/config/tests/unit-mozunit.py
+++ b/config/tests/unit-mozunit.py
@@ -1,13 +1,12 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-from __future__ import with_statement
 import sys
 import os
 from mozunit import main, MockedOpen
 import unittest
 from tempfile import mkstemp
 
 class TestMozUnit(unittest.TestCase):
     def test_mocked_open(self):
--- a/config/tests/unit-nsinstall.py
+++ b/config/tests/unit-nsinstall.py
@@ -117,17 +117,18 @@ class TestNsinstall(unittest.TestCase):
     if sys.platform != "win32":
         # can't run this test on windows, don't have real file modes there
         def test_nsinstall_m(self):
             "Test that nsinstall -m works (set mode)"
             testfile = self.touch("testfile")
             mode = 0600
             os.chmod(testfile, mode)
             testdir = self.mkdirs("testdir")
-            self.assertEqual(nsinstall(["-m", "%04o" % mode, testfile, testdir]), 0)
+            self.assertEqual(nsinstall(["-m", "{0:04o}"
+                                        .format(mode), testfile, testdir]), 0)
             destfile = os.path.join(testdir, "testfile")
             self.assert_(os.path.isfile(destfile))
             self.assertEqual(os.stat(testfile).st_mode,
                              os.stat(destfile).st_mode)
 
     def test_nsinstall_d(self):
         "Test that nsinstall -d works (create directories in target)"
         # -d makes no sense to me, but ok!
--- a/config/tests/unit-writemozinfo.py
+++ b/config/tests/unit-writemozinfo.py
@@ -1,16 +1,15 @@
 #!/usr/bin/env python
-from __future__ import with_statement
 import unittest
-import os, sys, time, tempfile
+import json, os, sys, time, tempfile
 from StringIO import StringIO
 import mozunit
 
-from writemozinfo import build_dict, write_json, JsonValue, jsonify
+from writemozinfo import build_dict, write_json
 
 class TestBuildDict(unittest.TestCase):
     def testMissing(self):
         """
         Test that missing required values raises.
         """
         self.assertRaises(Exception, build_dict, {})
         self.assertRaises(Exception, build_dict, {'OS_TARGET':'foo'})
@@ -154,55 +153,16 @@ class TestBuildDict(unittest.TestCase):
         self.assertEqual(False, d['crashreporter'])
         
         d = build_dict({'OS_TARGET':'Linux',
                         'TARGET_CPU':'i386',
                         'MOZ_WIDGET_TOOLKIT':'gtk2',
                         'MOZ_CRASHREPORTER':'1'})
         self.assertEqual(True, d['crashreporter'])
 
-class TestJsonValue(unittest.TestCase):
-    def testNone(self):
-        self.assertEqual("null", repr(JsonValue(None)))
-        
-    def testBool(self):
-        self.assertEqual("true", repr(JsonValue(True)))
-        self.assertEqual("false", repr(JsonValue(False)))
-
-    def testStr(self):
-        self.assertEqual("'abc'", repr(JsonValue("abc")))
-
-    def testInt(self):
-        self.assertEqual("100", repr(JsonValue(100)))
-
-    def testInvalid(self):
-        self.assertRaises(Exception, JsonValue, unicode("abc"))
-        self.assertRaises(Exception, JsonValue, 123.45)
-
-def parse_json(j):
-    """
-    Awful hack to parse a restricted subset of JSON strings into Python dicts.
-    """
-    return eval(j, {'true':True,'false':False,'null':None})
-
-class TestJsonify(unittest.TestCase):
-    """
-    Test the jsonify function.
-    """
-    def testBasic(self):
-        """
-        Sanity check the set of accepted Python value types.
-        """
-        j = parse_json(jsonify({'a':True,'b':False,'c':None,'d':100,'e':"abc"}))
-        self.assertEquals(True, j['a'])
-        self.assertEquals(False, j['b'])
-        self.assertEquals(None, j['c'])
-        self.assertEquals(100, j['d'])
-        self.assertEquals("abc", j['e'])
-
 class TestWriteJson(unittest.TestCase):
     """
     Test the write_json function.
     """
     def setUp(self):
         fd, self.f = tempfile.mkstemp()
         os.close(fd)
 
@@ -212,30 +172,30 @@ class TestWriteJson(unittest.TestCase):
     def testBasic(self):
         """
         Test that writing to a file produces correct output.
         """
         write_json(self.f, env={'OS_TARGET':'WINNT',
                                 'TARGET_CPU':'i386',
                                 'MOZ_WIDGET_TOOLKIT':'windows'})
         with open(self.f) as f:
-            d = parse_json(f.read())
+            d = json.load(f)
             self.assertEqual('win', d['os'])
             self.assertEqual('x86', d['processor'])
             self.assertEqual('windows', d['toolkit'])
             self.assertEqual(32, d['bits'])
 
     def testFileObj(self):
         """
         Test that writing to a file-like object produces correct output.
         """
         s = StringIO()
         write_json(s, env={'OS_TARGET':'WINNT',
                            'TARGET_CPU':'i386',
                            'MOZ_WIDGET_TOOLKIT':'windows'})
-        d = parse_json(s.getvalue())
+        d = json.loads(s.getvalue())
         self.assertEqual('win', d['os'])
         self.assertEqual('x86', d['processor'])
         self.assertEqual('windows', d['toolkit'])
         self.assertEqual(32, d['bits'])
 
 if __name__ == '__main__':
     mozunit.main()
--- a/config/tests/unitMozZipFile.py
+++ b/config/tests/unitMozZipFile.py
@@ -60,17 +60,17 @@ def prod(*iterables):
       yield [item]
     else:
       for others in prod(*iterables[1:]):
         yield [item] + others
 
 
 def getid(descs):
   'Convert a list of ints to a string.'
-  return reduce(lambda x,y: x+'%d%d'%tuple(y), descs,'')
+  return reduce(lambda x,y: x+'{0}{1}'.format(*tuple(y)), descs,'')
 
 
 def getContent(length):
   'Get pseudo random content of given length.'
   rv = [None] * length
   for i in xrange(length):
     rv[i] = random.choice(letters)
   return ''.join(rv)
@@ -181,17 +181,17 @@ for w in xrange(writes):
 # and then write all atomics again.
 # This should catch more or less all artifacts generated
 # by the final ordering step when closing the jar.
 files = [list(prod([i], xrange(lengths))) for i in xrange(len(leafs))]
 allfiles = reduce(lambda l,r:l+r,
                   [list(prod(*files[:(i+1)])) for i in xrange(len(leafs))])
 
 for first in allfiles:
-  testbasename = 'test%s_' % getid(first)
+  testbasename = 'test{0}_'.format(getid(first))
   test = [None, '_write' + getid(first), None]
   for second in atomics:
     test[0] = testbasename + getid([second])
     test[2] = '_write' + getid([second])
     setattr(TestExtensiveStored, test[0], createTester(*test))
 
 class TestExtensiveDeflated(TestExtensiveStored):
   'Test all that has been tested with ZIP_STORED with DEFLATED, too.'
--- a/config/utils.py
+++ b/config/utils.py
@@ -1,9 +1,9 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
+# >>sys.stderr,  This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 '''Utility methods to be used by python build infrastructure.
 '''
 
 import os
 import errno
@@ -19,17 +19,17 @@ class LockFile(object):
   '''
   def __init__(self, lockfile):
     self.lockfile = lockfile
   def __del__(self):
     while True:
       try:
         os.remove(self.lockfile)
         break
-      except OSError, e:
+      except OSError as e:
         if e.errno == errno.EACCES:
           # another process probably has the file open, we'll retry.
           # just a short sleep since we want to drop the lock ASAP
           # (but we need to let some other process close the file first)
           time.sleep(0.1)
         else:
           # re-raise unknown errors
           raise
@@ -39,64 +39,63 @@ def lockFile(lockfile, max_wait = 600):
 
   To release the lock, delete the returned object.
   '''
   while True:
     try:
       fd = os.open(lockfile, os.O_EXCL | os.O_RDWR | os.O_CREAT)
       # we created the lockfile, so we're the owner
       break
-    except OSError, e:
-      if e.errno == errno.EEXIST or \
-         (sys.platform == "win32" and e.errno == errno.EACCES):
+    except OSError as e:
+      if (e.errno == errno.EEXIST or 
+          (sys.platform == "win32" and e.errno == errno.EACCES)):
         pass
       else:
         # should not occur
         raise
   
     try:
       # the lock file exists, try to stat it to get its age
       # and read its contents to report the owner PID
       f = open(lockfile, "r")
       s = os.stat(lockfile)
-    except EnvironmentError, e:
+    except EnvironmentError as e:
       if e.errno == errno.ENOENT or e.errno == errno.EACCES:
         # we didn't create the lockfile, so it did exist, but it's
         # gone now. Just try again
         continue
-      sys.exit("%s exists but stat() failed: %s" %
-               (lockfile, e.strerror))
+      sys.exit("{0} exists but stat() failed: {1}"
+               .format(lockfile, e.strerror))
   
     # we didn't create the lockfile and it's still there, check
     # its age
     now = int(time.time())
     if now - s[stat.ST_MTIME] > max_wait:
       pid = f.readline().rstrip()
-      sys.exit("%s has been locked for more than " \
-               "%d seconds (PID %s)" % (lockfile, max_wait,
-                                        pid))
+      sys.exit("{0} has been locked for more than "
+               "{1} seconds (PID {2})".format(lockfile, max_wait, pid))
   
     # it's not been locked too long, wait a while and retry
     f.close()
     time.sleep(1)
   
   # if we get here. we have the lockfile. Convert the os.open file
   # descriptor into a Python file object and record our PID in it
   
   f = os.fdopen(fd, "w")
-  f.write("%d\n" % os.getpid())
+  f.write("{0}\n".format(os.getpid()))
   f.close()
   return LockFile(lockfile)
 
 class pushback_iter(object):
   '''Utility iterator that can deal with pushed back elements.
 
   This behaves like a regular iterable, just that you can call
     iter.pushback(item)
-  to get the givem item as next item in the iteration.
+  to get the given item as next item in the iteration.
   '''
   def __init__(self, iterable):
     self.it = iter(iterable)
     self.pushed_back = []
 
   def __iter__(self):
     return self
 
--- a/config/writemozinfo.py
+++ b/config/writemozinfo.py
@@ -4,18 +4,22 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 #
 # This script is run during configure, taking variables set in configure
 # and producing a JSON file that describes some portions of the build
 # configuration, such as the target OS and CPU.
 #
 # The output file is intended to be used as input to the mozinfo package.
-from __future__ import with_statement
-import os, re, sys
+from __future__ import print_function
+import os
+import re
+import sys
+import json
+
 
 def build_dict(env=os.environ):
     """
     Build a dict containing data about the build configuration from
     the environment.
     """
     d = {}
     # Check that all required variables are present first.
@@ -66,54 +70,29 @@ def build_dict(env=os.environ):
 
     # debug
     d["debug"] = 'MOZ_DEBUG' in env and env['MOZ_DEBUG'] == '1'
 
     # crashreporter
     d["crashreporter"] = 'MOZ_CRASHREPORTER' in env and env['MOZ_CRASHREPORTER'] == '1'
     return d
 
-#TODO: replace this with the json module when Python >= 2.6 is a requirement.
-class JsonValue:
-    """
-    A class to serialize Python values into JSON-compatible representations.
-    """
-    def __init__(self, v):
-        if v is not None and not (isinstance(v,str) or isinstance(v,bool) or isinstance(v,int)):
-            raise Exception("Unhandled data type: %s" % type(v))
-        self.v = v
-    def __repr__(self):
-        if self.v is None:
-            return "null"
-        if isinstance(self.v,bool):
-            return str(self.v).lower()
-        return repr(self.v)
-
-def jsonify(d):
-    """
-    Return a JSON string of the dict |d|. Only handles a subset of Python
-    value types: bool, str, int, None.
-    """
-    jd = {}
-    for k, v in d.iteritems():
-        jd[k] = JsonValue(v)
-    return repr(jd)
-
 def write_json(file, env=os.environ):
     """
     Write JSON data about the configuration specified in |env|
     to |file|, which may be a filename or file-like object.
     See build_dict for information about what  environment variables are used,
     and what keys are produced.
     """
-    s = jsonify(build_dict(env))
+    build_conf = build_dict(env)
     if isinstance(file, basestring):
         with open(file, "w") as f:
-            f.write(s)
+            json.dump(build_conf, f)
     else:
-        file.write(s)
+        json.dump(build_conf, file)
+
 
 if __name__ == '__main__':
     try:
         write_json(sys.argv[1] if len(sys.argv) > 1 else sys.stdout)
-    except Exception, e:
-        print >>sys.stderr, str(e)
+    except Exception as e:
+        print(str(e), file=sys.stderr)
         sys.exit(1)
--- a/js/src/config/Expression.py
+++ b/js/src/config/Expression.py
@@ -213,17 +213,18 @@ class Expression:
     Error raised when parsing fails.
     It has two members, offset and content, which give the offset of the
     error and the offending content.
     """
     def __init__(self, expression):
       self.offset = expression.offset
       self.content = expression.content[:3]
     def __str__(self):
-      return 'Unexpected content at offset %i, "%s"'%(self.offset, self.content)
+      return 'Unexpected content at offset {0}, "{1}"'.format(self.offset, 
+                                                              self.content)
 
 class Context(dict):
   """
   This class holds variable values by subclassing dict, and while it
   truthfully reports True and False on
   
   name in context
   
--- a/js/src/config/Preprocessor.py
+++ b/js/src/config/Preprocessor.py
@@ -72,36 +72,38 @@ class Preprocessor:
       self.cmds[cmd] = (level, getattr(self, 'do_' + cmd))
     self.out = sys.stdout
     self.setMarker('#')
     self.LE = '\n'
     self.varsubst = re.compile('@(?P<VAR>\w+)@', re.U)
   
   def warnUnused(self, file):
     if self.actionLevel == 0:
-      sys.stderr.write('%s: WARNING: no preprocessor directives found\n' % file)
+      sys.stderr.write('{0}: WARNING: no preprocessor directives found\n'.format(file))
     elif self.actionLevel == 1:
-      sys.stderr.write('%s: WARNING: no useful preprocessor directives found\n' % file)
+      sys.stderr.write('{0}: WARNING: no useful preprocessor directives found\n'.format(file))
     pass
 
   def setLineEndings(self, aLE):
     """
     Set the line endings to be used for output.
     """
     self.LE = {'cr': '\x0D', 'lf': '\x0A', 'crlf': '\x0D\x0A'}[aLE]
   
   def setMarker(self, aMarker):
     """
     Set the marker to be used for processing directives.
     Used for handling CSS files, with pp.setMarker('%'), for example.
     The given marker may be None, in which case no markers are processed.
     """
     self.marker = aMarker
     if aMarker:
-      self.instruction = re.compile('%s(?P<cmd>[a-z]+)(?:\s(?P<args>.*))?$'%aMarker, re.U)
+      self.instruction = re.compile('{0}(?P<cmd>[a-z]+)(?:\s(?P<args>.*))?$'
+                                    .format(aMarker), 
+                                    re.U)
       self.comment = re.compile(aMarker, re.U)
     else:
       class NoMatch(object):
         def match(self, *args):
           return False
       self.instruction = self.comment = NoMatch()
   
   def clone(self):
@@ -124,19 +126,19 @@ class Preprocessor:
   def write(self, aLine):
     """
     Internal method for handling output.
     """
     if self.checkLineNumbers:
       self.writtenLines += 1
       ln = self.context['LINE']
       if self.writtenLines != ln:
-        self.out.write('//@line %(line)d "%(file)s"%(le)s'%{'line': ln,
-                                                            'file': self.context['FILE'],
-                                                            'le': self.LE})
+        self.out.write('//@line {line} "{file}"{le}'.format(line=ln,
+                                                            file=self.context['FILE'],
+                                                            le=self.LE))
         self.writtenLines = ln
     filteredLine = self.applyFilters(aLine)
     if filteredLine != aLine:
       self.actionLevel = 2
     # ensure our line ending. Only need to handle \n, as we're reading
     # with universal line ending support, at least for files.
     filteredLine = re.sub('\n', self.LE, filteredLine)
     self.out.write(filteredLine)
--- a/js/src/config/check_source_count.py
+++ b/js/src/config/check_source_count.py
@@ -4,17 +4,17 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 
 # Usage: check_source_count.py SEARCH_TERM COUNT ERROR_LOCATION REPLACEMENT [FILES...]
 #   Checks that FILES contains exactly COUNT matches of SEARCH_TERM. If it does
 #   not, an error message is printed, quoting ERROR_LOCATION, which should
 #   probably be the filename and line number of the erroneous call to
 #   check_source_count.py.
-
+from __future__ import print_function
 import sys
 import os
 import re
 
 search_string = sys.argv[1]
 expected_count = int(sys.argv[2])
 error_location = sys.argv[3]
 replacement = sys.argv[4]
@@ -27,22 +27,31 @@ for f in files:
     text = file(f).read()
     match = re.findall(search_string, text)
     if match:
         num = len(match)
         count += num
         details[f] = num
 
 if count == expected_count:
-    print "TEST-PASS | check_source_count.py %s | %d" % (search_string, expected_count)
+    print("TEST-PASS | check_source_count.py {0} | {1}"
+          .format(search_string, expected_count))
 
 else:
-    print "TEST-UNEXPECTED-FAIL | check_source_count.py %s | " % (search_string),
+    print("TEST-UNEXPECTED-FAIL | check_source_count.py {0} | "
+          .format(search_string),
+          end='')
     if count < expected_count:
-        print "There are fewer occurrences of /%s/ than expected. This may mean that you have removed some, but forgotten to account for it %s." % (search_string, error_location)
+        print("There are fewer occurrences of /{0}/ than expected. "
+              "This may mean that you have removed some, but forgotten to "
+              "account for it {1}.".format(search_string, error_location))
     else:
-        print "There are more occurrences of /%s/ than expected. We're trying to prevent an increase in the number of %s's, using %s if possible. If it in unavoidable, you should update the expected count %s." % (search_string, search_string, replacement, error_location)
+        print("There are more occurrences of /{0}/ than expected. We're trying "
+              "to prevent an increase in the number of {1}'s, using {2} if "
+              "possible. If it is unavoidable, you should update the expected "
+              "count {3}.".format(search_string, search_string, replacement, 
+                                 error_location))
 
-    print "Expected: %d; found: %d" % (expected_count, count)
+    print("Expected: {0}; found: {1}".format(expected_count, count))
     for k in sorted(details):
-        print "Found %d occurences in %s" % (details[k],k)
+        print("Found {0} occurences in {1}".format(details[k],k))
     sys.exit(-1)
 
--- a/js/src/config/expandlibs.py
+++ b/js/src/config/expandlibs.py
@@ -21,27 +21,26 @@ given a list of files, expandlibs will r
 - If a ${DLL_PREFIX}${ROOT}.${DLL_SUFFIX} or
   ${DLL_PREFIX}${ROOT}.${IMPORT_LIB_SUFFIX} file exists, use that instead
 - If the ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} file exists, use it
 - If a ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX}.${LIB_DESC_SUFFIX} file exists,
   replace ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} with the OBJS and LIBS the
   descriptor contains. And for each of these LIBS, also apply the same
   rules.
 '''
-from __future__ import with_statement
 import sys, os, errno
 import expandlibs_config as conf
 
 def ensureParentDir(file):
     '''Ensures the directory parent to the given file exists'''
     dir = os.path.dirname(file)
     if dir and not os.path.exists(dir):
         try:
             os.makedirs(dir)
-        except OSError, error:
+        except OSError as error:
             if error.errno != errno.EEXIST:
                 raise
 
 def relativize(path):
     '''Returns a path relative to the current working directory, if it is
     shorter than the given path'''
     def splitpath(path):
         dir, file = os.path.split(path)
@@ -85,17 +84,18 @@ class LibDescriptor(dict):
         if not content:
             return
         for key, value in [(s.strip() for s in item.split('=', 2)) for item in content if item.find('=') >= 0]:
             if key in self.KEYS:
                 self[key] = value.split()
 
     def __str__(self):
         '''Serializes the lib descriptor'''
-        return '\n'.join('%s = %s' % (k, ' '.join(self[k])) for k in self.KEYS if len(self[k]))
+        return '\n'.join('{0} = {1}'.format(k, ' '.join(self[k]))
+                         for k in self.KEYS if len(self[k]))
 
 class ExpandArgs(list):
     def __init__(self, args):
         '''Creates a clone of the |args| list and performs file expansion on
         each item it contains'''
         super(ExpandArgs, self).__init__()
         for arg in args:
             self += self._expand(arg)
@@ -130,9 +130,9 @@ class ExpandLibsDeps(ExpandArgs):
     '''Same as ExpandArgs, but also adds the library descriptor to the list'''
     def _expand_desc(self, arg):
         objs = super(ExpandLibsDeps, self)._expand_desc(arg)
         if os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
             objs += [relativize(arg + conf.LIBS_DESC_SUFFIX)]
         return objs
 
 if __name__ == '__main__':
-    print " ".join(ExpandArgs(sys.argv[1:]))
+    print(" ".join(ExpandArgs(sys.argv[1:])))
--- a/js/src/config/expandlibs_exec.py
+++ b/js/src/config/expandlibs_exec.py
@@ -15,20 +15,21 @@ of a command line. The kind of list file
 EXPAND_LIBS_LIST_STYLE variable: 'list' for MSVC style lists (@file.list)
 or 'linkerscript' for GNU ld linker scripts.
 See https://bugzilla.mozilla.org/show_bug.cgi?id=584474#c59 for more details.
 
 With the --symbol-order argument, followed by a file name, it will add the
 relevant linker options to change the order in which the linker puts the
 symbols appear in the resulting binary. Only works for ELF targets.
 '''
-from __future__ import with_statement
+from __future__ import print_function
 import sys
 import os
-from expandlibs import ExpandArgs, relativize, isObject, ensureParentDir, ExpandLibsDeps
+from expandlibs import (ExpandArgs, relativize, isObject, ensureParentDir,
+                        ExpandLibsDeps)
 import expandlibs_config as conf
 from optparse import OptionParser
 import subprocess
 import tempfile
 import shutil
 import subprocess
 import re
 
@@ -87,20 +88,20 @@ class ExpandArgsMore(ExpandArgs):
     def makelist(self):
         '''Replaces object file names with a temporary list file, using a
         list format depending on the EXPAND_LIBS_LIST_STYLE variable
         '''
         objs = [o for o in self if isObject(o)]
         if not len(objs): return
         fd, tmp = tempfile.mkstemp(suffix=".list",dir=os.curdir)
         if conf.EXPAND_LIBS_LIST_STYLE == "linkerscript":
-            content = ['INPUT("%s")\n' % obj for obj in objs]
+            content = ['INPUT("{0}")\n'.format(obj) for obj in objs]
             ref = tmp
         elif conf.EXPAND_LIBS_LIST_STYLE == "list":
-            content = ["%s\n" % obj for obj in objs]
+            content = ["{0}\n".format(obj) for obj in objs]
             ref = "@" + tmp
         else:
             os.close(fd)
             os.remove(tmp)
             return
         self.tmp.append(tmp)
         f = os.fdopen(fd, "w")
         f.writelines(content)
@@ -134,19 +135,23 @@ class ExpandArgsMore(ExpandArgs):
                     result[quoted[5]].append(quoted[1])
                 else:
                     result[quoted[5]] = [quoted[1]]
         return result
 
     def _getOrderedSections(self, ordered_symbols):
         '''Given an ordered list of symbols, returns the corresponding list
         of sections following the order.'''
-        if not conf.EXPAND_LIBS_ORDER_STYLE in ['linkerscript', 'section-ordering-file']:
-            raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
-        finder = SectionFinder([arg for arg in self if isObject(arg) or os.path.splitext(arg)[1] == conf.LIB_SUFFIX])
+        if conf.EXPAND_LIBS_ORDER_STYLE not in ['linkerscript',
+                                                'section-ordering-file']:
+            raise Exception('EXPAND_LIBS_ORDER_STYLE "{0}" is not supported'
+                            .format(conf.EXPAND_LIBS_ORDER_STYLE))
+        finder = SectionFinder([arg for arg in self 
+                                if isObject(arg) or 
+                                os.path.splitext(arg)[1] == conf.LIB_SUFFIX])
         folded = self._getFoldedSections()
         sections = set()
         ordered_sections = []
         for symbol in ordered_symbols:
             symbol_sections = finder.getSections(symbol)
             all_symbol_sections = []
             for section in symbol_sections:
                 if section in folded:
@@ -177,59 +182,64 @@ class ExpandArgsMore(ExpandArgs):
                     else:
                         split_sections[linked_section] = [s]
                     break
         content = []
         # Order is important
         linked_sections = [s for s in linked_sections if s in split_sections]
 
         if conf.EXPAND_LIBS_ORDER_STYLE == 'section-ordering-file':
-            option = '-Wl,--section-ordering-file,%s'
+            option = '-Wl,--section-ordering-file,{0}'
             content = sections
             for linked_section in linked_sections:
                 content.extend(split_sections[linked_section])
-                content.append('%s.*' % linked_section)
+                content.append('{0}.*'.format(linked_section))
                 content.append(linked_section)
 
         elif conf.EXPAND_LIBS_ORDER_STYLE == 'linkerscript':
-            option = '-Wl,-T,%s'
+            option = '-Wl,-T,{0}'
             section_insert_before = dict(SECTION_INSERT_BEFORE)
             for linked_section in linked_sections:
-                content.append('SECTIONS {')
-                content.append('  %s : {' % linked_section)
-                content.extend('    *(%s)' % s for s in split_sections[linked_section])
-                content.append('  }')
-                content.append('}')
-                content.append('INSERT BEFORE %s' % section_insert_before[linked_section])
+                content.append('SECTIONS {{')
+                content.append('  {0} : {{'.format(linked_section))
+                content.extend('    *({0})'
+                               .format(s for s in split_sections[linked_section]))
+                content.append('  }}')
+                content.append('}}')
+                content.append('INSERT BEFORE {0}'
+                               .format(section_insert_before[linked_section]))
         else:
-            raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
+            raise Exception('EXPAND_LIBS_ORDER_STYLE "{0}" is not supported'
+                            .format(conf.EXPAND_LIBS_ORDER_STYLE))
 
         fd, tmp = tempfile.mkstemp(dir=os.curdir)
         f = os.fdopen(fd, "w")
         f.write('\n'.join(content)+'\n')
         f.close()
         self.tmp.append(tmp)
-        self.append(option % tmp)
+        self.append(option.format(tmp))
 
 class SectionFinder(object):
     '''Instances of this class allow to map symbol names to sections in
     object files.'''
 
     def __init__(self, objs):
         '''Creates an instance, given a list of object files.'''
         if not conf.EXPAND_LIBS_ORDER_STYLE in ['linkerscript', 'section-ordering-file']:
-            raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
+            raise Exception('EXPAND_LIBS_ORDER_STYLE "{0}" is not supported'
+                            .format(conf.EXPAND_LIBS_ORDER_STYLE))
         self.mapping = {}
         for obj in objs:
             if not isObject(obj) and os.path.splitext(obj)[1] != conf.LIB_SUFFIX:
-                raise Exception('%s is not an object nor a static library' % obj)
+                raise Exception('{0} is not an object nor a static library'
+                                .format(obj))
             for symbol, section in SectionFinder._getSymbols(obj):
                 sym = SectionFinder._normalize(symbol)
                 if sym in self.mapping:
-                    if not section in self.mapping[sym]:
+                    if section not in self.mapping[sym]:
                         self.mapping[sym].append(section)
                 else:
                     self.mapping[sym] = [section]
 
     def getSections(self, symbol):
         '''Given a symbol, returns a list of sections containing it or the
         corresponding thunks. When the given symbol is a thunk, returns the
         list of sections containing its corresponding normal symbol and the
@@ -263,21 +273,21 @@ class SectionFinder(object):
             # and where the [FfO] flag is either F (function) or O (object).
             if len(tmp) > 1 and len(tmp[1]) > 6 and tmp[1][6] in ['O', 'F']:
                 tmp = tmp[1][8:].split()
                 # That gives us ["<section>","<length>", "<symbol>"]
                 syms.append((tmp[-1], tmp[0]))
         return syms
 
 def print_command(out, args):
-    print >>out, "Executing: " + " ".join(args)
+    print("Executing: " + " ".join(args), file=out)
     for tmp in [f for f in args.tmp if os.path.isfile(f)]:
-        print >>out, tmp + ":"
+        print(tmp + ":", file=out)
         with open(tmp) as file:
-            print >>out, "".join(["    " + l for l in file.readlines()])
+            print("".join(["    " + l for l in file.readlines()]), file=out)
     out.flush()
 
 def main():
     parser = OptionParser()
     parser.add_option("--depend", dest="depend", metavar="FILE",
         help="generate dependencies for the given execution and store it in the given file")
     parser.add_option("--target", dest="target", metavar="FILE",
         help="designate the target for dependencies")
@@ -318,13 +328,16 @@ def main():
         sys.stderr.write(stdout)
         sys.stderr.flush()
         if proc.returncode:
             exit(proc.returncode)
     if not options.depend:
         return
     ensureParentDir(options.depend)
     with open(options.depend, 'w') as depfile:
-        depfile.write("%s : %s\n" % (options.target, ' '.join(dep for dep in deps if os.path.isfile(dep) and dep != options.target)))
+        depfile.write("{0} : {1}\n"
+                      .format(options.target, ' '.join(dep for dep in deps 
+                                                       if os.path.isfile(dep) and 
+                                                       dep != options.target)))
 
 
 if __name__ == '__main__':
     main()
--- a/js/src/config/expandlibs_gen.py
+++ b/js/src/config/expandlibs_gen.py
@@ -1,47 +1,47 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
+from __future__ import print_function
 '''Given a list of object files and library names, prints a library
 descriptor to standard output'''
 
-from __future__ import with_statement
 import sys
 import os
 import expandlibs_config as conf
 from expandlibs import LibDescriptor, isObject, ensureParentDir, ExpandLibsDeps
 from optparse import OptionParser
 
 def generate(args):
     desc = LibDescriptor()
     for arg in args:
         if isObject(arg):
             if os.path.exists(arg):
                 desc['OBJS'].append(os.path.abspath(arg))
             else:
-                raise Exception("File not found: %s" % arg)
+                raise Exception("File not found: {0}".format(arg))
         elif os.path.splitext(arg)[1] == conf.LIB_SUFFIX:
             if os.path.exists(arg) or os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
                 desc['LIBS'].append(os.path.abspath(arg))
             else:
-                raise Exception("File not found: %s" % arg)
+                raise Exception("File not found: {0}".format(arg))
     return desc
 
 if __name__ == '__main__':
     parser = OptionParser()
     parser.add_option("--depend", dest="depend", metavar="FILE",
         help="generate dependencies for the given execution and store it in the given file")
     parser.add_option("-o", dest="output", metavar="FILE",
         help="send output to the given file")
 
     (options, args) = parser.parse_args()
     if not options.output:
         raise Exception("Missing option: -o")
 
     ensureParentDir(options.output)
     with open(options.output, 'w') as outfile:
-        print >>outfile, generate(args)
+        print(generate(args), file=outfile)
     if options.depend:
         ensureParentDir(options.depend)
         with open(options.depend, 'w') as depfile:
-            depfile.write("%s : %s\n" % (options.output, ' '.join(ExpandLibsDeps(args))))
+            depfile.write("{0} : {1}\n".format(options.output, 
+                                               ' '.join(ExpandLibsDeps(args))))
--- a/js/src/config/find_OOM_errors.py
+++ b/js/src/config/find_OOM_errors.py
@@ -1,13 +1,13 @@
 #!/usr/bin/env python
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
+from __future__ import print_function
 
 usage = """%prog: A test for OOM conditions in the shell.
 
 %prog finds segfaults and other errors caused by incorrect handling of
 allocation during OOM (out-of-memory) conditions.
 """
 
 help = """Check for regressions only. This runs a set of files with a known
@@ -60,17 +60,17 @@ def run(args, stdin=None):
     stderr_worker = ThreadWorker(proc.stderr)
     stdout_worker.start()
     stderr_worker.start()
 
     proc.wait()
     stdout_worker.join()
     stderr_worker.join()
 
-  except KeyboardInterrupt, e:
+  except KeyboardInterrupt as e:
     sys.exit(-1)
 
   stdout, stderr = stdout_worker.all, stderr_worker.all
   result = (stdout, stderr, proc.returncode)
   return result
 
 def get_js_files():
   (out, err, exit) = run('find ../jit-test/tests -name "*.js"')
@@ -96,17 +96,17 @@ def count_lines():
      prioritize the errors which occur most frequently."""
   counts = {}
   for string,count in blacklist.items():
     for line in string.split("\n"):
       counts[line] = counts.get(line, 0) + count
 
   lines = []
   for k,v in counts.items():
-    lines.append("%6d: %s" % (v,k))
+    lines.append("{0:6}: {1}".format(v, k))
 
   lines.sort()
 
   countlog = file("../OOM_count_log", "w")
   countlog.write("\n".join(lines))
   countlog.flush()
   countlog.close()
 
@@ -166,17 +166,17 @@ def clean_output(err):
 #####################################################################
 # Consts, etc
 #####################################################################
 
 command_template = 'shell/js' \
                  + ' -m -j -p' \
                  + ' -e "const platform=\'darwin\'; const libdir=\'../jit-test/lib/\';"' \
                  + ' -f ../jit-test/lib/prolog.js' \
-                 + ' -f %s'
+                 + ' -f {0}'
 
 
 # Blacklists are things we don't want to see in our logs again (though we do
 # want to count them when they happen). Whitelists we do want to see in our
 # logs again, principally because the information we have isn't enough.
 
 blacklist = {}
 add_to_blacklist(r"('', '', 1)") # 1 means OOM if the shell hasn't launched yet.
@@ -217,31 +217,31 @@ if OPTIONS.regression == None:
   # Don't use a logfile, this is automated for tinderbox.
   log = file("../OOM_log", "w")
 
 
 num_failures = 0
 for f in files:
 
   # Run it once to establish boundaries
-  command = (command_template + ' -O') % (f)
+  command = (command_template + ' -O').format(f)
   out, err, exit = run(command)
   max = re.match(".*OOM max count: (\d+).*", out, flags=re.DOTALL).groups()[0]
   max = int(max)
   
   # OOMs don't recover well for the first 20 allocations or so.
   # TODO: revisit this.
   for i in range(20, max): 
 
     if OPTIONS.regression == None:
-      print "Testing allocation %d/%d in %s" % (i,max,f)
+      print("Testing allocation {0}/{1} in {2}".format(i,max,f))
     else:
       sys.stdout.write('.') # something short for tinderbox, no space or \n
 
-    command = (command_template + ' -A %d') % (f, i)
+    command = (command_template + ' -A {0}').format(f, i)
     out, err, exit = run(command)
 
     # Success (5 is SM's exit code for controlled errors)
     if exit == 5 and err.find("out of memory") != -1:
       continue
 
     # Failure
     else:
@@ -277,17 +277,19 @@ for f in files:
         continue
 
       add_to_blacklist(sans_alloc_sites)
 
       log.write ("\n")
       log.write ("\n")
       log.write ("=========================================================================")
       log.write ("\n")
-      log.write ("An allocation failure at\n\tallocation %d/%d in %s\n\tcauses problems (detected using bug 624094)" % (i, max, f))
+      log.write ("An allocation failure at\n\tallocation {0}/{1} in {2}\n\t"
+                 "causes problems (detected using bug 624094)"
+                 .format(i, max, f))
       log.write ("\n")
       log.write ("\n")
 
       log.write ("Command (from obj directory, using patch from bug 624094):\n  " + command)
       log.write ("\n")
       log.write ("\n")
       log.write ("stdout, stderr, exitcode:\n  " + problem)
       log.write ("\n")
@@ -318,25 +320,33 @@ for f in files:
       log.write ("Valgrind info:\n" + vout)
       log.write ("\n")
       log.write ("\n")
       log.flush()
 
   if OPTIONS.regression == None:
     count_lines()
 
-print '\n',
+print()
 
 # Do the actual regression check
 if OPTIONS.regression != None:
   expected_num_failures = OPTIONS.regression
 
   if num_failures != expected_num_failures:
 
-    print "TEST-UNEXPECTED-FAIL |",
+    print("TEST-UNEXPECTED-FAIL |", end='')
     if num_failures > expected_num_failures:
-      print "More out-of-memory errors were found (%s) than expected (%d). This probably means an allocation site has been added without a NULL-check. If this is unavoidable, you can account for it by updating Makefile.in." % (num_failures, expected_num_failures),
+      print("More out-of-memory errors were found ({0}) than expected ({1}). "
+            "This probably means an allocation site has been added without a "
+            "NULL-check. If this is unavoidable, you can account for it by "
+            "updating Makefile.in.".format(num_failures, expected_num_failures),
+            end='')
     else:
-      print "Congratulations, you have removed %d out-of-memory error(s) (%d remain)! Please account for it by updating Makefile.in." % (expected_num_failures - num_failures, num_failures),
+      print("Congratulations, you have removed {0} out-of-memory error(s) "
+            "({1} remain)! Please account for it by updating Makefile.in." 
+            .format(expected_num_failures - num_failures, num_failures),
+            end='')
     sys.exit(-1)
   else:
-    print 'TEST-PASS | find_OOM_errors | Found the expected number of OOM errors (%d)' % (expected_num_failures)
+    print('TEST-PASS | find_OOM_errors | Found the expected number of OOM '
+          'errors ({0})'.format(expected_num_failures))
 
--- a/js/src/config/nsinstall.py
+++ b/js/src/config/nsinstall.py
@@ -4,17 +4,17 @@
 
 # This is a partial python port of nsinstall.
 # It's intended to be used when there's no natively compile nsinstall
 # available, and doesn't intend to be fully equivalent.
 # Its major use is for l10n repackaging on systems that don't have
 # a full build environment set up.
 # The basic limitation is, it doesn't even try to link and ignores
 # all related options.
-
+from __future__ import print_function
 from optparse import OptionParser
 import os
 import os.path
 import sys
 import shutil
 import stat
 
 def _nsinstall_internal(argv):
@@ -34,57 +34,58 @@ def _nsinstall_internal(argv):
   p.add_option('-L', action="store", metavar="linkprefix",
                help="Link prefix (ignored)")
   p.add_option('-X', action="append", metavar="file",
                help="Ignore a file when installing a directory recursively.")
 
   # The remaining arguments are not used in our tree, thus they're not
   # implented.
   def BadArg(option, opt, value, parser):
-    parser.error('option not supported: %s' % opt)
+    parser.error('option not supported: {0}'.format(opt))
     
   p.add_option('-C', action="callback", metavar="CWD",
                callback=BadArg,
                help="NOT SUPPORTED")
   p.add_option('-o', action="callback", callback=BadArg,
                help="Set owner (NOT SUPPORTED)", metavar="owner")
   p.add_option('-g', action="callback", callback=BadArg,
                help="Set group (NOT SUPPORTED)", metavar="group")
 
   (options, args) = p.parse_args(argv)
 
   if options.m:
     # mode is specified
     try:
       options.m = int(options.m, 8)
     except:
-      sys.stderr.write('nsinstall: ' + options.m + ' is not a valid mode\n')
+      sys.stderr.write('nsinstall: {0} is not a valid mode\n'
+                       .format(options.m))
       return 1
 
   # just create one directory?
   def maybe_create_dir(dir, mode, try_again):
     dir = os.path.abspath(dir)
     if os.path.exists(dir):
       if not os.path.isdir(dir):
-        print >> sys.stderr, ('nsinstall: %s is not a directory' % dir)
+        print('nsinstall: {0} is not a directory'.format(dir), file=sys.stderr)
         return 1
       if mode:
         os.chmod(dir, mode)
       return 0
 
     try:
       if mode:
         os.makedirs(dir, mode)
       else:
         os.makedirs(dir)
-    except Exception, e:
+    except Exception as e:
       # We might have hit EEXIST due to a race condition (see bug 463411) -- try again once
       if try_again:
         return maybe_create_dir(dir, mode, False)
-      print >> sys.stderr, ("nsinstall: failed to create directory %s: %s" % (dir, e))
+      print("nsinstall: failed to create directory {0}: {1}".format(dir, e))
       return 1
     else:
       return 0
 
   if options.X:
     options.X = [os.path.abspath(p) for p in options.X]
 
   if options.D: