Bug 1530587 - Don't optimize jars without preloading/reordering data. r=chmanchester
authorMike Hommey <mh+mozilla@glandium.org>
Wed, 27 Feb 2019 01:26:46 +0000
changeset 519273 2bb574d4377e1d7f40d247519cac8fc586aecf19
parent 519161 b3e646df6c5e74f625bae22528ab6a6ff739033f
child 519274 2a034c02b842f00f2bad8e6a1894da33cd21def9
push id10862
push userffxbld-merge
push dateMon, 11 Mar 2019 13:01:11 +0000
treeherdermozilla-beta@a2e7f5c935da [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerschmanchester
bugs1530587
milestone67.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1530587 - Don't optimize jars without preloading/reordering data. r=chmanchester Optimizing jars without preloading/reordering data only moves the jar central directory to the beginning of the file, which, without preloading information, is not very useful. Let's just stop doing it if there's not going to be preloading/reordering information at all. Differential Revision: https://phabricator.services.mozilla.com/D21170
build/windows_toolchain.py
python/mozbuild/mozbuild/action/package_fennec_apk.py
python/mozbuild/mozbuild/action/symbols_archive.py
python/mozbuild/mozbuild/action/test_archive.py
python/mozbuild/mozbuild/action/zip.py
python/mozbuild/mozbuild/artifacts.py
python/mozbuild/mozbuild/codecoverage/packager.py
python/mozbuild/mozpack/copier.py
python/mozbuild/mozpack/mozjar.py
python/mozbuild/mozpack/packager/formats.py
python/mozbuild/mozpack/packager/l10n.py
python/mozbuild/mozpack/packager/unpack.py
python/mozbuild/mozpack/test/test_mozjar.py
testing/tps/mach_commands.py
toolkit/mozapps/installer/packager.mk
toolkit/mozapps/installer/packager.py
toolkit/mozapps/installer/upload-files.mk
--- a/build/windows_toolchain.py
+++ b/build/windows_toolchain.py
@@ -230,17 +230,17 @@ def format_manifest(manifest):
     return b'\n'.join(sha256_lines)
 
 
 def write_zip(zip_path, prefix=None):
     """Write toolchain data to a zip file."""
     if isinstance(prefix, unicode): # noqa Special case for Python 2
         prefix = prefix.encode('utf-8')
 
-    with JarWriter(file=zip_path, optimize=False, compress_level=5) as zip:
+    with JarWriter(file=zip_path, compress_level=5) as zip:
         manifest = {}
         for p, data, mode in resolve_files_and_hash(manifest):
             print(p)
             if prefix:
                 p = mozpath.join(prefix, p)
 
             zip.add(p, data, mode=mode)
 
--- a/python/mozbuild/mozbuild/action/package_fennec_apk.py
+++ b/python/mozbuild/mozbuild/action/package_fennec_apk.py
@@ -25,17 +25,17 @@ import mozpack.path as mozpath
 
 
 def package_fennec_apk(inputs=[], omni_ja=None,
                        lib_dirs=[],
                        assets_dirs=[],
                        features_dirs=[],
                        root_files=[],
                        verbose=False):
-    jarrer = Jarrer(optimize=False)
+    jarrer = Jarrer()
 
     # First, take input files.  The contents of the later files overwrites the
     # content of earlier files.  Multidexing requires special care: we want a
     # coherent set of classesN.dex files, so we only take DEX files from a
     # single input.  This avoids taking, say, classes{1,2,3}.dex from the first
     # input and only classes{1,2}.dex from the second input, leading to
     # (potentially) duplicated symbols at runtime.
     last_input_with_dex_files = None
--- a/python/mozbuild/mozbuild/action/symbols_archive.py
+++ b/python/mozbuild/mozbuild/action/symbols_archive.py
@@ -14,17 +14,17 @@ import mozpack.path as mozpath
 
 def make_archive(archive_name, base, exclude, include):
     compress = ['**/*.sym']
     finder = FileFinder(base, ignore=exclude)
     if not include:
         include = ['*']
     archive_basename = os.path.basename(archive_name)
     with open(archive_name, 'wb') as fh:
-        with JarWriter(fileobj=fh, optimize=False, compress_level=5) as writer:
+        with JarWriter(fileobj=fh, compress_level=5) as writer:
             for pat in include:
                 for p, f in finder.find(pat):
                     print('  Adding to "%s":\n\t"%s"' % (archive_basename, p))
                     should_compress = any(mozpath.match(p, pat) for pat in compress)
                     writer.add(p.encode('utf-8'), f, mode=f.mode,
                                compress=should_compress, skip_duplicates=True)
 
 def main(argv):
--- a/python/mozbuild/mozbuild/action/test_archive.py
+++ b/python/mozbuild/mozbuild/action/test_archive.py
@@ -770,17 +770,17 @@ def main(argv):
         # marginally larger sizes than higher values and is the sweet spot
         # for optimal compression. Read the detailed commit message that
         # introduced this for raw numbers.
         if out_file.endswith('.tar.gz'):
             files = dict(res)
             create_tar_gz_from_files(fh, files, compresslevel=5)
             file_count = len(files)
         elif out_file.endswith('.zip'):
-            with JarWriter(fileobj=fh, optimize=False, compress_level=5) as writer:
+            with JarWriter(fileobj=fh, compress_level=5) as writer:
                 for p, f in res:
                     writer.add(p.encode('utf-8'), f.read(), mode=f.mode,
                                skip_duplicates=True)
                     file_count += 1
         else:
             raise Exception('unhandled file extension: %s' % out_file)
 
     duration = time.time() - t_start
--- a/python/mozbuild/mozbuild/action/zip.py
+++ b/python/mozbuild/mozbuild/action/zip.py
@@ -26,17 +26,17 @@ def main(args):
                         help="Strip executables")
     parser.add_argument("-x", metavar='EXCLUDE', default=[], action='append',
                         help="Exclude files that match the pattern")
     parser.add_argument("zip", help="Path to zip file to write")
     parser.add_argument("input", nargs="+",
                         help="Path to files to add to zip")
     args = parser.parse_args(args)
 
-    jarrer = Jarrer(optimize=False)
+    jarrer = Jarrer()
 
     with errors.accumulate():
         finder = FileFinder(args.C, find_executables=args.strip)
         for path in args.input:
             for p, f in finder.find(path):
                 if not any([match(p, exclude) for exclude in args.x]):
                     jarrer.add(p, f)
         jarrer.copy(mozpath.join(args.C, args.zip))
--- a/python/mozbuild/mozbuild/artifacts.py
+++ b/python/mozbuild/mozbuild/artifacts.py
@@ -209,17 +209,17 @@ class ArtifactJob(object):
 
     def process_package_artifact(self, filename, processed_filename):
         raise NotImplementedError("Subclasses must specialize process_package_artifact!")
 
     def process_tests_zip_artifact(self, filename, processed_filename):
         from mozbuild.action.test_archive import OBJDIR_TEST_FILES
         added_entry = False
 
-        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
+        with JarWriter(file=processed_filename, compress_level=5) as writer:
             reader = JarReader(filename)
             for filename, entry in reader.entries.iteritems():
                 for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns:
                     if not mozpath.match(filename, pattern):
                         continue
                     destpath = mozpath.relpath(filename, src_prefix)
                     destpath = mozpath.join(dest_prefix, destpath)
                     self.log(logging.INFO, 'artifact',
@@ -245,17 +245,17 @@ class ArtifactJob(object):
             raise ValueError('Archive format changed! No pattern from "{patterns}"'
                              'matched an archive path.'.format(
                                  patterns=LinuxArtifactJob.test_artifact_patterns))
 
     def process_tests_tar_artifact(self, filename, processed_filename):
         from mozbuild.action.test_archive import OBJDIR_TEST_FILES
         added_entry = False
 
-        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
+        with JarWriter(file=processed_filename, compress_level=5) as writer:
             with tarfile.open(filename) as reader:
                 for filename, entry in TarFinder(filename, reader):
                     for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns:
                         if not mozpath.match(filename, pattern):
                             continue
 
                         destpath = mozpath.relpath(filename, src_prefix)
                         destpath = mozpath.join(dest_prefix, destpath)
@@ -279,27 +279,27 @@ class ArtifactJob(object):
                             writer.add(destpath.encode('utf-8'), entry.open(), mode=mode)
 
         if not added_entry:
             raise ValueError('Archive format changed! No pattern from "{patterns}"'
                              'matched an archive path.'.format(
                                  patterns=LinuxArtifactJob.test_artifact_patterns))
 
     def process_symbols_archive(self, filename, processed_filename):
-        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
+        with JarWriter(file=processed_filename, compress_level=5) as writer:
             reader = JarReader(filename)
             for filename in reader.entries:
                 destpath = mozpath.join('crashreporter-symbols', filename)
                 self.log(logging.INFO, 'artifact',
                          {'destpath': destpath},
                          'Adding {destpath} to processed archive')
                 writer.add(destpath.encode('utf-8'), reader[filename])
 
     def process_host_bin(self, filename, processed_filename):
-        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
+        with JarWriter(file=processed_filename, compress_level=5) as writer:
             # Turn 'HASH-mar.exe' into 'mar.exe'.  `filename` is a path on disk
             # without any of the path parts of the artifact, so we must inject
             # the desired `host/bin` prefix here.
             orig_basename = os.path.basename(filename).split('-', 1)[1]
             destpath = mozpath.join('host/bin', orig_basename)
             writer.add(destpath.encode('utf-8'), open(filename, 'rb'))
 
 
@@ -310,17 +310,17 @@ class AndroidArtifactJob(ArtifactJob):
     package_artifact_patterns = {
         'application.ini',
         'platform.ini',
         '**/*.so',
     }
 
     def process_package_artifact(self, filename, processed_filename):
         # Extract all .so files into the root, which will get copied into dist/bin.
-        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
+        with JarWriter(file=processed_filename, compress_level=5) as writer:
             for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
                 if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
                     continue
 
                 dirname, basename = os.path.split(p)
                 self.log(logging.INFO, 'artifact',
                     {'basename': basename},
                    'Adding {basename} to processed archive')
@@ -348,17 +348,17 @@ class LinuxArtifactJob(ArtifactJob):
         'firefox/plugin-container',
         'firefox/updater',
         'firefox/**/*.so',
     }
 
     def process_package_artifact(self, filename, processed_filename):
         added_entry = False
 
-        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
+        with JarWriter(file=processed_filename, compress_level=5) as writer:
             with tarfile.open(filename) as reader:
                 for p, f in UnpackFinder(TarFinder(filename, reader)):
                     if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
                         continue
 
                     # We strip off the relative "firefox/" bit from the path,
                     # but otherwise preserve it.
                     destpath = mozpath.join('bin',
@@ -444,17 +444,17 @@ class MacArtifactJob(ArtifactJob):
                     'dependentlibs.list',
                     # 'firefox',
                     'gmp-clearkey/0.1/libclearkey.dylib',
                     # 'gmp-fake/1.0/libfake.dylib',
                     # 'gmp-fakeopenh264/1.0/libfakeopenh264.dylib',
                 ]),
             ]
 
-            with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
+            with JarWriter(file=processed_filename, compress_level=5) as writer:
                 root, paths = paths_no_keep_path
                 finder = UnpackFinder(mozpath.join(source, root))
                 for path in paths:
                     for p, f in finder.find(path):
                         self.log(logging.INFO, 'artifact',
                             {'path': p},
                             'Adding {path} to processed archive')
                         destpath = mozpath.join('bin', os.path.basename(p))
@@ -509,17 +509,17 @@ class WinArtifactJob(ArtifactJob):
         ('bin/xpcshell.exe', ('bin', 'bin')),
         ('bin/plugins/gmp-*/*/*', ('bin/plugins', 'bin')),
         ('bin/plugins/*', ('bin/plugins', 'plugins')),
         ('bin/components/*', ('bin/components', 'bin/components')),
     }
 
     def process_package_artifact(self, filename, processed_filename):
         added_entry = False
-        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
+        with JarWriter(file=processed_filename, compress_level=5) as writer:
             for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
                 if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
                     continue
 
                 # strip off the relative "firefox/" bit from the path:
                 basename = mozpath.relpath(p, "firefox")
                 basename = mozpath.join('bin', basename)
                 self.log(logging.INFO, 'artifact',
--- a/python/mozbuild/mozbuild/codecoverage/packager.py
+++ b/python/mozbuild/mozbuild/codecoverage/packager.py
@@ -38,17 +38,17 @@ def describe_install_manifest(manifest, 
 
 
 def package_coverage_data(root, output_file):
     # XXX JarWriter doesn't support unicode strings, see bug 1056859
     if isinstance(root, unicode):
         root = root.encode('utf-8')
 
     finder = FileFinder(root)
-    jarrer = Jarrer(optimize=False)
+    jarrer = Jarrer()
     for p, f in finder.find("**/*.gcno"):
         jarrer.add(p, f)
 
     dist_include_manifest = mozpath.join(buildconfig.topobjdir,
                                          '_build_manifests',
                                          'install',
                                          'dist_include')
     linked_files = describe_install_manifest(dist_include_manifest,
--- a/python/mozbuild/mozpack/copier.py
+++ b/python/mozbuild/mozpack/copier.py
@@ -502,23 +502,22 @@ class FileCopier(FileRegistry):
         return result
 
 
 class Jarrer(FileRegistry, BaseFile):
     '''
     FileRegistry with the ability to copy and pack the registered files as a
     jar file. Also acts as a BaseFile instance, to be copied with a FileCopier.
     '''
-    def __init__(self, compress=True, optimize=True):
+    def __init__(self, compress=True):
         '''
         Create a Jarrer instance. See mozpack.mozjar.JarWriter documentation
-        for details on the compress and optimize arguments.
+        for details on the compress argument.
         '''
         self.compress = compress
-        self.optimize = optimize
         self._preload = []
         self._compress_options = {}  # Map path to compress boolean option.
         FileRegistry.__init__(self)
 
     def add(self, path, content, compress=None):
         FileRegistry.add(self, path, content)
         if compress is not None:
             self._compress_options[path] = compress
@@ -569,18 +568,17 @@ class Jarrer(FileRegistry, BaseFile):
         from mozpack.mozjar import JarWriter, JarReader, JAR_BROTLI
         try:
             old_jar = JarReader(fileobj=dest)
         except Exception:
             old_jar = []
 
         old_contents = dict([(f.filename, f) for f in old_jar])
 
-        with JarWriter(fileobj=dest, compress=self.compress,
-                       optimize=self.optimize) as jar:
+        with JarWriter(fileobj=dest, compress=self.compress) as jar:
             for path, file in self:
                 compress = self._compress_options.get(path, self.compress)
                 # Temporary: Because l10n repacks can't handle brotli just yet,
                 # but need to be able to decompress those files, per
                 # UnpackFinder and formatters, we force deflate on them.
                 if compress == JAR_BROTLI and (
                         isinstance(file, ManifestFile) or
                         mozpath.basename(path) == 'install.rdf'):
--- a/python/mozbuild/mozpack/mozjar.py
+++ b/python/mozbuild/mozpack/mozjar.py
@@ -469,18 +469,17 @@ class JarReader(object):
 
 
 class JarWriter(object):
     '''
     Class with methods to write Jar files. Can write more-or-less standard jar
     archives as well as jar archives optimized for Gecko. See the documentation
     for the close() member function for a description of both layouts.
     '''
-    def __init__(self, file=None, fileobj=None, compress=True, optimize=True,
-                 compress_level=9):
+    def __init__(self, file=None, fileobj=None, compress=True, compress_level=9):
         '''
         Initialize a Jar archive in the given file. Use the given file-like
         object if one is given instead of opening the given file name.
         The compress option determines the default behavior for storing data
         in the jar archive. The optimize options determines whether the jar
         archive should be optimized for Gecko or not. ``compress_level``
         defines the zlib compression level. It must be a value between 0 and 9
         and defaults to 9, the highest and slowest level of compression.
@@ -490,17 +489,16 @@ class JarWriter(object):
         else:
             self._data = open(file, 'wb')
         if compress is True:
             compress = JAR_DEFLATED
         self._compress = compress
         self._compress_level = compress_level
         self._contents = OrderedDict()
         self._last_preloaded = None
-        self._optimize = optimize
 
     def __enter__(self):
         '''
         Context manager __enter__ method for JarWriter.
         '''
         return self
 
     def __exit__(self, type, value, tb):
@@ -560,32 +558,31 @@ class JarWriter(object):
         # Prepare end of central directory
         end = JarCdirEnd()
         end['disk_entries'] = len(self._contents)
         end['cdir_entries'] = end['disk_entries']
         end['cdir_size'] = reduce(lambda x, y: x + y[0].size,
                                   self._contents.values(), 0)
         # On optimized archives, store the preloaded size and the central
         # directory entries, followed by the first end of central directory.
-        if self._optimize:
+        if preload_size:
             end['cdir_offset'] = 4
             offset = end['cdir_size'] + end['cdir_offset'] + end.size
-            if preload_size:
-                preload_size += offset
+            preload_size += offset
             self._data.write(struct.pack('<I', preload_size))
             for entry, _ in self._contents.itervalues():
                 entry['offset'] += offset
                 self._data.write(entry.serialize())
             self._data.write(end.serialize())
         # Store local file entries followed by compressed data
         for entry, content in self._contents.itervalues():
             self._data.write(headers[entry].serialize())
             self._data.write(content)
         # On non optimized archives, store the central directory entries.
-        if not self._optimize:
+        if not preload_size:
             end['cdir_offset'] = offset
             for entry, _ in self._contents.itervalues():
                 self._data.write(entry.serialize())
         # Store the end of central directory.
         self._data.write(end.serialize())
         self._data.close()
 
     def add(self, name, data, compress=None, mode=None, skip_duplicates=False):
--- a/python/mozbuild/mozpack/packager/formats.py
+++ b/python/mozbuild/mozpack/packager/formats.py
@@ -185,44 +185,42 @@ class FlatSubFormatter(object):
 
 class JarFormatter(PiecemealFormatter):
     '''
     Formatter for the jar package format. Assumes manifest entries related to
     chrome are registered before the chrome data files are added. Also assumes
     manifest entries for resources are registered after chrome manifest
     entries.
     '''
-    def __init__(self, copier, compress=True, optimize=True):
+    def __init__(self, copier, compress=True):
         PiecemealFormatter.__init__(self, copier)
         self._compress=compress
-        self._optimize=optimize
 
     def _add_base(self, base, addon=False):
         if addon is True:
-            jarrer = Jarrer(self._compress, self._optimize)
+            jarrer = Jarrer(self._compress)
             self.copier.add(base + '.xpi', jarrer)
             self._sub_formatter[base] = FlatSubFormatter(jarrer)
         else:
             self._sub_formatter[base] = JarSubFormatter(
                 FileRegistrySubtree(base, self.copier),
-                self._compress, self._optimize)
+                self._compress)
 
 
 class JarSubFormatter(PiecemealFormatter):
     '''
     Sub-formatter for the jar package format. It is a PiecemealFormatter that
     dispatches between further sub-formatter for each of the jar files it
     dispatches the chrome data to, and a FlatSubFormatter for the non-chrome
     files.
     '''
-    def __init__(self, copier, compress=True, optimize=True):
+    def __init__(self, copier, compress=True):
         PiecemealFormatter.__init__(self, copier)
         self._frozen_chrome = False
         self._compress = compress
-        self._optimize = optimize
         self._sub_formatter[''] = FlatSubFormatter(copier)
 
     def _jarize(self, entry, relpath):
         '''
         Transform a manifest entry in one pointing to chrome data in a jar.
         Return the corresponding chrome path and the new entry.
         '''
         base = entry.base
@@ -234,66 +232,63 @@ class JarSubFormatter(PiecemealFormatter
         return chromepath, entry
 
     def add_manifest(self, entry):
         if isinstance(entry, ManifestChrome) and \
                 not urlparse(entry.relpath).scheme:
             chromepath, entry = self._jarize(entry, entry.relpath)
             assert not self._frozen_chrome
             if chromepath not in self._sub_formatter:
-                jarrer = Jarrer(self._compress, self._optimize)
+                jarrer = Jarrer(self._compress)
                 self.copier.add(chromepath + '.jar', jarrer)
                 self._sub_formatter[chromepath] = FlatSubFormatter(jarrer)
         elif isinstance(entry, ManifestResource) and \
                 not urlparse(entry.target).scheme:
             chromepath, new_entry = self._jarize(entry, entry.target)
             if chromepath in self._sub_formatter:
                 entry = new_entry
         PiecemealFormatter.add_manifest(self, entry)
 
 
 class OmniJarFormatter(JarFormatter):
     '''
     Formatter for the omnijar package format.
     '''
-    def __init__(self, copier, omnijar_name, compress=True, optimize=True,
-                 non_resources=()):
-        JarFormatter.__init__(self, copier, compress, optimize)
+    def __init__(self, copier, omnijar_name, compress=True, non_resources=()):
+        JarFormatter.__init__(self, copier, compress)
         self._omnijar_name = omnijar_name
         self._non_resources = non_resources
 
     def _add_base(self, base, addon=False):
         if addon:
             JarFormatter._add_base(self, base, addon)
         else:
             # Initialize a chrome.manifest next to the omnijar file so that
             # there's always a chrome.manifest file, even an empty one.
             path = mozpath.normpath(mozpath.join(base, 'chrome.manifest'))
             if not self.copier.contains(path):
                 self.copier.add(path, ManifestFile(''))
             self._sub_formatter[base] = OmniJarSubFormatter(
                 FileRegistrySubtree(base, self.copier), self._omnijar_name,
-                self._compress, self._optimize, self._non_resources)
+                self._compress, self._non_resources)
 
 
 class OmniJarSubFormatter(PiecemealFormatter):
     '''
     Sub-formatter for the omnijar package format. It is a PiecemealFormatter
     that dispatches between a FlatSubFormatter for the resources data and
     another FlatSubFormatter for the other files.
     '''
-    def __init__(self, copier, omnijar_name, compress=True, optimize=True,
-                 non_resources=()):
+    def __init__(self, copier, omnijar_name, compress=True, non_resources=()):
         PiecemealFormatter.__init__(self, copier)
         self._omnijar_name = omnijar_name
         self._compress = compress
-        self._optimize = optimize
         self._non_resources = non_resources
         self._sub_formatter[''] = FlatSubFormatter(copier)
-        jarrer = Jarrer(self._compress, self._optimize)
+        jarrer = Jarrer(self._compress)
         self._sub_formatter[omnijar_name] = FlatSubFormatter(jarrer)
 
     def _get_base(self, path):
         base = self._omnijar_name if self.is_resource(path) else ''
         # Only add the omnijar file if something ends up in it.
         if base and not self.copier.contains(base):
             self.copier.add(base, self._sub_formatter[base].copier)
         return base, path
--- a/python/mozbuild/mozpack/packager/l10n.py
+++ b/python/mozbuild/mozpack/packager/l10n.py
@@ -293,20 +293,18 @@ def repack(source, l10n, extra_l10n={}, 
             finders[base] = UnpackFinder(path)
         l10n_finder = ComposedFinder(finders)
     copier = FileCopier()
     compress = min(app_finder.compressed, JAR_DEFLATED)
     if app_finder.kind == 'flat':
         formatter = FlatFormatter(copier)
     elif app_finder.kind == 'jar':
         formatter = JarFormatter(copier,
-                                 optimize=app_finder.optimizedjars,
                                  compress=compress)
     elif app_finder.kind == 'omni':
         formatter = OmniJarFormatter(copier, app_finder.omnijar,
-                                     optimize=app_finder.optimizedjars,
                                      compress=compress,
                                      non_resources=non_resources)
 
     with errors.accumulate():
         _repack(app_finder, l10n_finder, copier, formatter, non_chrome)
     copier.copy(source, skip_if_older=False)
     generate_precomplete(source)
--- a/python/mozbuild/mozpack/packager/unpack.py
+++ b/python/mozbuild/mozpack/packager/unpack.py
@@ -45,17 +45,16 @@ class UnpackFinder(BaseFinder):
             self._finder = source
         else:
             self._finder = FileFinder(source)
         self.base = self._finder.base
         self.files = FileRegistry()
         self.kind = 'flat'
         self.omnijar = None
         self.jarlogs = {}
-        self.optimizedjars = False
         self.compressed = False
 
         jars = set()
 
         for p, f in self._finder.find('*'):
             # Skip the precomplete file, which is generated at packaging time.
             if p == 'precomplete':
                 continue
@@ -136,18 +135,16 @@ class UnpackFinder(BaseFinder):
         return entry
 
     def _open_jar(self, path, file):
         '''
         Return a JarReader for the given BaseFile instance, keeping a log of
         the preloaded entries it has.
         '''
         jar = JarReader(fileobj=file.open())
-        if jar.is_optimized:
-            self.optimizedjars = True
         self.compressed = max(self.compressed, jar.compression)
         if jar.last_preloaded:
             jarlog = jar.entries.keys()
             self.jarlogs[path] = jarlog[:jarlog.index(jar.last_preloaded) + 1]
         return jar
 
     def find(self, path):
         for p in self.files.match(path):
--- a/python/mozbuild/mozpack/test/test_mozjar.py
+++ b/python/mozbuild/mozpack/test/test_mozjar.py
@@ -134,21 +134,19 @@ class TestDeflater(unittest.TestCase):
 
 
 class TestDeflaterMemoryView(TestDeflater):
     def wrap(self, data):
         return memoryview(data)
 
 
 class TestJar(unittest.TestCase):
-    optimize = False
-
     def test_jar(self):
         s = MockDest()
-        with JarWriter(fileobj=s, optimize=self.optimize) as jar:
+        with JarWriter(fileobj=s) as jar:
             jar.add('foo', 'foo')
             self.assertRaises(JarWriterError, jar.add, 'foo', 'bar')
             jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz')
             jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', False)
             jar.add('baz\\backslash', 'aaaaaaaaaaaaaaa')
 
         files = [j for j in JarReader(fileobj=s)]
 
@@ -167,18 +165,17 @@ class TestJar(unittest.TestCase):
         if os.sep == '\\':
             self.assertEqual(files[3].filename, 'baz/backslash',
                 'backslashes in filenames on Windows should get normalized')
         else:
             self.assertEqual(files[3].filename, 'baz\\backslash',
                 'backslashes in filenames on POSIX platform are untouched')
 
         s = MockDest()
-        with JarWriter(fileobj=s, compress=False,
-                       optimize=self.optimize) as jar:
+        with JarWriter(fileobj=s, compress=False) as jar:
             jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz')
             jar.add('foo', 'foo')
             jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', True)
 
         jar = JarReader(fileobj=s)
         files = [j for j in jar]
 
         self.assertEqual(files[0].filename, 'bar')
@@ -220,23 +217,23 @@ class TestJar(unittest.TestCase):
 
         files[2].seek(0)
         self.assertEqual(jar['baz/qux'].filename, files[2].filename)
         self.assertEqual(jar['baz/qux'].compressed, files[2].compressed)
         self.assertEqual(jar['baz/qux'].read(), files[2].read())
 
     def test_rejar(self):
         s = MockDest()
-        with JarWriter(fileobj=s, optimize=self.optimize) as jar:
+        with JarWriter(fileobj=s) as jar:
             jar.add('foo', 'foo')
             jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz')
             jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', False)
 
         new = MockDest()
-        with JarWriter(fileobj=new, optimize=self.optimize) as jar:
+        with JarWriter(fileobj=new) as jar:
             for j in JarReader(fileobj=s):
                 jar.add(j.filename, j)
 
         jar = JarReader(fileobj=new)
         files = [j for j in jar]
 
         self.assertEqual(files[0].filename, 'foo')
         self.assertFalse(files[0].compressed)
@@ -247,33 +244,29 @@ class TestJar(unittest.TestCase):
         self.assertEqual(files[1].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
 
         self.assertEqual(files[2].filename, 'baz/qux')
         self.assertTrue(files[2].compressed)
         self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
 
     def test_add_from_finder(self):
         s = MockDest()
-        with JarWriter(fileobj=s, optimize=self.optimize) as jar:
+        with JarWriter(fileobj=s) as jar:
             finder = FileFinder(test_data_path)
             for p, f in finder.find('test_data'):
                 jar.add('test_data', f)
 
         jar = JarReader(fileobj=s)
         files = [j for j in jar]
 
         self.assertEqual(files[0].filename, 'test_data')
         self.assertFalse(files[0].compressed)
         self.assertEqual(files[0].read(), 'test_data')
 
 
-class TestOptimizeJar(TestJar):
-    optimize = True
-
-
 class TestPreload(unittest.TestCase):
     def test_preload(self):
         s = MockDest()
         with JarWriter(fileobj=s) as jar:
             jar.add('foo', 'foo')
             jar.add('bar', 'abcdefghijklmnopqrstuvwxyz')
             jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz')
 
--- a/testing/tps/mach_commands.py
+++ b/testing/tps/mach_commands.py
@@ -22,14 +22,14 @@ class MachCommands(MachCommandBase):
         dest = os.path.join(dest or os.path.join(self.topobjdir, 'services', 'sync'), 'tps.xpi')
 
         if not os.path.exists(os.path.dirname(dest)):
             os.makedirs(os.path.dirname(dest))
 
         if os.path.isfile(dest):
             os.unlink(dest)
 
-        jarrer = Jarrer(optimize=False)
+        jarrer = Jarrer()
         for p, f in FileFinder(src).find('*'):
             jarrer.add(p, f)
         jarrer.copy(dest)
 
         print('Built TPS add-on as %s' % dest)
--- a/toolkit/mozapps/installer/packager.mk
+++ b/toolkit/mozapps/installer/packager.mk
@@ -29,17 +29,16 @@ stage-package: multilocale.txt locale-ma
 		--format $(MOZ_PACKAGER_FORMAT) \
 		$(addprefix --removals ,$(MOZ_PKG_REMOVALS)) \
 		$(if $(filter-out 0,$(MOZ_PKG_FATAL_WARNINGS)),,--ignore-errors) \
 		$(if $(MOZ_PACKAGER_MINIFY),--minify) \
 		$(if $(MOZ_PACKAGER_MINIFY_JS),--minify-js \
 		  $(addprefix --js-binary ,$(JS_BINARY)) \
 		) \
 		$(addprefix --jarlog ,$(wildcard $(JARLOG_FILE_AB_CD))) \
-		$(if $(OPTIMIZEJARS),--optimizejars) \
 		$(addprefix --compress ,$(JAR_COMPRESSION)) \
 		$(MOZ_PKG_MANIFEST) '$(DIST)' '$(DIST)'/$(MOZ_PKG_DIR)$(if $(MOZ_PKG_MANIFEST),,$(_BINPATH)) \
 		$(if $(filter omni,$(MOZ_PACKAGER_FORMAT)),$(if $(NON_OMNIJAR_FILES),--non-resource $(NON_OMNIJAR_FILES)))
 ifdef RUN_FIND_DUPES
 	$(PYTHON) $(MOZILLA_DIR)/toolkit/mozapps/installer/find-dupes.py $(DEFINES) $(ACDEFINES) $(MOZ_PKG_DUPEFLAGS) $(DIST)/$(MOZ_PKG_DIR)
 endif # RUN_FIND_DUPES
 ifndef MOZ_IS_COMM_TOPDIR
 	# Package mozharness
--- a/toolkit/mozapps/installer/packager.py
+++ b/toolkit/mozapps/installer/packager.py
@@ -205,18 +205,16 @@ def main():
     parser.add_argument('--minify-js', action='store_true',
                         help='Minify JavaScript files while packaging.')
     parser.add_argument('--js-binary',
                         help='Path to js binary. This is used to verify '
                         'minified JavaScript. If this is not defined, '
                         'minification verification will not be performed.')
     parser.add_argument('--jarlog', default='', help='File containing jar ' +
                         'access logs')
-    parser.add_argument('--optimizejars', action='store_true', default=False,
-                        help='Enable jar optimizations')
     parser.add_argument('--compress', choices=('none', 'deflate', 'brotli'),
                         default='deflate',
                         help='Use given jar compression (default: deflate)')
     parser.add_argument('manifest', default=None, nargs='?',
                         help='Manifest file name')
     parser.add_argument('source', help='Source directory')
     parser.add_argument('destination', help='Destination directory')
     parser.add_argument('--non-resource', nargs='+', metavar='PATTERN',
@@ -237,22 +235,21 @@ def main():
         'deflate': True,
         'brotli': JAR_BROTLI,
     }[args.compress]
 
     copier = FileCopier()
     if args.format == 'flat':
         formatter = FlatFormatter(copier)
     elif args.format == 'jar':
-        formatter = JarFormatter(copier, compress=compress, optimize=args.optimizejars)
+        formatter = JarFormatter(copier, compress=compress)
     elif args.format == 'omni':
         formatter = OmniJarFormatter(copier,
                                      buildconfig.substs['OMNIJAR_NAME'],
                                      compress=compress,
-                                     optimize=args.optimizejars,
                                      non_resources=args.non_resource)
     else:
         errors.fatal('Unknown format: %s' % args.format)
 
     # Adjust defines according to the requested format.
     if isinstance(formatter, OmniJarFormatter):
         defines['MOZ_OMNIJAR'] = 1
     elif 'MOZ_OMNIJAR' in defines:
--- a/toolkit/mozapps/installer/upload-files.mk
+++ b/toolkit/mozapps/installer/upload-files.mk
@@ -312,17 +312,16 @@ GARBAGE		+= $(DIST)/$(PACKAGE) $(PACKAGE
 
 PKG_ARG = , '$(pkg)'
 
 ifndef MOZ_PACKAGER_FORMAT
   MOZ_PACKAGER_FORMAT = $(error MOZ_PACKAGER_FORMAT is not set)
 endif
 
 ifneq (android,$(MOZ_WIDGET_TOOLKIT))
-  OPTIMIZEJARS = 1
   JAR_COMPRESSION ?= none
 endif
 
 # A js binary is needed to perform verification of JavaScript minification.
 # We can only use the built binary when not cross-compiling. Environments
 # (such as release automation) can provide their own js binary to enable
 # verification when cross-compiling.
 ifndef JS_BINARY