Backed out changeset 7cb5f70b086d (bug 1377543) for breaking Android multi-locale builds. r=backout a=backout
authorSebastian Hengst <archaeopteryx@coole-files.de>
Sun, 09 Jul 2017 23:29:32 +0200
changeset 367959 a418121d46250f91728b86d9eea331029c264c30
parent 367958 6e542037f6d4013f9e0cffd4e8d801790bbe091d
child 367963 615ab8b98b263e059355d31c514788e2ab54f064
push id32152
push userarchaeopteryx@coole-files.de
push dateSun, 09 Jul 2017 21:29:50 +0000
treeherdermozilla-central@a418121d4625 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbackout, backout
bugs1377543
milestone56.0a1
backs out7cb5f70b086d886007b2022da83c3056b9e5d311
first release with
nightly linux32
a418121d4625 / 56.0a1 / 20170710100245 / files
nightly linux64
a418121d4625 / 56.0a1 / 20170710100245 / files
nightly mac
a418121d4625 / 56.0a1 / 20170710100238 / files
nightly win32
a418121d4625 / 56.0a1 / 20170710030203 / files
nightly win64
a418121d4625 / 56.0a1 / 20170710030203 / files
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
releases
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out changeset 7cb5f70b086d (bug 1377543) for breaking Android multi-locale builds. r=backout a=backout MozReview-Commit-ID: 1Ow65BnlErT
toolkit/mozapps/installer/find-dupes.py
--- a/toolkit/mozapps/installer/find-dupes.py
+++ b/toolkit/mozapps/installer/find-dupes.py
@@ -1,16 +1,15 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import sys
 import hashlib
 import re
-import os
 from mozbuild.preprocessor import Preprocessor
 from mozbuild.util import DefinesAction
 from mozpack.packager.unpack import UnpackFinder
 from mozpack.files import DeflatedFile
 from collections import OrderedDict
 from StringIO import StringIO
 import argparse
 import buildconfig
@@ -28,21 +27,39 @@ def normalize_osx_path(p):
     >>> normalize_osx_path('Nightly.app/foo/bar/baz')
     'baz'
     '''
     bits = p.split('/')
     if len(bits) > 3 and bits[0].endswith('.app'):
         return '/'.join(bits[3:])
     return p
 
-def is_l10n_file(path):
-    return os.path.splitext(path)[1] in ['.properties', '.dtd', '.ftl']
+
+def normalize_l10n_path(p):
+    '''
+    Normalizes localized paths to en-US
+
+    >>> normalize_l10n_path('chrome/es-ES/locale/branding/brand.properties')
+    'chrome/en-US/locale/branding/brand.properties'
+    >>> normalize_l10n_path('chrome/fr/locale/fr/browser/aboutHome.dtd')
+    'chrome/en-US/locale/en-US/browser/aboutHome.dtd'
+    '''
+    # Keep a trailing slash here! e.g. locales like 'br' can transform
+    # 'chrome/br/locale/branding/' into 'chrome/en-US/locale/en-USanding/'
+    p = re.sub(r'chrome/(\S+)/locale/\1/',
+               'chrome/en-US/locale/en-US/',
+               p)
+    p = re.sub(r'chrome/(\S+)/locale/',
+               'chrome/en-US/locale/',
+               p)
+    return p
+
 
 def normalize_path(p):
-    return normalize_osx_path(p)
+    return normalize_osx_path(normalize_l10n_path(p))
 
 
 def find_dupes(source, allowed_dupes, bail=True):
     allowed_dupes = set(allowed_dupes)
     md5s = OrderedDict()
     for p, f in UnpackFinder(source):
         content = f.open().read()
         m = hashlib.md5(content).digest()
@@ -63,19 +80,17 @@ def find_dupes(source, allowed_dupes, ba
             print 'Duplicates %d bytes%s%s:' % (size,
                   ' (%d compressed)' % compressed if compressed != size else '',
                   ' (%d times)' % (len(paths) - 1) if len(paths) > 2 else '')
             print ''.join('  %s\n' % p for p in paths)
             total += (len(paths) - 1) * size
             total_compressed += (len(paths) - 1) * compressed
             num_dupes += 1
 
-            for p in paths:
-                if not is_l10n_file(p) and normalize_path(p) not in allowed_dupes:
-                    unexpected_dupes.append(p)
+            unexpected_dupes.extend([p for p in paths if normalize_path(p) not in allowed_dupes])
 
     if num_dupes:
         print "WARNING: Found %d duplicated files taking %d bytes (%s)" % \
               (num_dupes, total,
                '%d compressed' % total_compressed if total_compressed != total
                                                   else 'uncompressed')
 
     if unexpected_dupes: