Merge m-c to m-i
authorPhil Ringnalda <philringnalda@gmail.com>
Thu, 21 Sep 2017 20:49:43 -0700
changeset 668889 0b39d5cbf3d1cf23fc162fb8b4e9261cbb3d6521
parent 668888 40023b79401d49e45e1eb53acb2b96976f24a939 (current diff)
parent 668887 97282b0c985aa1778ced171514d1ae61945c634c (diff)
child 668890 a33c563d5428ddc49e06d802ea04fbea69999e40
push id81146
push userbmo:topwu.tw@gmail.com
push dateFri, 22 Sep 2017 05:24:51 +0000
milestone58.0a1
Merge m-c to m-i MozReview-Commit-ID: LQmIsTRxble
mobile/android/services/src/main/java/org/mozilla/gecko/sync/DelayedWorkTracker.java
third_party/python/compare-locales/compare_locales/tests/test_webapps.py
third_party/python/compare-locales/compare_locales/webapps.py
toolkit/components/payments/content/paymentRequest.css
toolkit/components/payments/content/paymentRequest.js
toolkit/components/payments/content/paymentRequest.xhtml
--- a/browser/base/content/popup-notifications.inc
+++ b/browser/base/content/popup-notifications.inc
@@ -76,14 +76,20 @@
       <popupnotificationcontent class="addon-webext-perm-notification-content" orient="vertical">
         <description id="addon-webext-perm-header" class="addon-webext-perm-header"/>
         <description id="addon-webext-perm-text" class="addon-webext-perm-text"/>
         <label id="addon-webext-perm-intro" class="addon-webext-perm-text"/>
         <html:ul id="addon-webext-perm-list" class="addon-webext-perm-list"/>
       </popupnotificationcontent>
     </popupnotification>
 
+    <popupnotification id="addon-webext-defaultsearch-notification" hidden="true">
+      <popupnotificationcontent class="addon-webext-defaultsearch-notification-content" orient="vertical">
+        <description id="addon-webext-defaultsearch-text" class="addon-webext-perm-header"/>
+      </popupnotificationcontent>
+    </popupnotification>
+
     <popupnotification id="addon-installed-notification" hidden="true">
       <popupnotificationcontent class="addon-installed-notification-content" orient="vertical">
         <description id="addon-installed-notification-header"/>
         <description id="addon-installed-notification-message"/>
       </popupnotificationcontent>
     </popupnotification>
--- a/browser/components/extensions/ext-chrome-settings-overrides.js
+++ b/browser/components/extensions/ext-chrome-settings-overrides.js
@@ -1,29 +1,31 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
+ /* globals windowTracker */
+
 "use strict";
 
 const {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
 
 XPCOMUtils.defineLazyModuleGetter(this, "ExtensionPreferencesManager",
                                   "resource://gre/modules/ExtensionPreferencesManager.jsm");
 XPCOMUtils.defineLazyModuleGetter(this, "ExtensionSettingsStore",
                                   "resource://gre/modules/ExtensionSettingsStore.jsm");
 
 const DEFAULT_SEARCH_STORE_TYPE = "default_search";
 const DEFAULT_SEARCH_SETTING_NAME = "defaultSearch";
 
 const searchInitialized = () => {
+  if (Services.search.isInitialized) {
+    return;
+  }
   return new Promise(resolve => {
-    if (Services.search.isInitialized) {
-      resolve();
-    }
     const SEARCH_SERVICE_TOPIC = "browser-search-service";
     Services.obs.addObserver(function observer(subject, topic, data) {
       if (data != "init-complete") {
         return;
       }
 
       Services.obs.removeObserver(observer, SEARCH_SERVICE_TOPIC);
       resolve();
@@ -65,113 +67,144 @@ this.chrome_settings_overrides = class e
 
     await ExtensionSettingsStore.initialize();
     if (manifest.chrome_settings_overrides.homepage) {
       ExtensionPreferencesManager.setSetting(extension, "homepage_override",
                                              manifest.chrome_settings_overrides.homepage);
     }
     if (manifest.chrome_settings_overrides.search_provider) {
       await searchInitialized();
+      extension.callOnClose({
+        close: () => {
+          if (extension.shutdownReason == "ADDON_DISABLE" ||
+              extension.shutdownReason == "ADDON_UNINSTALL") {
+            switch (extension.shutdownReason) {
+              case "ADDON_DISABLE":
+                this.processDefaultSearchSetting("disable");
+                break;
+
+              case "ADDON_UNINSTALL":
+                this.processDefaultSearchSetting("removeSetting");
+                break;
+            }
+            // We shouldn't need to wait for search initialized here
+            // because the search service should be ready to go.
+            let engines = Services.search.getEnginesByExtensionID(extension.id);
+            for (let engine of engines) {
+              try {
+                Services.search.removeEngine(engine);
+              } catch (e) {
+                Components.utils.reportError(e);
+              }
+            }
+          }
+        },
+      });
+
       let searchProvider = manifest.chrome_settings_overrides.search_provider;
+      let engineName = searchProvider.name.trim();
       if (searchProvider.is_default) {
-        let engineName = searchProvider.name.trim();
         let engine = Services.search.getEngineByName(engineName);
         if (engine && Services.search.getDefaultEngines().includes(engine)) {
-          // Only add onclose handlers if we would definitely
-          // be setting the default engine.
-          extension.callOnClose({
-            close: () => {
-              switch (extension.shutdownReason) {
-                case "ADDON_DISABLE":
-                  this.processDefaultSearchSetting("disable");
-                  break;
-
-                case "ADDON_UNINSTALL":
-                  this.processDefaultSearchSetting("removeSetting");
-                  break;
-              }
-            },
-          });
-          if (extension.startupReason === "ADDON_INSTALL") {
-            let item = await ExtensionSettingsStore.addSetting(
-              extension, DEFAULT_SEARCH_STORE_TYPE, DEFAULT_SEARCH_SETTING_NAME, engineName, () => {
-                return Services.search.currentEngine.name;
-              });
-            Services.search.currentEngine = Services.search.getEngineByName(item.value);
-          } else if (extension.startupReason === "ADDON_ENABLE") {
-            this.processDefaultSearchSetting("enable");
-          }
-          // If we would have set the default engine,
-          // we don't allow a search provider to be added.
+          // Needs to be called every time to handle reenabling, but
+          // only sets default for install or enable.
+          await this.setDefault(engineName);
+          // For built in search engines, we don't do anything further
           return;
         }
-        Components.utils.reportError("is_default can only be used for built-in engines.");
-      }
-      let isCurrent = false;
-      let index = -1;
-      if (extension.startupReason === "ADDON_UPGRADE") {
-        let engines = Services.search.getEnginesByExtensionID(extension.id);
-        if (engines.length > 0) {
-          // There can be only one engine right now
-          isCurrent = Services.search.currentEngine == engines[0];
-          // Get position of engine and store it
-          index = Services.search.getEngines().indexOf(engines[0]);
-          Services.search.removeEngine(engines[0]);
-        }
       }
-      try {
-        let params = {
-          template: searchProvider.search_url,
-          iconURL: searchProvider.favicon_url,
-          alias: searchProvider.keyword,
-          extensionID: extension.id,
-          suggestURL: searchProvider.suggest_url,
-        };
-        Services.search.addEngineWithDetails(searchProvider.name.trim(), params);
-        if (extension.startupReason === "ADDON_UPGRADE") {
-          let engine = Services.search.getEngineByName(searchProvider.name.trim());
-          if (isCurrent) {
-            Services.search.currentEngine = engine;
-          }
-          if (index != -1) {
-            Services.search.moveEngine(engine, index);
+      this.addSearchEngine(searchProvider);
+      if (searchProvider.is_default) {
+        if (extension.startupReason === "ADDON_INSTALL") {
+          // Don't ask if it already the current engine
+          let engine = Services.search.getEngineByName(engineName);
+          if (Services.search.currentEngine != engine) {
+            let allow = await new Promise(resolve => {
+              let subject = {
+                wrappedJSObject: {
+                  // This is a hack because we don't have the browser of
+                  // the actual install. This means the popup might show
+                  // in a different window. Will be addressed in a followup bug.
+                  browser: windowTracker.topWindow.gBrowser.selectedBrowser,
+                  name: this.extension.name,
+                  icon: this.extension.iconURL,
+                  currentEngine: Services.search.currentEngine.name,
+                  newEngine: engineName,
+                  resolve,
+                },
+              };
+              Services.obs.notifyObservers(subject, "webextension-defaultsearch-prompt");
+            });
+            if (!allow) {
+              return;
+            }
           }
         }
-      } catch (e) {
-        Components.utils.reportError(e);
+        // Needs to be called every time to handle reenabling, but
+        // only sets default for install or enable.
+        await this.setDefault(engineName);
+      } else if (ExtensionSettingsStore.hasSetting(
+                extension, DEFAULT_SEARCH_STORE_TYPE, DEFAULT_SEARCH_SETTING_NAME)) {
+        // is_default has been removed, but we still have a setting. Remove it.
+        // This won't cover the case where the entire search_provider is removed.
+        this.processDefaultSearchSetting("removeSetting");
       }
     }
-    // If the setting exists for the extension, but is missing from the manifest,
-    // remove it. This can happen if the extension removes is_default.
-    // There's really no good place to put this, because the entire search section
-    // could be removed.
-    // We'll never get here in the normal case because we always return early
-    // if we have an is_default value that we use.
-    if (ExtensionSettingsStore.hasSetting(
-               extension, DEFAULT_SEARCH_STORE_TYPE, DEFAULT_SEARCH_SETTING_NAME)) {
-      await searchInitialized();
-      this.processDefaultSearchSetting("removeSetting");
+  }
+
+  async setDefault(engineName) {
+    let {extension} = this;
+    if (extension.startupReason === "ADDON_INSTALL") {
+      let item = await ExtensionSettingsStore.addSetting(
+        extension, DEFAULT_SEARCH_STORE_TYPE, DEFAULT_SEARCH_SETTING_NAME, engineName, () => {
+          return Services.search.currentEngine.name;
+        });
+      Services.search.currentEngine = Services.search.getEngineByName(item.value);
+    } else if (extension.startupReason === "ADDON_ENABLE") {
+      this.processDefaultSearchSetting("enable");
     }
   }
-  async onShutdown(reason) {
+
+  addSearchEngine(searchProvider) {
     let {extension} = this;
-    if (reason == "ADDON_DISABLE" ||
-        reason == "ADDON_UNINSTALL") {
-      if (extension.manifest.chrome_settings_overrides.search_provider) {
-        await searchInitialized();
-        let engines = Services.search.getEnginesByExtensionID(extension.id);
-        for (let engine of engines) {
-          try {
-            Services.search.removeEngine(engine);
-          } catch (e) {
-            Components.utils.reportError(e);
-          }
+    let isCurrent = false;
+    let index = -1;
+    if (extension.startupReason === "ADDON_UPGRADE") {
+      let engines = Services.search.getEnginesByExtensionID(extension.id);
+      if (engines.length > 0) {
+        // There can be only one engine right now
+        isCurrent = Services.search.currentEngine == engines[0];
+        // Get position of engine and store it
+        index = Services.search.getEngines().indexOf(engines[0]);
+        Services.search.removeEngine(engines[0]);
+      }
+    }
+    try {
+      let params = {
+        template: searchProvider.search_url,
+        iconURL: searchProvider.favicon_url,
+        alias: searchProvider.keyword,
+        extensionID: extension.id,
+        suggestURL: searchProvider.suggest_url,
+      };
+      Services.search.addEngineWithDetails(searchProvider.name.trim(), params);
+      if (extension.startupReason === "ADDON_UPGRADE") {
+        let engine = Services.search.getEngineByName(searchProvider.name.trim());
+        if (isCurrent) {
+          Services.search.currentEngine = engine;
+        }
+        if (index != -1) {
+          Services.search.moveEngine(engine, index);
         }
       }
+    } catch (e) {
+      Components.utils.reportError(e);
+      return false;
     }
+    return true;
   }
 };
 
 ExtensionPreferencesManager.addSetting("homepage_override", {
   prefNames: [
     "browser.startup.homepage",
   ],
   setCallback(value) {
--- a/browser/components/extensions/test/browser/browser_ext_settings_overrides_default_search.js
+++ b/browser/components/extensions/test/browser/browser_ext_settings_overrides_default_search.js
@@ -50,42 +50,16 @@ add_task(async function test_extension_s
 
   is(Services.search.currentEngine.name, "DuckDuckGo", "Default engine is DuckDuckGo");
 
   await ext1.unload();
 
   is(Services.search.currentEngine.name, defaultEngineName, `Default engine is ${defaultEngineName}`);
 });
 
-/* This tests that using an invalid engine does nothing. */
-add_task(async function test_extension_setting_invalid_name_default_engine() {
-  let defaultEngineName = Services.search.currentEngine.name;
-
-  let ext1 = ExtensionTestUtils.loadExtension({
-    manifest: {
-      "chrome_settings_overrides": {
-        "search_provider": {
-          "name": "InvalidName",
-          "search_url": "https://example.com/?q={searchTerms}",
-          "is_default": true,
-        },
-      },
-    },
-    useAddonManager: "temporary",
-  });
-
-  await ext1.startup();
-
-  is(Services.search.currentEngine.name, defaultEngineName, `Default engine is ${defaultEngineName}`);
-
-  await ext1.unload();
-
-  is(Services.search.currentEngine.name, defaultEngineName, `Default engine is ${defaultEngineName}`);
-});
-
 /* This tests that uninstalling add-ons maintains the proper
  * search default. */
 add_task(async function test_extension_setting_multiple_default_engine() {
   let defaultEngineName = Services.search.currentEngine.name;
   let ext1 = ExtensionTestUtils.loadExtension({
     manifest: {
       "chrome_settings_overrides": {
         "search_provider": {
@@ -170,66 +144,16 @@ add_task(async function test_extension_s
 
   is(Services.search.currentEngine.name, "Twitter", "Default engine is Twitter");
 
   await ext2.unload();
 
   is(Services.search.currentEngine.name, defaultEngineName, `Default engine is ${defaultEngineName}`);
 });
 
-/* This tests adding an engine with one add-on and trying to make it the
- *default with anoth. */
-add_task(async function test_extension_setting_invalid_default_engine() {
-  let defaultEngineName = Services.search.currentEngine.name;
-  let ext1 = ExtensionTestUtils.loadExtension({
-    manifest: {
-      "chrome_settings_overrides": {
-        "search_provider": {
-          "name": "MozSearch",
-          "keyword": "MozSearch",
-          "search_url": "https://example.com/?q={searchTerms}",
-        },
-      },
-    },
-    useAddonManager: "temporary",
-  });
-
-  let ext2 = ExtensionTestUtils.loadExtension({
-    manifest: {
-      "chrome_settings_overrides": {
-        "search_provider": {
-          "name": "MozSearch",
-          "search_url": "https://example.com/?q={searchTerms}",
-          "is_default": true,
-        },
-      },
-    },
-    useAddonManager: "temporary",
-  });
-
-  await ext1.startup();
-
-  is(Services.search.currentEngine.name, defaultEngineName, `Default engine is ${defaultEngineName}`);
-
-  let engine = Services.search.getEngineByName("MozSearch");
-  ok(engine, "Engine should exist.");
-
-  await ext2.startup();
-
-  is(Services.search.currentEngine.name, defaultEngineName, `Default engine is ${defaultEngineName}`);
-
-  await ext2.unload();
-
-  is(Services.search.currentEngine.name, defaultEngineName, `Default engine is ${defaultEngineName}`);
-
-  await ext1.unload();
-
-  is(Services.search.currentEngine.name, defaultEngineName, `Default engine is ${defaultEngineName}`);
-});
-
 /* This tests that when the user changes the search engine and the add-on
  * is unistalled, search stays with the user's choice. */
 add_task(async function test_user_changing_default_engine() {
   let ext1 = ExtensionTestUtils.loadExtension({
     manifest: {
       "chrome_settings_overrides": {
         "search_provider": {
           "name": "DuckDuckGo",
--- a/browser/locales/en-US/chrome/browser/browser.properties
+++ b/browser/locales/en-US/chrome/browser/browser.properties
@@ -140,16 +140,26 @@ webextPerms.hostDescription.oneSite=Acce
 
 # LOCALIZATION NOTE (webextPerms.hostDescription.tooManySites)
 # Semi-colon list of plural forms.
 # See: http://developer.mozilla.org/en/docs/Localization_and_Plurals
 # #1 will be replaced by an integer indicating the number of additional
 # hosts for which this webextension is requesting permission.
 webextPerms.hostDescription.tooManySites=Access your data on #1 other site;Access your data on #1 other sites
 
+# LOCALIZATION NOTE (webext.defaultSearch.description)
+# %1$S is replaced with the localized named of the extension that is asking to change the default search engine.
+# %2$S is replaced with the name of the current search engine
+# %3$S is replaced with the name of the new search engine
+webext.defaultSearch.description=%1$S would like to change your default search engine from %2$S to %3$S. Is that OK?
+webext.defaultSearchYes.label=Yes
+webext.defaultSearchYes.accessKey=Y
+webext.defaultSearchNo.label=No
+webext.defaultSearchNo.accessKey=N
+
 # LOCALIZATION NOTE (addonPostInstall.message)
 # %1$S is replaced with the localized named of the extension that was
 # just installed.
 # %2$S is replaced with the localized name of the application.
 addonPostInstall.message1=%1$S has been added to %2$S.
 
 # LOCALIZATION NOTE (addonPostInstall.messageDetail)
 # %1$S is replaced with the icon for the add-ons menu.
--- a/browser/modules/ExtensionsUI.jsm
+++ b/browser/modules/ExtensionsUI.jsm
@@ -41,16 +41,17 @@ this.ExtensionsUI = {
 
   async init() {
     this.histogram = Services.telemetry.getHistogramById("EXTENSION_INSTALL_PROMPT_RESULT");
 
     Services.obs.addObserver(this, "webextension-permission-prompt");
     Services.obs.addObserver(this, "webextension-update-permissions");
     Services.obs.addObserver(this, "webextension-install-notify");
     Services.obs.addObserver(this, "webextension-optional-permission-prompt");
+    Services.obs.addObserver(this, "webextension-defaultsearch-prompt");
 
     await Services.wm.getMostRecentWindow("navigator:browser").delayedStartupPromise;
 
     this._checkForSideloaded();
   },
 
   async _checkForSideloaded() {
     let sideloaded = await AddonManagerPrivate.getNewSideloads();
@@ -232,18 +233,31 @@ this.ExtensionsUI = {
         permissions,
       });
 
       // If we don't have any promptable permissions, just proceed
       if (strings.msgs.length == 0) {
         resolve(true);
         return;
       }
+      resolve(this.showPermissionsPrompt(browser, strings, icon));
+    } else if (topic == "webextension-defaultsearch-prompt") {
+      let {browser, name, icon, resolve, currentEngine, newEngine} = subject.wrappedJSObject;
 
-      resolve(this.showPermissionsPrompt(browser, strings, icon));
+      let bundle = Services.strings.createBundle(BROWSER_PROPERTIES);
+
+      let strings = {};
+      strings.acceptText = bundle.GetStringFromName("webext.defaultSearchYes.label");
+      strings.acceptKey = bundle.GetStringFromName("webext.defaultSearchYes.accessKey");
+      strings.cancelText = bundle.GetStringFromName("webext.defaultSearchNo.label");
+      strings.cancelKey = bundle.GetStringFromName("webext.defaultSearchNo.accessKey");
+      let addonName = `<span class="addon-webext-name">${this._sanitizeName(name)}</span>`;
+      strings.text = bundle.formatStringFromName("webext.defaultSearch.description",
+                                               [addonName, currentEngine, newEngine], 3);
+      resolve(this.showDefaultSearchPrompt(browser, strings, icon));
     }
   },
 
   // Escape &, <, and > characters in a string so that it may be
   // injected as part of raw markup.
   _sanitizeName(name) {
     return name.replace(/&/g, "&amp;")
                .replace(/</g, "&lt;")
@@ -322,17 +336,62 @@ this.ExtensionsUI = {
               this.histogram.add(histkey + "Rejected");
             }
             resolve(false);
           },
         },
       ];
 
       win.PopupNotifications.show(browser, "addon-webext-permissions", "",
-      // eslint-disable-next-line no-unsanitized/property
+                                  "addons-notification-icon",
+                                  action, secondaryActions, popupOptions);
+    });
+  },
+
+  showDefaultSearchPrompt(browser, strings, icon) {
+//    const kDefaultSearchHistKey = "defaultSearch";
+    return new Promise(resolve => {
+      let popupOptions = {
+        hideClose: true,
+        popupIconURL: icon || DEFAULT_EXTENSION_ICON,
+        persistent: false,
+        removeOnDismissal: true,
+        eventCallback(topic) {
+          if (topic == "showing") {
+            let doc = this.browser.ownerDocument;
+            // eslint-disable-next-line no-unsanitized/property
+            doc.getElementById("addon-webext-defaultsearch-text").innerHTML = strings.text;
+          } else if (topic == "removed") {
+            resolve(false);
+          }
+        }
+      };
+
+      let action = {
+        label: strings.acceptText,
+        accessKey: strings.acceptKey,
+        disableHighlight: true,
+        callback: () => {
+//          this.histogram.add(kDefaultSearchHistKey + "Accepted");
+          resolve(true);
+        },
+      };
+      let secondaryActions = [
+        {
+          label: strings.cancelText,
+          accessKey: strings.cancelKey,
+          callback: () => {
+//            this.histogram.add(kDefaultSearchHistKey + "Rejected");
+            resolve(false);
+          },
+        },
+      ];
+
+      let win = browser.ownerGlobal;
+      win.PopupNotifications.show(browser, "addon-webext-defaultsearch", "",
                                   "addons-notification-icon",
                                   action, secondaryActions, popupOptions);
     });
   },
 
   showInstallNotification(target, addon) {
     let win = target.ownerGlobal;
     let popups = win.PopupNotifications;
--- a/build/sparse-profiles/taskgraph
+++ b/build/sparse-profiles/taskgraph
@@ -2,16 +2,20 @@
 
 [include]
 # This file is read as part of validating the taskgraph.
 path:browser/locales/all-locales
 
 # Lots of random files in here are read. Just pull in the whole thing.
 path:build/
 
+# TODO remove once bug 1402010 is resolved and test manifests aren't
+# processed in Files() reading mode in moz.build files.
+path:layout/tools/reftest/
+
 # This file is read as part of validating the taskgraph.
 path:mobile/locales/l10n-changesets.json
 
 # The main meat of this profile.
 path:taskcluster/
 
 # Various files in these directories are read by taskgraph. Just pull
 # them all in.
--- a/build/virtualenv_packages.txt
+++ b/build/virtualenv_packages.txt
@@ -3,16 +3,17 @@ mozilla.pth:python/mozboot
 mozilla.pth:python/mozbuild
 mozilla.pth:python/mozlint
 mozilla.pth:python/mozversioncontrol
 mozilla.pth:third_party/python/blessings
 mozilla.pth:third_party/python/compare-locales
 mozilla.pth:third_party/python/configobj
 mozilla.pth:third_party/python/cram
 mozilla.pth:third_party/python/dlmanager
+mozilla.pth:third_party/python/fluent
 mozilla.pth:third_party/python/futures
 mozilla.pth:third_party/python/hglib
 mozilla.pth:third_party/python/jsmin
 optional:setup.py:third_party/python/psutil:build_ext:--inplace
 mozilla.pth:third_party/python/psutil
 mozilla.pth:third_party/python/pylru
 mozilla.pth:third_party/python/which
 mozilla.pth:third_party/python/pystache
--- a/devtools/client/jsonview/converter-child.js
+++ b/devtools/client/jsonview/converter-child.js
@@ -249,21 +249,18 @@ function onContentMessage(e) {
       copyString(win, value);
       break;
 
     case "copy-headers":
       copyHeaders(win, value);
       break;
 
     case "save":
-      // The window ID is needed when the JSON Viewer is inside an iframe.
-      let windowID = win.QueryInterface(Ci.nsIInterfaceRequestor)
-        .getInterface(Ci.nsIDOMWindowUtils).outerWindowID;
       childProcessMessageManager.sendAsyncMessage(
-        "devtools:jsonview:save", {url: value, windowID: windowID});
+        "devtools:jsonview:save", value);
   }
 }
 
 function copyHeaders(win, headers) {
   let value = "";
   let eol = (Services.appinfo.OS !== "WINNT") ? "\n" : "\r\n";
 
   let responseHeaders = headers.response;
--- a/devtools/client/jsonview/converter-observer.js
+++ b/devtools/client/jsonview/converter-observer.js
@@ -68,16 +68,19 @@ JsonViewSniffer.prototype = {
     }
     return false;
   },
 
   getMIMETypeFromContent: function (request, data, length) {
     if (request instanceof Ci.nsIChannel) {
       // JSON View is enabled only for top level loads only.
       if (!this.isTopLevelLoad(request)) {
+        if (request.contentType === JSON_VIEW_MIME_TYPE) {
+          return "application/json";
+        }
         return "";
       }
       try {
         if (request.contentDisposition ==
           Ci.nsIChannel.DISPOSITION_ATTACHMENT) {
           return "";
         }
       } catch (e) {
--- a/devtools/shim/devtools-startup.js
+++ b/devtools/shim/devtools-startup.js
@@ -618,29 +618,29 @@ const JsonView = {
 
   /**
    * Save JSON to a file needs to be implemented here
    * in the parent process.
    */
   onSave: function (message) {
     let chrome = Services.wm.getMostRecentWindow("navigator:browser");
     let browser = chrome.gBrowser.selectedBrowser;
-    if (message.data.url === null) {
+    if (message.data === null) {
       // Save original contents
-      chrome.saveBrowser(browser, false, message.data.windowID);
+      chrome.saveBrowser(browser);
     } else {
       // The following code emulates saveBrowser, but:
       // - Uses the given blob URL containing the custom contents to save.
       // - Obtains the file name from the URL of the document, not the blob.
       let persistable = browser.frameLoader;
-      persistable.startPersistence(message.data.windowID, {
+      persistable.startPersistence(0, {
         onDocumentReady(doc) {
           let uri = chrome.makeURI(doc.documentURI, doc.characterSet);
           let filename = chrome.getDefaultFileName(undefined, uri, doc, null);
-          chrome.internalSave(message.data.url, doc, filename, null, doc.contentType,
+          chrome.internalSave(message.data, doc, filename, null, doc.contentType,
             false, null, null, null, doc, false, null, undefined);
         },
         onError(status) {
           throw new Error("JSON Viewer's onSave failed in startPersistence");
         }
       });
     }
   }
--- a/dom/media/MediaFormatReader.cpp
+++ b/dom/media/MediaFormatReader.cpp
@@ -691,16 +691,17 @@ MediaFormatReader::DecoderFactory::RunSt
       MOZ_ASSERT(!aData.mDecoder);
       MOZ_ASSERT(!aData.mInitRequest.Exists());
 
       MediaResult rv = DoCreateDecoder(aData);
       if (NS_FAILED(rv)) {
         NS_WARNING("Error constructing decoders");
         aData.mToken = nullptr;
         aData.mStage = Stage::None;
+        aData.mOwnerData.mDescription = rv.Description();
         mOwner->NotifyError(aData.mTrack, rv);
         return;
       }
 
       aData.mDecoder = new Wrapper(aData.mDecoder.forget(), aData.mToken.forget());
       DoInitDecoder(aData);
       aData.mStage = Stage::WaitForInit;
       break;
@@ -722,17 +723,22 @@ MediaFormatReader::DecoderFactory::DoCre
   if (!mOwner->mPlatform) {
     mOwner->mPlatform = new PDMFactory();
     if (mOwner->IsEncrypted()) {
       MOZ_ASSERT(mOwner->mCDMProxy);
       mOwner->mPlatform->SetCDMProxy(mOwner->mCDMProxy);
     }
   }
 
-  MediaResult result(NS_OK);
+  // result may not be updated by PDMFactory::CreateDecoder, as such it must be
+  // initialized to a fatal error by default.
+  MediaResult result = MediaResult(
+    NS_ERROR_DOM_MEDIA_FATAL_ERR,
+    nsPrintfCString("error creating %s decoder", TrackTypeToStr(aData.mTrack)));
+
   switch (aData.mTrack) {
     case TrackInfo::kAudioTrack: {
       aData.mDecoder = mOwner->mPlatform->CreateDecoder({
         ownerData.mInfo
         ? *ownerData.mInfo->GetAsAudioInfo()
         : *ownerData.mOriginalInfo->GetAsAudioInfo(),
         ownerData.mTaskQueue,
         mOwner->mCrashHelper,
@@ -765,19 +771,17 @@ MediaFormatReader::DecoderFactory::DoCre
     default:
       break;
   }
 
   if (aData.mDecoder) {
     return NS_OK;
   }
 
-  if (NS_FAILED(result)) {
-    ownerData.mDescription = result.Description();
-  }
+  MOZ_RELEASE_ASSERT(NS_FAILED(result), "PDM returned an invalid error code");
 
   return result;
 }
 
 void
 MediaFormatReader::DecoderFactory::DoInitDecoder(Data& aData)
 {
   auto& ownerData = aData.mOwnerData;
--- a/dom/media/platforms/PDMFactory.cpp
+++ b/dom/media/platforms/PDMFactory.cpp
@@ -292,18 +292,17 @@ PDMFactory::CreateDecoderWithPDM(Platfor
   if (MP4Decoder::IsH264(config.mMimeType) && !aParams.mUseNullDecoder.mUse) {
     RefPtr<H264Converter> h = new H264Converter(aPDM, aParams);
     const MediaResult result = h->GetLastError();
     if (NS_SUCCEEDED(result) || result == NS_ERROR_NOT_INITIALIZED) {
       // The H264Converter either successfully created the wrapped decoder,
       // or there wasn't enough AVCC data to do so. Otherwise, there was some
       // problem, for example WMF DLLs were missing.
       m = h.forget();
-    }
-    if (NS_FAILED(result) && aParams.mError) {
+    } else if (aParams.mError) {
       *aParams.mError = result;
     }
   } else {
     m = aPDM->CreateVideoDecoder(aParams);
   }
 
   return m.forget();
 }
--- a/dom/media/systemservices/MediaParent.cpp
+++ b/dom/media/systemservices/MediaParent.cpp
@@ -32,16 +32,17 @@ mozilla::LazyLogModule gMediaParentLog("
 // deviceIds to be unique per origin, to avoid them being supercookies.
 
 #define ORIGINKEYS_FILE "enumerate_devices.txt"
 #define ORIGINKEYS_VERSION "1"
 
 namespace mozilla {
 namespace media {
 
+StaticMutex sOriginKeyStoreMutex;
 static OriginKeyStore* sOriginKeyStore = nullptr;
 
 class OriginKeyStore : public nsISupports
 {
   NS_DECL_THREADSAFE_ISUPPORTS
   class OriginKey
   {
   public:
@@ -391,24 +392,26 @@ class OriginKeyStore : public nsISupport
     }
   private:
     nsCOMPtr<nsIFile> mProfileDir;
   };
 
 private:
   virtual ~OriginKeyStore()
   {
+    StaticMutexAutoLock lock(sOriginKeyStoreMutex);
     sOriginKeyStore = nullptr;
     LOG((__FUNCTION__));
   }
 
 public:
   static OriginKeyStore* Get()
   {
     MOZ_ASSERT(NS_IsMainThread());
+    StaticMutexAutoLock lock(sOriginKeyStoreMutex);
     if (!sOriginKeyStore) {
       sOriginKeyStore = new OriginKeyStore();
     }
     return sOriginKeyStore;
   }
 
   // Only accessed on StreamTS thread
   OriginKeysLoader mOriginKeys;
@@ -442,37 +445,41 @@ Parent<Super>::RecvGetPrincipalKey(const
   MOZ_ASSERT(NS_IsMainThread());
   nsCOMPtr<nsIFile> profileDir;
   nsresult rv = NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR,
                                        getter_AddRefs(profileDir));
   if (NS_WARN_IF(NS_FAILED(rv))) {
     return IPCResult(this, false);
   }
 
-  // Then over to stream-transport thread to do the actual file io.
-  // Stash a pledge to hold the answer and get an id for this request.
+  // Then over to stream-transport thread (a thread pool) to do the actual
+  // file io. Stash a pledge to hold the answer and get an id for this request.
 
   RefPtr<Pledge<nsCString>> p = new Pledge<nsCString>();
   uint32_t id = mOutstandingPledges.Append(*p);
 
   nsCOMPtr<nsIEventTarget> sts = do_GetService(NS_STREAMTRANSPORTSERVICE_CONTRACTID);
   MOZ_ASSERT(sts);
   RefPtr<Parent<Super>> that(this);
 
   rv = sts->Dispatch(NewRunnableFrom([this, that, id, profileDir,
                                       aPrincipalInfo, aPersist]() -> nsresult {
     MOZ_ASSERT(!NS_IsMainThread());
-    mOriginKeyStore->mOriginKeys.SetProfileDir(profileDir);
+    StaticMutexAutoLock lock(sOriginKeyStoreMutex);
+    if (!sOriginKeyStore) {
+      return NS_ERROR_FAILURE;
+    }
+    sOriginKeyStore->mOriginKeys.SetProfileDir(profileDir);
 
     nsresult rv;
     nsAutoCString result;
     if (IsPincipalInfoPrivate(aPrincipalInfo)) {
-      rv = mOriginKeyStore->mPrivateBrowsingOriginKeys.GetPrincipalKey(aPrincipalInfo, result);
+      rv = sOriginKeyStore->mPrivateBrowsingOriginKeys.GetPrincipalKey(aPrincipalInfo, result);
     } else {
-      rv = mOriginKeyStore->mOriginKeys.GetPrincipalKey(aPrincipalInfo, result, aPersist);
+      rv = sOriginKeyStore->mOriginKeys.GetPrincipalKey(aPrincipalInfo, result, aPersist);
     }
 
     if (NS_WARN_IF(NS_FAILED(rv))) {
       return rv;
     }
 
     // Pass result back to main thread.
     rv = NS_DispatchToMainThread(NewRunnableFrom([this, that, id,
@@ -513,29 +520,32 @@ Parent<Super>::RecvSanitizeOriginKeys(co
 {
   MOZ_ASSERT(NS_IsMainThread());
   nsCOMPtr<nsIFile> profileDir;
   nsresult rv = NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR,
                                          getter_AddRefs(profileDir));
   if (NS_WARN_IF(NS_FAILED(rv))) {
     return IPCResult(this, false);
   }
-  // Over to stream-transport thread to do the file io.
+  // Over to stream-transport thread (a thread pool) to do the file io.
 
   nsCOMPtr<nsIEventTarget> sts = do_GetService(NS_STREAMTRANSPORTSERVICE_CONTRACTID);
   MOZ_ASSERT(sts);
-  RefPtr<OriginKeyStore> store(mOriginKeyStore);
 
-  rv = sts->Dispatch(NewRunnableFrom([profileDir, store, aSinceWhen,
+  rv = sts->Dispatch(NewRunnableFrom([profileDir, aSinceWhen,
                                       aOnlyPrivateBrowsing]() -> nsresult {
     MOZ_ASSERT(!NS_IsMainThread());
-    store->mPrivateBrowsingOriginKeys.Clear(aSinceWhen);
+    StaticMutexAutoLock lock(sOriginKeyStoreMutex);
+    if (!sOriginKeyStore) {
+      return NS_ERROR_FAILURE;
+    }
+    sOriginKeyStore->mPrivateBrowsingOriginKeys.Clear(aSinceWhen);
     if (!aOnlyPrivateBrowsing) {
-      store->mOriginKeys.SetProfileDir(profileDir);
-      store->mOriginKeys.Clear(aSinceWhen);
+      sOriginKeyStore->mOriginKeys.SetProfileDir(profileDir);
+      sOriginKeyStore->mOriginKeys.Clear(aSinceWhen);
     }
     return NS_OK;
   }), NS_DISPATCH_NORMAL);
   if (NS_WARN_IF(NS_FAILED(rv))) {
     return IPCResult(this, false);
   }
   return IPC_OK();
 }
new file mode 100644
--- /dev/null
+++ b/layout/style/crashtests/1401256.html
@@ -0,0 +1,5 @@
+<script>
+  let o1 = document.createElement('p');
+  document.documentElement.appendChild(o1);
+  o1.animate({'minWidth':['-moz-max-content']});
+</script>
\ No newline at end of file
--- a/layout/style/crashtests/crashtests.list
+++ b/layout/style/crashtests/crashtests.list
@@ -217,8 +217,9 @@ load 1397439-1.html
 load 1395719.html
 load 1397091.html
 load 1398479.html
 load 1398581.html
 load 1400035.html
 load 1399546.html
 load 1400325.html
 load 1400926.html
+load 1401256.html
--- a/mobile/android/app/src/main/res/layout/activity_stream_main_highlightstitle.xml
+++ b/mobile/android/app/src/main/res/layout/activity_stream_main_highlightstitle.xml
@@ -40,11 +40,12 @@
         android:visibility="gone"/>
 
     <ImageView
         android:id="@+id/arrow_link"
         android:layout_width="wrap_content"
         android:layout_height="match_parent"
         android:layout_marginEnd="@dimen/activity_stream_base_margin"
         android:layout_marginRight="@dimen/activity_stream_base_margin"
-        android:visibility="gone"/>
+        android:visibility="gone"
+        android:importantForAccessibility="no" />
 
 </LinearLayout>
--- a/mobile/android/app/src/main/res/values/arrays.xml
+++ b/mobile/android/app/src/main/res/values/arrays.xml
@@ -131,23 +131,16 @@
         <item>@string/pref_update_autodownload_wifi</item>
         <item>@string/pref_update_autodownload_disabled</item>
     </string-array>
     <string-array name="pref_update_autodownload_values">
         <item>enabled</item>
         <item>wifi</item>
         <item>disabled</item>
     </string-array>
-    <!-- This value is similar to config_longPressVibePattern in android frameworks/base/core/res/res/values/config.xml-->
-    <integer-array name="long_press_vibrate_msec">
-        <item>0</item>
-        <item>1</item>
-        <item>20</item>
-        <item>21</item>
-    </integer-array>
     <!-- browser.image_blocking -->
     <string-array name="pref_browser_image_blocking_entries">
         <item>@string/pref_tap_to_load_images_enabled</item>
         <item>@string/pref_tap_to_load_images_data</item>
         <item>@string/pref_tap_to_load_images_disabled2</item>
     </string-array>
     <string-array name="pref_browser_image_blocking_values">
         <item>1</item> <!-- Always -->
--- a/mobile/android/base/android-services.mozbuild
+++ b/mobile/android/base/android-services.mozbuild
@@ -878,17 +878,16 @@ sync_java_files = [TOPSRCDIR + '/mobile/
     'sync/crypto/HMACVerificationException.java',
     'sync/crypto/KeyBundle.java',
     'sync/crypto/MissingCryptoInputException.java',
     'sync/crypto/NoKeyBundleException.java',
     'sync/crypto/PBKDF2.java',
     'sync/crypto/PersistedCrypto5Keys.java',
     'sync/CryptoKeysChangedException.java',
     'sync/CryptoRecord.java',
-    'sync/DelayedWorkTracker.java',
     'sync/delegates/ClientsDataDelegate.java',
     'sync/delegates/FreshStartDelegate.java',
     'sync/delegates/GlobalSessionCallback.java',
     'sync/delegates/JSONRecordFetchDelegate.java',
     'sync/delegates/KeyUploadDelegate.java',
     'sync/delegates/MetaGlobalDelegate.java',
     'sync/delegates/WipeServerDelegate.java',
     'sync/EngineSettings.java',
--- a/mobile/android/base/java/org/mozilla/gecko/BrowserApp.java
+++ b/mobile/android/base/java/org/mozilla/gecko/BrowserApp.java
@@ -44,16 +44,17 @@ import android.support.design.widget.Sna
 import android.support.v4.app.Fragment;
 import android.support.v4.app.FragmentManager;
 import android.support.v4.app.NotificationCompat;
 import android.support.v4.content.res.ResourcesCompat;
 import android.support.v4.view.MenuItemCompat;
 import android.text.TextUtils;
 import android.util.AttributeSet;
 import android.util.Log;
+import android.view.HapticFeedbackConstants;
 import android.view.InputDevice;
 import android.view.KeyEvent;
 import android.view.LayoutInflater;
 import android.view.Menu;
 import android.view.MenuInflater;
 import android.view.MenuItem;
 import android.view.MotionEvent;
 import android.view.SubMenu;
@@ -693,17 +694,17 @@ public class BrowserApp extends GeckoApp
                             // being called. Hence we need to guard against the Activity being
                             // shut down (in which case trying to perform UI changes, such as showing
                             // fragments below, will crash).
                             return;
                         }
 
                         final TabHistoryFragment fragment = TabHistoryFragment.newInstance(historyPageList, toIndex);
                         final FragmentManager fragmentManager = getSupportFragmentManager();
-                        GeckoAppShell.vibrateOnHapticFeedbackEnabled(getResources().getIntArray(R.array.long_press_vibrate_msec));
+                        GeckoAppShell.getHapticFeedbackDelegate().performHapticFeedback(HapticFeedbackConstants.LONG_PRESS);
                         fragment.show(R.id.tab_history_panel, fragmentManager.beginTransaction(), TAB_HISTORY_FRAGMENT_TAG);
                     }
                 });
             }
         });
         mBrowserToolbar.setTabHistoryController(tabHistoryController);
 
         final String action = intent.getAction();
@@ -1653,17 +1654,16 @@ public class BrowserApp extends GeckoApp
 
         mDoorHangerPopup.setAnchor(mBrowserToolbar.getDoorHangerAnchor());
         mDoorHangerPopup.setOnVisibilityChangeListener(this);
 
         if (mLayerView != null) {
             mLayerView.getDynamicToolbarAnimator().addMetricsListener(this);
             mLayerView.getDynamicToolbarAnimator().setToolbarChromeProxy(this);
         }
-        mDynamicToolbar.setLayerView(mLayerView);
         setDynamicToolbarEnabled(mDynamicToolbar.isEnabled());
 
         // Intercept key events for gamepad shortcuts
         mLayerView.setOnKeyListener(this);
 
         // Initialize the actionbar menu items on startup for both large and small tablets
         if (HardwareUtils.isTablet()) {
             onCreatePanelMenu(Window.FEATURE_OPTIONS_PANEL, null);
--- a/mobile/android/base/java/org/mozilla/gecko/GeckoApp.java
+++ b/mobile/android/base/java/org/mozilla/gecko/GeckoApp.java
@@ -668,17 +668,17 @@ public abstract class GeckoApp extends G
                     getSharedPreferences().edit().putInt(PREFS_CRASHED_COUNT, 0).apply();
                 }
             }, STARTUP_PHASE_DURATION_MS);
 
         } else if ("Accessibility:Ready".equals(event)) {
             GeckoAccessibility.updateAccessibilitySettings(this);
 
         } else if ("Accessibility:Event".equals(event)) {
-            GeckoAccessibility.sendAccessibilityEvent(message);
+            GeckoAccessibility.sendAccessibilityEvent(mLayerView, message);
 
         } else if ("Bookmark:Insert".equals(event)) {
             final BrowserDB db = BrowserDB.from(getProfile());
             final boolean bookmarkAdded = db.addBookmark(
                     getContentResolver(), message.getString("title"), message.getString("url"));
             final int resId = bookmarkAdded ? R.string.bookmark_added
                                             : R.string.bookmark_already_added;
             ThreadUtils.postToUiThread(new Runnable() {
--- a/mobile/android/base/java/org/mozilla/gecko/GeckoApplication.java
+++ b/mobile/android/base/java/org/mozilla/gecko/GeckoApplication.java
@@ -52,17 +52,18 @@ import org.mozilla.gecko.util.GeckoBundl
 import org.mozilla.gecko.util.HardwareUtils;
 import org.mozilla.gecko.util.PRNGFixes;
 import org.mozilla.gecko.util.ThreadUtils;
 
 import java.io.File;
 import java.lang.reflect.Method;
 import java.util.UUID;
 
-public class GeckoApplication extends Application {
+public class GeckoApplication extends Application
+                              implements HapticFeedbackDelegate {
     private static final String LOG_TAG = "GeckoApplication";
     private static final String MEDIA_DECODING_PROCESS_CRASH = "MEDIA_DECODING_PROCESS_CRASH";
 
     private boolean mInBackground;
     private boolean mPausedGecko;
     private boolean mIsInitialResume;
 
     private LightweightTheme mLightweightTheme;
@@ -224,16 +225,17 @@ public class GeckoApplication extends Ap
 
         sSessionUUID = UUID.randomUUID().toString();
 
         GeckoActivityMonitor.getInstance().initialize(this);
         MemoryMonitor.getInstance().init(this);
 
         final Context context = getApplicationContext();
         GeckoAppShell.setApplicationContext(context);
+        GeckoAppShell.setHapticFeedbackDelegate(this);
         GeckoAppShell.setGeckoInterface(new GeckoAppShell.GeckoInterface() {
             @Override
             public boolean openUriExternal(final String targetURI, final String mimeType,
                                            final String packageName, final String className,
                                            final String action, final String title) {
                 // Default to showing prompt in private browsing to be safe.
                 return IntentHelper.openUriExternal(targetURI, mimeType, packageName,
                                                     className, action, title, true);
@@ -629,9 +631,18 @@ public class GeckoApplication extends Ap
                 new Rect(halfSize - sWidth,
                         halfSize - sHeight,
                         halfSize + sWidth,
                         halfSize + sHeight),
                 null);
 
         return bitmap;
     }
+
+    @Override // HapticFeedbackDelegate
+    public void performHapticFeedback(final int effect) {
+        final Activity currentActivity =
+                GeckoActivityMonitor.getInstance().getCurrentActivity();
+        if (currentActivity != null) {
+            currentActivity.getWindow().getDecorView().performHapticFeedback(effect);
+        }
+    }
 }
--- a/mobile/android/base/java/org/mozilla/gecko/activitystream/ActivityStreamTelemetry.java
+++ b/mobile/android/base/java/org/mozilla/gecko/activitystream/ActivityStreamTelemetry.java
@@ -45,16 +45,17 @@ public class ActivityStreamTelemetry {
         public final static String ITEM_REMOVE_BOOKMARK = "remove_bookmark";
         public final static String ITEM_PIN = "pin";
         public final static String ITEM_UNPIN = "unpin";
         public final static String ITEM_COPY = "copy";
         public final static String ITEM_ADD_TO_HOMESCREEN = "homescreen";
         public final static String ITEM_NEW_TAB = "newtab";
         public final static String ITEM_DISMISS = "dismiss";
         public final static String ITEM_DELETE_HISTORY = "delete";
+        public final static String ITEM_LINK_MORE = "link_more";
         public final static String INTERACTION_MENU_BUTTON = "menu_button";
         public final static String INTERACTION_LONG_CLICK = "long_click";
     }
 
     /**
      * A helper class used for composing an 'extras' field. It encapsulates a holder of "global"
      * key/value pairs which will be present in every 'extras' constructed by this class, and a
      * static builder which is aware of Activity Stream telemetry needs.
@@ -139,19 +140,16 @@ public class ActivityStreamTelemetry {
             public Builder forHighlightSource(Utils.HighlightSource source) {
                 switch (source) {
                     case VISITED:
                         this.set(Contract.SOURCE_SUBTYPE, Contract.SUBTYPE_VISITED);
                         break;
                     case BOOKMARKED:
                         this.set(Contract.SOURCE_SUBTYPE, Contract.SUBTYPE_BOOKMARKED);
                         break;
-                    case POCKET:
-                        this.set(Contract.SOURCE_TYPE, Contract.TYPE_POCKET);
-                        break;
                     default:
                         throw new IllegalStateException("Unknown highlight source: " + source);
                 }
                 return this;
             }
 
             public Builder forTopSite(final TopSite topSite) {
                 this.set(
--- a/mobile/android/base/java/org/mozilla/gecko/activitystream/homepanel/StreamRecyclerAdapter.java
+++ b/mobile/android/base/java/org/mozilla/gecko/activitystream/homepanel/StreamRecyclerAdapter.java
@@ -236,33 +236,33 @@ public class StreamRecyclerAdapter exten
         final WebpageRowModel model = (WebpageRowModel) recyclerViewModel.get(position);
 
         final String sourceType;
         final int actionPosition;
         final int size;
         final String referrerUri;
         final int viewType = getItemViewType(position);
 
+        final ActivityStreamTelemetry.Extras.Builder extras = ActivityStreamTelemetry.Extras.builder();
         if (viewType == RowItemType.HIGHLIGHT_ITEM.getViewType()) {
+            extras.forHighlightSource(model.getSource());
             sourceType = ActivityStreamTelemetry.Contract.TYPE_HIGHLIGHTS;
             actionPosition = getHighlightsIndexFromAdapterPosition(position);
             size = getNumOfTypeShown(RowItemType.HIGHLIGHT_ITEM);
             referrerUri = null;
         } else {
             sourceType = ActivityStreamTelemetry.Contract.TYPE_POCKET;
             actionPosition = getTopStoriesIndexFromAdapterPosition(position);
             size = getNumOfTypeShown(RowItemType.TOP_STORIES_ITEM);
             referrerUri = PocketStoriesLoader.POCKET_REFERRER_URI;
         }
 
-        ActivityStreamTelemetry.Extras.Builder extras = ActivityStreamTelemetry.Extras.builder()
-                .forHighlightSource(model.getSource())
-                .set(ActivityStreamTelemetry.Contract.SOURCE_TYPE, sourceType)
-                .set(ActivityStreamTelemetry.Contract.ACTION_POSITION, actionPosition)
-                .set(ActivityStreamTelemetry.Contract.COUNT, size);
+        extras.set(ActivityStreamTelemetry.Contract.SOURCE_TYPE, sourceType)
+              .set(ActivityStreamTelemetry.Contract.ACTION_POSITION, actionPosition)
+              .set(ActivityStreamTelemetry.Contract.COUNT, size);
 
         Telemetry.sendUIEvent(
                 TelemetryContract.Event.LOAD_URL,
                 TelemetryContract.Method.LIST_ITEM,
                 extras.build()
         );
 
         // NB: This is hacky. We need to process telemetry data first, otherwise we run a risk of
@@ -312,31 +312,31 @@ public class StreamRecyclerAdapter exten
     @Override
     public void openContextMenu(final WebpageItemRow webpageItemRow, final int position, @NonNull final String interactionExtra) {
         final WebpageRowModel model = (WebpageRowModel) recyclerViewModel.get(position);
 
         final String sourceType;
         final int actionPosition;
         final ActivityStreamContextMenu.MenuMode menuMode;
 
+        ActivityStreamTelemetry.Extras.Builder extras = ActivityStreamTelemetry.Extras.builder();
         if (model.getRowItemType() == RowItemType.HIGHLIGHT_ITEM) {
+            extras.forHighlightSource(model.getSource());
             sourceType = ActivityStreamTelemetry.Contract.TYPE_HIGHLIGHTS;
             actionPosition = getHighlightsIndexFromAdapterPosition(position);
             menuMode = ActivityStreamContextMenu.MenuMode.HIGHLIGHT;
         } else {
             sourceType = ActivityStreamTelemetry.Contract.TYPE_POCKET;
             actionPosition = getTopStoriesIndexFromAdapterPosition(position);
             menuMode = ActivityStreamContextMenu.MenuMode.TOPSTORY;
         }
 
-        ActivityStreamTelemetry.Extras.Builder extras = ActivityStreamTelemetry.Extras.builder()
-                .set(ActivityStreamTelemetry.Contract.SOURCE_TYPE, sourceType)
-                .set(ActivityStreamTelemetry.Contract.ACTION_POSITION, actionPosition)
-                .set(ActivityStreamTelemetry.Contract.INTERACTION, interactionExtra)
-                .forHighlightSource(model.getSource());
+        extras.set(ActivityStreamTelemetry.Contract.SOURCE_TYPE, sourceType)
+              .set(ActivityStreamTelemetry.Contract.ACTION_POSITION, actionPosition)
+              .set(ActivityStreamTelemetry.Contract.INTERACTION, interactionExtra);
 
         ActivityStreamContextMenu.show(webpageItemRow.itemView.getContext(),
                 webpageItemRow.getContextMenuAnchor(),
                 extras,
                 menuMode,
                 model,
                 /* shouldOverrideWithImageProvider */ true, // we use image providers in HighlightItem.pageIconLayout.
                 onUrlOpenListener, onUrlOpenInBackgroundListener,
--- a/mobile/android/base/java/org/mozilla/gecko/activitystream/homepanel/stream/StreamTitleRow.java
+++ b/mobile/android/base/java/org/mozilla/gecko/activitystream/homepanel/stream/StreamTitleRow.java
@@ -7,16 +7,19 @@ package org.mozilla.gecko.activitystream
 
 import android.support.annotation.NonNull;
 import android.support.annotation.StringRes;
 import android.view.View;
 import android.widget.ImageView;
 import android.widget.TextView;
 
 import org.mozilla.gecko.R;
+import org.mozilla.gecko.Telemetry;
+import org.mozilla.gecko.TelemetryContract;
+import org.mozilla.gecko.activitystream.ActivityStreamTelemetry;
 import org.mozilla.gecko.home.HomePager;
 import org.mozilla.gecko.util.DrawableUtil;
 
 import java.util.EnumSet;
 
 public class StreamTitleRow extends StreamViewHolder {
     public static final int LAYOUT_ID = R.layout.activity_stream_main_highlightstitle;
 
@@ -37,16 +40,22 @@ public class StreamTitleRow extends Stre
         final ImageView titleArrow = (ImageView) itemView.findViewById(R.id.arrow_link);
         titleArrow.setImageDrawable(DrawableUtil.tintDrawableWithColorRes(itemView.getContext(), R.drawable.menu_item_more, R.color.ob_click));
         titleArrow.setVisibility(View.VISIBLE);
 
         final View.OnClickListener clickListener = new View.OnClickListener() {
             @Override
             public void onClick(View view) {
                 onUrlOpenListener.onUrlOpen(url, EnumSet.of(HomePager.OnUrlOpenListener.Flags.ALLOW_SWITCH_TO_TAB));
+
+                ActivityStreamTelemetry.Extras.Builder extras = ActivityStreamTelemetry.Extras.builder()
+                        .set(ActivityStreamTelemetry.Contract.SOURCE_TYPE, ActivityStreamTelemetry.Contract.TYPE_POCKET)
+                        .set(ActivityStreamTelemetry.Contract.ITEM, ActivityStreamTelemetry.Contract.ITEM_LINK_MORE);
+
+                Telemetry.sendUIEvent(TelemetryContract.Event.ACTION, TelemetryContract.Method.BUTTON, extras.build());
             }
         };
 
         titleLink.setOnClickListener(clickListener);
         titleArrow.setOnClickListener(clickListener);
     }
 }
 
--- a/mobile/android/base/java/org/mozilla/gecko/menu/GeckoMenu.java
+++ b/mobile/android/base/java/org/mozilla/gecko/menu/GeckoMenu.java
@@ -12,16 +12,17 @@ import org.mozilla.gecko.util.ThreadUtil
 import org.mozilla.gecko.widget.GeckoActionProvider;
 
 import android.content.ComponentName;
 import android.content.Context;
 import android.content.Intent;
 import android.util.AttributeSet;
 import android.util.Log;
 import android.util.SparseArray;
+import android.view.HapticFeedbackConstants;
 import android.view.KeyEvent;
 import android.view.LayoutInflater;
 import android.view.Menu;
 import android.view.MenuItem;
 import android.view.SubMenu;
 import android.view.View;
 import android.view.ViewGroup;
 import android.widget.AdapterView;
@@ -254,34 +255,36 @@ public class GeckoMenu extends ListView
                 public void onClick(View view) {
                     handleMenuItemClick(menuItem);
                 }
             });
             ((MenuItemActionBar) actionView).setOnLongClickListener(new View.OnLongClickListener() {
                 @Override
                 public boolean onLongClick(View view) {
                     if (handleMenuItemLongClick(menuItem)) {
-                        GeckoAppShell.vibrateOnHapticFeedbackEnabled(getResources().getIntArray(R.array.long_press_vibrate_msec));
+                        GeckoAppShell.getHapticFeedbackDelegate().performHapticFeedback(
+                                HapticFeedbackConstants.LONG_PRESS);
                         return true;
                     }
                     return false;
                 }
             });
         } else if (actionView instanceof MenuItemSwitcherLayout) {
             ((MenuItemSwitcherLayout) actionView).setMenuItemClickListener(new View.OnClickListener() {
                 @Override
                 public void onClick(View view) {
                     handleMenuItemClick(menuItem);
                 }
             });
             ((MenuItemSwitcherLayout) actionView).setMenuItemLongClickListener(new View.OnLongClickListener() {
                 @Override
                 public boolean onLongClick(View view) {
                     if (handleMenuItemLongClick(menuItem)) {
-                        GeckoAppShell.vibrateOnHapticFeedbackEnabled(getResources().getIntArray(R.array.long_press_vibrate_msec));
+                        GeckoAppShell.getHapticFeedbackDelegate().performHapticFeedback(
+                                HapticFeedbackConstants.LONG_PRESS);
                         return true;
                     }
                     return false;
                 }
             });
         }
 
         return added;
--- a/mobile/android/base/moz.build
+++ b/mobile/android/base/moz.build
@@ -410,16 +410,17 @@ gvjar.sources += [geckoview_source_dir +
     'gfx/PointUtils.java',
     'gfx/RenderTask.java',
     'gfx/StackScroller.java',
     'gfx/SurfaceAllocator.java',
     'gfx/SurfaceAllocatorService.java',
     'gfx/SurfaceTextureListener.java',
     'gfx/ViewTransform.java',
     'gfx/VsyncSource.java',
+    'HapticFeedbackDelegate.java',
     'InputConnectionListener.java',
     'InputMethods.java',
     'media/AsyncCodec.java',
     'media/AsyncCodecFactory.java',
     'media/BaseHlsPlayer.java',
     'media/Codec.java',
     'media/CodecProxy.java',
     'media/FormatParam.java',
--- a/mobile/android/docs/activitystreamtelemetry.rst
+++ b/mobile/android/docs/activitystreamtelemetry.rst
@@ -52,45 +52,84 @@ For each click event (1/2), in addition 
     }
 
 Subtype indicates a reason an item which is being interacted with appeared in the Top Sites:
 
 - "pinned": a pinned top site, specifically a non-positioned "Activity Stream pinned" site
 - "suggested": a suggested top site, one of the default ones displayed when there's not enough browsing history available
 - "top": a frecency-based top site, based on browsing history. Neither "pinned" nor "suggested".
 
+Top Stories (Pocket) interactions
+---------------------------------
+
+Two event types are recorded for row items (links):
+1) User clicked on a Story item: event="loadurl.1", method="listitem"
+2) User clicked on the menu button: event="show.1", method="contextmenu"
+
+For both event types, in addition to global extras, the following information is recorded:
+
+.. code-block:: js
+
+    extras: {
+        ...
+        "source_type": "pocket",
+        "action_position": number /* 0-based index of a story being interacted with */
+    }
+
+For "loadurl.1" event, the following extra information is also recorded:
+
+.. code-block:: js
+
+    extras: {
+        ...
+        "count": number, /* total number of stories displayed */
+    }
+
+One event type is recorded for interaction with the Top Stories section title UI:
+1) User clicks on the "MORE" link in the Top Stories section title: event="action.1", method="button"
+
+In addition to global extras, the following information is included:
+
+.. code-block:: js
+
+    extras: {
+        ...
+        "source_type": "pocket",
+        "item": "link_more"
+    }
+
 Highlight interactions
 ----------------------
 Two event types are recorded:
 
 1) User clicked on a Highlight: event="loadurl.1", method="listitem"
 2) User clicked on the menu button: event="show.1", method="contextmenu"
 
 For both event types, in addition to global extras, the following information is recorded:
 
 .. code-block:: js
 
     extras: {
         ...
         "source_type": "highlights",
-        "source_subtype": "visited"/"bookmarked"
+        "source_subtype": "visited"/"bookmarked",
+        "action_position": number, /* 0-based index of a highlight being interacted with */
     }
 
 Subtype indicates reason an item being which is being interacted with appeared in the Highlights:
 - "visited": a website has been visited recently
 - "bookmarked": a website has been bookmarked recently
 
 For "loadurl.1" event, the following extra information is also recorded:
 
 .. code-block:: js
 
     extras: {
         ...
-        "action_position": number, /* 0-based index of a highlight being interacted with */
-        "count": number, /* total number of highlights displayed */
+        "count": number /* total number of highlights displayed */
     }
 
 Context Menu interactions
 -------------------------
 Every interaction with a context menu item is recorded using: event="action.1", method="contextmenu"
 
 For all interactions, in addition to global extras, the following information is recorded:
 
--- a/mobile/android/geckoview/src/main/java/org/mozilla/gecko/GeckoAccessibility.java
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/GeckoAccessibility.java
@@ -2,18 +2,16 @@
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 package org.mozilla.gecko;
 
 import org.json.JSONException;
 import org.json.JSONObject;
-import org.mozilla.gecko.EventDispatcher;
-import org.mozilla.gecko.gfx.LayerView;
 import org.mozilla.gecko.util.GeckoBundle;
 import org.mozilla.gecko.util.ThreadUtils;
 
 import android.content.Context;
 import android.graphics.Rect;
 import android.os.Build;
 import android.os.Bundle;
 import android.util.Log;
@@ -105,30 +103,32 @@ public class GeckoAccessibility {
             // Accessibility is off.
         }
     }
 
     public static boolean isEnabled() {
         return sEnabled;
     }
 
-    public static void sendAccessibilityEvent(final GeckoBundle message) {
+    public static void sendAccessibilityEvent(final GeckoView view,
+                                              final GeckoBundle message) {
         if (!sEnabled)
             return;
 
         final int eventType = message.getInt("eventType", -1);
         if (eventType < 0) {
             Log.e(LOGTAG, "No accessibility event type provided");
             return;
         }
 
-        sendAccessibilityEvent(message, eventType);
+        sendAccessibilityEvent(view, message, eventType);
     }
 
-    public static void sendAccessibilityEvent(final GeckoBundle message, final int eventType) {
+    public static void sendAccessibilityEvent(final GeckoView view, final GeckoBundle message,
+                                              final int eventType) {
         if (!sEnabled)
             return;
 
         final String exitView = message.getString("exitView", "");
         if (exitView.equals("moveNext")) {
             sCurrentNode = VIRTUAL_ENTRY_POINT_AFTER;
         } else if (exitView.equals("movePrevious")) {
             sCurrentNode = VIRTUAL_ENTRY_POINT_BEFORE;
@@ -143,20 +143,16 @@ public class GeckoAccessibility {
                     @Override
                     public void run() {
                         sendDirectAccessibilityEvent(eventType, message);
                 }
             });
         } else {
             // In Jelly Bean we populate an AccessibilityNodeInfo with the minimal amount of data to have
             // it work with TalkBack.
-            final LayerView view = GeckoAppShell.getLayerView();
-            if (view == null)
-                return;
-
             if (sVirtualCursorNode == null) {
                 sVirtualCursorNode = AccessibilityNodeInfo.obtain(view, VIRTUAL_CURSOR_POSITION);
             }
             sVirtualCursorNode.setEnabled(message.getBoolean("enabled", true));
             sVirtualCursorNode.setClickable(message.getBoolean("clickable"));
             sVirtualCursorNode.setCheckable(message.getBoolean("checkable"));
             sVirtualCursorNode.setChecked(message.getBoolean("checked"));
             sVirtualCursorNode.setPassword(message.getBoolean("password"));
@@ -261,17 +257,22 @@ public class GeckoAccessibility {
             EventDispatcher.getInstance().dispatch("Accessibility:Focus", data);
         }
     }
 
     public static class GeckoAccessibilityDelegate extends View.AccessibilityDelegate {
         AccessibilityNodeProvider mAccessibilityNodeProvider;
 
         @Override
-        public AccessibilityNodeProvider getAccessibilityNodeProvider(final View host) {
+        public AccessibilityNodeProvider getAccessibilityNodeProvider(final View hostView) {
+            if (!(hostView instanceof GeckoView)) {
+                return super.getAccessibilityNodeProvider(hostView);
+            }
+            final GeckoView host = (GeckoView) hostView;
+
             if (mAccessibilityNodeProvider == null)
                 // The accessibility node structure for web content consists of 3 LayerView child nodes:
                 // 1. VIRTUAL_ENTRY_POINT_BEFORE: Represents the entry point before the LayerView.
                 // 2. VIRTUAL_CURSOR_POSITION: Represents the current position of the virtual cursor.
                 // 3. VIRTUAL_ENTRY_POINT_AFTER: Represents the entry point after the LayerView.
                 mAccessibilityNodeProvider = new AccessibilityNodeProvider() {
                         @Override
                         public AccessibilityNodeInfo createAccessibilityNodeInfo(int virtualDescendantId) {
@@ -314,17 +315,17 @@ public class GeckoAccessibility {
                         }
 
                         @Override
                         public boolean performAction (int virtualViewId, int action, Bundle arguments) {
                             if (action == AccessibilityNodeInfo.ACTION_ACCESSIBILITY_FOCUS) {
                                 // The accessibility focus is permanently on the middle node, VIRTUAL_CURSOR_POSITION.
                                 // When we enter the view forward or backward we just ask Gecko to get focus, keeping the current position.
                                 if (virtualViewId == VIRTUAL_CURSOR_POSITION && sHoverEnter != null) {
-                                    GeckoAccessibility.sendAccessibilityEvent(sHoverEnter, AccessibilityEvent.TYPE_VIEW_ACCESSIBILITY_FOCUSED);
+                                    GeckoAccessibility.sendAccessibilityEvent(host, sHoverEnter, AccessibilityEvent.TYPE_VIEW_ACCESSIBILITY_FOCUSED);
                                 } else {
                                     final GeckoBundle data = new GeckoBundle(1);
                                     data.putBoolean("gainFocus", true);
                                     EventDispatcher.getInstance().dispatch("Accessibility:Focus", data);
                                 }
                                 return true;
                             } else if (action == AccessibilityNodeInfo.ACTION_CLICK && virtualViewId == VIRTUAL_CURSOR_POSITION) {
                                 EventDispatcher.getInstance().dispatch("Accessibility:ActivateObject", null);
--- a/mobile/android/geckoview/src/main/java/org/mozilla/gecko/GeckoAppShell.java
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/GeckoAppShell.java
@@ -23,17 +23,16 @@ import java.util.List;
 import java.util.Map;
 import java.util.StringTokenizer;
 import java.util.TreeMap;
 
 import org.mozilla.gecko.annotation.JNITarget;
 import org.mozilla.gecko.annotation.RobocopTarget;
 import org.mozilla.gecko.annotation.WrapForJNI;
 import org.mozilla.gecko.gfx.BitmapUtils;
-import org.mozilla.gecko.gfx.LayerView;
 import org.mozilla.gecko.permissions.Permissions;
 import org.mozilla.gecko.process.GeckoProcessManager;
 import org.mozilla.gecko.util.HardwareCodecCapabilityUtils;
 import org.mozilla.gecko.util.HardwareUtils;
 import org.mozilla.gecko.util.IOUtils;
 import org.mozilla.gecko.util.ProxySelector;
 import org.mozilla.gecko.util.ThreadUtils;
 
@@ -217,31 +216,18 @@ public class GeckoAppShell
 
     // helper methods
     @WrapForJNI
     /* package */ static native void reportJavaCrash(Throwable exc, String stackTrace);
 
     @WrapForJNI(dispatchTo = "gecko")
     public static native void notifyUriVisited(String uri);
 
-    private static LayerView sLayerView;
     private static Rect sScreenSize;
 
-    public static void setLayerView(LayerView lv) {
-        if (sLayerView == lv) {
-            return;
-        }
-        sLayerView = lv;
-    }
-
-    @RobocopTarget
-    public static LayerView getLayerView() {
-        return sLayerView;
-    }
-
     @WrapForJNI(stubName = "NotifyObservers", dispatchTo = "gecko")
     private static native void nativeNotifyObservers(String topic, String data);
 
     @RobocopTarget
     public static void notifyObservers(final String topic, final String data) {
         notifyObservers(topic, data, GeckoThread.State.RUNNING);
     }
 
@@ -376,17 +362,18 @@ public class GeckoAppShell
     /* package */ static native void onLocationChanged(double latitude, double longitude,
                                                        double altitude, float accuracy,
                                                        float bearing, float speed, long time);
 
     private static class DefaultListeners implements SensorEventListener,
                                                      LocationListener,
                                                      NotificationListener,
                                                      ScreenOrientationDelegate,
-                                                     WakeLockDelegate {
+                                                     WakeLockDelegate,
+                                                     HapticFeedbackDelegate {
         @Override
         public void onAccuracyChanged(Sensor sensor, int accuracy) {
         }
 
         private static int HalSensorAccuracyFor(int androidAccuracy) {
             switch (androidAccuracy) {
             case SensorManager.SENSOR_STATUS_UNRELIABLE:
                 return GeckoHalDefines.SENSOR_ACCURACY_UNRELIABLE;
@@ -556,23 +543,40 @@ public class GeckoAppShell
 
                 wl.acquire();
                 mWakeLocks.put(lock, wl);
             } else if (state != WakeLockDelegate.STATE_LOCKED_FOREGROUND && wl != null) {
                 wl.release();
                 mWakeLocks.remove(lock);
             }
         }
+
+        @Override
+        public void performHapticFeedback(final int effect) {
+            final int[] pattern;
+            // Use default platform values.
+            if (effect == HapticFeedbackConstants.KEYBOARD_TAP) {
+                pattern = new int[] { 40 };
+            } else if (effect == HapticFeedbackConstants.LONG_PRESS) {
+                pattern = new int[] { 0, 1, 20, 21 };
+            } else if (effect == HapticFeedbackConstants.VIRTUAL_KEY) {
+                pattern = new int[] { 0, 10, 20, 30 };
+            } else {
+                return;
+            }
+            vibrateOnHapticFeedbackEnabled(pattern);
+        }
     }
 
     private static final DefaultListeners DEFAULT_LISTENERS = new DefaultListeners();
     private static SensorEventListener sSensorListener = DEFAULT_LISTENERS;
     private static LocationListener sLocationListener = DEFAULT_LISTENERS;
     private static NotificationListener sNotificationListener = DEFAULT_LISTENERS;
     private static WakeLockDelegate sWakeLockDelegate = DEFAULT_LISTENERS;
+    private static HapticFeedbackDelegate sHapticFeedbackDelegate = DEFAULT_LISTENERS;
 
     /**
      * A delegate for supporting the Screen Orientation API.
      */
     private static ScreenOrientationDelegate sScreenOrientationDelegate = DEFAULT_LISTENERS;
 
     public static SensorEventListener getSensorListener() {
         return sSensorListener;
@@ -609,16 +613,24 @@ public class GeckoAppShell
     public static WakeLockDelegate getWakeLockDelegate() {
         return sWakeLockDelegate;
     }
 
     public void setWakeLockDelegate(final WakeLockDelegate delegate) {
         sWakeLockDelegate = (delegate != null) ? delegate : DEFAULT_LISTENERS;
     }
 
+    public static HapticFeedbackDelegate getHapticFeedbackDelegate() {
+        return sHapticFeedbackDelegate;
+    }
+
+    public static void setHapticFeedbackDelegate(final HapticFeedbackDelegate delegate) {
+        sHapticFeedbackDelegate = (delegate != null) ? delegate : DEFAULT_LISTENERS;
+    }
+
     @WrapForJNI(calledFrom = "gecko")
     private static void enableSensor(int aSensortype) {
         final SensorManager sm = (SensorManager)
             getApplicationContext().getSystemService(Context.SENSOR_SERVICE);
 
         switch (aSensortype) {
         case GeckoHalDefines.SENSOR_GAME_ROTATION_VECTOR:
             if (gGameRotationVectorSensor == null) {
@@ -997,20 +1009,21 @@ public class GeckoAppShell
         sScreenDepth = aScreenDepth;
     }
 
     @WrapForJNI(calledFrom = "gecko")
     private static void performHapticFeedback(boolean aIsLongPress) {
         // Don't perform haptic feedback if a vibration is currently playing,
         // because the haptic feedback will nuke the vibration.
         if (!sVibrationMaybePlaying || System.nanoTime() >= sVibrationEndTime) {
-            LayerView layerView = getLayerView();
-            layerView.performHapticFeedback(aIsLongPress ?
-                                            HapticFeedbackConstants.LONG_PRESS :
-                                            HapticFeedbackConstants.VIRTUAL_KEY);
+            getHapticFeedbackDelegate().performHapticFeedback(
+                    aIsLongPress ? HapticFeedbackConstants.LONG_PRESS
+                                 : HapticFeedbackConstants.VIRTUAL_KEY);
+            sVibrationMaybePlaying = false;
+            sVibrationEndTime = 0;
         }
     }
 
     private static Vibrator vibrator() {
         return (Vibrator) getApplicationContext().getSystemService(Context.VIBRATOR_SERVICE);
     }
 
     // Helper method to convert integer array to long array.
@@ -1018,36 +1031,40 @@ public class GeckoAppShell
         long[] output = new long[input.length];
         for (int i = 0; i < input.length; i++) {
             output[i] = input[i];
         }
         return output;
     }
 
     // Vibrate only if haptic feedback is enabled.
-    public static void vibrateOnHapticFeedbackEnabled(int[] milliseconds) {
+    private static void vibrateOnHapticFeedbackEnabled(int[] milliseconds) {
         if (Settings.System.getInt(getApplicationContext().getContentResolver(),
                                    Settings.System.HAPTIC_FEEDBACK_ENABLED, 0) > 0) {
-            vibrate(convertIntToLongArray(milliseconds), -1);
+            if (milliseconds.length == 1) {
+                vibrate(milliseconds[0]);
+            } else {
+                vibrate(convertIntToLongArray(milliseconds), -1);
+            }
         }
     }
 
     @WrapForJNI(calledFrom = "gecko")
     private static void vibrate(long milliseconds) {
         sVibrationEndTime = System.nanoTime() + milliseconds * 1000000;
         sVibrationMaybePlaying = true;
         vibrator().vibrate(milliseconds);
     }
 
     @WrapForJNI(calledFrom = "gecko")
     private static void vibrate(long[] pattern, int repeat) {
-        // If pattern.length is even, the last element in the pattern is a
+        // If pattern.length is odd, the last element in the pattern is a
         // meaningless delay, so don't include it in vibrationDuration.
         long vibrationDuration = 0;
-        int iterLen = pattern.length - (pattern.length % 2 == 0 ? 1 : 0);
+        int iterLen = pattern.length & ~1;
         for (int i = 0; i < iterLen; i++) {
           vibrationDuration += pattern[i];
         }
 
         sVibrationEndTime = System.nanoTime() + vibrationDuration * 1000000;
         sVibrationMaybePlaying = true;
         vibrator().vibrate(pattern, repeat);
     }
@@ -1548,31 +1565,16 @@ public class GeckoAppShell
 
     /* Called by JNI from AndroidBridge, and by reflection from tests/BaseTest.java.in */
     @WrapForJNI(calledFrom = "gecko")
     @RobocopTarget
     public static boolean isTablet() {
         return HardwareUtils.isTablet();
     }
 
-    private static boolean sImeWasEnabledOnLastResize = false;
-    public static void viewSizeChanged() {
-        GeckoView v = (GeckoView) getLayerView();
-        if (v == null) {
-            return;
-        }
-        boolean imeIsEnabled = v.isIMEEnabled();
-        if (imeIsEnabled && !sImeWasEnabledOnLastResize) {
-            // The IME just came up after not being up, so let's scroll
-            // to the focused input.
-            EventDispatcher.getInstance().dispatch("ScrollTo:FocusedInput", null);
-        }
-        sImeWasEnabledOnLastResize = imeIsEnabled;
-    }
-
     @WrapForJNI(calledFrom = "gecko")
     private static double[] getCurrentNetworkInformation() {
         return GeckoNetworkManager.getInstance().getCurrentInformation();
     }
 
     @WrapForJNI(calledFrom = "gecko")
     private static void enableNetworkNotifications() {
         ThreadUtils.postToUiThread(new Runnable() {
--- a/mobile/android/geckoview/src/main/java/org/mozilla/gecko/GeckoInputConnection.java
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/GeckoInputConnection.java
@@ -67,36 +67,36 @@ class GeckoInputConnection
     private String mIMEModeHint = "";
     private String mIMEActionHint = "";
     private boolean mInPrivateBrowsing;
     private boolean mIsUserAction;
     private boolean mFocused;
 
     private String mCurrentInputMethod = "";
 
-    private final View mView;
+    private final GeckoView mView;
     private final GeckoEditableClient mEditableClient;
     protected int mBatchEditCount;
     private ExtractedTextRequest mUpdateRequest;
     private final ExtractedText mUpdateExtract = new ExtractedText();
     private final InputConnection mKeyInputConnection;
     private CursorAnchorInfo.Builder mCursorAnchorInfoBuilder;
 
     // Prevent showSoftInput and hideSoftInput from causing reentrant calls on some devices.
     private volatile boolean mSoftInputReentrancyGuard;
 
-    public static GeckoEditableListener create(View targetView,
+    public static GeckoEditableListener create(GeckoView targetView,
                                                GeckoEditableClient editable) {
         if (DEBUG)
             return DebugGeckoInputConnection.create(targetView, editable);
         else
             return new GeckoInputConnection(targetView, editable);
     }
 
-    protected GeckoInputConnection(View targetView,
+    protected GeckoInputConnection(GeckoView targetView,
                                    GeckoEditableClient editable) {
         super(targetView, true);
         mView = targetView;
         mEditableClient = editable;
         mIMEState = IME_STATE_DISABLED;
         // InputConnection that sends keys for plugins, which don't have full editors
         mKeyInputConnection = new BaseInputConnection(targetView, false);
     }
@@ -199,17 +199,17 @@ class GeckoInputConnection
         if ((req.flags & GET_TEXT_WITH_STYLES) != 0) {
             extract.text = new SpannableString(editable);
         } else {
             extract.text = editable.toString();
         }
         return extract;
     }
 
-    private View getView() {
+    private GeckoView getView() {
         return mView;
     }
 
     private InputMethodManager getInputMethodManager() {
         View view = getView();
         if (view == null) {
             return null;
         }
@@ -231,17 +231,20 @@ class GeckoInputConnection
             @Override
             public void run() {
                 if (v.hasFocus() && !imm.isActive(v)) {
                     // Marshmallow workaround: The view has focus but it is not the active
                     // view for the input method. (Bug 1211848)
                     v.clearFocus();
                     v.requestFocus();
                 }
-                GeckoAppShell.getLayerView().getDynamicToolbarAnimator().showToolbar(/*immediately*/true);
+                final GeckoView view = getView();
+                if (view != null) {
+                    view.getDynamicToolbarAnimator().showToolbar(/*immediately*/ true);
+                }
                 mSoftInputReentrancyGuard = true;
                 imm.showSoftInput(v, 0);
                 mSoftInputReentrancyGuard = false;
             }
         });
     }
 
     private void hideSoftInput() {
@@ -354,28 +357,28 @@ class GeckoInputConnection
         }
 
         if (mCursorAnchorInfoBuilder == null) {
             mCursorAnchorInfoBuilder = new CursorAnchorInfo.Builder();
         }
         mCursorAnchorInfoBuilder.reset();
 
         // Calculate Gecko logical coords to screen coords
-        final View v = getView();
-        if (v == null) {
+        final GeckoView view = getView();
+        if (view == null) {
             return;
         }
 
         int[] viewCoords = new int[2];
-        v.getLocationOnScreen(viewCoords);
+        view.getLocationOnScreen(viewCoords);
 
-        DynamicToolbarAnimator animator = GeckoAppShell.getLayerView().getDynamicToolbarAnimator();
-        float toolbarHeight = (float)animator.getCurrentToolbarHeight();
+        DynamicToolbarAnimator animator = view.getDynamicToolbarAnimator();
+        float toolbarHeight = (float) animator.getCurrentToolbarHeight();
 
-        Matrix matrix = GeckoAppShell.getLayerView().getMatrixForLayerRectToViewRect();
+        Matrix matrix = view.getMatrixForLayerRectToViewRect();
         if (matrix == null) {
             if (DEBUG) {
                 Log.d(LOGTAG, "Cannot get Matrix to convert from Gecko coords to layer view coords");
             }
             return;
         }
         matrix.postTranslate(viewCoords[0], viewCoords[1] + toolbarHeight);
         mCursorAnchorInfoBuilder.setMatrix(matrix);
@@ -1029,23 +1032,23 @@ class GeckoInputConnection
 
 final class DebugGeckoInputConnection
         extends GeckoInputConnection
         implements InvocationHandler {
 
     private InputConnection mProxy;
     private final StringBuilder mCallLevel;
 
-    private DebugGeckoInputConnection(View targetView,
+    private DebugGeckoInputConnection(GeckoView targetView,
                                       GeckoEditableClient editable) {
         super(targetView, editable);
         mCallLevel = new StringBuilder();
     }
 
-    public static GeckoEditableListener create(View targetView,
+    public static GeckoEditableListener create(GeckoView targetView,
                                                GeckoEditableClient editable) {
         final Class<?>[] PROXY_INTERFACES = { InputConnection.class,
                 InputConnectionListener.class,
                 GeckoEditableListener.class };
         DebugGeckoInputConnection dgic =
                 new DebugGeckoInputConnection(targetView, editable);
         dgic.mProxy = (InputConnection)Proxy.newProxyInstance(
                 GeckoInputConnection.class.getClassLoader(),
--- a/mobile/android/geckoview/src/main/java/org/mozilla/gecko/GeckoView.java
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/GeckoView.java
@@ -483,19 +483,16 @@ public class GeckoView extends LayerView
                                         /* debugging */ false)) {
             GeckoThread.launch();
         }
     }
 
     private void init(final Context context, final GeckoViewSettings settings) {
         preload(context);
 
-        // Perform common initialization for Fennec/GeckoView.
-        GeckoAppShell.setLayerView(this);
-
         initializeView();
         mListener.registerListeners();
 
         if (settings == null) {
             mSettings = new GeckoViewSettings(getEventDispatcher());
         } else {
             mSettings = settings;
         }
@@ -746,17 +743,18 @@ public class GeckoView extends LayerView
     public boolean onKeyMultiple(int keyCode, int repeatCount, KeyEvent event) {
         if (super.onKeyMultiple(keyCode, repeatCount, event)) {
             return true;
         }
         return mInputConnectionListener != null &&
                 mInputConnectionListener.onKeyMultiple(keyCode, repeatCount, event);
     }
 
-    /* package */ boolean isIMEEnabled() {
+    @Override
+    public boolean isIMEEnabled() {
         return mInputConnectionListener != null &&
                 mInputConnectionListener.isIMEEnabled();
     }
 
     public void importScript(final String url) {
         if (url.startsWith("resource://android/assets/")) {
             final GeckoBundle data = new GeckoBundle(1);
             data.putString("scriptURL", url);
new file mode 100644
--- /dev/null
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/HapticFeedbackDelegate.java
@@ -0,0 +1,20 @@
+/* -*- Mode: Java; c-basic-offset: 4; tab-width: 20; indent-tabs-mode: nil; -*-
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+package org.mozilla.gecko;
+
+import android.view.HapticFeedbackConstants;
+
+/**
+ * A <code>HapticFeedbackDelegate</code> is responsible for performing haptic feedback.
+ */
+public interface HapticFeedbackDelegate {
+    /**
+     * Perform a haptic feedback effect. Called from the Gecko thread.
+     *
+     * @param effect Effect to perform from <code>android.view.HapticFeedbackConstants</code>.
+     */
+    void performHapticFeedback(int effect);
+}
--- a/mobile/android/geckoview/src/main/java/org/mozilla/gecko/gfx/GeckoLayerClient.java
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/gfx/GeckoLayerClient.java
@@ -28,16 +28,17 @@ class GeckoLayerClient implements LayerV
 {
     private static final String LOGTAG = "GeckoLayerClient";
 
     private final Context mContext;
     private IntSize mScreenSize;
     private IntSize mWindowSize;
 
     private boolean mForceRedraw;
+    private boolean mImeWasEnabledOnLastResize;
 
     /* The current viewport metrics.
      * This is volatile so that we can read and write to it from different threads.
      * We avoid synchronization to make getting the viewport metrics from
      * the compositor as cheap as possible. The viewport is immutable so
      * we don't need to worry about anyone mutating it while we're reading from it.
      * Specifically:
      * 1) reading mViewportMetrics from any thread is fine without synchronization
@@ -148,17 +149,23 @@ class GeckoLayerClient implements LayerV
             // here we send gecko a resize message. The code in browser.js is responsible for
             // picking up on that resize event, modifying the viewport as necessary, and informing
             // us of the new viewport.
             sendResizeEventIfNecessary(true);
 
             // the following call also sends gecko a message, which will be processed after the resize
             // message above has updated the viewport. this message ensures that if we have just put
             // focus in a text field, we scroll the content so that the text field is in view.
-            GeckoAppShell.viewSizeChanged();
+            final boolean imeIsEnabled = mView.isIMEEnabled();
+            if (imeIsEnabled && !mImeWasEnabledOnLastResize) {
+                // The IME just came up after not being up, so let's scroll
+                // to the focused input.
+                EventDispatcher.getInstance().dispatch("ScrollTo:FocusedInput", null);
+            }
+            mImeWasEnabledOnLastResize = imeIsEnabled;
         }
         return true;
     }
 
     PanZoomController getPanZoomController() {
         return mPanZoomController;
     }
 
--- a/mobile/android/geckoview/src/main/java/org/mozilla/gecko/gfx/LayerView.java
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/gfx/LayerView.java
@@ -819,10 +819,14 @@ public class LayerView extends FrameLayo
             });
             return;
         }
 
         mDefaultClearColor = color;
         if (isCompositorReady()) {
             mCompositor.setDefaultClearColor(mDefaultClearColor);
         }
-   }
+    }
+
+    public boolean isIMEEnabled() {
+        return false;
+    }
 }
deleted file mode 100644
--- a/mobile/android/services/src/main/java/org/mozilla/gecko/sync/DelayedWorkTracker.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-package org.mozilla.gecko.sync;
-
-import org.mozilla.gecko.background.common.log.Logger;
-
-/**
- * A little class to allow us to maintain a count of extant
- * things (in our case, callbacks that need to fire), and
- * some work that we want done when that count hits 0.
- *
- * @author rnewman
- *
- */
-public class DelayedWorkTracker {
-  private static final String LOG_TAG = "DelayedWorkTracker";
-  protected Runnable workItem = null;
-  protected int outstandingCount = 0;
-
-  public int incrementOutstanding() {
-    Logger.trace(LOG_TAG, "Incrementing outstanding.");
-    synchronized(this) {
-      return ++outstandingCount;
-    }
-  }
-  public int decrementOutstanding() {
-    Logger.trace(LOG_TAG, "Decrementing outstanding.");
-    Runnable job = null;
-    int count;
-    synchronized(this) {
-      if ((count = --outstandingCount) == 0 &&
-          workItem != null) {
-        job = workItem;
-        workItem = null;
-      } else {
-        return count;
-      }
-    }
-    job.run();
-    // In case it's changed.
-    return getOutstandingOperations();
-  }
-  public int getOutstandingOperations() {
-    synchronized(this) {
-      return outstandingCount;
-    }
-  }
-  public void delayWorkItem(Runnable item) {
-    Logger.trace(LOG_TAG, "delayWorkItem.");
-    boolean runnableNow = false;
-    synchronized(this) {
-      Logger.trace(LOG_TAG, "outstandingCount: " + outstandingCount);
-      if (outstandingCount == 0) {
-        runnableNow = true;
-      } else {
-        if (workItem != null) {
-          throw new IllegalStateException("Work item already set!");
-        }
-        workItem = item;
-      }
-    }
-    if (runnableNow) {
-      Logger.trace(LOG_TAG, "Running item now.");
-      item.run();
-    }
-  }
-}
\ No newline at end of file
--- a/mobile/android/services/src/main/java/org/mozilla/gecko/sync/repositories/downloaders/BatchingDownloader.java
+++ b/mobile/android/services/src/main/java/org/mozilla/gecko/sync/repositories/downloaders/BatchingDownloader.java
@@ -7,33 +7,34 @@ package org.mozilla.gecko.sync.repositor
 import android.net.Uri;
 import android.os.SystemClock;
 import android.support.annotation.Nullable;
 import android.support.annotation.VisibleForTesting;
 
 import org.mozilla.gecko.background.common.log.Logger;
 import org.mozilla.gecko.sync.CollectionConcurrentModificationException;
 import org.mozilla.gecko.sync.CryptoRecord;
-import org.mozilla.gecko.sync.DelayedWorkTracker;
 import org.mozilla.gecko.sync.SyncDeadlineReachedException;
 import org.mozilla.gecko.sync.Utils;
 import org.mozilla.gecko.sync.net.AuthHeaderProvider;
 import org.mozilla.gecko.sync.net.SyncResponse;
 import org.mozilla.gecko.sync.net.SyncStorageCollectionRequest;
 import org.mozilla.gecko.sync.net.SyncStorageResponse;
 import org.mozilla.gecko.sync.repositories.RepositorySession;
 import org.mozilla.gecko.sync.repositories.RepositoryStateProvider;
 import org.mozilla.gecko.sync.repositories.delegates.RepositorySessionFetchRecordsDelegate;
 
 import java.io.UnsupportedEncodingException;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
 /**
  * Batching Downloader implements batching protocol as supported by Sync 1.5.
  *
  * Downloader's batching behaviour is configured via two parameters, obtained from the repository:
  * - Per-batch limit, which specified how many records may be fetched in an individual GET request.
  * - allowMultipleBatches, which determines if downloader is allowed to perform more than one fetch.
@@ -53,31 +54,32 @@ import java.util.concurrent.TimeUnit;
  * header. Server will ensure that our collection did not change while we are batching, if it did it will
  * fail our fetch with a 412 error. Additionally, we perform the same checks locally.
  */
 public class BatchingDownloader {
     public static final String LOG_TAG = "BatchingDownloader";
     private static final String DEFAULT_SORT_ORDER = "index";
 
     private final RepositorySession repositorySession;
-    private final DelayedWorkTracker workTracker = new DelayedWorkTracker();
     private final Uri baseCollectionUri;
     private final long fetchDeadline;
     private final boolean allowMultipleBatches;
     private final boolean keepTrackOfHighWaterMark;
 
     private RepositoryStateProvider stateProvider;
 
     /* package-local */ final AuthHeaderProvider authHeaderProvider;
 
     // Used to track outstanding requests, so that we can abort them as needed.
     @VisibleForTesting
     protected final Set<SyncStorageCollectionRequest> pending = Collections.synchronizedSet(new HashSet<SyncStorageCollectionRequest>());
     /* @GuardedBy("this") */ private String lastModified;
 
+    private final ExecutorService taskQueue = Executors.newSingleThreadExecutor();
+
     public BatchingDownloader(
             AuthHeaderProvider authHeaderProvider,
             Uri baseCollectionUri,
             long fetchDeadline,
             boolean allowMultipleBatches,
             boolean keepTrackOfHighWaterMark,
             RepositoryStateProvider stateProvider,
             RepositorySession repositorySession) {
@@ -86,17 +88,17 @@ public class BatchingDownloader {
         this.baseCollectionUri = baseCollectionUri;
         this.allowMultipleBatches = allowMultipleBatches;
         this.keepTrackOfHighWaterMark = keepTrackOfHighWaterMark;
         this.fetchDeadline = fetchDeadline;
         this.stateProvider = stateProvider;
     }
 
     @VisibleForTesting
-    protected static String flattenIDs(String[] guids) {
+    /* package-private */ static String flattenIDs(String[] guids) {
         // Consider using Utils.toDelimitedString if and when the signature changes
         // to Collection<String> guids.
         if (guids.length == 0) {
             return "";
         }
         if (guids.length == 1) {
             return guids[0];
         }
@@ -105,28 +107,33 @@ public class BatchingDownloader {
         for (String guid : guids) {
             b.append(guid);
             b.append(",");
         }
         return b.substring(0, b.length() - 1);
     }
 
     @VisibleForTesting
-    protected void fetchWithParameters(long newer,
-                                    long batchLimit,
-                                    boolean full,
-                                    String sort,
-                                    String ids,
-                                    SyncStorageCollectionRequest request,
-                                    RepositorySessionFetchRecordsDelegate fetchRecordsDelegate)
+    protected void fetchWithParameters(final long newer,
+                                    final long batchLimit,
+                                    final boolean full,
+                                    final String sort,
+                                    final String ids,
+                                    final SyncStorageCollectionRequest request,
+                                    final RepositorySessionFetchRecordsDelegate fetchRecordsDelegate)
             throws URISyntaxException, UnsupportedEncodingException {
-        request.delegate = new BatchingDownloaderDelegate(this, fetchRecordsDelegate, request,
-                newer, batchLimit, full, sort, ids);
-        this.pending.add(request);
-        request.get();
+        runTaskOnQueue(new Runnable() {
+            @Override
+            public void run() {
+                request.delegate = new BatchingDownloaderDelegate(BatchingDownloader.this, fetchRecordsDelegate, request,
+                        newer, batchLimit, full, sort, ids);
+                pending.add(request);
+                request.get();
+            }
+        });
     }
 
     @VisibleForTesting
     protected SyncStorageCollectionRequest makeSyncStorageCollectionRequest(long newer,
                                                   long batchLimit,
                                                   boolean full,
                                                   String sort,
                                                   String ids,
@@ -210,20 +217,20 @@ public class BatchingDownloader {
             // sync we'll erroneously try to resume downloading. If resume proceeds, we will fetch
             // from an older timestamp, but offset by the amount of records we've fetched prior.
             // Since we're diligent about setting a X-I-U-S header, any remote collection changes
             // will be caught and we'll receive a 412.
             if (!BatchingDownloaderController.resetResumeContextAndCommit(this.stateProvider)) {
                 Logger.warn(LOG_TAG, "Failed to reset resume context while completing a batch");
             }
 
-            this.workTracker.delayWorkItem(new Runnable() {
+            runTaskOnQueue(new Runnable() {
                 @Override
                 public void run() {
-                    Logger.debug(LOG_TAG, "Delayed onFetchCompleted running.");
+                    Logger.debug(LOG_TAG, "onFetchCompleted running.");
                     fetchRecordsDelegate.onFetchCompleted();
                 }
             });
             return;
         }
 
         // This is unfortunate, but largely just means that in case we need to resume later on, it
         // either won't be possible (and we'll fetch w/o resuming), or won't be as efficient (i.e.
@@ -235,19 +242,20 @@ public class BatchingDownloader {
         } else {
             if (!BatchingDownloaderController.setInitialResumeContextAndCommit(this.stateProvider, offset, newer, sort)) {
                 Logger.warn(LOG_TAG, "Failed to set initial resume context while processing a batch.");
             }
         }
 
         // We need to make another batching request!
         // Let the delegate know that a batch fetch just completed before we proceed.
-        // This operation needs to run after every call to onFetchedRecord for this batch has been
-        // processed, hence the delayWorkItem call.
-        this.workTracker.delayWorkItem(new Runnable() {
+        // Beware that while this operation will run after every call to onFetchedRecord returned,
+        // it's not guaranteed that the 'sink' session actually processed all of the fetched records.
+        // See Bug https://bugzilla.mozilla.org/show_bug.cgi?id=1351673#c28 for details.
+        runTaskOnQueue(new Runnable() {
             @Override
             public void run() {
                 Logger.debug(LOG_TAG, "Running onBatchCompleted.");
                 fetchRecordsDelegate.onBatchCompleted();
             }
         });
 
         // Should we proceed, however? Do we have enough time?
@@ -260,26 +268,31 @@ public class BatchingDownloader {
         try {
             final SyncStorageCollectionRequest newRequest = makeSyncStorageCollectionRequest(newer,
                     limit, full, sort, ids, offset);
             this.fetchWithParameters(newer, limit, full, sort, ids, newRequest, fetchRecordsDelegate);
         } catch (final URISyntaxException | UnsupportedEncodingException e) {
             if (!this.stateProvider.commit()) {
                 Logger.warn(LOG_TAG, "Failed to commit repository state while handling request creation error");
             }
-            this.workTracker.delayWorkItem(new Runnable() {
+            runTaskOnQueue(new Runnable() {
                 @Override
                 public void run() {
-                    Logger.debug(LOG_TAG, "Delayed onFetchCompleted running.");
+                    Logger.debug(LOG_TAG, "onFetchCompleted running.");
                     fetchRecordsDelegate.onFetchFailed(e);
                 }
             });
         }
     }
 
+    @VisibleForTesting
+    /* package-private */ void runTaskOnQueue(Runnable task) {
+        taskQueue.execute(task);
+    }
+
     private void handleFetchFailed(final RepositorySessionFetchRecordsDelegate fetchRecordsDelegate,
                                   final Exception ex) {
         handleFetchFailed(fetchRecordsDelegate, ex, null);
     }
 
     /* package-local */ void handleFetchFailed(final RepositorySessionFetchRecordsDelegate fetchRecordsDelegate,
                               final Exception ex,
                               @Nullable final SyncStorageCollectionRequest request) {
@@ -299,40 +312,36 @@ public class BatchingDownloader {
         } else {
             // Failing to commit the context here means that we didn't commit the latest high-water-mark,
             // and won't be as efficient once we re-sync. That is, we might download more records than necessary.
             if (!this.stateProvider.commit()) {
                 Logger.warn(LOG_TAG, "Failed to commit resume context while processing a deadline exception");
             }
         }
 
-        this.workTracker.delayWorkItem(new Runnable() {
+        runTaskOnQueue(new Runnable() {
             @Override
             public void run() {
                 Logger.debug(LOG_TAG, "Running onFetchFailed.");
                 fetchRecordsDelegate.onFetchFailed(ex);
             }
         });
     }
 
     public void onFetchedRecord(CryptoRecord record,
                                 RepositorySessionFetchRecordsDelegate fetchRecordsDelegate) {
-        this.workTracker.incrementOutstanding();
-
         try {
             fetchRecordsDelegate.onFetchedRecord(record);
             // NB: changes to stateProvider are committed in either onFetchCompleted or handleFetchFailed.
             if (this.keepTrackOfHighWaterMark) {
                 this.stateProvider.setLong(RepositoryStateProvider.KEY_HIGH_WATER_MARK, record.lastModified);
             }
         } catch (Exception ex) {
             Logger.warn(LOG_TAG, "Got exception calling onFetchedRecord with WBO.", ex);
             throw new RuntimeException(ex);
-        } finally {
-            this.workTracker.decrementOutstanding();
         }
     }
 
     private void removeRequestFromPending(SyncStorageCollectionRequest request) {
         if (request == null) {
             return;
         }
         this.pending.remove(request);
@@ -357,17 +366,17 @@ public class BatchingDownloader {
     private static boolean mayProceedWithBatching(long deadline) {
         // For simplicity, allow batching to proceed if there's at least a minute left for the sync.
         // This should be enough to fetch and process records in the batch.
         final long timeLeft = deadline - SystemClock.elapsedRealtime();
         return timeLeft > TimeUnit.MINUTES.toMillis(1);
     }
 
     @VisibleForTesting
-    public static URI buildCollectionURI(Uri baseCollectionUri, boolean full, long newer, long limit, String sort, String ids, String offset) throws URISyntaxException {
+    /* package-private */ static URI buildCollectionURI(Uri baseCollectionUri, boolean full, long newer, long limit, String sort, String ids, String offset) throws URISyntaxException {
         Uri.Builder uriBuilder = baseCollectionUri.buildUpon();
 
         if (full) {
             uriBuilder.appendQueryParameter("full", "1");
         }
 
         if (newer >= 0) {
             // Translate local millisecond timestamps into server decimal seconds.
--- a/mobile/android/tests/background/junit4/src/org/mozilla/gecko/sync/repositories/downloaders/BatchingDownloaderTest.java
+++ b/mobile/android/tests/background/junit4/src/org/mozilla/gecko/sync/repositories/downloaders/BatchingDownloaderTest.java
@@ -209,16 +209,21 @@ public class BatchingDownloaderTest {
                       boolean full,
                       String sort,
                       String ids,
                       String offset)
                 throws URISyntaxException, UnsupportedEncodingException {
             this.offset = offset;
             return super.makeSyncStorageCollectionRequest(newer, batchLimit, full, sort, ids, offset);
         }
+
+        @Override
+        void runTaskOnQueue(Runnable task) {
+            task.run();
+        }
     }
 
     static class MockSever15Repository extends Server15Repository {
         MockSever15Repository(@NonNull String collection, @NonNull String storageURL,
                                      AuthHeaderProvider authHeaderProvider, @NonNull InfoCollections infoCollections,
                                      @NonNull InfoConfiguration infoConfiguration) throws URISyntaxException {
             super(collection, SystemClock.elapsedRealtime() + TimeUnit.MINUTES.toMillis(30),
                     storageURL, authHeaderProvider, infoCollections, infoConfiguration,
--- a/mobile/android/tests/browser/robocop/src/org/mozilla/gecko/FennecNativeActions.java
+++ b/mobile/android/tests/browser/robocop/src/org/mozilla/gecko/FennecNativeActions.java
@@ -351,17 +351,17 @@ public class FennecNativeActions impleme
         private boolean mPaintDone;
         private boolean mListening;
 
         private final LayerView mLayerView;
         private final DrawListener mDrawListener;
 
         PaintExpecter() {
             final PaintExpecter expecter = this;
-            mLayerView = GeckoAppShell.getLayerView();
+            mLayerView = (LayerView) mSolo.getView(R.id.layer_view);
             mDrawListener = new DrawListener() {
                 @Override
                 public void drawFinished() {
                     FennecNativeDriver.log(FennecNativeDriver.LogLevel.DEBUG,
                             "Received drawFinished notification");
                     expecter.notifyOfEvent();
                 }
             };
--- a/mobile/android/tests/browser/robocop/src/org/mozilla/gecko/tests/BaseTest.java
+++ b/mobile/android/tests/browser/robocop/src/org/mozilla/gecko/tests/BaseTest.java
@@ -351,17 +351,18 @@ abstract class BaseTest extends BaseRobo
         if (!foundText) {
             if ((mScreenMidWidth == 0) || (mScreenMidHeight == 0)) {
                 mScreenMidWidth = mDriver.getGeckoWidth()/2;
                 mScreenMidHeight = mDriver.getGeckoHeight()/2;
             }
 
             // If we don't see the item, scroll down once in case it's off-screen.
             // Hacky way to scroll down.  solo.scroll* does not work in dialogs.
-            MotionEventHelper meh = new MotionEventHelper(getInstrumentation(), mDriver.getGeckoLeft(), mDriver.getGeckoTop());
+            MotionEventHelper meh = new MotionEventHelper(getInstrumentation(), mSolo,
+                                                          mDriver.getGeckoLeft(), mDriver.getGeckoTop());
             meh.dragSync(mScreenMidWidth, mScreenMidHeight+100, mScreenMidWidth, mScreenMidHeight-100);
 
             foundText = mSolo.waitForText(txt);
         }
         return foundText;
     }
 
     /**
--- a/mobile/android/tests/browser/robocop/src/org/mozilla/gecko/tests/MotionEventHelper.java
+++ b/mobile/android/tests/browser/robocop/src/org/mozilla/gecko/tests/MotionEventHelper.java
@@ -1,17 +1,20 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 package org.mozilla.gecko.tests;
 
 import org.mozilla.gecko.GeckoAppShell;
+import org.mozilla.gecko.PrefsHelper;
+import org.mozilla.gecko.R;
 import org.mozilla.gecko.gfx.LayerView;
-import org.mozilla.gecko.PrefsHelper;
+
+import com.robotium.solo.Solo;
 
 import android.app.Instrumentation;
 import android.os.SystemClock;
 import android.util.Log;
 import android.view.MotionEvent;
 
 class MotionEventHelper {
     private static final String LOGTAG = "RobocopMotionEventHelper";
@@ -21,21 +24,22 @@ class MotionEventHelper {
     private final Instrumentation mInstrumentation;
     private final int mSurfaceOffsetX;
     private final int mSurfaceOffsetY;
     private final LayerView layerView;
     private boolean mApzEnabled;
     private float mTouchStartTolerance;
     private final int mDpi;
 
-    public MotionEventHelper(Instrumentation inst, int surfaceOffsetX, int surfaceOffsetY) {
+    public MotionEventHelper(Instrumentation inst, Solo solo,
+                             int surfaceOffsetX, int surfaceOffsetY) {
         mInstrumentation = inst;
         mSurfaceOffsetX = surfaceOffsetX;
         mSurfaceOffsetY = surfaceOffsetY;
-        layerView = GeckoAppShell.getLayerView();
+        layerView = (LayerView) solo.getView(R.id.layer_view);
         mApzEnabled = false;
         mTouchStartTolerance = 0.0f;
         mDpi = GeckoAppShell.getDpi();
         Log.i(LOGTAG, "Initialized using offset (" + mSurfaceOffsetX + "," + mSurfaceOffsetY + ")");
         PrefsHelper.getPref("layers.async-pan-zoom.enabled", new PrefsHelper.PrefHandlerBase() {
             @Override public void prefValue(String pref, boolean value) {
                 mApzEnabled = value;
             }
--- a/mobile/android/tests/browser/robocop/src/org/mozilla/gecko/tests/testAxisLocking.java
+++ b/mobile/android/tests/browser/robocop/src/org/mozilla/gecko/tests/testAxisLocking.java
@@ -14,17 +14,18 @@ import org.mozilla.gecko.PaintedSurface;
  * - Verify that the 5-degree angle was thrown out and it dragged vertically
  * - Drag page upwards at a 45-degree angle
  * - Verify that the 45-degree angle was not thrown out and it dragged diagonally
  */
 public class testAxisLocking extends PixelTest {
     public void testAxisLocking() {
         String url = getAbsoluteUrl(mStringHelper.ROBOCOP_BOXES_URL);
 
-        MotionEventHelper meh = new MotionEventHelper(getInstrumentation(), mDriver.getGeckoLeft(), mDriver.getGeckoTop());
+        MotionEventHelper meh = new MotionEventHelper(getInstrumentation(), mSolo,
+                                                      mDriver.getGeckoLeft(), mDriver.getGeckoTop());
 
         blockForGeckoReady();
 
         // load page and check we're at 0,0
         loadAndVerifyBoxes(url);
 
         // drag page upwards by 100 pixels with a slight angle. verify that
         // axis locking prevents any horizontal scrolling
--- a/mobile/android/tests/browser/robocop/src/org/mozilla/gecko/tests/testFlingCorrectness.java
+++ b/mobile/android/tests/browser/robocop/src/org/mozilla/gecko/tests/testFlingCorrectness.java
@@ -12,17 +12,18 @@ import org.mozilla.gecko.PaintedSurface;
  * - Loads a page and verifies it draws
  * - Drags page upwards by 200 pixels to get ready for a fling
  * - Fling the page downwards so we get back to the top and verify.
  */
 public class testFlingCorrectness extends PixelTest {
     public void testFlingCorrectness() {
         String url = getAbsoluteUrl(mStringHelper.ROBOCOP_BOXES_URL);
 
-        MotionEventHelper meh = new MotionEventHelper(getInstrumentation(), mDriver.getGeckoLeft(), mDriver.getGeckoTop());
+        MotionEventHelper meh = new MotionEventHelper(getInstrumentation(), mSolo,
+                                                      mDriver.getGeckoLeft(), mDriver.getGeckoTop());
 
         blockForGeckoReady();
 
         // load page and check we're at 0,0
         loadAndVerifyBoxes(url);
 
         // drag page upwards by 200 pixels (use two drags instead of one in case
         // the screen size is small)
--- a/mobile/android/tests/browser/robocop/src/org/mozilla/gecko/tests/testPanCorrectness.java
+++ b/mobile/android/tests/browser/robocop/src/org/mozilla/gecko/tests/testPanCorrectness.java
@@ -12,17 +12,18 @@ import org.mozilla.gecko.PaintedSurface;
  * - Loads a page and verifies it draws
  * - drags page upwards by 100 pixels and verifies it draws
  * - drags page leftwards by 100 pixels and verifies it draws
  */
 public class testPanCorrectness extends PixelTest {
     public void testPanCorrectness() {
         String url = getAbsoluteUrl(mStringHelper.ROBOCOP_BOXES_URL);
 
-        MotionEventHelper meh = new MotionEventHelper(getInstrumentation(), mDriver.getGeckoLeft(), mDriver.getGeckoTop());
+        MotionEventHelper meh = new MotionEventHelper(getInstrumentation(), mSolo,
+                                                      mDriver.getGeckoLeft(), mDriver.getGeckoTop());
 
         blockForGeckoReady();
 
         // load page and check we're at 0,0
         loadAndVerifyBoxes(url);
 
         // drag page upwards by 100 pixels
         Actions.RepeatedEventExpecter paintExpecter = mActions.expectPaint();
--- a/mobile/android/tests/browser/robocop/src/org/mozilla/gecko/tests/testVkbOverlap.java
+++ b/mobile/android/tests/browser/robocop/src/org/mozilla/gecko/tests/testVkbOverlap.java
@@ -24,17 +24,18 @@ public class testVkbOverlap extends Pixe
         testSetup("", "phone".equals(mDevice.type));
     }
 
     private void testSetup(String viewport, boolean shouldZoom) {
         loadAndPaint(getAbsoluteUrl("/robocop/test_viewport.sjs?metadata=" + Uri.encode(viewport)));
 
         // scroll to the bottom of the page and let it settle
         Actions.RepeatedEventExpecter paintExpecter = mActions.expectPaint();
-        MotionEventHelper meh = new MotionEventHelper(getInstrumentation(), mDriver.getGeckoLeft(), mDriver.getGeckoTop());
+        MotionEventHelper meh = new MotionEventHelper(getInstrumentation(), mSolo,
+                                                      mDriver.getGeckoLeft(), mDriver.getGeckoTop());
         meh.dragSync(10, 150, 10, 50);
 
         // the input field has a green background, so let's count the number of green pixels
         int greenPixelCount = 0;
 
         PaintedSurface painted = waitForPaint(paintExpecter);
         paintExpecter.unregisterListener();
         try {
--- a/services/fxaccounts/tests/xpcshell/test_push_service.js
+++ b/services/fxaccounts/tests/xpcshell/test_push_service.js
@@ -182,21 +182,30 @@ add_task(async function observePushTopic
         }
       })
     },
     QueryInterface() {
       return this;
     }
   };
 
+  let signoutCalled = false;
   let { FxAccounts } = Cu.import("resource://gre/modules/FxAccounts.jsm", {});
-  const fxAccountsMock = new FxAccounts({});
-  fxAccountsMock.internal.currentAccountState.getUserAccountData = async () => {
-    return { deviceId };
-  };
+  const fxAccountsMock = new FxAccounts({
+    newAccountState() {
+      return {
+        async getUserAccountData() {
+          return { deviceId };
+        }
+      }
+    },
+    signOut() {
+      signoutCalled = true;
+    }
+  });
 
   const deviceDisconnectedNotificationObserved = new Promise(resolve => {
     Services.obs.addObserver(function obs(subject, topic, data) {
       Services.obs.removeObserver(obs, topic);
       equal(data, JSON.stringify({ isLocalDevice: true }));
       resolve();
     }, ON_DEVICE_DISCONNECTED_NOTIFICATION);
   });
@@ -204,16 +213,17 @@ add_task(async function observePushTopic
   let pushService = new FxAccountsPushService({
     pushService: mockPushService,
     fxAccounts: fxAccountsMock,
   });
 
   pushService.observe(msg, mockPushService.pushTopic, FXA_PUSH_SCOPE_ACCOUNT_UPDATE);
 
   await deviceDisconnectedNotificationObserved;
+  ok(signoutCalled);
 });
 
 add_task(async function observePushTopicDeviceDisconnected_another_device() {
   const deviceId = "bogusid";
   let msg = {
     data: {
       json: () => ({
         command: ON_DEVICE_DISCONNECTED_NOTIFICATION,
@@ -222,21 +232,30 @@ add_task(async function observePushTopic
         }
       })
     },
     QueryInterface() {
       return this;
     }
   };
 
+  let signoutCalled = false;
   let { FxAccounts } = Cu.import("resource://gre/modules/FxAccounts.jsm", {});
-  const fxAccountsMock = new FxAccounts({});
-  fxAccountsMock.internal.currentAccountState.getUserAccountData = async () => {
-    return { deviceId: "thelocaldevice" };
-  };
+  const fxAccountsMock = new FxAccounts({
+    newAccountState() {
+      return {
+        async getUserAccountData() {
+          return { deviceId: "thelocaldevice" };
+        }
+      }
+    },
+    signOut() {
+      signoutCalled = true;
+    }
+  });
 
   const deviceDisconnectedNotificationObserved = new Promise(resolve => {
     Services.obs.addObserver(function obs(subject, topic, data) {
       Services.obs.removeObserver(obs, topic);
       equal(data, JSON.stringify({ isLocalDevice: false }));
       resolve();
     }, ON_DEVICE_DISCONNECTED_NOTIFICATION);
   });
@@ -244,16 +263,17 @@ add_task(async function observePushTopic
   let pushService = new FxAccountsPushService({
     pushService: mockPushService,
     fxAccounts: fxAccountsMock,
   });
 
   pushService.observe(msg, mockPushService.pushTopic, FXA_PUSH_SCOPE_ACCOUNT_UPDATE);
 
   await deviceDisconnectedNotificationObserved;
+  ok(!signoutCalled);
 });
 
 add_test(function observePushTopicAccountDestroyed() {
   const uid = "bogusuid";
   let msg = {
     data: {
       json: () => ({
         command: ON_ACCOUNT_DESTROYED_NOTIFICATION,
--- a/servo/components/gfx/lib.rs
+++ b/servo/components/gfx/lib.rs
@@ -2,17 +2,16 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 // For SIMD
 #![cfg_attr(any(target_os = "linux", target_os = "android"), feature(allocator_api))]
 #![feature(box_syntax)]
 #![feature(cfg_target_feature)]
 #![feature(range_contains)]
-#![feature(unique)]
 
 #![deny(unsafe_code)]
 
 extern crate app_units;
 #[macro_use]
 extern crate bitflags;
 
 // Mac OS-specific library dependencies
--- a/servo/components/gfx/text/shaping/harfbuzz.rs
+++ b/servo/components/gfx/text/shaping/harfbuzz.rs
@@ -159,17 +159,17 @@ impl Shaper {
             hb_font_set_ppem(hb_font, pt_size as c_uint, pt_size as c_uint);
 
             // Set scaling. Note that this takes 16.16 fixed point.
             hb_font_set_scale(hb_font,
                               Shaper::float_to_fixed(pt_size) as c_int,
                               Shaper::float_to_fixed(pt_size) as c_int);
 
             // configure static function callbacks.
-            hb_font_set_funcs(hb_font, HB_FONT_FUNCS.as_ptr(), font as *mut Font as *mut c_void, None);
+            hb_font_set_funcs(hb_font, HB_FONT_FUNCS.0, font as *mut Font as *mut c_void, None);
 
             Shaper {
                 hb_face: hb_face,
                 hb_font: hb_font,
                 font: font,
             }
         }
     }
@@ -406,27 +406,31 @@ impl Shaper {
             let (length, percent) = options.word_spacing;
             advance = (advance + length) + Au::new((advance.0 as f32 * percent.into_inner()) as i32);
         }
 
         advance
     }
 }
 
-// Callbacks from Harfbuzz when font map and glyph advance lookup needed.
+/// Callbacks from Harfbuzz when font map and glyph advance lookup needed.
+struct FontFuncs(*mut hb_font_funcs_t);
+
+unsafe impl Sync for FontFuncs {}
+
 lazy_static! {
-    static ref HB_FONT_FUNCS: ptr::Unique<hb_font_funcs_t> = unsafe {
+    static ref HB_FONT_FUNCS: FontFuncs = unsafe {
         let hb_funcs = hb_font_funcs_create();
         hb_font_funcs_set_glyph_func(hb_funcs, Some(glyph_func), ptr::null_mut(), None);
         hb_font_funcs_set_glyph_h_advance_func(
             hb_funcs, Some(glyph_h_advance_func), ptr::null_mut(), None);
         hb_font_funcs_set_glyph_h_kerning_func(
             hb_funcs, Some(glyph_h_kerning_func), ptr::null_mut(), None);
 
-        ptr::Unique::new_unchecked(hb_funcs)
+        FontFuncs(hb_funcs)
     };
 }
 
 extern fn glyph_func(_: *mut hb_font_t,
                      font_data: *mut c_void,
                      unicode: hb_codepoint_t,
                      _: hb_codepoint_t,
                      glyph: *mut hb_codepoint_t,
--- a/servo/components/script/lib.rs
+++ b/servo/components/script/lib.rs
@@ -9,19 +9,17 @@
 #![feature(const_ptr_null)]
 #![feature(const_ptr_null_mut)]
 #![feature(core_intrinsics)]
 #![feature(mpsc_select)]
 #![feature(nonzero)]
 #![feature(on_unimplemented)]
 #![feature(plugin)]
 #![feature(proc_macro)]
-#![feature(stmt_expr_attributes)]
 #![feature(try_from)]
-#![feature(unboxed_closures)]
 #![feature(untagged_unions)]
 
 #![deny(unsafe_code)]
 #![allow(non_snake_case)]
 
 #![doc = "The script crate contains all matters DOM."]
 
 #![plugin(script_plugins)]
--- a/servo/components/script_plugins/lib.rs
+++ b/servo/components/script_plugins/lib.rs
@@ -10,17 +10,20 @@
 //!  - `#[derive(JSTraceable)]` : Auto-derives an implementation of `JSTraceable` for a struct in the script crate
 //!  - `#[must_root]` : Prevents data of the marked type from being used on the stack.
 //!                     See the lints module for more details
 //!  - `#[dom_struct]` : Implies #[derive(JSTraceable, DenyPublicFields)]`, and `#[must_root]`.
 //!                       Use this for structs that correspond to a DOM type
 
 
 #![deny(unsafe_code)]
-#![feature(box_syntax, plugin, plugin_registrar, rustc_private)]
+#![feature(box_syntax)]
+#![feature(plugin)]
+#![feature(plugin_registrar)]
+#![feature(rustc_private)]
 
 #[macro_use]
 extern crate rustc;
 extern crate rustc_plugin;
 extern crate syntax;
 
 use rustc_plugin::Registry;
 use syntax::feature_gate::AttributeType::Whitelisted;
--- a/servo/components/style/properties/helpers/animated_properties.mako.rs
+++ b/servo/components/style/properties/helpers/animated_properties.mako.rs
@@ -571,16 +571,22 @@ impl AnimationValue {
         initial: &ComputedValues
     ) -> Option<Self> {
         use properties::LonghandId;
 
         match *decl {
             % for prop in data.longhands:
             % if prop.animatable:
             PropertyDeclaration::${prop.camel_case}(ref val) => {
+                context.for_non_inherited_property =
+                    % if prop.style_struct.inherited:
+                        None;
+                    % else:
+                        Some(LonghandId::${prop.camel_case});
+                    % endif
             % if prop.ident in SYSTEM_FONT_LONGHANDS and product == "gecko":
                 if let Some(sf) = val.get_system() {
                     longhands::system_font::resolve_system_font(sf, context);
                 }
             % endif
             % if prop.boxed:
             let computed = (**val).to_computed_value(context);
             % else:
--- a/servo/ports/servo/main.rs
+++ b/servo/ports/servo/main.rs
@@ -10,17 +10,17 @@
 //! This browser's implementation of `WindowMethods` is built on top
 //! of [glutin], the cross-platform OpenGL utility and windowing
 //! library.
 //!
 //! For the engine itself look next door in `components/servo/lib.rs`.
 //!
 //! [glutin]: https://github.com/tomaka/glutin
 
-#![feature(start, core_intrinsics)]
+#![feature(core_intrinsics)]
 
 #[cfg(target_os = "android")]
 extern crate android_injected_glue;
 extern crate backtrace;
 // The window backed by glutin
 extern crate glutin_app as app;
 #[macro_use]
 extern crate log;
--- a/taskcluster/taskgraph/actions/backfill.py
+++ b/taskcluster/taskgraph/actions/backfill.py
@@ -4,16 +4,17 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import logging
 
 import requests
+from requests.exceptions import HTTPError
 
 from .registry import register_callback_action
 from .util import find_decision_task, create_tasks
 from taskgraph.util.taskcluster import get_artifact_from_index
 from taskgraph.taskgraph import TaskGraph
 
 PUSHLOG_TMPL = '{}/json-pushes?version=2&startID={}&endID={}'
 INDEX_TMPL = 'gecko.v2.{}.pushlog-id.{}.decision'
@@ -65,25 +66,29 @@ def backfill_action(parameters, input, t
         end_id = start_id - 1
         start_id -= depth
         if start_id < 0:
             break
 
     pushes = sorted(pushes)[-depth:]
 
     for push in pushes:
-        full_task_graph = get_artifact_from_index(
-                INDEX_TMPL.format(parameters['project'], push),
-                'public/full-task-graph.json')
-        _, full_task_graph = TaskGraph.from_json(full_task_graph)
-        label_to_taskid = get_artifact_from_index(
-                INDEX_TMPL.format(parameters['project'], push),
-                'public/label-to-taskid.json')
-        push_params = get_artifact_from_index(
-                INDEX_TMPL.format(parameters['project'], push),
-                'public/parameters.yml')
-        push_decision_task_id = find_decision_task(push_params)
+        try:
+            full_task_graph = get_artifact_from_index(
+                    INDEX_TMPL.format(parameters['project'], push),
+                    'public/full-task-graph.json')
+            _, full_task_graph = TaskGraph.from_json(full_task_graph)
+            label_to_taskid = get_artifact_from_index(
+                    INDEX_TMPL.format(parameters['project'], push),
+                    'public/label-to-taskid.json')
+            push_params = get_artifact_from_index(
+                    INDEX_TMPL.format(parameters['project'], push),
+                    'public/parameters.yml')
+            push_decision_task_id = find_decision_task(push_params)
+        except HTTPError as e:
+            logger.info('Skipping {} due to missing index artifacts! Error: {}'.format(push, e))
+            continue
 
         if label in full_task_graph.tasks.keys():
             create_tasks(
                     [label], full_task_graph, label_to_taskid, push_params, push_decision_task_id)
         else:
             logging.info('Could not find {} on {}. Skipping.'.format(label, push))
--- a/third_party/python/compare-locales/compare_locales/__init__.py
+++ b/third_party/python/compare-locales/compare_locales/__init__.py
@@ -1,1 +1,1 @@
-version = "1.2.3"
+version = "2.1"
--- a/third_party/python/compare-locales/compare_locales/checks.py
+++ b/third_party/python/compare-locales/compare_locales/checks.py
@@ -1,44 +1,57 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import re
+from collections import Counter
 from difflib import SequenceMatcher
 from xml import sax
 try:
     from cStringIO import StringIO
 except ImportError:
     from StringIO import StringIO
 
-from compare_locales.parser import DTDParser, PropertiesParser
+from compare_locales.parser import DTDParser, PropertiesEntity
 
 
 class Checker(object):
     '''Abstract class to implement checks per file type.
     '''
     pattern = None
+    # if a check uses all reference entities, set this to True
+    needs_reference = False
 
     @classmethod
     def use(cls, file):
         return cls.pattern.match(file.file)
 
+    def __init__(self, extra_tests):
+        self.extra_tests = extra_tests
+        self.reference = None
+
     def check(self, refEnt, l10nEnt):
         '''Given the reference and localized Entities, performs checks.
 
         This is a generator yielding tuples of
         - "warning" or "error", depending on what should be reported,
         - tuple of line, column info for the error within the string
         - description string to be shown in the report
         '''
         if True:
             raise NotImplementedError("Need to subclass")
         yield ("error", (0, 0), "This is an example error", "example")
 
+    def set_reference(self, reference):
+        '''Set the reference entities.
+        Only do this if self.needs_reference is True.
+        '''
+        self.reference = reference
+
 
 class PrintfException(Exception):
     def __init__(self, msg, pos):
         self.pos = pos
         self.msg = msg
 
 
 class PropertiesChecker(Checker):
@@ -53,17 +66,18 @@ class PropertiesChecker(Checker):
 
     def check(self, refEnt, l10nEnt):
         '''Test for the different variable formats.
         '''
         refValue, l10nValue = refEnt.val, l10nEnt.val
         refSpecs = None
         # check for PluralForm.jsm stuff, should have the docs in the
         # comment
-        if 'Localization_and_Plurals' in refEnt.pre_comment:
+        if (refEnt.pre_comment
+                and 'Localization_and_Plurals' in refEnt.pre_comment.all):
             # For plurals, common variable pattern is #1. Try that.
             pats = set(int(m.group(1)) for m in re.finditer('#([0-9]+)',
                                                             refValue))
             if len(pats) == 0:
                 return
             lpats = set(int(m.group(1)) for m in re.finditer('#([0-9]+)',
                                                              l10nValue))
             if pats - lpats:
@@ -72,19 +86,19 @@ class PropertiesChecker(Checker):
                 return
             if lpats - pats:
                 yield ('error', 0, 'unreplaced variables in l10n',
                        'plural')
                 return
             return
         # check for lost escapes
         raw_val = l10nEnt.raw_val
-        for m in PropertiesParser.escape.finditer(raw_val):
+        for m in PropertiesEntity.escape.finditer(raw_val):
             if m.group('single') and \
-               m.group('single') not in PropertiesParser.known_escapes:
+               m.group('single') not in PropertiesEntity.known_escapes:
                 yield ('warning', m.start(),
                        'unknown escape sequence, \\' + m.group('single'),
                        'escape')
         try:
             refSpecs = self.getPrintfSpecs(refValue)
         except PrintfException:
             refSpecs = []
         if refSpecs:
@@ -154,20 +168,16 @@ class PropertiesChecker(Checker):
                 pos = int(m.group('number')) - 1
                 ls = len(specs)
                 if pos >= ls:
                     # pad specs
                     nones = pos - ls
                     specs[ls:pos] = nones*[None]
                     specs.append(m.group('spec'))
                 else:
-                    if specs[pos] is not None:
-                        raise PrintfException('Double ordered argument %d' %
-                                              (pos+1),
-                                              m.start())
                     specs[pos] = m.group('spec')
             else:
                 specs.append(m.group('spec'))
         # check for missing args
         if hasNumber and not all(specs):
             raise PrintfException('Ordered argument missing', 0)
         return specs
 
@@ -179,25 +189,29 @@ class DTDChecker(Checker):
 
     The code tries to parse until it doesn't find any unresolved entities
     anymore. If it finds one, it tries to grab the key, and adds an empty
     <!ENTITY key ""> definition to the header.
 
     Also checks for some CSS and number heuristics in the values.
     """
     pattern = re.compile('.*\.dtd$')
+    needs_reference = True  # to cast a wider net for known entity references
 
     eref = re.compile('&(%s);' % DTDParser.Name)
     tmpl = '''<!DOCTYPE elem [%s]>
 <elem>%s</elem>
 '''
     xmllist = set(('amp', 'lt', 'gt', 'apos', 'quot'))
 
-    def __init__(self, reference):
-        self.reference = reference
+    def __init__(self, extra_tests):
+        super(DTDChecker, self).__init__(extra_tests)
+        self.processContent = False
+        if self.extra_tests is not None and 'android-dtd' in self.extra_tests:
+            self.processContent = True
         self.__known_entities = None
 
     def known_entities(self, refValue):
         if self.__known_entities is None and self.reference is not None:
             self.__known_entities = set()
             for ent in self.reference:
                 self.__known_entities.update(self.entities_for_value(ent.val))
         return self.__known_entities if self.__known_entities is not None \
@@ -223,18 +237,16 @@ class DTDChecker(Checker):
     num = re.compile('^%s$' % numPattern)
     lengthPattern = '%s(em|px|ch|cm|in)' % numPattern
     length = re.compile('^%s$' % lengthPattern)
     spec = re.compile(r'((?:min\-)?(?:width|height))\s*:\s*%s' %
                       lengthPattern)
     style = re.compile(r'^%(spec)s\s*(;\s*%(spec)s\s*)*;?$' %
                        {'spec': spec.pattern})
 
-    processContent = None
-
     def check(self, refEnt, l10nEnt):
         """Try to parse the refvalue inside a dummy element, and keep
         track of entities that we need to define to make that work.
 
         Return a checker that offers just those entities.
         """
         refValue, l10nValue = refEnt.val, l10nEnt.val
         # find entities the refValue references,
@@ -258,17 +270,17 @@ class DTDChecker(Checker):
                    (0, 0),
                    "can't parse en-US value", 'xmlparse')
 
         # find entities the l10nValue references,
         # reusing markup from DTDParser.
         l10nlist = self.entities_for_value(l10nValue)
         missing = sorted(l10nlist - reflist)
         _entities = entities + ''.join('<!ENTITY %s "">' % s for s in missing)
-        if self.processContent is not None:
+        if self.processContent:
             self.texthandler.textcontent = ''
             parser.setContentHandler(self.texthandler)
         try:
             parser.parse(StringIO(self.tmpl % (_entities,
                          l10nValue.encode('utf-8'))))
             # also catch stray %
             # if this fails, we need to substract the entity definition
             parser.setContentHandler(self.defaulthandler)
@@ -342,27 +354,20 @@ class DTDChecker(Checker):
                         if u != ru:
                             msgs.append("units for %s don't match "
                                         "(%s != %s)" % (s, u, ru))
                 for s in refMap.iterkeys():
                     msgs.insert(0, '%s only in reference' % s)
                 if msgs:
                     yield ('warning', 0, ', '.join(msgs), 'css')
 
-        if self.processContent is not None:
-            for t in self.processContent(self.texthandler.textcontent):
+        if self.extra_tests is not None and 'android-dtd' in self.extra_tests:
+            for t in self.processAndroidContent(self.texthandler.textcontent):
                 yield t
 
-
-class PrincessAndroid(DTDChecker):
-    """Checker for the string values that Android puts into an XML container.
-
-    http://developer.android.com/guide/topics/resources/string-resource.html#FormattingAndStyling  # noqa
-    has more info. Check for unescaped apostrophes and bad unicode escapes.
-    """
     quoted = re.compile("(?P<q>[\"']).*(?P=q)$")
 
     def unicode_escape(self, str):
         """Helper method to try to decode all unicode escapes in a string.
 
         This code uses the standard python decode for unicode-escape, but
         that's somewhat tricky, as its input needs to be ascii. To get to
         ascii, the unicode string gets converted to ascii with
@@ -380,25 +385,21 @@ class PrincessAndroid(DTDChecker):
         except UnicodeDecodeError, e:
             args = list(e.args)
             badstring = args[1][args[2]:args[3]]
             i = len(args[1][:args[2]].decode('unicode-escape'))
             args[2] = i
             args[3] = i + len(badstring)
             raise UnicodeDecodeError(*args)
 
-    @classmethod
-    def use(cls, file):
-        """Use this Checker only for DTD files in embedding/android."""
-        return (file.module in ("embedding/android",
-                                "mobile/android/base") and
-                cls.pattern.match(file.file))
+    def processAndroidContent(self, val):
+        """Check for the string values that Android puts into an XML container.
 
-    def processContent(self, val):
-        """Actual check code.
+        http://developer.android.com/guide/topics/resources/string-resource.html#FormattingAndStyling  # noqa
+
         Check for unicode escapes and unescaped quotes and apostrophes,
         if string's not quoted.
         """
         # first, try to decode unicode escapes
         try:
             self.unicode_escape(val)
         except UnicodeDecodeError, e:
             yield ('error', e.args[2], e.args[4], 'android')
@@ -423,16 +424,73 @@ class PrincessAndroid(DTDChecker):
                           u"or \\u0022, or put string in apostrophes."
                 else:
                     msg = u"Apostrophes in Android DTDs need escaping with "\
                           u"\\' or \\u0027, or use \u2019, or put string in "\
                           u"quotes."
                 yield ('error', m.end(0)+offset, msg, 'android')
 
 
-def getChecker(file, reference=None):
+class FluentChecker(Checker):
+    '''Tests to run on Fluent (FTL) files.
+    '''
+    pattern = re.compile('.*\.ftl')
+
+    # Positions yielded by FluentChecker.check are absolute offsets from the
+    # beginning of the file.  This is different from the base Checker behavior
+    # which yields offsets from the beginning of the current entity's value.
+    def check(self, refEnt, l10nEnt):
+        ref_entry = refEnt.entry
+        l10n_entry = l10nEnt.entry
+        # verify that values match, either both have a value or none
+        if ref_entry.value is not None and l10n_entry.value is None:
+            yield ('error', l10n_entry.span.start,
+                   'Missing value', 'fluent')
+        if ref_entry.value is None and l10n_entry.value is not None:
+            yield ('error', l10n_entry.value.span.start,
+                   'Obsolete value', 'fluent')
+
+        # verify that we're having the same set of attributes
+        ref_attr_names = set((attr.id.name for attr in ref_entry.attributes))
+        ref_pos = dict((attr.id.name, i)
+                       for i, attr in enumerate(ref_entry.attributes))
+        l10n_attr_counts = \
+            Counter(attr.id.name for attr in l10n_entry.attributes)
+        l10n_attr_names = set(l10n_attr_counts)
+        l10n_pos = dict((attr.id.name, i)
+                        for i, attr in enumerate(l10n_entry.attributes))
+        # check for duplicate Attributes
+        # only warn to not trigger a merge skip
+        for attr_name, cnt in l10n_attr_counts.items():
+            if cnt > 1:
+                yield (
+                    'warning',
+                    l10n_entry.attributes[l10n_pos[attr_name]].span.start,
+                    'Attribute "{}" occurs {} times'.format(
+                        attr_name, cnt),
+                    'fluent')
+
+        missing_attr_names = sorted(ref_attr_names - l10n_attr_names,
+                                    key=lambda k: ref_pos[k])
+        for attr_name in missing_attr_names:
+            yield ('error', l10n_entry.span.start,
+                   'Missing attribute: ' + attr_name, 'fluent')
+
+        obsolete_attr_names = sorted(l10n_attr_names - ref_attr_names,
+                                     key=lambda k: l10n_pos[k])
+        obsolete_attrs = [
+            attr
+            for attr in l10n_entry.attributes
+            if attr.id.name in obsolete_attr_names
+        ]
+        for attr in obsolete_attrs:
+            yield ('error', attr.span.start,
+                   'Obsolete attribute: ' + attr.id.name, 'fluent')
+
+
+def getChecker(file, extra_tests=None):
     if PropertiesChecker.use(file):
-        return PropertiesChecker()
-    if PrincessAndroid.use(file):
-        return PrincessAndroid(reference)
+        return PropertiesChecker(extra_tests)
     if DTDChecker.use(file):
-        return DTDChecker(reference)
+        return DTDChecker(extra_tests)
+    if FluentChecker.use(file):
+        return FluentChecker(extra_tests)
     return None
--- a/third_party/python/compare-locales/compare_locales/commands.py
+++ b/third_party/python/compare-locales/compare_locales/commands.py
@@ -1,28 +1,30 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 'Commands exposed to commandlines'
 
 import logging
 from argparse import ArgumentParser
+import os
 
 from compare_locales import version
-from compare_locales.paths import EnumerateApp
-from compare_locales.compare import compareApp, compareDirs
-from compare_locales.webapps import compare_web_app
+from compare_locales.paths import EnumerateApp, TOMLParser, ConfigNotFound
+from compare_locales.compare import compareProjects, Observer
 
 
-class BaseCommand(object):
-    """Base class for compare-locales commands.
-    This handles command line parsing, and general sugar for setuptools
-    entry_points.
-    """
+class CompareLocales(object):
+    """Check the localization status of gecko applications.
+The first arguments are paths to the l10n.ini or toml files for the
+applications, followed by the base directory of the localization repositories.
+Then you pass in the list of locale codes you want to compare. If there are
+not locales given, the list of locales will be taken from the l10n.toml file
+or the all-locales file referenced by the application\'s l10n.ini."""
 
     def __init__(self):
         self.parser = None
 
     def get_parser(self):
         """Get an ArgumentParser, with class docstring as description.
         """
         parser = ArgumentParser(description=self.__doc__)
@@ -30,126 +32,135 @@ class BaseCommand(object):
                             version='%(prog)s ' + version)
         parser.add_argument('-v', '--verbose', action='count', dest='v',
                             default=0, help='Make more noise')
         parser.add_argument('-q', '--quiet', action='count', dest='q',
                             default=0, help='Make less noise')
         parser.add_argument('-m', '--merge',
                             help='''Use this directory to stage merged files,
 use {ab_CD} to specify a different directory for each locale''')
-        return parser
-
-    def add_data_argument(self, parser):
+        parser.add_argument('config_paths', metavar='l10n.toml', nargs='+',
+                            help='TOML or INI file for the project')
+        parser.add_argument('l10n_base_dir', metavar='l10n-base-dir',
+                            help='Parent directory of localizations')
+        parser.add_argument('locales', nargs='*', metavar='locale-code',
+                            help='Locale code and top-level directory of '
+                                 'each localization')
+        parser.add_argument('-D', action='append', metavar='var=value',
+                            default=[], dest='defines',
+                            help='Overwrite variables in TOML files')
+        parser.add_argument('--unified', action="store_true",
+                            help="Show output for all projects unified")
+        parser.add_argument('--full', action="store_true",
+                            help="Compare projects that are disabled")
+        parser.add_argument('--clobber-merge', action="store_true",
+                            default=False, dest='clobber',
+                            help="""WARNING: DATALOSS.
+Use this option with care. If specified, the merge directory will
+be clobbered for each module. That means, the subdirectory will
+be completely removed, any files that were there are lost.
+Be careful to specify the right merge directory when using this option.""")
         parser.add_argument('--data', choices=['text', 'exhibit', 'json'],
                             default='text',
                             help='''Choose data and format (one of text,
 exhibit, json); text: (default) Show which files miss which strings, together
 with warnings and errors. Also prints a summary; json: Serialize the internal
 tree, useful for tools. Also always succeeds; exhibit: Serialize the summary
 data in a json useful for Exhibit
 ''')
+        return parser
 
     @classmethod
     def call(cls):
         """Entry_point for setuptools.
         The actual command handling is done in the handle() method of the
         subclasses.
         """
         cmd = cls()
-        cmd.handle_()
+        return cmd.handle_()
 
     def handle_(self):
         """The instance part of the classmethod call."""
         self.parser = self.get_parser()
         args = self.parser.parse_args()
         # log as verbose or quiet as we want, warn by default
         logging.basicConfig()
         logging.getLogger().setLevel(logging.WARNING -
                                      (args.v - args.q) * 10)
-        observer = self.handle(args)
-        print observer.serialize(type=args.data).encode('utf-8', 'replace')
-
-    def handle(self, args):
-        """Subclasses need to implement this method for the actual
-        command handling.
-        """
-        raise NotImplementedError
-
-
-class CompareLocales(BaseCommand):
-    """Check the localization status of a gecko application.
-The first argument is a path to the l10n.ini file for the application,
-followed by the base directory of the localization repositories.
-Then you pass in the list of locale codes you want to compare. If there are
-not locales given, the list of locales will be taken from the all-locales file
-of the application\'s l10n.ini."""
+        kwargs = vars(args)
+        # strip handeld arguments
+        kwargs.pop('q')
+        kwargs.pop('v')
+        return self.handle(**kwargs)
 
-    def get_parser(self):
-        parser = super(CompareLocales, self).get_parser()
-        parser.add_argument('ini_file', metavar='l10n.ini',
-                            help='INI file for the project')
-        parser.add_argument('l10n_base_dir', metavar='l10n-base-dir',
-                            help='Parent directory of localizations')
-        parser.add_argument('locales', nargs='*', metavar='locale-code',
-                            help='Locale code and top-level directory of '
-                                 'each localization')
-        parser.add_argument('--clobber-merge', action="store_true",
-                            default=False, dest='clobber',
-                            help="""WARNING: DATALOSS.
-Use this option with care. If specified, the merge directory will
-be clobbered for each module. That means, the subdirectory will
-be completely removed, any files that were there are lost.
-Be careful to specify the right merge directory when using this option.""")
-        parser.add_argument('-r', '--reference', default='en-US',
-                            dest='reference',
-                            help='Explicitly set the reference '
-                            'localization. [default: en-US]')
-        self.add_data_argument(parser)
-        return parser
-
-    def handle(self, args):
-        app = EnumerateApp(args.ini_file, args.l10n_base_dir, args.locales)
-        app.reference = args.reference
+    def handle(self, config_paths, l10n_base_dir, locales,
+               merge=None, defines=None, unified=False, full=False,
+               clobber=False, data='text'):
+        # using nargs multiple times in argparser totally screws things
+        # up, repair that.
+        # First files are configs, then the base dir, everything else is
+        # locales
+        all_args = config_paths + [l10n_base_dir] + locales
+        config_paths = []
+        locales = []
+        if defines is None:
+            defines = []
+        while all_args and not os.path.isdir(all_args[0]):
+            config_paths.append(all_args.pop(0))
+        if not config_paths:
+            self.parser.error('no configuration file given')
+        for cf in config_paths:
+            if not os.path.isfile(cf):
+                self.parser.error('config file %s not found' % cf)
+        if not all_args:
+            self.parser.error('l10n-base-dir not found')
+        l10n_base_dir = all_args.pop(0)
+        locales.extend(all_args)
+        # when we compare disabled projects, we set our locales
+        # on all subconfigs, so deep is True.
+        locales_deep = full
+        configs = []
+        config_env = {}
+        for define in defines:
+            var, _, value = define.partition('=')
+            config_env[var] = value
+        for config_path in config_paths:
+            if config_path.endswith('.toml'):
+                try:
+                    config = TOMLParser.parse(config_path, env=config_env)
+                except ConfigNotFound as e:
+                    self.parser.exit('config file %s not found' % e.filename)
+                config.add_global_environment(l10n_base=l10n_base_dir)
+                if locales:
+                    config.set_locales(locales, deep=locales_deep)
+                configs.append(config)
+            else:
+                app = EnumerateApp(
+                    config_path, l10n_base_dir, locales)
+                configs.append(app.asConfig())
         try:
-            observer = compareApp(app, merge_stage=args.merge,
-                                  clobber=args.clobber)
+            unified_observer = None
+            if unified:
+                unified_observer = Observer()
+            observers = compareProjects(
+                configs,
+                stat_observer=unified_observer,
+                merge_stage=merge, clobber_merge=clobber)
         except (OSError, IOError), exc:
             print "FAIL: " + str(exc)
             self.parser.exit(2)
-        return observer
-
-
-class CompareDirs(BaseCommand):
-    """Check the localization status of a directory tree.
-The first argument is a path to the reference data,the second is the
-localization to be tested."""
-
-    def get_parser(self):
-        parser = super(CompareDirs, self).get_parser()
-        parser.add_argument('reference')
-        parser.add_argument('localization')
-        self.add_data_argument(parser)
-        return parser
-
-    def handle(self, args):
-        observer = compareDirs(args.reference, args.localization,
-                               merge_stage=args.merge)
-        return observer
+        if unified:
+            observers = [unified_observer]
 
-
-class CompareWebApp(BaseCommand):
-    """Check the localization status of a gaia-style web app.
-The first argument is the directory of the web app.
-Following arguments explicitly state the locales to test.
-If none are given, test all locales in manifest.webapp or files."""
-
-    def get_parser(self):
-        parser = super(CompareWebApp, self).get_parser()
-        parser.add_argument('webapp')
-        parser.add_argument('locales', nargs='*', metavar='locale-code',
-                            help='Locale code and top-level directory of '
-                                 'each localization')
-        self.add_data_argument(parser)
-        return parser
-
-    def handle(self, args):
-        observer = compare_web_app(args.webapp, args.locales)
-        return observer
+        rv = 0
+        for observer in observers:
+            print observer.serialize(type=data).encode('utf-8', 'replace')
+            # summary is a dict of lang-summary dicts
+            # find out if any of our results has errors, return 1 if so
+            if rv > 0:
+                continue  # we already have errors
+            for loc, summary in observer.summary.items():
+                if summary.get('errors', 0) > 0:
+                    rv = 1
+                    # no need to check further summaries, but
+                    # continue to run through observers
+                    break
+        return rv
--- a/third_party/python/compare-locales/compare_locales/compare.py
+++ b/third_party/python/compare-locales/compare_locales/compare.py
@@ -1,43 +1,43 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 'Mozilla l10n compare locales tool'
 
 import codecs
 import os
-import os.path
 import shutil
 import re
-from difflib import SequenceMatcher
 from collections import defaultdict
 
 try:
     from json import dumps
 except:
     from simplejson import dumps
 
 from compare_locales import parser
-from compare_locales import paths
+from compare_locales import paths, mozpath
 from compare_locales.checks import getChecker
 
 
 class Tree(object):
     def __init__(self, valuetype):
         self.branches = dict()
         self.valuetype = valuetype
         self.value = None
 
     def __getitem__(self, leaf):
         parts = []
         if isinstance(leaf, paths.File):
-            parts = [p for p in [leaf.locale, leaf.module] if p] + \
-                leaf.file.split('/')
+            parts = [] if not leaf.locale else [leaf.locale]
+            if leaf.module:
+                parts += leaf.module.split('/')
+            parts += leaf.file.split('/')
         else:
             parts = leaf.split('/')
         return self.__get(parts)
 
     def __get(self, parts):
         common = None
         old = None
         new = tuple(parts)
@@ -88,180 +88,156 @@ class Tree(object):
             for child in self.branches[key].getContent(depth + 1):
                 yield child
 
     def toJSON(self):
         '''
         Returns this Tree as a JSON-able tree of hashes.
         Only the values need to take care that they're JSON-able.
         '''
-        json = {}
-        keys = self.branches.keys()
-        keys.sort()
         if self.value is not None:
-            json['value'] = self.value
-        children = [('/'.join(key), self.branches[key].toJSON())
-                    for key in keys]
-        if children:
-            json['children'] = children
-        return json
+            return self.value
+        return dict(('/'.join(key), self.branches[key].toJSON())
+                    for key in self.branches.keys())
 
     def getStrRows(self):
         def tostr(t):
             if t[1] == 'key':
                 return self.indent * t[0] + '/'.join(t[2])
             return self.indent * (t[0] + 1) + str(t[2])
 
         return map(tostr, self.getContent())
 
     def __str__(self):
         return '\n'.join(self.getStrRows())
 
 
-class AddRemove(SequenceMatcher):
+class AddRemove(object):
     def __init__(self):
-        SequenceMatcher.__init__(self, None, None, None)
+        self.left = self.right = None
 
     def set_left(self, left):
         if not isinstance(left, list):
-            left = [l for l in left]
-        self.set_seq1(left)
+            left = list(l for l in left)
+        self.left = left
 
     def set_right(self, right):
         if not isinstance(right, list):
-            right = [l for l in right]
-        self.set_seq2(right)
+            right = list(l for l in right)
+        self.right = right
 
     def __iter__(self):
-        for tag, i1, i2, j1, j2 in self.get_opcodes():
-            if tag == 'equal':
-                for pair in zip(self.a[i1:i2], self.b[j1:j2]):
-                    yield ('equal', pair)
-            elif tag == 'delete':
-                for item in self.a[i1:i2]:
-                    yield ('delete', item)
-            elif tag == 'insert':
-                for item in self.b[j1:j2]:
-                    yield ('add', item)
+        # order_map stores index in left and then index in right
+        order_map = dict((item, (i, -1)) for i, item in enumerate(self.left))
+        left_items = set(order_map)
+        # as we go through the right side, keep track of which left
+        # item we had in right last, and for items not in left,
+        # set the sortmap to (left_offset, right_index)
+        left_offset = -1
+        right_items = set()
+        for i, item in enumerate(self.right):
+            right_items.add(item)
+            if item in order_map:
+                left_offset = order_map[item][0]
             else:
-                # tag == 'replace'
-                for item in self.a[i1:i2]:
-                    yield ('delete', item)
-                for item in self.b[j1:j2]:
-                    yield ('add', item)
-
-
-class DirectoryCompare(SequenceMatcher):
-    def __init__(self, reference):
-        SequenceMatcher.__init__(self, None, [i for i in reference],
-                                 [])
-        self.watcher = None
-
-    def setWatcher(self, watcher):
-        self.watcher = watcher
-
-    def compareWith(self, other):
-        if not self.watcher:
-            return
-        self.set_seq2([i for i in other])
-        for tag, i1, i2, j1, j2 in self.get_opcodes():
-            if tag == 'equal':
-                for i, j in zip(xrange(i1, i2), xrange(j1, j2)):
-                    self.watcher.compare(self.a[i], self.b[j])
-            elif tag == 'delete':
-                for i in xrange(i1, i2):
-                    self.watcher.add(self.a[i], other.cloneFile(self.a[i]))
-            elif tag == 'insert':
-                for j in xrange(j1, j2):
-                    self.watcher.remove(self.b[j])
+                order_map[item] = (left_offset, i)
+        for item in sorted(order_map, key=lambda item: order_map[item]):
+            if item in left_items and item in right_items:
+                yield ('equal', item)
+            elif item in left_items:
+                yield ('delete', item)
             else:
-                for j in xrange(j1, j2):
-                    self.watcher.remove(self.b[j])
-                for i in xrange(i1, i2):
-                    self.watcher.add(self.a[i], other.cloneFile(self.a[i]))
+                yield ('add', item)
 
 
 class Observer(object):
-    stat_cats = ['missing', 'obsolete', 'missingInFiles', 'report',
-                 'changed', 'unchanged', 'keys']
 
-    def __init__(self):
-        class intdict(defaultdict):
-            def __init__(self):
-                defaultdict.__init__(self, int)
-
-        self.summary = defaultdict(intdict)
-        self.details = Tree(dict)
-        self.filter = None
+    def __init__(self, filter=None, file_stats=False):
+        self.summary = defaultdict(lambda: defaultdict(int))
+        self.details = Tree(list)
+        self.filter = filter
+        self.file_stats = None
+        if file_stats:
+            self.file_stats = defaultdict(lambda: defaultdict(dict))
 
     # support pickling
     def __getstate__(self):
-        return dict(summary=self.getSummary(), details=self.details)
+        state = dict(summary=self._dictify(self.summary), details=self.details)
+        if self.file_stats is not None:
+            state['file_stats'] = self._dictify(self.file_stats)
+        return state
 
     def __setstate__(self, state):
-        class intdict(defaultdict):
-            def __init__(self):
-                defaultdict.__init__(self, int)
-
-        self.summary = defaultdict(intdict)
+        self.summary = defaultdict(lambda: defaultdict(int))
         if 'summary' in state:
             for loc, stats in state['summary'].iteritems():
                 self.summary[loc].update(stats)
+        self.file_stats = None
+        if 'file_stats' in state:
+            self.file_stats = defaultdict(lambda: defaultdict(dict))
+            for k, d in state['file_stats'].iteritems():
+                self.file_stats[k].update(d)
         self.details = state['details']
         self.filter = None
 
-    def getSummary(self):
+    def _dictify(self, d):
         plaindict = {}
-        for k, v in self.summary.iteritems():
+        for k, v in d.iteritems():
             plaindict[k] = dict(v)
         return plaindict
 
     def toJSON(self):
-        return dict(summary=self.getSummary(), details=self.details.toJSON())
+        # Don't export file stats, even if we collected them.
+        # Those are not part of the data we use toJSON for.
+        return {
+            'summary': self._dictify(self.summary),
+            'details': self.details.toJSON()
+        }
+
+    def updateStats(self, file, stats):
+        # in multi-project scenarios, this file might not be ours,
+        # check that.
+        # Pass in a dummy entity key '' to avoid getting in to
+        # generic file filters. If we have stats for those,
+        # we want to aggregate the counts
+        if (self.filter is not None and
+                self.filter(file, entity='') == 'ignore'):
+            return
+        for category, value in stats.iteritems():
+            self.summary[file.locale][category] += value
+        if self.file_stats is None:
+            return
+        if 'missingInFiles' in stats:
+            # keep track of how many strings are in a missing file
+            # we got the {'missingFile': 'error'} from the notify pass
+            self.details[file].append({'count': stats['missingInFiles']})
+            # missingInFiles should just be "missing" in file stats
+            self.file_stats[file.locale][file.localpath]['missing'] = \
+                stats['missingInFiles']
+            return  # there are no other stats for missing files
+        self.file_stats[file.locale][file.localpath].update(stats)
 
     def notify(self, category, file, data):
-        rv = "error"
-        if category in self.stat_cats:
-            # these get called post reporting just for stats
-            # return "error" to forward them to other other_observers
-            self.summary[file.locale][category] += data
-            # keep track of how many strings are in a missing file
-            # we got the {'missingFile': 'error'} from the first pass
-            if category == 'missingInFiles':
-                self.details[file]['strings'] = data
-            return "error"
+        rv = 'error'
         if category in ['missingFile', 'obsoleteFile']:
             if self.filter is not None:
                 rv = self.filter(file)
             if rv != "ignore":
-                self.details[file][category] = rv
+                self.details[file].append({category: rv})
             return rv
         if category in ['missingEntity', 'obsoleteEntity']:
             if self.filter is not None:
                 rv = self.filter(file, data)
             if rv == "ignore":
                 return rv
-            v = self.details[file]
-            try:
-                v[category].append(data)
-            except KeyError:
-                v[category] = [data]
+            self.details[file].append({category: data})
             return rv
-        if category == 'error':
-            try:
-                self.details[file][category].append(data)
-            except KeyError:
-                self.details[file][category] = [data]
-            self.summary[file.locale]['errors'] += 1
-        elif category == 'warning':
-            try:
-                self.details[file][category].append(data)
-            except KeyError:
-                self.details[file][category] = [data]
-            self.summary[file.locale]['warnings'] += 1
+        if category in ('error', 'warning'):
+            self.details[file].append({category: data})
+            self.summary[file.locale][category + 's'] += 1
         return rv
 
     def toExhibit(self):
         items = []
         for locale in sorted(self.summary.iterkeys()):
             summary = self.summary[locale]
             if locale is not None:
                 item = {'id': 'xxx/' + locale,
@@ -271,37 +247,44 @@ class Observer(object):
                 item = {'id': 'xxx',
                         'label': 'xxx',
                         'locale': 'xxx'}
             item['type'] = 'Build'
             total = sum([summary[k]
                          for k in ('changed', 'unchanged', 'report', 'missing',
                                    'missingInFiles')
                          if k in summary])
+            total_w = sum([summary[k]
+                           for k in ('changed_w', 'unchanged_w', 'missing_w')
+                           if k in summary])
             rate = (('changed' in summary and summary['changed'] * 100) or
                     0) / total
             item.update((k, summary.get(k, 0))
                         for k in ('changed', 'unchanged'))
             item.update((k, summary[k])
                         for k in ('report', 'errors', 'warnings')
                         if k in summary)
             item['missing'] = summary.get('missing', 0) + \
                 summary.get('missingInFiles', 0)
             item['completion'] = rate
             item['total'] = total
+            item.update((k, summary.get(k, 0))
+                        for k in ('changed_w', 'unchanged_w', 'missing_w'))
+            item['total_w'] = total_w
             result = 'success'
             if item.get('warnings', 0):
                 result = 'warning'
             if item.get('errors', 0) or item.get('missing', 0):
                 result = 'failure'
             item['result'] = result
             items.append(item)
         data = {
             "properties": dict.fromkeys(
                 ("completion", "errors", "warnings", "missing", "report",
+                 "missing_w", "changed_w", "unchanged_w",
                  "unchanged", "changed", "obsolete"),
                 {"valueType": "number"}),
             "types": {
                 "Build": {"pluralLabel": "Builds"}
             }}
         data['items'] = items
         return dumps(data, indent=2)
 
@@ -311,36 +294,29 @@ class Observer(object):
         if type == "json":
             return dumps(self.toJSON())
 
         def tostr(t):
             if t[1] == 'key':
                 return '  ' * t[0] + '/'.join(t[2])
             o = []
             indent = '  ' * (t[0] + 1)
-            if 'error' in t[2]:
-                o += [indent + 'ERROR: ' + e for e in t[2]['error']]
-            if 'warning' in t[2]:
-                o += [indent + 'WARNING: ' + e for e in t[2]['warning']]
-            if 'missingEntity' in t[2] or 'obsoleteEntity' in t[2]:
-                missingEntities = ('missingEntity' in t[2] and
-                                   t[2]['missingEntity']) or []
-                obsoleteEntities = ('obsoleteEntity' in t[2] and
-                                    t[2]['obsoleteEntity']) or []
-                entities = missingEntities + obsoleteEntities
-                entities.sort()
-                for entity in entities:
-                    op = '+'
-                    if entity in obsoleteEntities:
-                        op = '-'
-                    o.append(indent + op + entity)
-            elif 'missingFile' in t[2]:
-                o.append(indent + '// add and localize this file')
-            elif 'obsoleteFile' in t[2]:
-                o.append(indent + '// remove this file')
+            for item in t[2]:
+                if 'error' in item:
+                    o += [indent + 'ERROR: ' + item['error']]
+                elif 'warning' in item:
+                    o += [indent + 'WARNING: ' + item['warning']]
+                elif 'missingEntity' in item:
+                    o += [indent + '+' + item['missingEntity']]
+                elif 'obsoleteEntity' in item:
+                    o += [indent + '-' + item['obsoleteEntity']]
+                elif 'missingFile' in item:
+                    o.append(indent + '// add and localize this file')
+                elif 'obsoleteFile' in item:
+                    o.append(indent + '// remove this file')
             return '\n'.join(o)
 
         out = []
         for locale, summary in sorted(self.summary.iteritems()):
             if locale is not None:
                 out.append(locale + ':')
             out += [k + ': ' + str(v) for k, v in sorted(summary.iteritems())]
             total = sum([summary[k]
@@ -357,276 +333,310 @@ class Observer(object):
     def __str__(self):
         return 'observer'
 
 
 class ContentComparer:
     keyRE = re.compile('[kK]ey')
     nl = re.compile('\n', re.M)
 
-    def __init__(self):
+    def __init__(self, observers, stat_observers=None):
         '''Create a ContentComparer.
         observer is usually a instance of Observer. The return values
         of the notify method are used to control the handling of missing
         entities.
         '''
-        self.reference = dict()
-        self.observer = Observer()
-        self.other_observers = []
-        self.merge_stage = None
+        self.observers = observers
+        if stat_observers is None:
+            stat_observers = []
+        self.stat_observers = stat_observers
 
-    def add_observer(self, obs):
-        '''Add a non-filtering observer.
-        Results from the notify calls are ignored.
-        '''
-        self.other_observers.append(obs)
-
-    def set_merge_stage(self, merge_stage):
-        self.merge_stage = merge_stage
-
-    def merge(self, ref_entities, ref_map, ref_file, l10n_file, missing,
-              skips, ctx, canMerge, encoding):
-        outfile = os.path.join(self.merge_stage, l10n_file.module,
-                               l10n_file.file)
-        outdir = os.path.dirname(outfile)
+    def create_merge_dir(self, merge_file):
+        outdir = mozpath.dirname(merge_file)
         if not os.path.isdir(outdir):
             os.makedirs(outdir)
-        if not canMerge:
-            shutil.copyfile(ref_file.fullpath, outfile)
-            print "copied reference to " + outfile
+
+    def merge(self, ref_entities, ref_map, ref_file, l10n_file, merge_file,
+              missing, skips, ctx, capabilities, encoding):
+
+        if capabilities == parser.CAN_NONE:
             return
+
+        if capabilities & parser.CAN_COPY and (skips or missing):
+            self.create_merge_dir(merge_file)
+            shutil.copyfile(ref_file.fullpath, merge_file)
+            print "copied reference to " + merge_file
+            return
+
+        if not (capabilities & parser.CAN_SKIP):
+            return
+
+        # Start with None in case the merge file doesn't need to be created.
+        f = None
+
         if skips:
             # skips come in ordered by key name, we need them in file order
             skips.sort(key=lambda s: s.span[0])
-        trailing = (['\n'] +
-                    [ref_entities[ref_map[key]].all for key in missing] +
-                    [ref_entities[ref_map[skip.key]].all for skip in skips
-                     if not isinstance(skip, parser.Junk)])
-        if skips:
-            # we need to skip a few errornous blocks in the input, copy by hand
-            f = codecs.open(outfile, 'wb', encoding)
+
+            # we need to skip a few erroneous blocks in the input, copy by hand
+            self.create_merge_dir(merge_file)
+            f = codecs.open(merge_file, 'wb', encoding)
             offset = 0
             for skip in skips:
                 chunk = skip.span
                 f.write(ctx.contents[offset:chunk[0]])
                 offset = chunk[1]
             f.write(ctx.contents[offset:])
-        else:
-            shutil.copyfile(l10n_file.fullpath, outfile)
-            f = codecs.open(outfile, 'ab', encoding)
-        print "adding to " + outfile
+
+        if not (capabilities & parser.CAN_MERGE):
+            return
+
+        if skips or missing:
+            if f is None:
+                self.create_merge_dir(merge_file)
+                shutil.copyfile(l10n_file.fullpath, merge_file)
+                f = codecs.open(merge_file, 'ab', encoding)
 
-        def ensureNewline(s):
-            if not s.endswith('\n'):
-                return s + '\n'
-            return s
+            trailing = (['\n'] +
+                        [ref_entities[ref_map[key]].all for key in missing] +
+                        [ref_entities[ref_map[skip.key]].all for skip in skips
+                         if not isinstance(skip, parser.Junk)])
 
-        f.write(''.join(map(ensureNewline, trailing)))
-        f.close()
+            def ensureNewline(s):
+                if not s.endswith('\n'):
+                    return s + '\n'
+                return s
+
+            print "adding to " + merge_file
+            f.write(''.join(map(ensureNewline, trailing)))
+
+        if f is not None:
+            f.close()
 
     def notify(self, category, file, data):
         """Check observer for the found data, and if it's
-        not to ignore, notify other_observers.
+        not to ignore, notify stat_observers.
         """
-        rv = self.observer.notify(category, file, data)
-        if rv == 'ignore':
-            return rv
-        for obs in self.other_observers:
-            # non-filtering other_observers, ignore results
+        rvs = set(
+            observer.notify(category, file, data)
+            for observer in self.observers
+            )
+        if all(rv == 'ignore' for rv in rvs):
+            return 'ignore'
+        rvs.discard('ignore')
+        for obs in self.stat_observers:
+            # non-filtering stat_observers, ignore results
             obs.notify(category, file, data)
-        return rv
+        if 'error' in rvs:
+            return 'error'
+        assert len(rvs) == 1
+        return rvs.pop()
+
+    def updateStats(self, file, stats):
+        """Check observer for the found data, and if it's
+        not to ignore, notify stat_observers.
+        """
+        for observer in self.observers + self.stat_observers:
+            observer.updateStats(file, stats)
 
     def remove(self, obsolete):
         self.notify('obsoleteFile', obsolete, None)
         pass
 
-    def compare(self, ref_file, l10n):
+    def compare(self, ref_file, l10n, merge_file, extra_tests=None):
         try:
             p = parser.getParser(ref_file.file)
         except UserWarning:
             # no comparison, XXX report?
             return
-        if ref_file not in self.reference:
-            # we didn't parse this before
-            try:
-                p.readContents(ref_file.getContents())
-            except Exception, e:
-                self.notify('error', ref_file, str(e))
-                return
-            self.reference[ref_file] = p.parse()
-        ref = self.reference[ref_file]
-        ref_list = ref[1].keys()
-        ref_list.sort()
+        try:
+            p.readContents(ref_file.getContents())
+        except Exception, e:
+            self.notify('error', ref_file, str(e))
+            return
+        ref_entities, ref_map = p.parse()
         try:
             p.readContents(l10n.getContents())
             l10n_entities, l10n_map = p.parse()
             l10n_ctx = p.ctx
         except Exception, e:
             self.notify('error', l10n, str(e))
             return
 
-        l10n_list = l10n_map.keys()
-        l10n_list.sort()
         ar = AddRemove()
-        ar.set_left(ref_list)
-        ar.set_right(l10n_list)
+        ar.set_left(e.key for e in ref_entities)
+        ar.set_right(e.key for e in l10n_entities)
         report = missing = obsolete = changed = unchanged = keys = 0
+        missing_w = changed_w = unchanged_w = 0  # word stats
         missings = []
         skips = []
-        checker = getChecker(l10n, reference=ref[0])
-        for action, item_or_pair in ar:
+        checker = getChecker(l10n, extra_tests=extra_tests)
+        if checker and checker.needs_reference:
+            checker.set_reference(ref_entities)
+        for msg in p.findDuplicates(ref_entities):
+            self.notify('warning', l10n, msg)
+        for msg in p.findDuplicates(l10n_entities):
+            self.notify('error', l10n, msg)
+        for action, entity_id in ar:
             if action == 'delete':
                 # missing entity
-                _rv = self.notify('missingEntity', l10n, item_or_pair)
+                if isinstance(ref_entities[ref_map[entity_id]], parser.Junk):
+                    self.notify('warning', l10n, 'Parser error in en-US')
+                    continue
+                _rv = self.notify('missingEntity', l10n, entity_id)
                 if _rv == "ignore":
                     continue
                 if _rv == "error":
                     # only add to missing entities for l10n-merge on error,
                     # not report
-                    missings.append(item_or_pair)
+                    missings.append(entity_id)
                     missing += 1
+                    refent = ref_entities[ref_map[entity_id]]
+                    missing_w += refent.count_words()
                 else:
                     # just report
                     report += 1
             elif action == 'add':
                 # obsolete entity or junk
-                if isinstance(l10n_entities[l10n_map[item_or_pair]],
+                if isinstance(l10n_entities[l10n_map[entity_id]],
                               parser.Junk):
-                    junk = l10n_entities[l10n_map[item_or_pair]]
+                    junk = l10n_entities[l10n_map[entity_id]]
                     params = (junk.val,) + junk.position() + junk.position(-1)
                     self.notify('error', l10n,
-                                'Unparsed content "%s" from line %d colum %d'
+                                'Unparsed content "%s" from line %d column %d'
                                 ' to line %d column %d' % params)
-                    if self.merge_stage is not None:
+                    if merge_file is not None:
                         skips.append(junk)
                 elif self.notify('obsoleteEntity', l10n,
-                                 item_or_pair) != 'ignore':
+                                 entity_id) != 'ignore':
                     obsolete += 1
             else:
                 # entity found in both ref and l10n, check for changed
-                entity = item_or_pair[0]
-                refent = ref[0][ref[1][entity]]
-                l10nent = l10n_entities[l10n_map[entity]]
-                if self.keyRE.search(entity):
+                refent = ref_entities[ref_map[entity_id]]
+                l10nent = l10n_entities[l10n_map[entity_id]]
+                if self.keyRE.search(entity_id):
                     keys += 1
                 else:
-                    if refent.val == l10nent.val:
+                    if refent.equals(l10nent):
                         self.doUnchanged(l10nent)
                         unchanged += 1
+                        unchanged_w += refent.count_words()
                     else:
                         self.doChanged(ref_file, refent, l10nent)
                         changed += 1
+                        changed_w += refent.count_words()
                         # run checks:
                 if checker:
                     for tp, pos, msg, cat in checker.check(refent, l10nent):
-                        # compute real src position, if first line,
-                        # col needs adjustment
-                        if isinstance(pos, tuple):
-                            _l, col = l10nent.value_position()
-                            # line, column
-                            if pos[0] == 1:
-                                col = col + pos[1]
-                            else:
-                                col = pos[1]
-                                _l += pos[0] - 1
-                        else:
-                            _l, col = l10nent.value_position(pos)
+                        line, col = l10nent.value_position(pos)
                         # skip error entities when merging
-                        if tp == 'error' and self.merge_stage is not None:
+                        if tp == 'error' and merge_file is not None:
                             skips.append(l10nent)
                         self.notify(tp, l10n,
                                     u"%s at line %d, column %d for %s" %
-                                    (msg, _l, col, refent.key))
+                                    (msg, line, col, refent.key))
                 pass
-        if missing:
-            self.notify('missing', l10n, missing)
-        if self.merge_stage is not None and (missings or skips):
+
+        if merge_file is not None:
             self.merge(
-                ref[0], ref[1], ref_file,
-                l10n, missings, skips, l10n_ctx,
-                p.canMerge, p.encoding)
-        if report:
-            self.notify('report', l10n, report)
-        if obsolete:
-            self.notify('obsolete', l10n, obsolete)
-        if changed:
-            self.notify('changed', l10n, changed)
-        if unchanged:
-            self.notify('unchanged', l10n, unchanged)
-        if keys:
-            self.notify('keys', l10n, keys)
+                ref_entities, ref_map, ref_file,
+                l10n, merge_file, missings, skips, l10n_ctx,
+                p.capabilities, p.encoding)
+
+        stats = {}
+        for cat, value in (
+                ('missing', missing),
+                ('missing_w', missing_w),
+                ('report', report),
+                ('obsolete', obsolete),
+                ('changed', changed),
+                ('changed_w', changed_w),
+                ('unchanged', unchanged),
+                ('unchanged_w', unchanged_w),
+                ('keys', keys)):
+            if value:
+                stats[cat] = value
+        self.updateStats(l10n, stats)
         pass
 
     def add(self, orig, missing):
         if self.notify('missingFile', missing, None) == "ignore":
             # filter said that we don't need this file, don't count it
             return
         f = orig
         try:
             p = parser.getParser(f.file)
         except UserWarning:
             return
         try:
             p.readContents(f.getContents())
             entities, map = p.parse()
-        except Exception, e:
-            self.notify('error', f, str(e))
+        except Exception, ex:
+            self.notify('error', f, str(ex))
             return
-        self.notify('missingInFiles', missing, len(map))
+        # strip parse errors
+        entities = [e for e in entities if not isinstance(e, parser.Junk)]
+        self.updateStats(missing, {'missingInFiles': len(entities)})
+        missing_w = 0
+        for e in entities:
+            missing_w += e.count_words()
+        self.updateStats(missing, {'missing_w': missing_w})
 
     def doUnchanged(self, entity):
         # overload this if needed
         pass
 
     def doChanged(self, file, ref_entity, l10n_entity):
         # overload this if needed
         pass
 
 
-def compareApp(app, other_observer=None, merge_stage=None, clobber=False):
-    '''Compare locales set in app.
-
-    Optional arguments are:
-    - other_observer. A object implementing
-        notify(category, _file, data)
-      The return values of that callback are ignored.
-    - merge_stage. A directory to be used for staging the output of
-      l10n-merge.
-    - clobber. Clobber the module subdirectories of the merge dir as we go.
-      Use wisely, as it might cause data loss.
-    '''
-    comparer = ContentComparer()
-    if other_observer is not None:
-        comparer.add_observer(other_observer)
-    comparer.observer.filter = app.filter
-    for module, reference, locales in app:
-        dir_comp = DirectoryCompare(reference)
-        dir_comp.setWatcher(comparer)
-        for _, localization in locales:
-            if merge_stage is not None:
-                locale_merge = merge_stage.format(ab_CD=localization.locale)
-                comparer.set_merge_stage(locale_merge)
-                if clobber:
-                    # if clobber, remove the stage for the module if it exists
-                    clobberdir = os.path.join(locale_merge, module)
+def compareProjects(project_configs, stat_observer=None,
+                    file_stats=False,
+                    merge_stage=None, clobber_merge=False):
+    locales = set()
+    observers = []
+    for project in project_configs:
+        observers.append(
+            Observer(filter=project.filter, file_stats=file_stats))
+        locales.update(project.locales)
+    if stat_observer is not None:
+        stat_observers = [stat_observer]
+    else:
+        stat_observers = None
+    comparer = ContentComparer(observers, stat_observers=stat_observers)
+    for locale in sorted(locales):
+        files = paths.ProjectFiles(locale, project_configs,
+                                   mergebase=merge_stage)
+        root = mozpath.commonprefix([m['l10n'].prefix for m in files.matchers])
+        if merge_stage is not None:
+            if clobber_merge:
+                mergematchers = set(_m.get('merge') for _m in files.matchers)
+                mergematchers.discard(None)
+                for matcher in mergematchers:
+                    clobberdir = matcher.prefix
                     if os.path.exists(clobberdir):
                         shutil.rmtree(clobberdir)
                         print "clobbered " + clobberdir
-            dir_comp.compareWith(localization)
-    return comparer.observer
-
-
-def compareDirs(reference, locale, other_observer=None, merge_stage=None):
-    '''Compare reference and locale dir.
-
-    Optional arguments are:
-    - other_observer. A object implementing
-        notify(category, _file, data)
-      The return values of that callback are ignored.
-    '''
-    comparer = ContentComparer()
-    if other_observer is not None:
-        comparer.add_observer(other_observer)
-    comparer.set_merge_stage(merge_stage)
-    dir_comp = DirectoryCompare(paths.EnumerateDir(reference))
-    dir_comp.setWatcher(comparer)
-    dir_comp.compareWith(paths.EnumerateDir(locale))
-    return comparer.observer
+        for l10npath, refpath, mergepath, extra_tests in files:
+            # module and file path are needed for legacy filter.py support
+            module = None
+            fpath = mozpath.relpath(l10npath, root)
+            for _m in files.matchers:
+                if _m['l10n'].match(l10npath):
+                    if _m['module']:
+                        # legacy ini support, set module, and resolve
+                        # local path against the matcher prefix,
+                        # which includes the module
+                        module = _m['module']
+                        fpath = mozpath.relpath(l10npath, _m['l10n'].prefix)
+                    break
+            reffile = paths.File(refpath, fpath or refpath, module=module)
+            l10n = paths.File(l10npath, fpath or l10npath,
+                              module=module, locale=locale)
+            if not os.path.exists(l10npath):
+                comparer.add(reffile, l10n)
+                continue
+            if not os.path.exists(refpath):
+                comparer.remove(l10n)
+                continue
+            comparer.compare(reffile, l10n, mergepath, extra_tests)
+    return observers
new file mode 100644
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/mozpath.py
@@ -0,0 +1,137 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import posixpath
+import os
+import re
+
+'''
+Like os.path, with a reduced set of functions, and with normalized path
+separators (always use forward slashes).
+Also contains a few additional utilities not found in os.path.
+'''
+
+
+def normsep(path):
+    '''
+    Normalize path separators, by using forward slashes instead of whatever
+    os.sep is.
+    '''
+    if os.sep != '/':
+        path = path.replace(os.sep, '/')
+    if os.altsep and os.altsep != '/':
+        path = path.replace(os.altsep, '/')
+    return path
+
+
+def relpath(path, start):
+    rel = normsep(os.path.relpath(path, start))
+    return '' if rel == '.' else rel
+
+
+def realpath(path):
+    return normsep(os.path.realpath(path))
+
+
+def abspath(path):
+    return normsep(os.path.abspath(path))
+
+
+def join(*paths):
+    return normsep(os.path.join(*paths))
+
+
+def normpath(path):
+    return posixpath.normpath(normsep(path))
+
+
+def dirname(path):
+    return posixpath.dirname(normsep(path))
+
+
+def commonprefix(paths):
+    return posixpath.commonprefix([normsep(path) for path in paths])
+
+
+def basename(path):
+    return os.path.basename(path)
+
+
+def splitext(path):
+    return posixpath.splitext(normsep(path))
+
+
+def split(path):
+    '''
+    Return the normalized path as a list of its components.
+        split('foo/bar/baz') returns ['foo', 'bar', 'baz']
+    '''
+    return normsep(path).split('/')
+
+
+def basedir(path, bases):
+    '''
+    Given a list of directories (bases), return which one contains the given
+    path. If several matches are found, the deepest base directory is returned.
+        basedir('foo/bar/baz', ['foo', 'baz', 'foo/bar']) returns 'foo/bar'
+        ('foo' and 'foo/bar' both match, but 'foo/bar' is the deepest match)
+    '''
+    path = normsep(path)
+    bases = [normsep(b) for b in bases]
+    if path in bases:
+        return path
+    for b in sorted(bases, reverse=True):
+        if b == '' or path.startswith(b + '/'):
+            return b
+
+
+re_cache = {}
+
+
+def match(path, pattern):
+    '''
+    Return whether the given path matches the given pattern.
+    An asterisk can be used to match any string, including the null string, in
+    one part of the path:
+        'foo' matches '*', 'f*' or 'fo*o'
+    However, an asterisk matching a subdirectory may not match the null string:
+        'foo/bar' does *not* match 'foo/*/bar'
+    If the pattern matches one of the ancestor directories of the path, the
+    patch is considered matching:
+        'foo/bar' matches 'foo'
+    Two adjacent asterisks can be used to match files and zero or more
+    directories and subdirectories.
+        'foo/bar' matches 'foo/**/bar', or '**/bar'
+    '''
+    if not pattern:
+        return True
+    if pattern not in re_cache:
+        p = re.escape(pattern)
+        p = re.sub(r'(^|\\\/)\\\*\\\*\\\/', r'\1(?:.+/)?', p)
+        p = re.sub(r'(^|\\\/)\\\*\\\*$', r'(?:\1.+)?', p)
+        p = p.replace(r'\*', '[^/]*') + '(?:/.*)?$'
+        re_cache[pattern] = re.compile(p)
+    return re_cache[pattern].match(path) is not None
+
+
+def rebase(oldbase, base, relativepath):
+    '''
+    Return relativepath relative to base instead of oldbase.
+    '''
+    if base == oldbase:
+        return relativepath
+    if len(base) < len(oldbase):
+        assert basedir(oldbase, [base]) == base
+        relbase = relpath(oldbase, base)
+        result = join(relbase, relativepath)
+    else:
+        assert basedir(base, [oldbase]) == oldbase
+        relbase = relpath(base, oldbase)
+        result = relpath(relativepath, relbase)
+    result = normpath(result)
+    if relativepath.endswith('/') and not result.endswith('/'):
+        result += '/'
+    return result
--- a/third_party/python/compare-locales/compare_locales/parser.py
+++ b/third_party/python/compare-locales/compare_locales/parser.py
@@ -1,46 +1,62 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import re
 import bisect
 import codecs
+from collections import Counter
 import logging
 
+from fluent.syntax import FluentParser as FTLParser
+from fluent.syntax import ast as ftl
+
 __constructors = []
 
 
+# The allowed capabilities for the Parsers.  They define the exact strategy
+# used by ContentComparer.merge.
+
+# Don't perform any merging
+CAN_NONE = 0
+# Copy the entire reference file
+CAN_COPY = 1
+# Remove broken entities from localization
+CAN_SKIP = 2
+# Add missing and broken entities from the reference to localization
+CAN_MERGE = 4
+
+
 class EntityBase(object):
     '''
     Abstraction layer for a localizable entity.
     Currently supported are grammars of the form:
 
     1: pre white space
     2: entity definition
     3: entity key (name)
     4: entity value
     5: post white space
                                                  <--[1]
     <!ENTITY key "value">
 
     <-------[2]--------->
     '''
-    def __init__(self, ctx, pp, pre_comment,
+    def __init__(self, ctx, pre_comment,
                  span, pre_ws_span, def_span,
                  key_span, val_span, post_span):
         self.ctx = ctx
         self.span = span
         self.pre_ws_span = pre_ws_span
         self.def_span = def_span
         self.key_span = key_span
         self.val_span = val_span
         self.post_span = post_span
-        self.pp = pp
         self.pre_comment = pre_comment
         pass
 
     def position(self, offset=0):
         """Get the 1-based line and column of the character
         with given offset into the Entity.
 
         If offset is negative, return the end of the Entity.
@@ -72,52 +88,62 @@ class EntityBase(object):
         return self.ctx.contents[self.pre_ws_span[0]:self.pre_ws_span[1]]
 
     def get_def(self):
         return self.ctx.contents[self.def_span[0]:self.def_span[1]]
 
     def get_key(self):
         return self.ctx.contents[self.key_span[0]:self.key_span[1]]
 
-    def get_val(self):
-        return self.pp(self.ctx.contents[self.val_span[0]:self.val_span[1]])
-
     def get_raw_val(self):
         return self.ctx.contents[self.val_span[0]:self.val_span[1]]
 
     def get_post(self):
         return self.ctx.contents[self.post_span[0]:self.post_span[1]]
 
     # getters
 
     all = property(get_all)
     pre_ws = property(get_pre_ws)
     definition = property(get_def)
     key = property(get_key)
-    val = property(get_val)
+    val = property(get_raw_val)
     raw_val = property(get_raw_val)
     post = property(get_post)
 
     def __repr__(self):
         return self.key
 
+    re_br = re.compile('<br\s*/?>', re.U)
+    re_sgml = re.compile('</?\w+.*?>', re.U | re.M)
+
+    def count_words(self):
+        """Count the words in an English string.
+        Replace a couple of xml markup to make that safer, too.
+        """
+        value = self.re_br.sub(u'\n', self.val)
+        value = self.re_sgml.sub(u'', value)
+        return len(value.split())
+
+    def equals(self, other):
+        return self.key == other.key and self.val == other.val
+
 
 class Entity(EntityBase):
     pass
 
 
 class Comment(EntityBase):
     def __init__(self, ctx, span, pre_ws_span, def_span,
                  post_span):
         self.ctx = ctx
         self.span = span
         self.pre_ws_span = pre_ws_span
         self.def_span = def_span
         self.post_span = post_span
-        self.pp = lambda v: v
 
     @property
     def key(self):
         return None
 
     @property
     def val(self):
         return None
@@ -169,24 +195,23 @@ class Whitespace(EntityBase):
     '''Entity-like object representing an empty file with whitespace,
     if allowed
     '''
     def __init__(self, ctx, span):
         self.ctx = ctx
         self.key_span = self.val_span = self.span = span
         self.def_span = self.pre_ws_span = (span[0], span[0])
         self.post_span = (span[1], span[1])
-        self.pp = lambda v: v
 
     def __repr__(self):
         return self.raw_val
 
 
-class Parser:
-    canMerge = True
+class Parser(object):
+    capabilities = CAN_SKIP | CAN_MERGE
     tail = re.compile('\s+\Z')
 
     class Context(object):
         "Fixture for content and line numbers"
         def __init__(self, contents):
             self.contents = contents
             self._lines = None
 
@@ -228,33 +253,30 @@ class Parser:
     def parse(self):
         l = []
         m = {}
         for e in self:
             m[e.key] = len(l)
             l.append(e)
         return (l, m)
 
-    def postProcessValue(self, val):
-        return val
-
     def __iter__(self):
         return self.walk(onlyEntities=True)
 
     def walk(self, onlyEntities=False):
         if not self.ctx:
             # loading file failed, or we just didn't load anything
             return
         ctx = self.ctx
         contents = ctx.contents
         offset = 0
         entity, offset = self.getEntity(ctx, offset)
         while entity:
             if (not onlyEntities or
-                    type(entity) is Entity or
+                    isinstance(entity, Entity) or
                     type(entity) is Junk):
                 yield entity
             entity, offset = self.getEntity(ctx, offset)
         if len(contents) > offset:
             yield Junk(ctx, (offset, len(contents)))
 
     def getEntity(self, ctx, offset):
         m = self.reKey.match(ctx.contents, offset)
@@ -279,21 +301,28 @@ class Parser:
             if self.tail.match(ctx.contents, offset):
                 white_end = len(ctx.contents)
                 return (Whitespace(ctx, (offset, white_end)), white_end)
             else:
                 return (None, offset)
         return (Junk(ctx, (offset, junkend)), junkend)
 
     def createEntity(self, ctx, m):
-        pre_comment = unicode(self.last_comment) if self.last_comment else ''
-        self.last_comment = ''
-        return Entity(ctx, self.postProcessValue, pre_comment,
+        pre_comment = self.last_comment
+        self.last_comment = None
+        return Entity(ctx, pre_comment,
                       *[m.span(i) for i in xrange(6)])
 
+    @classmethod
+    def findDuplicates(cls, entities):
+        found = Counter(entity.key for entity in entities)
+        for entity_id, cnt in found.items():
+            if cnt > 1:
+                yield '{} occurs {} times'.format(entity_id, cnt)
+
 
 def getParser(path):
     for item in __constructors:
         if re.search(item[0], path):
             return item[1]
     raise UserWarning("Cannot find Parser")
 
 
@@ -306,16 +335,32 @@ def getParser(path):
 # 6: post comment (and white space) in the same line (dtd only)
 #                                            <--[1]
 # <!-- pre comments -->                      <--[2]
 # <!ENTITY key "value"> <!-- comment -->
 #
 # <-------[3]---------><------[6]------>
 
 
+class DTDEntity(Entity):
+    def value_position(self, offset=0):
+        # DTDChecker already returns tuples of (line, col) positions
+        if isinstance(offset, tuple):
+            line_pos, col_pos = offset
+            line, col = super(DTDEntity, self).value_position()
+            if line_pos == 1:
+                col = col + col_pos
+            else:
+                col = col_pos
+                line += line_pos - 1
+            return line, col
+        else:
+            return super(DTDEntity, self).value_position(offset)
+
+
 class DTDParser(Parser):
     # http://www.w3.org/TR/2006/REC-xml11-20060816/#NT-NameStartChar
     # ":" | [A-Z] | "_" | [a-z] |
     # [#xC0-#xD6] | [#xD8-#xF6] | [#xF8-#x2FF] | [#x370-#x37D] | [#x37F-#x1FFF]
     # | [#x200C-#x200D] | [#x2070-#x218F] | [#x2C00-#x2FEF] |
     # [#x3001-#xD7FF] | [#xF900-#xFDCF] | [#xFDF0-#xFFFD] |
     # [#x10000-#xEFFFF]
     CharMinusDash = u'\x09\x0A\x0D\u0020-\u002C\u002E-\uD7FF\uE000-\uFFFD'
@@ -352,38 +397,51 @@ class DTDParser(Parser):
         '''
         if offset is 0 and self.reHeader.match(ctx.contents):
             offset += 1
         entity, inneroffset = Parser.getEntity(self, ctx, offset)
         if (entity and isinstance(entity, Junk)) or entity is None:
             m = self.rePE.match(ctx.contents, offset)
             if m:
                 inneroffset = m.end()
-                self.last_comment = ''
-                entity = Entity(ctx, self.postProcessValue, '',
-                                *[m.span(i) for i in xrange(6)])
+                self.last_comment = None
+                entity = DTDEntity(ctx, '', *[m.span(i) for i in xrange(6)])
         return (entity, inneroffset)
 
     def createEntity(self, ctx, m):
         valspan = m.span('val')
         valspan = (valspan[0]+1, valspan[1]-1)
-        pre_comment = unicode(self.last_comment) if self.last_comment else ''
-        self.last_comment = ''
-        return Entity(ctx, self.postProcessValue, pre_comment,
-                      m.span(),
-                      m.span('pre'),
-                      m.span('entity'), m.span('key'), valspan,
-                      m.span('post'))
+        pre_comment = self.last_comment
+        self.last_comment = None
+        return DTDEntity(ctx, pre_comment,
+                         m.span(),
+                         m.span('pre'),
+                         m.span('entity'), m.span('key'), valspan,
+                         m.span('post'))
 
 
-class PropertiesParser(Parser):
+class PropertiesEntity(Entity):
     escape = re.compile(r'\\((?P<uni>u[0-9a-fA-F]{1,4})|'
                         '(?P<nl>\n\s*)|(?P<single>.))', re.M)
     known_escapes = {'n': '\n', 'r': '\r', 't': '\t', '\\': '\\'}
 
+    @property
+    def val(self):
+        def unescape(m):
+            found = m.groupdict()
+            if found['uni']:
+                return unichr(int(found['uni'][1:], 16))
+            if found['nl']:
+                return ''
+            return self.known_escapes.get(found['single'], found['single'])
+
+        return self.escape.sub(unescape, self.raw_val)
+
+
+class PropertiesParser(Parser):
     def __init__(self):
         self.reKey = re.compile('^(\s*)'
                                 '([^#!\s\n][^=:\n]*?)\s*[:=][ \t]*', re.M)
         self.reComment = re.compile('(\s*)(((?:[#!][^\n]*\n?)+))', re.M)
         self._escapedEnd = re.compile(r'\\+$')
         self._trailingWS = re.compile(r'\s*(?:\n|\Z)', re.M)
         Parser.__init__(self)
 
@@ -419,61 +477,48 @@ class PropertiesParser(Parser):
                 if len(_e.group()) % 2 == 0:
                     break
                 startline = offset
             # strip trailing whitespace
             ws = self._trailingWS.search(contents, startline)
             if ws:
                 endval = ws.start()
                 offset = ws.end()
-            pre_comment = (unicode(self.last_comment) if self.last_comment
-                           else '')
-            self.last_comment = ''
-            entity = Entity(ctx, self.postProcessValue, pre_comment,
-                            (m.start(), offset),   # full span
-                            m.span(1),  # leading whitespan
-                            (m.start(2), offset),   # entity def span
-                            m.span(2),   # key span
-                            (m.end(), endval),   # value span
-                            (offset, offset))  # post comment span, empty
+            pre_comment = self.last_comment
+            self.last_comment = None
+            entity = PropertiesEntity(
+                ctx, pre_comment,
+                (m.start(), offset),   # full span
+                m.span(1),  # leading whitespan
+                (m.start(2), offset),   # entity def span
+                m.span(2),   # key span
+                (m.end(), endval),   # value span
+                (offset, offset))  # post comment span, empty
             return (entity, offset)
         return self.getTrailing(ctx, offset, self.reKey, self.reComment)
 
-    def postProcessValue(self, val):
-
-        def unescape(m):
-            found = m.groupdict()
-            if found['uni']:
-                return unichr(int(found['uni'][1:], 16))
-            if found['nl']:
-                return ''
-            return self.known_escapes.get(found['single'], found['single'])
-        val = self.escape.sub(unescape, val)
-        return val
-
 
 class DefinesInstruction(EntityBase):
     '''Entity-like object representing processing instructions in inc files
     '''
     def __init__(self, ctx, span, pre_ws_span, def_span, val_span, post_span):
         self.ctx = ctx
         self.span = span
         self.pre_ws_span = pre_ws_span
         self.def_span = def_span
         self.key_span = self.val_span = val_span
         self.post_span = post_span
-        self.pp = lambda v: v
 
     def __repr__(self):
         return self.raw_val
 
 
 class DefinesParser(Parser):
     # can't merge, #unfilter needs to be the last item, which we don't support
-    canMerge = False
+    capabilities = CAN_COPY
     tail = re.compile(r'(?!)')  # never match
 
     def __init__(self):
         self.reComment = re.compile(
             '((?:[ \t]*\n)*)'
             '((?:^# .*?(?:\n|\Z))+)'
             '((?:[ \t]*(?:\n|\Z))*)', re.M)
         self.reKey = re.compile('((?:[ \t]*\n)*)'
@@ -511,17 +556,16 @@ class IniSection(EntityBase):
     '''
     def __init__(self, ctx, span, pre_ws_span, def_span, val_span, post_span):
         self.ctx = ctx
         self.span = span
         self.pre_ws_span = pre_ws_span
         self.def_span = def_span
         self.key_span = self.val_span = val_span
         self.post_span = post_span
-        self.pp = lambda v: v
 
     def __repr__(self):
         return self.raw_val
 
 
 class IniParser(Parser):
     '''
     Parse files of the form:
@@ -561,12 +605,113 @@ class IniParser(Parser):
         m = self.reKey.match(contents, offset)
         if m:
             offset = m.end()
             return (self.createEntity(ctx, m), offset)
         return self.getTrailing(ctx, offset,
                                 self.reComment, self.reSection, self.reKey)
 
 
+class FluentAttribute(EntityBase):
+    ignored_fields = ['span']
+
+    def __init__(self, entity, attr_node):
+        self.ctx = entity.ctx
+        self.attr = attr_node
+        self.key_span = (attr_node.id.span.start, attr_node.id.span.end)
+        self.val_span = (attr_node.value.span.start, attr_node.value.span.end)
+
+    def equals(self, other):
+        if not isinstance(other, FluentAttribute):
+            return False
+        return self.attr.equals(
+            other.attr, ignored_fields=self.ignored_fields)
+
+
+class FluentEntity(Entity):
+    # Fields ignored when comparing two entities.
+    ignored_fields = ['comment', 'span', 'tags']
+
+    def __init__(self, ctx, entry):
+        start = entry.span.start
+        end = entry.span.end
+
+        self.ctx = ctx
+        self.span = (start, end)
+
+        self.key_span = (entry.id.span.start, entry.id.span.end)
+
+        if entry.value is not None:
+            self.val_span = (entry.value.span.start, entry.value.span.end)
+        else:
+            self.val_span = (0, 0)
+
+        self.entry = entry
+
+    _word_count = None
+
+    def count_words(self):
+        if self._word_count is None:
+            self._word_count = 0
+
+            def count_words(node):
+                if isinstance(node, ftl.TextElement):
+                    self._word_count += len(node.value.split())
+                return node
+
+            self.entry.traverse(count_words)
+
+        return self._word_count
+
+    def equals(self, other):
+        return self.entry.equals(
+            other.entry, ignored_fields=self.ignored_fields)
+
+    # Positions yielded by FluentChecker.check are absolute offsets from the
+    # beginning of the file.  This is different from the base Checker behavior
+    # which yields offsets from the beginning of the current entity's value.
+    def position(self, pos=None):
+        if pos is None:
+            pos = self.entry.span.start
+        return self.ctx.lines(pos)[0]
+
+    # FluentEntities don't differentiate between entity and value positions
+    # because all positions are absolute from the beginning of the file.
+    def value_position(self, pos=None):
+        return self.position(pos)
+
+    @property
+    def attributes(self):
+        for attr_node in self.entry.attributes:
+            yield FluentAttribute(self, attr_node)
+
+
+class FluentParser(Parser):
+    capabilities = CAN_SKIP
+
+    def __init__(self):
+        super(FluentParser, self).__init__()
+        self.ftl_parser = FTLParser()
+
+    def walk(self, onlyEntities=False):
+        if not self.ctx:
+            # loading file failed, or we just didn't load anything
+            return
+        resource = self.ftl_parser.parse(self.ctx.contents)
+        for entry in resource.body:
+            if isinstance(entry, ftl.Message):
+                yield FluentEntity(self.ctx, entry)
+            elif isinstance(entry, ftl.Junk):
+                start = entry.span.start
+                end = entry.span.end
+                # strip leading whitespace
+                start += re.match('\s*', entry.content).end()
+                # strip trailing whitespace
+                ws, we = re.search('\s*$', entry.content).span()
+                end -= we - ws
+                yield Junk(self.ctx, (start, end))
+
+
 __constructors = [('\\.dtd$', DTDParser()),
                   ('\\.properties$', PropertiesParser()),
                   ('\\.ini$', IniParser()),
-                  ('\\.inc$', DefinesParser())]
+                  ('\\.inc$', DefinesParser()),
+                  ('\\.ftl$', FluentParser())]
--- a/third_party/python/compare-locales/compare_locales/paths.py
+++ b/third_party/python/compare-locales/compare_locales/paths.py
@@ -1,69 +1,566 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-import os.path
 import os
+import re
 from ConfigParser import ConfigParser, NoSectionError, NoOptionError
-from urlparse import urlparse, urljoin
-from urllib import pathname2url, url2pathname
-from urllib2 import urlopen
 from collections import defaultdict
-from compare_locales import util
+import errno
+import itertools
+import logging
+from compare_locales import util, mozpath
+import pytoml as toml
+
+
+class Matcher(object):
+    '''Path pattern matcher
+    Supports path matching similar to mozpath.match(), but does
+    not match trailing file paths without trailing wildcards.
+    Also gets a prefix, which is the path before the first wildcard,
+    which is good for filesystem iterations, and allows to replace
+    the own matches in a path on a different Matcher. compare-locales
+    uses that to transform l10n and en-US paths back and forth.
+    '''
+
+    def __init__(self, pattern):
+        '''Create regular expression similar to mozpath.match().
+        '''
+        prefix = pattern.split("*", 1)[0]
+        p = re.escape(pattern)
+        p = re.sub(r'(^|\\\/)\\\*\\\*\\\/', r'\1(.+/)?', p)
+        p = re.sub(r'(^|\\\/)\\\*\\\*$', r'(\1.+)?', p)
+        p = p.replace(r'\*', '([^/]*)') + '$'
+        r = re.escape(pattern)
+        r = re.sub(r'(^|\\\/)\\\*\\\*\\\/', r'\\\\0', r)
+        r = re.sub(r'(^|\\\/)\\\*\\\*$', r'\\\\0', r)
+        r = r.replace(r'\*', r'\\0')
+        backref = itertools.count(1)
+        r = re.sub(r'\\0', lambda m: '\\%s' % backref.next(), r)
+        r = re.sub(r'\\(.)', r'\1', r)
+        self.prefix = prefix
+        self.regex = re.compile(p)
+        self.placable = r
+
+    def match(self, path):
+        '''
+        True if the given path matches the file pattern.
+        '''
+        return self.regex.match(path) is not None
+
+    def sub(self, other, path):
+        '''
+        Replace the wildcard matches in this pattern into the
+        pattern of the other Match object.
+        '''
+        if not self.match(path):
+            return None
+        return self.regex.sub(other.placable, path)
+
+
+class ProjectConfig(object):
+    '''Abstraction of l10n project configuration data.
+    '''
+
+    def __init__(self):
+        self.filter_py = None  # legacy filter code
+        # {
+        #  'l10n': pattern,
+        #  'reference': pattern,  # optional
+        #  'locales': [],  # optional
+        #  'test': [],  # optional
+        # }
+        self.paths = []
+        self.rules = []
+        self.locales = []
+        self.environ = {}
+        self.children = []
+        self._cache = None
+
+    variable = re.compile('{\s*([\w]+)\s*}')
+
+    def expand(self, path, env=None):
+        if env is None:
+            env = {}
+
+        def _expand(m):
+            _var = m.group(1)
+            for _env in (env, self.environ):
+                if _var in _env:
+                    return self.expand(_env[_var], env)
+            return '{{{}}}'.format(_var)
+        return self.variable.sub(_expand, path)
+
+    def lazy_expand(self, pattern):
+        def lazy_l10n_expanded_pattern(env):
+            return Matcher(self.expand(pattern, env))
+        return lazy_l10n_expanded_pattern
+
+    def add_global_environment(self, **kwargs):
+        self.add_environment(**kwargs)
+        for child in self.children:
+            child.add_global_environment(**kwargs)
+
+    def add_environment(self, **kwargs):
+        self.environ.update(kwargs)
+
+    def add_paths(self, *paths):
+        '''Add path dictionaries to this config.
+        The dictionaries must have a `l10n` key. For monolingual files,
+        `reference` is also required.
+        An optional key `test` is allowed to enable additional tests for this
+        path pattern.
+        '''
+
+        for d in paths:
+            rv = {
+                'l10n': self.lazy_expand(d['l10n']),
+                'module': d.get('module')
+            }
+            if 'reference' in d:
+                rv['reference'] = Matcher(d['reference'])
+            if 'test' in d:
+                rv['test'] = d['test']
+            if 'locales' in d:
+                rv['locales'] = d['locales'][:]
+            self.paths.append(rv)
+
+    def set_filter_py(self, filter):
+        '''Set legacy filter.py code.
+        Assert that no rules are set.
+        Also, normalize output already here.
+        '''
+        assert not self.rules
+
+        def filter_(module, path, entity=None):
+            try:
+                rv = filter(module, path, entity=entity)
+            except:
+                return 'error'
+            rv = {
+                True: 'error',
+                False: 'ignore',
+                'report': 'warning'
+            }.get(rv, rv)
+            assert rv in ('error', 'ignore', 'warning', None)
+            return rv
+        self.filter_py = filter_
+
+    def add_rules(self, *rules):
+        '''Add rules to filter on.
+        Assert that there's no legacy filter.py code hooked up.
+        '''
+        assert self.filter_py is None
+        for rule in rules:
+            self.rules.extend(self._compile_rule(rule))
+
+    def add_child(self, child):
+        self.children.append(child)
+
+    def set_locales(self, locales, deep=False):
+        self.locales = locales
+        for child in self.children:
+            if not child.locales or deep:
+                child.set_locales(locales, deep=True)
+            else:
+                locs = [loc for loc in locales if loc in child.locales]
+                child.set_locales(locs)
+
+    @property
+    def configs(self):
+        'Recursively get all configs in this project and its children'
+        yield self
+        for child in self.children:
+            for config in child.configs:
+                yield config
+
+    def filter(self, l10n_file, entity=None):
+        '''Filter a localization file or entities within, according to
+        this configuration file.'''
+        if self.filter_py is not None:
+            return self.filter_py(l10n_file.module, l10n_file.file,
+                                  entity=entity)
+        rv = self._filter(l10n_file, entity=entity)
+        if rv is None:
+            return 'ignore'
+        return rv
+
+    class FilterCache(object):
+        def __init__(self, locale):
+            self.locale = locale
+            self.rules = []
+            self.l10n_paths = []
+
+    def cache(self, locale):
+        if self._cache and self._cache.locale == locale:
+            return self._cache
+        self._cache = self.FilterCache(locale)
+        for paths in self.paths:
+            self._cache.l10n_paths.append(paths['l10n']({
+                "locale": locale
+            }))
+        for rule in self.rules:
+            cached_rule = rule.copy()
+            cached_rule['path'] = rule['path']({
+                "locale": locale
+            })
+            self._cache.rules.append(cached_rule)
+        return self._cache
+
+    def _filter(self, l10n_file, entity=None):
+        actions = set(
+            child._filter(l10n_file, entity=entity)
+            for child in self.children)
+        if 'error' in actions:
+            # return early if we know we'll error
+            return 'error'
+
+        cached = self.cache(l10n_file.locale)
+        if any(p.match(l10n_file.fullpath) for p in cached.l10n_paths):
+            action = 'error'
+            for rule in reversed(cached.rules):
+                if not rule['path'].match(l10n_file.fullpath):
+                    continue
+                if ('key' in rule) ^ (entity is not None):
+                    # key/file mismatch, not a matching rule
+                    continue
+                if 'key' in rule and not rule['key'].match(entity):
+                    continue
+                action = rule['action']
+                break
+            actions.add(action)
+        if 'error' in actions:
+            return 'error'
+        if 'warning' in actions:
+            return 'warning'
+        if 'ignore' in actions:
+            return 'ignore'
+
+    def _compile_rule(self, rule):
+        assert 'path' in rule
+        if isinstance(rule['path'], list):
+            for path in rule['path']:
+                _rule = rule.copy()
+                _rule['path'] = self.lazy_expand(path)
+                for __rule in self._compile_rule(_rule):
+                    yield __rule
+            return
+        if isinstance(rule['path'], basestring):
+            rule['path'] = self.lazy_expand(rule['path'])
+        if 'key' not in rule:
+            yield rule
+            return
+        if not isinstance(rule['key'], basestring):
+            for key in rule['key']:
+                _rule = rule.copy()
+                _rule['key'] = key
+                for __rule in self._compile_rule(_rule):
+                    yield __rule
+            return
+        rule = rule.copy()
+        key = rule['key']
+        if key.startswith('re:'):
+            key = key[3:]
+        else:
+            key = re.escape(key) + '$'
+        rule['key'] = re.compile(key)
+        yield rule
+
+
+class ProjectFiles(object):
+    '''Iterable object to get all files and tests for a locale and a
+    list of ProjectConfigs.
+    '''
+    def __init__(self, locale, projects, mergebase=None):
+        self.locale = locale
+        self.matchers = []
+        self.mergebase = mergebase
+        configs = []
+        for project in projects:
+            configs.extend(project.configs)
+        for pc in configs:
+            if locale not in pc.locales:
+                continue
+            for paths in pc.paths:
+                if 'locales' in paths and locale not in paths['locales']:
+                    continue
+                m = {
+                    'l10n': paths['l10n']({
+                        "locale": locale
+                    }),
+                    'module': paths.get('module'),
+                }
+                if 'reference' in paths:
+                    m['reference'] = paths['reference']
+                if self.mergebase is not None:
+                    m['merge'] = paths['l10n']({
+                        "locale": locale,
+                        "l10n_base": self.mergebase
+                    })
+                m['test'] = set(paths.get('test', []))
+                if 'locales' in paths:
+                    m['locales'] = paths['locales'][:]
+                self.matchers.append(m)
+        self.matchers.reverse()  # we always iterate last first
+        # Remove duplicate patterns, comparing each matcher
+        # against all other matchers.
+        # Avoid n^2 comparisons by only scanning the upper triangle
+        # of a n x n matrix of all possible combinations.
+        # Using enumerate and keeping track of indexes, as we can't
+        # modify the list while iterating over it.
+        drops = set()  # duplicate matchers to remove
+        for i, m in enumerate(self.matchers[:-1]):
+            if i in drops:
+                continue  # we're dropping this anyway, don't search again
+            for i_, m_ in enumerate(self.matchers[(i+1):]):
+                if (mozpath.realpath(m['l10n'].prefix) !=
+                        mozpath.realpath(m_['l10n'].prefix)):
+                    # ok, not the same thing, continue
+                    continue
+                # check that we're comparing the same thing
+                if 'reference' in m:
+                    if (mozpath.realpath(m['reference'].prefix) !=
+                            mozpath.realpath(m_.get('reference').prefix)):
+                        raise RuntimeError('Mismatch in reference for ' +
+                                           mozpath.realpath(m['l10n'].prefix))
+                drops.add(i_ + i + 1)
+                m['test'] |= m_['test']
+        drops = sorted(drops, reverse=True)
+        for i in drops:
+            del self.matchers[i]
+
+    def __iter__(self):
+        known = {}
+        for matchers in self.matchers:
+            matcher = matchers['l10n']
+            for path in self._files(matcher):
+                if path not in known:
+                    known[path] = {'test': matchers.get('test')}
+                    if 'reference' in matchers:
+                        known[path]['reference'] = matcher.sub(
+                            matchers['reference'], path)
+                    if 'merge' in matchers:
+                        known[path]['merge'] = matcher.sub(
+                            matchers['merge'], path)
+            if 'reference' not in matchers:
+                continue
+            matcher = matchers['reference']
+            for path in self._files(matcher):
+                l10npath = matcher.sub(matchers['l10n'], path)
+                if l10npath not in known:
+                    known[l10npath] = {
+                        'reference': path,
+                        'test': matchers.get('test')
+                    }
+                    if 'merge' in matchers:
+                        known[l10npath]['merge'] = \
+                            matcher.sub(matchers['merge'], path)
+        for path, d in sorted(known.items()):
+            yield (path, d.get('reference'), d.get('merge'), d['test'])
+
+    def _files(self, matcher):
+        '''Base implementation of getting all files in a hierarchy
+        using the file system.
+        Subclasses might replace this method to support different IO
+        patterns.
+        '''
+        base = matcher.prefix
+        if os.path.isfile(base):
+            if matcher.match(base):
+                yield base
+            return
+        for d, dirs, files in os.walk(base):
+            for f in files:
+                p = mozpath.join(d, f)
+                if matcher.match(p):
+                    yield p
+
+    def match(self, path):
+        '''Return the tuple of l10n_path, reference, mergepath, tests
+        if the given path matches any config, otherwise None.
+
+        This routine doesn't check that the files actually exist.
+        '''
+        for matchers in self.matchers:
+            matcher = matchers['l10n']
+            if matcher.match(path):
+                ref = merge = None
+                if 'reference' in matchers:
+                    ref = matcher.sub(matchers['reference'], path)
+                if 'merge' in matchers:
+                    merge = matcher.sub(matchers['merge'], path)
+                return path, ref, merge, matchers.get('test')
+            if 'reference' not in matchers:
+                continue
+            matcher = matchers['reference']
+            if matcher.match(path):
+                merge = None
+                l10n = matcher.sub(matchers['l10n'], path)
+                if 'merge' in matchers:
+                    merge = matcher.sub(matchers['merge'], path)
+                return l10n, path, merge, matchers.get('test')
+
+
+class ConfigNotFound(EnvironmentError):
+    def __init__(self, path):
+        super(ConfigNotFound, self).__init__(
+            errno.ENOENT,
+            'Configuration file not found',
+            path)
+
+
+class TOMLParser(object):
+    @classmethod
+    def parse(cls, path, env=None, ignore_missing_includes=False):
+        parser = cls(path, env=env,
+                     ignore_missing_includes=ignore_missing_includes)
+        parser.load()
+        parser.processEnv()
+        parser.processPaths()
+        parser.processFilters()
+        parser.processIncludes()
+        parser.processLocales()
+        return parser.asConfig()
+
+    def __init__(self, path, env=None, ignore_missing_includes=False):
+        self.path = path
+        self.env = env if env is not None else {}
+        self.ignore_missing_includes = ignore_missing_includes
+        self.data = None
+        self.pc = ProjectConfig()
+        self.pc.PATH = path
+
+    def load(self):
+        try:
+            with open(self.path, 'rb') as fin:
+                self.data = toml.load(fin)
+        except:
+            raise ConfigNotFound(self.path)
+
+    def processEnv(self):
+        assert self.data is not None
+        self.pc.add_environment(**self.data.get('env', {}))
+
+    def processLocales(self):
+        assert self.data is not None
+        if 'locales' in self.data:
+            self.pc.set_locales(self.data['locales'])
+
+    def processPaths(self):
+        assert self.data is not None
+        for data in self.data.get('paths', []):
+            l10n = data['l10n']
+            if not l10n.startswith('{'):
+                # l10n isn't relative to a variable, expand
+                l10n = self.resolvepath(l10n)
+            paths = {
+                "l10n": l10n,
+            }
+            if 'locales' in data:
+                paths['locales'] = data['locales']
+            if 'reference' in data:
+                paths['reference'] = self.resolvepath(data['reference'])
+            self.pc.add_paths(paths)
+
+    def processFilters(self):
+        assert self.data is not None
+        for data in self.data.get('filters', []):
+            paths = data['path']
+            if isinstance(paths, basestring):
+                paths = [paths]
+            # expand if path isn't relative to a variable
+            paths = [
+                self.resolvepath(path) if not path.startswith('{')
+                else path
+                for path in paths
+            ]
+            rule = {
+                "path": paths,
+                "action": data['action']
+            }
+            if 'key' in data:
+                rule['key'] = data['key']
+            self.pc.add_rules(rule)
+
+    def processIncludes(self):
+        assert self.data is not None
+        if 'includes' not in self.data:
+            return
+        for include in self.data['includes']:
+            p = include['path']
+            p = self.resolvepath(p)
+            try:
+                child = self.parse(
+                    p, env=self.env,
+                    ignore_missing_includes=self.ignore_missing_includes
+                )
+            except ConfigNotFound as e:
+                if not self.ignore_missing_includes:
+                    raise
+                (logging
+                    .getLogger('compare-locales.io')
+                    .error('%s: %s', e.strerror, e.filename))
+                continue
+            self.pc.add_child(child)
+
+    def resolvepath(self, path):
+        path = self.pc.expand(path, env=self.env)
+        path = mozpath.join(
+            mozpath.dirname(self.path),
+            self.data.get('basepath', '.'),
+            path)
+        return mozpath.normpath(path)
+
+    def asConfig(self):
+        return self.pc
 
 
 class L10nConfigParser(object):
     '''Helper class to gather application information from ini files.
 
     This class is working on synchronous open to read files or web data.
     Subclass this and overwrite loadConfigs and addChild if you need async.
     '''
     def __init__(self, inipath, **kwargs):
         """Constructor for L10nConfigParsers
 
         inipath -- l10n.ini path
         Optional keyword arguments are fowarded to the inner ConfigParser as
         defaults.
         """
-        if os.path.isabs(inipath):
-            self.inipath = 'file:%s' % pathname2url(inipath)
-        else:
-            pwdurl = 'file:%s/' % pathname2url(os.getcwd())
-            self.inipath = urljoin(pwdurl, inipath)
+        self.inipath = mozpath.normpath(inipath)
         # l10n.ini files can import other l10n.ini files, store the
         # corresponding L10nConfigParsers
         self.children = []
         # we really only care about the l10n directories described in l10n.ini
         self.dirs = []
         # optional defaults to be passed to the inner ConfigParser (unused?)
         self.defaults = kwargs
 
     def getDepth(self, cp):
         '''Get the depth for the comparison from the parsed l10n.ini.
-
-        Overloadable to get the source depth for fennec and friends.
         '''
         try:
             depth = cp.get('general', 'depth')
         except:
             depth = '.'
         return depth
 
     def getFilters(self):
         '''Get the test functions from this ConfigParser and all children.
 
         Only works with synchronous loads, used by compare-locales, which
         is local anyway.
         '''
-        filterurl = urljoin(self.inipath, 'filter.py')
+        filter_path = mozpath.join(mozpath.dirname(self.inipath), 'filter.py')
         try:
             l = {}
-            execfile(url2pathname(urlparse(filterurl).path), {}, l)
+            execfile(filter_path, {}, l)
             if 'test' in l and callable(l['test']):
                 filters = [l['test']]
             else:
                 filters = []
         except:
             filters = []
 
         for c in self.children:
@@ -73,334 +570,197 @@ class L10nConfigParser(object):
 
     def loadConfigs(self):
         """Entry point to load the l10n.ini file this Parser refers to.
 
         This implementation uses synchronous loads, subclasses might overload
         this behaviour. If you do, make sure to pass a file-like object
         to onLoadConfig.
         """
-        self.onLoadConfig(urlopen(self.inipath))
-
-    def onLoadConfig(self, inifile):
-        """Parse a file-like object for the loaded l10n.ini file."""
         cp = ConfigParser(self.defaults)
-        cp.readfp(inifile)
+        cp.read(self.inipath)
         depth = self.getDepth(cp)
-        self.baseurl = urljoin(self.inipath, depth)
+        self.base = mozpath.join(mozpath.dirname(self.inipath), depth)
         # create child loaders for any other l10n.ini files to be included
         try:
             for title, path in cp.items('includes'):
                 # skip default items
                 if title in self.defaults:
                     continue
                 # add child config parser
                 self.addChild(title, path, cp)
         except NoSectionError:
             pass
         # try to load the "dirs" defined in the "compare" section
         try:
             self.dirs.extend(cp.get('compare', 'dirs').split())
         except (NoOptionError, NoSectionError):
             pass
-        # try getting a top level compare dir, as used for fennec
-        try:
-            self.tld = cp.get('compare', 'tld')
-            # remove tld from comparison dirs
-            if self.tld in self.dirs:
-                self.dirs.remove(self.tld)
-        except (NoOptionError, NoSectionError):
-            self.tld = None
         # try to set "all_path" and "all_url"
         try:
-            self.all_path = cp.get('general', 'all')
-            self.all_url = urljoin(self.baseurl, self.all_path)
+            self.all_path = mozpath.join(self.base, cp.get('general', 'all'))
         except (NoOptionError, NoSectionError):
             self.all_path = None
-            self.all_url = None
         return cp
 
     def addChild(self, title, path, orig_cp):
         """Create a child L10nConfigParser and load it.
 
         title -- indicates the module's name
         path -- indicates the path to the module's l10n.ini file
         orig_cp -- the configuration parser of this l10n.ini
         """
-        cp = L10nConfigParser(urljoin(self.baseurl, path), **self.defaults)
+        cp = L10nConfigParser(mozpath.join(self.base, path), **self.defaults)
         cp.loadConfigs()
         self.children.append(cp)
 
-    def getTLDPathsTuple(self, basepath):
-        """Given the basepath, return the path fragments to be used for
-        self.tld. For build runs, this is (basepath, self.tld), for
-        source runs, just (basepath,).
-
-        @see overwritten method in SourceTreeConfigParser.
-        """
-        return (basepath, self.tld)
-
     def dirsIter(self):
         """Iterate over all dirs and our base path for this l10n.ini"""
-        url = urlparse(self.baseurl)
-        basepath = url2pathname(url.path)
-        if self.tld is not None:
-            yield self.tld, self.getTLDPathsTuple(basepath)
         for dir in self.dirs:
-            yield dir, (basepath, dir)
+            yield dir, (self.base, dir)
 
     def directories(self):
         """Iterate over all dirs and base paths for this l10n.ini as well
         as the included ones.
         """
         for t in self.dirsIter():
             yield t
         for child in self.children:
             for t in child.directories():
                 yield t
 
     def allLocales(self):
         """Return a list of all the locales of this project"""
-        return util.parseLocales(urlopen(self.all_url).read())
+        return util.parseLocales(open(self.all_path).read())
 
 
 class SourceTreeConfigParser(L10nConfigParser):
     '''Subclassing L10nConfigParser to work with just the repos
     checked out next to each other instead of intermingled like
     we do for real builds.
     '''
 
-    def __init__(self, inipath, basepath, redirects):
+    def __init__(self, inipath, base, redirects):
         '''Add additional arguments basepath.
 
         basepath is used to resolve local paths via branchnames.
         redirects is used in unified repository, mapping upstream
         repos to local clones.
         '''
         L10nConfigParser.__init__(self, inipath)
-        self.basepath = basepath
+        self.base = base
         self.redirects = redirects
-        self.tld = None
-
-    def getDepth(self, cp):
-        '''Get the depth for the comparison from the parsed l10n.ini.
-
-        Overloaded to get the source depth for fennec and friends.
-        '''
-        try:
-            depth = cp.get('general', 'source-depth')
-        except:
-            try:
-                depth = cp.get('general', 'depth')
-            except:
-                depth = '.'
-        return depth
 
     def addChild(self, title, path, orig_cp):
         # check if there's a section with details for this include
         # we might have to check a different repo, or even VCS
         # for example, projects like "mail" indicate in
         # an "include_" section where to find the l10n.ini for "toolkit"
         details = 'include_' + title
         if orig_cp.has_section(details):
             branch = orig_cp.get(details, 'mozilla')
             branch = self.redirects.get(branch, branch)
             inipath = orig_cp.get(details, 'l10n.ini')
-            path = self.basepath + '/' + branch + '/' + inipath
+            path = mozpath.join(self.base, branch, inipath)
         else:
-            path = urljoin(self.baseurl, path)
-        cp = SourceTreeConfigParser(path, self.basepath, self.redirects,
+            path = mozpath.join(self.base, path)
+        cp = SourceTreeConfigParser(path, self.base, self.redirects,
                                     **self.defaults)
         cp.loadConfigs()
         self.children.append(cp)
 
-    def getTLDPathsTuple(self, basepath):
-        """Overwrite L10nConfigParser's getTLDPathsTuple to just return
-        the basepath.
-        """
-        return (basepath, )
-
 
 class File(object):
 
     def __init__(self, fullpath, file, module=None, locale=None):
         self.fullpath = fullpath
         self.file = file
         self.module = module
         self.locale = locale
         pass
 
     def getContents(self):
         # open with universal line ending support and read
         return open(self.fullpath, 'rU').read()
 
-    def __hash__(self):
+    @property
+    def localpath(self):
         f = self.file
         if self.module:
-            f = self.module + '/' + f
-        return hash(f)
+            f = mozpath.join(self.module, f)
+        return f
+
+    def __hash__(self):
+        return hash(self.localpath)
 
     def __str__(self):
         return self.fullpath
 
     def __cmp__(self, other):
         if not isinstance(other, File):
             raise NotImplementedError
         rv = cmp(self.module, other.module)
         if rv != 0:
             return rv
         return cmp(self.file, other.file)
 
 
-class EnumerateDir(object):
-    ignore_dirs = ['CVS', '.svn', '.hg', '.git']
-
-    def __init__(self, basepath, module='', locale=None, ignore_subdirs=[]):
-        self.basepath = basepath
-        self.module = module
-        self.locale = locale
-        self.ignore_subdirs = ignore_subdirs
-        pass
-
-    def cloneFile(self, other):
-        '''
-        Return a File object that this enumerator would return, if it had it.
-        '''
-        return File(os.path.join(self.basepath, other.file), other.file,
-                    self.module, self.locale)
-
-    def __iter__(self):
-        # our local dirs are given as a tuple of path segments, starting off
-        # with an empty sequence for the basepath.
-        dirs = [()]
-        while dirs:
-            dir = dirs.pop(0)
-            fulldir = os.path.join(self.basepath, *dir)
-            try:
-                entries = os.listdir(fulldir)
-            except OSError:
-                # we probably just started off in a non-existing dir, ignore
-                continue
-            entries.sort()
-            for entry in entries:
-                leaf = os.path.join(fulldir, entry)
-                if os.path.isdir(leaf):
-                    if entry not in self.ignore_dirs and \
-                        leaf not in [os.path.join(self.basepath, d)
-                                     for d in self.ignore_subdirs]:
-                        dirs.append(dir + (entry,))
-                    continue
-                yield File(leaf, '/'.join(dir + (entry,)),
-                           self.module, self.locale)
-
-
-class LocalesWrap(object):
-
-    def __init__(self, base, module, locales, ignore_subdirs=[]):
-        self.base = base
-        self.module = module
-        self.locales = locales
-        self.ignore_subdirs = ignore_subdirs
-
-    def __iter__(self):
-        for locale in self.locales:
-            path = os.path.join(self.base, locale, self.module)
-            yield (locale, EnumerateDir(path, self.module, locale,
-                                        self.ignore_subdirs))
-
-
 class EnumerateApp(object):
     reference = 'en-US'
 
     def __init__(self, inipath, l10nbase, locales=None):
         self.setupConfigParser(inipath)
         self.modules = defaultdict(dict)
-        self.l10nbase = os.path.abspath(l10nbase)
+        self.l10nbase = mozpath.abspath(l10nbase)
         self.filters = []
-        drive, tail = os.path.splitdrive(inipath)
         self.addFilters(*self.config.getFilters())
         self.locales = locales or self.config.allLocales()
         self.locales.sort()
 
     def setupConfigParser(self, inipath):
         self.config = L10nConfigParser(inipath)
         self.config.loadConfigs()
 
     def addFilters(self, *args):
         self.filters += args
 
-    value_map = {None: None, 'error': 0, 'ignore': 1, 'report': 2}
+    def asConfig(self):
+        config = ProjectConfig()
+        self._config_for_ini(config, self.config)
+        filters = self.config.getFilters()
+        if filters:
+            config.set_filter_py(filters[0])
+        config.locales += self.locales
+        return config
 
-    def filter(self, l10n_file, entity=None):
-        '''Go through all added filters, and,
-        - map "error" -> 0, "ignore" -> 1, "report" -> 2
-        - if filter.test returns a bool, map that to
-            False -> "ignore" (1), True -> "error" (0)
-        - take the max of all reported
-        '''
-        rv = 0
-        for f in reversed(self.filters):
-            try:
-                _r = f(l10n_file.module, l10n_file.file, entity)
-            except:
-                # XXX error handling
-                continue
-            if isinstance(_r, bool):
-                _r = [1, 0][_r]
-            else:
-                # map string return value to int, default to 'error',
-                # None is None
-                _r = self.value_map.get(_r, 0)
-            if _r is not None:
-                rv = max(rv, _r)
-        return ['error', 'ignore', 'report'][rv]
-
-    def __iter__(self):
-        '''
-        Iterate over all modules, return en-US directory enumerator, and an
-        iterator over all locales in each iteration. Per locale, the locale
-        code and an directory enumerator will be given.
-        '''
-        dirmap = dict(self.config.directories())
-        mods = dirmap.keys()
-        mods.sort()
-        for mod in mods:
-            if self.reference == 'en-US':
-                base = os.path.join(*(dirmap[mod] + ('locales', 'en-US')))
-            else:
-                base = os.path.join(self.l10nbase, self.reference, mod)
-            yield (mod, EnumerateDir(base, mod, self.reference),
-                   LocalesWrap(self.l10nbase, mod, self.locales,
-                   [m[len(mod)+1:] for m in mods if m.startswith(mod+'/')]))
+    def _config_for_ini(self, projectconfig, aConfig):
+        for k, (basepath, module) in aConfig.dirsIter():
+            paths = {
+                'module': module,
+                'reference': mozpath.normpath('%s/%s/locales/en-US/**' %
+                                              (basepath, module)),
+                'l10n': mozpath.normpath('{l10n_base}/{locale}/%s/**' %
+                                         module)
+            }
+            if module == 'mobile/android/base':
+                paths['test'] = ['android-dtd']
+            projectconfig.add_paths(paths)
+            projectconfig.add_global_environment(l10n_base=self.l10nbase)
+        for child in aConfig.children:
+            self._config_for_ini(projectconfig, child)
 
 
 class EnumerateSourceTreeApp(EnumerateApp):
     '''Subclass EnumerateApp to work on side-by-side checked out
     repos, and to no pay attention to how the source would actually
     be checked out for building.
-
-    It's supporting applications like Fennec, too, which have
-    'locales/en-US/...' in their root dir, but claim to be 'mobile'.
     '''
 
     def __init__(self, inipath, basepath, l10nbase, redirects,
                  locales=None):
         self.basepath = basepath
         self.redirects = redirects
         EnumerateApp.__init__(self, inipath, l10nbase, locales)
 
     def setupConfigParser(self, inipath):
         self.config = SourceTreeConfigParser(inipath, self.basepath,
                                              self.redirects)
         self.config.loadConfigs()
-
-
-def get_base_path(mod, loc):
-    'statics for path patterns and conversion'
-    __l10n = 'l10n/%(loc)s/%(mod)s'
-    __en_US = 'mozilla/%(mod)s/locales/en-US'
-    if loc == 'en-US':
-        return __en_US % {'mod': mod}
-    return __l10n % {'mod': mod, 'loc': loc}
-
-
-def get_path(mod, loc, leaf):
-    return get_base_path(mod, loc) + '/' + leaf
new file mode 100644
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_apps.py
@@ -0,0 +1,116 @@
+import unittest
+import os
+import tempfile
+import shutil
+
+from compare_locales import mozpath
+from compare_locales.paths import EnumerateApp, ProjectFiles
+
+MAIL_INI = '''\
+[general]
+depth = ../..
+all = mail/locales/all-locales
+
+[compare]
+dirs = mail
+
+[includes]
+# non-central apps might want to use %(topsrcdir)s here, or other vars
+# RFE: that needs to be supported by compare-locales, too, though
+toolkit = mozilla/toolkit/locales/l10n.ini
+
+[include_toolkit]
+type = hg
+mozilla = mozilla-central
+repo = http://hg.mozilla.org/
+l10n.ini = toolkit/locales/l10n.ini
+'''
+
+
+MAIL_ALL_LOCALES = '''af
+de
+fr
+'''
+
+MAIL_FILTER_PY = '''
+def test(mod, path, entity = None):
+    if mod == 'toolkit' and path == 'ignored_path':
+        return 'ignore'
+    return 'error'
+'''
+
+TOOLKIT_INI = '''[general]
+depth = ../..
+
+[compare]
+dirs = toolkit
+'''
+
+
+class TestApp(unittest.TestCase):
+    def setUp(self):
+        self.stage = tempfile.mkdtemp()
+        mail = mozpath.join(self.stage, 'comm', 'mail', 'locales')
+        toolkit = mozpath.join(
+            self.stage, 'comm', 'mozilla', 'toolkit', 'locales')
+        l10n = mozpath.join(self.stage, 'l10n-central', 'de', 'toolkit')
+        os.makedirs(mozpath.join(mail, 'en-US'))
+        os.makedirs(mozpath.join(toolkit, 'en-US'))
+        os.makedirs(l10n)
+        with open(mozpath.join(mail, 'l10n.ini'), 'w') as f:
+            f.write(MAIL_INI)
+        with open(mozpath.join(mail, 'all-locales'), 'w') as f:
+            f.write(MAIL_ALL_LOCALES)
+        with open(mozpath.join(mail, 'filter.py'), 'w') as f:
+            f.write(MAIL_FILTER_PY)
+        with open(mozpath.join(toolkit, 'l10n.ini'), 'w') as f:
+            f.write(TOOLKIT_INI)
+        with open(mozpath.join(mail, 'en-US', 'mail.ftl'), 'w') as f:
+            f.write('')
+        with open(mozpath.join(toolkit, 'en-US', 'platform.ftl'), 'w') as f:
+            f.write('')
+        with open(mozpath.join(l10n, 'localized.ftl'), 'w') as f:
+            f.write('')
+
+    def tearDown(self):
+        shutil.rmtree(self.stage)
+
+    def test_app(self):
+        'Test parsing a App'
+        app = EnumerateApp(
+            mozpath.join(self.stage, 'comm', 'mail', 'locales', 'l10n.ini'),
+            mozpath.join(self.stage, 'l10n-central'))
+        self.assertListEqual(app.locales, ['af', 'de', 'fr'])
+        self.assertEqual(len(app.config.children), 1)
+        projectconfig = app.asConfig()
+        self.assertListEqual(projectconfig.locales, ['af', 'de', 'fr'])
+        files = ProjectFiles('de', [projectconfig])
+        files = list(files)
+        self.assertEqual(len(files), 3)
+
+        l10nfile, reffile, mergefile, test = files[0]
+        self.assertListEqual(mozpath.split(l10nfile)[-3:],
+                             ['de', 'mail', 'mail.ftl'])
+        self.assertListEqual(mozpath.split(reffile)[-4:],
+                             ['mail', 'locales', 'en-US', 'mail.ftl'])
+        self.assertIsNone(mergefile)
+        self.assertSetEqual(test, set())
+
+        l10nfile, reffile, mergefile, test = files[1]
+        self.assertListEqual(mozpath.split(l10nfile)[-3:],
+                             ['de', 'toolkit', 'localized.ftl'])
+        self.assertListEqual(
+            mozpath.split(reffile)[-6:],
+            ['comm', 'mozilla', 'toolkit',
+             'locales', 'en-US', 'localized.ftl'])
+        self.assertIsNone(mergefile)
+        self.assertSetEqual(test, set())
+
+        l10nfile, reffile, mergefile, test = files[2]
+        self.assertListEqual(mozpath.split(l10nfile)[-3:],
+                             ['de', 'toolkit', 'platform.ftl'])
+        self.assertListEqual(
+            mozpath.split(reffile)[-6:],
+            ['comm', 'mozilla', 'toolkit', 'locales', 'en-US', 'platform.ftl'])
+        self.assertIsNone(mergefile)
+        self.assertSetEqual(test, set())
--- a/third_party/python/compare-locales/compare_locales/tests/test_checks.py
+++ b/third_party/python/compare-locales/compare_locales/tests/test_checks.py
@@ -1,42 +1,38 @@
 # -*- coding: utf-8 -*-
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import unittest
 
 from compare_locales.checks import getChecker
-from compare_locales.parser import getParser, Parser, Entity
+from compare_locales.parser import getParser, Parser, DTDEntity
 from compare_locales.paths import File
 
 
 class BaseHelper(unittest.TestCase):
     file = None
     refContent = None
 
     def setUp(self):
         p = getParser(self.file.file)
         p.readContents(self.refContent)
         self.refList, self.refMap = p.parse()
 
-    def _test(self, content, refWarnOrErrors, with_ref_file=False):
+    def _test(self, content, refWarnOrErrors):
         p = getParser(self.file.file)
         p.readContents(content)
         l10n = [e for e in p]
         assert len(l10n) == 1
         l10n = l10n[0]
-        if with_ref_file:
-            kwargs = {
-                'reference': self.refList
-            }
-        else:
-            kwargs = {}
-        checker = getChecker(self.file, **kwargs)
+        checker = getChecker(self.file)
+        if checker.needs_reference:
+            checker.set_reference(self.refList)
         ref = self.refList[self.refMap[l10n.key]]
         found = tuple(checker.check(ref, l10n))
         self.assertEqual(found, refWarnOrErrors)
 
 
 class TestProperties(BaseHelper):
     file = File('foo.properties', 'foo.properties')
     refContent = '''some = value
@@ -179,90 +175,85 @@ class TestEntitiesInDTDs(BaseHelper):
     file = File('foo.dtd', 'foo.dtd')
     refContent = '''<!ENTITY short "This is &brandShortName;">
 <!ENTITY shorter "This is &brandShorterName;">
 <!ENTITY ent.start "Using &brandShorterName; start to">
 <!ENTITY ent.end " end">
 '''
 
     def testOK(self):
-        self._test('''<!ENTITY ent.start "Mit &brandShorterName;">''', tuple(),
-                   with_ref_file=True)
+        self._test('''<!ENTITY ent.start "Mit &brandShorterName;">''', tuple())
 
     def testMismatch(self):
         self._test('''<!ENTITY ent.start "Mit &brandShortName;">''',
                    (('warning', (0, 0),
                      'Entity brandShortName referenced, '
                      'but brandShorterName used in context',
-                     'xmlparse'),),
-                   with_ref_file=True)
+                     'xmlparse'),))
 
     def testAcross(self):
         self._test('''<!ENTITY ent.end "Mit &brandShorterName;">''',
-                   tuple(),
-                   with_ref_file=True)
+                   tuple())
 
     def testAcrossWithMismatch(self):
         '''If we could tell that ent.start and ent.end are one string,
         we should warn. Sadly, we can't, so this goes without warning.'''
         self._test('''<!ENTITY ent.end "Mit &brandShortName;">''',
-                   tuple(),
-                   with_ref_file=True)
+                   tuple())
 
     def testUnknownWithRef(self):
         self._test('''<!ENTITY ent.start "Mit &foopy;">''',
                    (('warning',
                      (0, 0),
                      'Referencing unknown entity `foopy` '
                      '(brandShorterName used in context, '
                      'brandShortName known)',
-                     'xmlparse'),),
-                   with_ref_file=True)
+                     'xmlparse'),))
 
     def testUnknown(self):
         self._test('''<!ENTITY ent.end "Mit &foopy;">''',
                    (('warning',
                      (0, 0),
                      'Referencing unknown entity `foopy`'
                      ' (brandShortName, brandShorterName known)',
-                     'xmlparse'),),
-                   with_ref_file=True)
+                     'xmlparse'),))
 
 
 class TestAndroid(unittest.TestCase):
     """Test Android checker
 
     Make sure we're hitting our extra rules only if
     we're passing in a DTD file in the embedding/android module.
     """
     apos_msg = u"Apostrophes in Android DTDs need escaping with \\' or " + \
                u"\\u0027, or use \u2019, or put string in quotes."
     quot_msg = u"Quotes in Android DTDs need escaping with \\\" or " + \
                u"\\u0022, or put string in apostrophes."
 
     def getEntity(self, v):
         ctx = Parser.Context(v)
-        return Entity(ctx, lambda s: s, '', (0, len(v)), (), (), (),
-                      (0, len(v)), ())
+        return DTDEntity(
+            ctx, '', (0, len(v)), (), (), (), (0, len(v)), ())
 
     def getDTDEntity(self, v):
         v = v.replace('"', '&quot;')
         ctx = Parser.Context('<!ENTITY foo "%s">' % v)
-        return Entity(ctx,
-                      lambda s: s, '',
-                      (0, len(v) + 16), (), (), (9, 12),
-                      (14, len(v) + 14), ())
+        return DTDEntity(
+            ctx,
+            '',
+            (0, len(v) + 16), (), (), (9, 12),
+            (14, len(v) + 14), ())
 
     def test_android_dtd(self):
         """Testing the actual android checks. The logic is involved,
         so this is a lot of nitty gritty detail tests.
         """
         f = File("embedding/android/strings.dtd", "strings.dtd",
                  "embedding/android")
-        checker = getChecker(f)
+        checker = getChecker(f, extra_tests=['android-dtd'])
         # good string
         ref = self.getDTDEntity("plain string")
         l10n = self.getDTDEntity("plain localized string")
         self.assertEqual(tuple(checker.check(ref, l10n)),
                          ())
         # dtd warning
         l10n = self.getDTDEntity("plain localized string &ref;")
         self.assertEqual(tuple(checker.check(ref, l10n)),
@@ -328,17 +319,17 @@ class TestAndroid(unittest.TestCase):
         l10n = self.getDTDEntity(u"\u9690"*14+"\u006"+"  "+"\u0064")
         self.assertEqual(tuple(checker.check(ref, l10n)),
                          (('error', 14, 'truncated \\uXXXX escape',
                            'android'),))
 
     def test_android_prop(self):
         f = File("embedding/android/strings.properties", "strings.properties",
                  "embedding/android")
-        checker = getChecker(f)
+        checker = getChecker(f, extra_tests=['android-dtd'])
         # good plain string
         ref = self.getEntity("plain string")
         l10n = self.getEntity("plain localized string")
         self.assertEqual(tuple(checker.check(ref, l10n)),
                          ())
         # no dtd warning
         ref = self.getEntity("plain string")
         l10n = self.getEntity("plain localized string &ref;")
@@ -376,17 +367,18 @@ class TestAndroid(unittest.TestCase):
         self.assertEqual(tuple(checker.check(ref, l10n)),
                          ())
 
     def test_entities_across_dtd(self):
         f = File("browser/strings.dtd", "strings.dtd", "browser")
         p = getParser(f.file)
         p.readContents('<!ENTITY other "some &good.ref;">')
         ref = p.parse()
-        checker = getChecker(f, reference=ref[0])
+        checker = getChecker(f)
+        checker.set_reference(ref[0])
         # good string
         ref = self.getDTDEntity("plain string")
         l10n = self.getDTDEntity("plain localized string")
         self.assertEqual(tuple(checker.check(ref, l10n)),
                          ())
         # dtd warning
         ref = self.getDTDEntity("plain string")
         l10n = self.getDTDEntity("plain localized string &ref;")
--- a/third_party/python/compare-locales/compare_locales/tests/test_compare.py
+++ b/third_party/python/compare-locales/compare_locales/tests/test_compare.py
@@ -1,15 +1,16 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import unittest
 
-from compare_locales import compare
+from compare_locales import compare, paths
+from cPickle import loads, dumps
 
 
 class TestTree(unittest.TestCase):
     '''Test the Tree utility class
 
     Tree value classes need to be in-place editable
     '''
 
@@ -32,24 +33,20 @@ class TestTree(unittest.TestCase):
                 (1, 'value', {'leaf': 1}),
                 (0, 'key', ('two', 'other')),
                 (1, 'value', {'leaf': 2})
             ]
         )
         self.assertDictEqual(
             tree.toJSON(),
             {
-                'children': [
-                    ('one/entry',
-                     {'value': {'leaf': 1}}
-                     ),
-                    ('two/other',
-                     {'value': {'leaf': 2}}
-                     )
-                ]
+                'one/entry':
+                    {'leaf': 1},
+                'two/other':
+                    {'leaf': 2}
             }
         )
         self.assertMultiLineEqual(
             str(tree),
             '''\
 one/entry
     {'leaf': 1}
 two/other
@@ -69,22 +66,199 @@ two/other
                 (2, 'value', {'leaf': 1}),
                 (1, 'key', ('other',)),
                 (2, 'value', {'leaf': 2})
             ]
         )
         self.assertDictEqual(
             tree.toJSON(),
             {
-                'children': [
-                    ('one', {
-                        'children': [
-                            ('entry',
-                             {'value': {'leaf': 1}}
-                             ),
-                            ('other',
-                             {'value': {'leaf': 2}}
-                             )
-                        ]
-                    })
-                ]
+                'one': {
+                    'entry':
+                        {'leaf': 1},
+                    'other':
+                        {'leaf': 2}
+                }
             }
         )
+
+
+class TestObserver(unittest.TestCase):
+    def test_simple(self):
+        obs = compare.Observer()
+        f = paths.File('/some/real/sub/path', 'sub/path', locale='de')
+        obs.notify('missingEntity', f, 'one')
+        obs.notify('missingEntity', f, 'two')
+        obs.updateStats(f, {'missing': 15})
+        self.assertDictEqual(obs.toJSON(), {
+            'summary': {
+                'de': {
+                    'missing': 15
+                }
+            },
+            'details': {
+                'de/sub/path':
+                    [{'missingEntity': 'one'},
+                     {'missingEntity': 'two'}]
+            }
+        })
+        clone = loads(dumps(obs))
+        self.assertDictEqual(clone.summary, obs.summary)
+        self.assertDictEqual(clone.details.toJSON(), obs.details.toJSON())
+        self.assertIsNone(clone.file_stats)
+
+    def test_module(self):
+        obs = compare.Observer(file_stats=True)
+        f = paths.File('/some/real/sub/path', 'path',
+                       module='sub', locale='de')
+        obs.notify('missingEntity', f, 'one')
+        obs.notify('obsoleteEntity', f, 'bar')
+        obs.notify('missingEntity', f, 'two')
+        obs.updateStats(f, {'missing': 15})
+        self.assertDictEqual(obs.toJSON(), {
+            'summary': {
+                'de': {
+                    'missing': 15
+                }
+            },
+            'details': {
+                'de/sub/path':
+                    [
+                     {'missingEntity': 'one'},
+                     {'obsoleteEntity': 'bar'},
+                     {'missingEntity': 'two'},
+                    ]
+            }
+        })
+        self.assertDictEqual(obs.file_stats, {
+            'de': {
+                'sub/path': {
+                    'missing': 15
+                }
+            }
+        })
+        self.assertEqual(obs.serialize(), '''\
+de/sub/path
+    +one
+    -bar
+    +two
+de:
+missing: 15
+0% of entries changed''')
+        clone = loads(dumps(obs))
+        self.assertDictEqual(clone.summary, obs.summary)
+        self.assertDictEqual(clone.details.toJSON(), obs.details.toJSON())
+        self.assertDictEqual(clone.file_stats, obs.file_stats)
+
+    def test_file_stats(self):
+        obs = compare.Observer(file_stats=True)
+        f = paths.File('/some/real/sub/path', 'sub/path', locale='de')
+        obs.notify('missingEntity', f, 'one')
+        obs.notify('missingEntity', f, 'two')
+        obs.updateStats(f, {'missing': 15})
+        self.assertDictEqual(obs.toJSON(), {
+            'summary': {
+                'de': {
+                    'missing': 15
+                }
+            },
+            'details': {
+                'de/sub/path':
+                    [
+                     {'missingEntity': 'one'},
+                     {'missingEntity': 'two'},
+                    ]
+            }
+        })
+        self.assertDictEqual(obs.file_stats, {
+            'de': {
+                'sub/path': {
+                    'missing': 15
+                }
+            }
+        })
+        clone = loads(dumps(obs))
+        self.assertDictEqual(clone.summary, obs.summary)
+        self.assertDictEqual(clone.details.toJSON(), obs.details.toJSON())
+        self.assertDictEqual(clone.file_stats, obs.file_stats)
+
+
+class TestAddRemove(unittest.TestCase):
+
+    def _test(self, left, right, ref_actions):
+        ar = compare.AddRemove()
+        ar.set_left(left)
+        ar.set_right(right)
+        actions = list(ar)
+        self.assertListEqual(actions, ref_actions)
+
+    def test_equal(self):
+        self._test(['z', 'a', 'p'], ['z', 'a', 'p'], [
+                ('equal', 'z'),
+                ('equal', 'a'),
+                ('equal', 'p'),
+            ])
+
+    def test_add_start(self):
+        self._test(['a', 'p'], ['z', 'a', 'p'], [
+                ('add', 'z'),
+                ('equal', 'a'),
+                ('equal', 'p'),
+            ])
+
+    def test_add_middle(self):
+        self._test(['z', 'p'], ['z', 'a', 'p'], [
+                ('equal', 'z'),
+                ('add', 'a'),
+                ('equal', 'p'),
+            ])
+
+    def test_add_end(self):
+        self._test(['z', 'a'], ['z', 'a', 'p'], [
+                ('equal', 'z'),
+                ('equal', 'a'),
+                ('add', 'p'),
+            ])
+
+    def test_delete_start(self):
+        self._test(['z', 'a', 'p'], ['a', 'p'], [
+                ('delete', 'z'),
+                ('equal', 'a'),
+                ('equal', 'p'),
+            ])
+
+    def test_delete_middle(self):
+        self._test(['z', 'a', 'p'], ['z', 'p'], [
+                ('equal', 'z'),
+                ('delete', 'a'),
+                ('equal', 'p'),
+            ])
+
+    def test_delete_end(self):
+        self._test(['z', 'a', 'p'], ['z', 'a'], [
+                ('equal', 'z'),
+                ('equal', 'a'),
+                ('delete', 'p'),
+            ])
+
+    def test_replace_start(self):
+        self._test(['b', 'a', 'p'], ['z', 'a', 'p'], [
+                ('add', 'z'),
+                ('delete', 'b'),
+                ('equal', 'a'),
+                ('equal', 'p'),
+            ])
+
+    def test_replace_middle(self):
+        self._test(['z', 'b', 'p'], ['z', 'a', 'p'], [
+                ('equal', 'z'),
+                ('add', 'a'),
+                ('delete', 'b'),
+                ('equal', 'p'),
+            ])
+
+    def test_replace_end(self):
+        self._test(['z', 'a', 'b'], ['z', 'a', 'p'], [
+                ('equal', 'z'),
+                ('equal', 'a'),
+                ('add', 'p'),
+                ('delete', 'b'),
+            ])
new file mode 100644
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_defines.py
@@ -0,0 +1,95 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from compare_locales.tests import ParserTestMixin
+
+
+mpl2 = '''\
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+'''
+
+
+class TestDefinesParser(ParserTestMixin, unittest.TestCase):
+
+    filename = 'defines.inc'
+
+    def testBrowser(self):
+        self._test(mpl2 + '''#filter emptyLines
+
+#define MOZ_LANGPACK_CREATOR mozilla.org
+
+# If non-English locales wish to credit multiple contributors, uncomment this
+# variable definition and use the format specified.
+# #define MOZ_LANGPACK_CONTRIBUTORS <em:contributor>Joe Solon</em:contributor>
+
+#unfilter emptyLines
+
+''', (
+            ('Comment', mpl2),
+            ('DefinesInstruction', 'filter emptyLines'),
+            ('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
+            ('Comment', '#define'),
+            ('DefinesInstruction', 'unfilter emptyLines')))
+
+    def testBrowserWithContributors(self):
+        self._test(mpl2 + '''#filter emptyLines
+
+#define MOZ_LANGPACK_CREATOR mozilla.org
+
+# If non-English locales wish to credit multiple contributors, uncomment this
+# variable definition and use the format specified.
+#define MOZ_LANGPACK_CONTRIBUTORS <em:contributor>Joe Solon</em:contributor>
+
+#unfilter emptyLines
+
+''', (
+            ('Comment', mpl2),
+            ('DefinesInstruction', 'filter emptyLines'),
+            ('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
+            ('Comment', 'non-English'),
+            ('MOZ_LANGPACK_CONTRIBUTORS',
+             '<em:contributor>Joe Solon</em:contributor>'),
+            ('DefinesInstruction', 'unfilter emptyLines')))
+
+    def testCommentWithNonAsciiCharacters(self):
+        self._test(mpl2 + '''#filter emptyLines
+
+# e.g. #define seamonkey_l10n <DT><A HREF="urn:foo">SeaMonkey v češtině</a>
+#define seamonkey_l10n_long
+
+#unfilter emptyLines
+
+''', (
+            ('Comment', mpl2),
+            ('DefinesInstruction', 'filter emptyLines'),
+            ('Comment', u'češtině'),
+            ('seamonkey_l10n_long', ''),
+            ('DefinesInstruction', 'unfilter emptyLines')))
+
+    def testToolkit(self):
+        self._test('''#define MOZ_LANG_TITLE English (US)
+''', (
+            ('MOZ_LANG_TITLE', 'English (US)'),))
+
+    def testToolkitEmpty(self):
+        self._test('', tuple())
+
+    def test_empty_file(self):
+        '''Test that empty files generate errors
+
+        defines.inc are interesting that way, as their
+        content is added to the generated file.
+        '''
+        self._test('\n', (('Junk', '\n'),))
+        self._test('\n\n', (('Junk', '\n\n'),))
+        self._test(' \n\n', (('Junk', ' \n\n'),))
+
+
+if __name__ == '__main__':
+    unittest.main()
--- a/third_party/python/compare-locales/compare_locales/tests/test_dtd.py
+++ b/third_party/python/compare-locales/compare_locales/tests/test_dtd.py
@@ -127,11 +127,24 @@ escaped value">
     def test_post(self):
         self.parser.readContents('<!ENTITY a "a"><!ENTITY b "b">')
         a, b = list(self.parser)
         self.assertEqual(a.post, '')
         self.parser.readContents('<!ENTITY a "a"> <!ENTITY b "b">')
         a, b = list(self.parser)
         self.assertEqual(a.post, ' ')
 
+    def test_word_count(self):
+        self.parser.readContents('''\
+<!ENTITY a "one">
+<!ENTITY b "one<br>two">
+<!ENTITY c "one<span>word</span>">
+<!ENTITY d "one <a href='foo'>two</a> three">
+''')
+        a, b, c, d = list(self.parser)
+        self.assertEqual(a.count_words(), 1)
+        self.assertEqual(b.count_words(), 2)
+        self.assertEqual(c.count_words(), 1)
+        self.assertEqual(d.count_words(), 3)
+
 
 if __name__ == '__main__':
     unittest.main()
new file mode 100644
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_ftl.py
@@ -0,0 +1,133 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from compare_locales.tests import ParserTestMixin
+
+
+class TestFluentParser(ParserTestMixin, unittest.TestCase):
+    maxDiff = None
+    filename = 'foo.ftl'
+
+    def test_equality_same(self):
+        source = 'progress = Progress: { NUMBER($num, style: "percent") }.'
+
+        self.parser.readContents(source)
+        [ent1] = list(self.parser)
+
+        self.parser.readContents(source)
+        [ent2] = list(self.parser)
+
+        self.assertTrue(ent1.equals(ent2))
+
+    def test_equality_different_whitespace(self):
+        source1 = 'foo = { $arg }'
+        source2 = 'foo = {    $arg    }'
+
+        self.parser.readContents(source1)
+        [ent1] = list(self.parser)
+
+        self.parser.readContents(source2)
+        [ent2] = list(self.parser)
+
+        self.assertTrue(ent1.equals(ent2))
+
+    def test_word_count(self):
+        self.parser.readContents('''\
+a = One
+b = One two three
+c = One { $arg } two
+d =
+    One { $arg ->
+       *[x] Two three
+        [y] Four
+    } five.
+e
+    .attr = One
+f
+    .attr1 = One
+    .attr2 = Two
+g = One two
+    .attr = Three
+h =
+    One { $arg ->
+       *[x] Two three
+        [y] Four
+    } five.
+    .attr1 =
+        Six { $arg ->
+           *[x] Seven eight
+            [y] Nine
+        } ten.
+''')
+
+        a, b, c, d, e, f, g, h = list(self.parser)
+        self.assertEqual(a.count_words(), 1)
+        self.assertEqual(b.count_words(), 3)
+        self.assertEqual(c.count_words(), 2)
+        self.assertEqual(d.count_words(), 5)
+        self.assertEqual(e.count_words(), 1)
+        self.assertEqual(f.count_words(), 2)
+        self.assertEqual(g.count_words(), 3)
+        self.assertEqual(h.count_words(), 10)
+
+    def test_simple_message(self):
+        self.parser.readContents('a = A')
+
+        [a] = list(self.parser)
+        self.assertEqual(a.key, 'a')
+        self.assertEqual(a.val, 'A')
+        self.assertEqual(a.all, 'a = A')
+        attributes = list(a.attributes)
+        self.assertEqual(len(attributes), 0)
+
+    def test_complex_message(self):
+        self.parser.readContents('abc = A { $arg } B { msg } C')
+
+        [abc] = list(self.parser)
+        self.assertEqual(abc.key, 'abc')
+        self.assertEqual(abc.val, 'A { $arg } B { msg } C')
+        self.assertEqual(abc.all, 'abc = A { $arg } B { msg } C')
+
+    def test_multiline_message(self):
+        self.parser.readContents('''\
+abc =
+    A
+    B
+    C
+''')
+
+        [abc] = list(self.parser)
+        self.assertEqual(abc.key, 'abc')
+        self.assertEqual(abc.val, '\n    A\n    B\n    C')
+        self.assertEqual(abc.all, 'abc =\n    A\n    B\n    C')
+
+    def test_message_with_attribute(self):
+        self.parser.readContents('''\
+abc = ABC
+    .attr = Attr
+''')
+
+        [abc] = list(self.parser)
+        self.assertEqual(abc.key, 'abc')
+        self.assertEqual(abc.val, 'ABC')
+        self.assertEqual(abc.all, 'abc = ABC\n    .attr = Attr')
+
+    def test_message_with_attribute_and_no_value(self):
+        self.parser.readContents('''\
+abc
+    .attr = Attr
+''')
+
+        [abc] = list(self.parser)
+        self.assertEqual(abc.key, 'abc')
+        self.assertEqual(abc.val, '')
+        self.assertEqual(abc.all, 'abc\n    .attr = Attr')
+        attributes = list(abc.attributes)
+        self.assertEqual(len(attributes), 1)
+        attr = attributes[0]
+        self.assertEqual(attr.key, 'attr')
+        self.assertEqual(attr.val, 'Attr')
--- a/third_party/python/compare-locales/compare_locales/tests/test_ini.py
+++ b/third_party/python/compare-locales/compare_locales/tests/test_ini.py
@@ -130,10 +130,11 @@ Good=other string
             ('Good', 'other string')))
 
     def test_empty_file(self):
         self._test('', tuple())
         self._test('\n', (('Whitespace', '\n'),))
         self._test('\n\n', (('Whitespace', '\n\n'),))
         self._test(' \n\n', (('Whitespace', ' \n\n'),))
 
+
 if __name__ == '__main__':
     unittest.main()
--- a/third_party/python/compare-locales/compare_locales/tests/test_merge.py
+++ b/third_party/python/compare-locales/compare_locales/tests/test_merge.py
@@ -4,266 +4,839 @@
 
 import unittest
 import os
 from tempfile import mkdtemp
 import shutil
 
 from compare_locales.parser import getParser
 from compare_locales.paths import File
-from compare_locales.compare import ContentComparer
+from compare_locales.compare import ContentComparer, Observer
+from compare_locales import mozpath
 
 
 class ContentMixin(object):
     extension = None  # OVERLOAD
 
     def reference(self, content):
-        self.ref = os.path.join(self.tmp, "en-reference" + self.extension)
+        self.ref = mozpath.join(self.tmp, "en-reference" + self.extension)
         open(self.ref, "w").write(content)
 
     def localized(self, content):
-        self.l10n = os.path.join(self.tmp, "l10n" + self.extension)
+        self.l10n = mozpath.join(self.tmp, "l10n" + self.extension)
         open(self.l10n, "w").write(content)
 
 
 class TestProperties(unittest.TestCase, ContentMixin):
     extension = '.properties'
 
     def setUp(self):
         self.maxDiff = None
         self.tmp = mkdtemp()
-        os.mkdir(os.path.join(self.tmp, "merge"))
+        os.mkdir(mozpath.join(self.tmp, "merge"))
 
     def tearDown(self):
         shutil.rmtree(self.tmp)
         del self.tmp
 
     def testGood(self):
         self.assertTrue(os.path.isdir(self.tmp))
-        self.reference("""foo = fooVal
-bar = barVal
+        self.reference("""foo = fooVal word
+bar = barVal word
 eff = effVal""")
         self.localized("""foo = lFoo
 bar = lBar
-eff = lEff
+eff = lEff word
 """)
-        cc = ContentComparer()
-        cc.set_merge_stage(os.path.join(self.tmp, "merge"))
+        cc = ContentComparer([Observer()])
         cc.compare(File(self.ref, "en-reference.properties", ""),
-                   File(self.l10n, "l10n.properties", ""))
+                   File(self.l10n, "l10n.properties", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.properties"))
         self.assertDictEqual(
-            cc.observer.toJSON(),
+            cc.observers[0].toJSON(),
             {'summary':
                 {None: {
-                    'changed': 3
+                    'changed': 3,
+                    'changed_w': 5
                 }},
              'details': {}
              }
         )
-        self.assert_(not os.path.exists(os.path.join(cc.merge_stage,
+        self.assert_(not os.path.exists(mozpath.join(self.tmp, "merge",
                                                      'l10n.properties')))
 
     def testMissing(self):
         self.assertTrue(os.path.isdir(self.tmp))
         self.reference("""foo = fooVal
 bar = barVal
 eff = effVal""")
         self.localized("""bar = lBar
 """)
-        cc = ContentComparer()
-        cc.set_merge_stage(os.path.join(self.tmp, "merge"))
+        cc = ContentComparer([Observer()])
         cc.compare(File(self.ref, "en-reference.properties", ""),
-                   File(self.l10n, "l10n.properties", ""))
+                   File(self.l10n, "l10n.properties", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.properties"))
         self.assertDictEqual(
-            cc.observer.toJSON(),
+            cc.observers[0].toJSON(),
             {'summary':
                 {None: {
-                    'changed': 1, 'missing': 2
+                    'changed': 1,
+                    'changed_w': 1,
+                    'missing': 2,
+                    'missing_w': 2
                 }},
              'details': {
-                 'children': [
-                     ('l10n.properties',
-                         {'value': {'missingEntity': [u'eff', u'foo']}}
-                      )
-                 ]}
-             }
-        )
-        mergefile = os.path.join(self.tmp, "merge", "l10n.properties")
+                 'l10n.properties': [
+                     {'missingEntity': u'foo'},
+                     {'missingEntity': u'eff'}]
+                }
+             })
+        mergefile = mozpath.join(self.tmp, "merge", "l10n.properties")
         self.assertTrue(os.path.isfile(mergefile))
         p = getParser(mergefile)
         p.readFile(mergefile)
         [m, n] = p.parse()
-        self.assertEqual(map(lambda e: e.key,  m), ["bar", "eff", "foo"])
+        self.assertEqual(map(lambda e: e.key,  m), ["bar", "foo", "eff"])
 
     def testError(self):
         self.assertTrue(os.path.isdir(self.tmp))
         self.reference("""foo = fooVal
 bar = %d barVal
 eff = effVal""")
         self.localized("""\
 bar = %S lBar
 eff = leffVal
 """)
-        cc = ContentComparer()
-        cc.set_merge_stage(os.path.join(self.tmp, "merge"))
+        cc = ContentComparer([Observer()])
         cc.compare(File(self.ref, "en-reference.properties", ""),
-                   File(self.l10n, "l10n.properties", ""))
+                   File(self.l10n, "l10n.properties", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.properties"))
         self.assertDictEqual(
-            cc.observer.toJSON(),
+            cc.observers[0].toJSON(),
             {'summary':
                 {None: {
-                    'changed': 2, 'errors': 1, 'missing': 1
+                    'changed': 2,
+                    'changed_w': 3,
+                    'errors': 1,
+                    'missing': 1,
+                    'missing_w': 1
                 }},
              'details': {
-                 'children': [
-                     ('l10n.properties',
-                         {'value': {
-                          'error': [u'argument 1 `S` should be `d` '
-                                    u'at line 1, column 7 for bar'],
-                          'missingEntity': [u'foo']}}
-                      )
-                 ]}
-             }
-        )
-        mergefile = os.path.join(self.tmp, "merge", "l10n.properties")
+                 'l10n.properties': [
+                     {'missingEntity': u'foo'},
+                     {'error': u'argument 1 `S` should be `d` '
+                               u'at line 1, column 7 for bar'}]
+                }
+             })
+        mergefile = mozpath.join(self.tmp, "merge", "l10n.properties")
         self.assertTrue(os.path.isfile(mergefile))
         p = getParser(mergefile)
         p.readFile(mergefile)
         [m, n] = p.parse()
         self.assertEqual([e.key for e in m], ["eff", "foo", "bar"])
         self.assertEqual(m[n['bar']].val, '%d barVal')
 
     def testObsolete(self):
         self.assertTrue(os.path.isdir(self.tmp))
         self.reference("""foo = fooVal
 eff = effVal""")
         self.localized("""foo = fooVal
 other = obsolete
 eff = leffVal
 """)
-        cc = ContentComparer()
-        cc.set_merge_stage(os.path.join(self.tmp, "merge"))
+        cc = ContentComparer([Observer()])
         cc.compare(File(self.ref, "en-reference.properties", ""),
-                   File(self.l10n, "l10n.properties", ""))
+                   File(self.l10n, "l10n.properties", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.properties"))
         self.assertDictEqual(
-            cc.observer.toJSON(),
+            cc.observers[0].toJSON(),
             {'summary':
                 {None: {
-                    'changed': 1, 'obsolete': 1, 'unchanged': 1
+                    'changed': 1,
+                    'changed_w': 1,
+                    'obsolete': 1,
+                    'unchanged': 1,
+                    'unchanged_w': 1
                 }},
              'details': {
-                 'children': [
-                     ('l10n.properties',
-                         {'value': {'obsoleteEntity': [u'other']}})]},
-             }
-        )
+                 'l10n.properties': [
+                     {'obsoleteEntity': u'other'}]
+                }
+             })
+
+    def test_duplicate(self):
+        self.assertTrue(os.path.isdir(self.tmp))
+        self.reference("""foo = fooVal
+bar = barVal
+eff = effVal
+foo = other val for foo""")
+        self.localized("""foo = localized
+bar = lBar
+eff = localized eff
+bar = duplicated bar
+""")
+        cc = ContentComparer([Observer()])
+        cc.compare(File(self.ref, "en-reference.properties", ""),
+                   File(self.l10n, "l10n.properties", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.properties"))
+        self.assertDictEqual(
+            cc.observers[0].toJSON(),
+            {'summary':
+                {None: {
+                    'errors': 1,
+                    'warnings': 1,
+                    'changed': 3,
+                    'changed_w': 6
+                }},
+             'details': {
+                 'l10n.properties': [
+                     {'warning': u'foo occurs 2 times'},
+                     {'error': u'bar occurs 2 times'}]
+                }
+             })
+        mergefile = mozpath.join(self.tmp, "merge", "l10n.properties")
+        self.assertFalse(os.path.isfile(mergefile))
 
 
 class TestDTD(unittest.TestCase, ContentMixin):
     extension = '.dtd'
 
     def setUp(self):
         self.maxDiff = None
         self.tmp = mkdtemp()
-        os.mkdir(os.path.join(self.tmp, "merge"))
+        os.mkdir(mozpath.join(self.tmp, "merge"))
 
     def tearDown(self):
         shutil.rmtree(self.tmp)
         del self.tmp
 
     def testGood(self):
         self.assertTrue(os.path.isdir(self.tmp))
         self.reference("""<!ENTITY foo 'fooVal'>
 <!ENTITY bar 'barVal'>
 <!ENTITY eff 'effVal'>""")
         self.localized("""<!ENTITY foo 'lFoo'>
 <!ENTITY bar 'lBar'>
 <!ENTITY eff 'lEff'>
 """)
-        cc = ContentComparer()
-        cc.set_merge_stage(os.path.join(self.tmp, "merge"))
+        cc = ContentComparer([Observer()])
         cc.compare(File(self.ref, "en-reference.dtd", ""),
-                   File(self.l10n, "l10n.dtd", ""))
+                   File(self.l10n, "l10n.dtd", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.dtd"))
         self.assertDictEqual(
-            cc.observer.toJSON(),
+            cc.observers[0].toJSON(),
             {'summary':
                 {None: {
-                    'changed': 3
+                    'changed': 3,
+                    'changed_w': 3
                 }},
              'details': {}
              }
         )
         self.assert_(
-            not os.path.exists(os.path.join(cc.merge_stage, 'l10n.dtd')))
+            not os.path.exists(mozpath.join(self.tmp, "merge", 'l10n.dtd')))
 
     def testMissing(self):
         self.assertTrue(os.path.isdir(self.tmp))
         self.reference("""<!ENTITY foo 'fooVal'>
 <!ENTITY bar 'barVal'>
 <!ENTITY eff 'effVal'>""")
         self.localized("""<!ENTITY bar 'lBar'>
 """)
-        cc = ContentComparer()
-        cc.set_merge_stage(os.path.join(self.tmp, "merge"))
+        cc = ContentComparer([Observer()])
         cc.compare(File(self.ref, "en-reference.dtd", ""),
-                   File(self.l10n, "l10n.dtd", ""))
+                   File(self.l10n, "l10n.dtd", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.dtd"))
         self.assertDictEqual(
-            cc.observer.toJSON(),
+            cc.observers[0].toJSON(),
             {'summary':
                 {None: {
-                    'changed': 1, 'missing': 2
+                    'changed': 1,
+                    'changed_w': 1,
+                    'missing': 2,
+                    'missing_w': 2
                 }},
              'details': {
-                 'children': [
-                     ('l10n.dtd',
-                         {'value': {'missingEntity': [u'eff', u'foo']}}
-                      )
-                 ]}
-             }
-        )
-        mergefile = os.path.join(self.tmp, "merge", "l10n.dtd")
+                 'l10n.dtd': [
+                     {'missingEntity': u'foo'},
+                     {'missingEntity': u'eff'}]
+                }
+             })
+        mergefile = mozpath.join(self.tmp, "merge", "l10n.dtd")
         self.assertTrue(os.path.isfile(mergefile))
         p = getParser(mergefile)
         p.readFile(mergefile)
         [m, n] = p.parse()
-        self.assertEqual(map(lambda e: e.key,  m), ["bar", "eff", "foo"])
+        self.assertEqual(map(lambda e: e.key,  m), ["bar", "foo", "eff"])
 
     def testJunk(self):
         self.assertTrue(os.path.isdir(self.tmp))
         self.reference("""<!ENTITY foo 'fooVal'>
 <!ENTITY bar 'barVal'>
 <!ENTITY eff 'effVal'>""")
         self.localized("""<!ENTITY foo 'fooVal'>
 <!ENTY bar 'gimmick'>
 <!ENTITY eff 'effVal'>
 """)
-        cc = ContentComparer()
-        cc.set_merge_stage(os.path.join(self.tmp, "merge"))
+        cc = ContentComparer([Observer()])
         cc.compare(File(self.ref, "en-reference.dtd", ""),
-                   File(self.l10n, "l10n.dtd", ""))
+                   File(self.l10n, "l10n.dtd", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.dtd"))
         self.assertDictEqual(
-            cc.observer.toJSON(),
+            cc.observers[0].toJSON(),
             {'summary':
                 {None: {
-                    'errors': 1, 'missing': 1, 'unchanged': 2
+                    'errors': 1,
+                    'missing': 1,
+                    'missing_w': 1,
+                    'unchanged': 2,
+                    'unchanged_w': 2
                 }},
              'details': {
-                 'children': [
-                     ('l10n.dtd',
-                         {'value': {
-                             'error': [u'Unparsed content "<!ENTY bar '
-                                       u'\'gimmick\'>" '
-                                       u'from line 2 colum 1 to '
-                                       u'line 2 column 22'],
-                             'missingEntity': [u'bar']}}
-                      )
-                 ]}
-             }
-        )
-        mergefile = os.path.join(self.tmp, "merge", "l10n.dtd")
+                 'l10n.dtd': [
+                     {'error': u'Unparsed content "<!ENTY bar '
+                               u'\'gimmick\'>" '
+                               u'from line 2 column 1 to '
+                               u'line 2 column 22'},
+                     {'missingEntity': u'bar'}]
+                }
+             })
+        mergefile = mozpath.join(self.tmp, "merge", "l10n.dtd")
         self.assertTrue(os.path.isfile(mergefile))
         p = getParser(mergefile)
         p.readFile(mergefile)
         [m, n] = p.parse()
         self.assertEqual(map(lambda e: e.key,  m), ["foo", "eff", "bar"])
 
+    def test_reference_junk(self):
+        self.assertTrue(os.path.isdir(self.tmp))
+        self.reference("""<!ENTITY foo 'fooVal'>
+<!ENT bar 'bad val'>
+<!ENTITY eff 'effVal'>""")
+        self.localized("""<!ENTITY foo 'fooVal'>
+<!ENTITY eff 'effVal'>
+""")
+        cc = ContentComparer([Observer()])
+        cc.compare(File(self.ref, "en-reference.dtd", ""),
+                   File(self.l10n, "l10n.dtd", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.dtd"))
+        self.assertDictEqual(
+            cc.observers[0].toJSON(),
+            {'summary':
+                {None: {
+                    'warnings': 1,
+                    'unchanged': 2,
+                    'unchanged_w': 2
+                }},
+             'details': {
+                 'l10n.dtd': [
+                     {'warning': 'Parser error in en-US'}]
+                }
+             })
+
+    def test_reference_xml_error(self):
+        self.assertTrue(os.path.isdir(self.tmp))
+        self.reference("""<!ENTITY foo 'fooVal'>
+<!ENTITY bar 'bad &val'>
+<!ENTITY eff 'effVal'>""")
+        self.localized("""<!ENTITY foo 'fooVal'>
+<!ENTITY bar 'good val'>
+<!ENTITY eff 'effVal'>
+""")
+        cc = ContentComparer([Observer()])
+        cc.compare(File(self.ref, "en-reference.dtd", ""),
+                   File(self.l10n, "l10n.dtd", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.dtd"))
+        self.assertDictEqual(
+            cc.observers[0].toJSON(),
+            {'summary':
+                {None: {
+                    'warnings': 1,
+                    'unchanged': 2,
+                    'unchanged_w': 2,
+                    'changed': 1,
+                    'changed_w': 2
+                }},
+             'details': {
+                 'l10n.dtd': [
+                     {'warning': u"can't parse en-US value at line 1, "
+                                 u"column 0 for bar"}]
+                }
+             })
+
+
+class TestFluent(unittest.TestCase):
+    maxDiff = None  # we got big dictionaries to compare
+
+    def reference(self, content):
+        self.ref = os.path.join(self.tmp, "en-reference.ftl")
+        open(self.ref, "w").write(content)
+
+    def localized(self, content):
+        self.l10n = os.path.join(self.tmp, "l10n.ftl")
+        open(self.l10n, "w").write(content)
+
+    def setUp(self):
+        self.tmp = mkdtemp()
+        os.mkdir(os.path.join(self.tmp, "merge"))
+        self.ref = self.l10n = None
+
+    def tearDown(self):
+        shutil.rmtree(self.tmp)
+        del self.tmp
+        del self.ref
+        del self.l10n
+
+    def testGood(self):
+        self.reference("""\
+foo = fooVal
+bar = barVal
+eff = effVal
+""")
+        self.localized("""\
+foo = lFoo
+bar = lBar
+eff = lEff
+""")
+        cc = ContentComparer([Observer()])
+        cc.compare(File(self.ref, "en-reference.ftl", ""),
+                   File(self.l10n, "l10n.ftl", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+        self.assertDictEqual(
+            cc.observers[0].toJSON(),
+            {'summary':
+                {None: {
+                    'changed': 3,
+                    'changed_w': 3
+                }},
+             'details': {}
+             }
+        )
+
+        # validate merge results
+        mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+        self.assert_(not os.path.exists(mergepath))
+
+    def testMissing(self):
+        self.reference("""\
+foo = fooVal
+bar = barVal
+eff = effVal
+""")
+        self.localized("""\
+foo = lFoo
+eff = lEff
+""")
+        cc = ContentComparer([Observer()])
+        cc.compare(File(self.ref, "en-reference.ftl", ""),
+                   File(self.l10n, "l10n.ftl", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+        self.assertDictEqual(
+            cc.observers[0].toJSON(),
+            {
+                'details': {
+                    'l10n.ftl': [
+                        {'missingEntity': u'bar'}
+                    ],
+                },
+                'summary': {
+                    None: {
+                        'changed': 2,
+                        'changed_w': 2,
+                        'missing': 1,
+                        'missing_w': 1
+                    }
+                }
+            }
+        )
+
+        # validate merge results
+        mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+        self.assert_(not os.path.exists(mergepath))
+
+    def testBroken(self):
+        self.reference("""\
+foo = fooVal
+bar = barVal
+eff = effVal
+""")
+        self.localized("""\
+-- Invalid Comment
+foo = lFoo
+bar lBar
+eff = lEff {
+""")
+        cc = ContentComparer([Observer()])
+        cc.compare(File(self.ref, "en-reference.ftl", ""),
+                   File(self.l10n, "l10n.ftl", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+        self.assertDictEqual(
+            cc.observers[0].toJSON(),
+            {
+                'details': {
+                    'l10n.ftl': [
+                        {'error': u'Unparsed content "-- Invalid Comment" '
+                                  u'from line 1 column 1 '
+                                  u'to line 1 column 19'},
+                        {'error': u'Unparsed content "bar lBar" '
+                                  u'from line 3 column 1 '
+                                  u'to line 3 column 9'},
+                        {'error': u'Unparsed content "eff = lEff {" '
+                                  u'from line 4 column 1 '
+                                  u'to line 4 column 13'},
+                        {'missingEntity': u'bar'},
+                        {'missingEntity': u'eff'},
+                    ],
+                },
+                'summary': {
+                    None: {
+                        'changed': 1,
+                        'changed_w': 1,
+                        'missing': 2,
+                        'missing_w': 2,
+                        'errors': 3
+                    }
+                }
+            }
+        )
+
+        # validate merge results
+        mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+        self.assert_(os.path.exists(mergepath))
+
+        p = getParser(mergepath)
+        p.readFile(mergepath)
+        merged_entities, merged_map = p.parse()
+        self.assertEqual([e.key for e in merged_entities], ["foo"])
+        merged_foo = merged_entities[merged_map['foo']]
+
+        # foo should be l10n
+        p.readFile(self.l10n)
+        l10n_entities, l10n_map = p.parse()
+        l10n_foo = l10n_entities[l10n_map['foo']]
+        self.assertTrue(merged_foo.equals(l10n_foo))
+
+    def testMismatchingAttributes(self):
+        self.reference("""
+foo = Foo
+bar = Bar
+  .tender = Attribute value
+eff = Eff
+""")
+        self.localized("""\
+foo = lFoo
+  .obsolete = attr
+bar = lBar
+eff = lEff
+""")
+        cc = ContentComparer([Observer()])
+        cc.compare(File(self.ref, "en-reference.ftl", ""),
+                   File(self.l10n, "l10n.ftl", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+        self.assertDictEqual(
+            cc.observers[0].toJSON(),
+            {
+                'details': {
+                    'l10n.ftl': [
+                            {
+                                'error':
+                                    u'Obsolete attribute: '
+                                    'obsolete at line 2, column 3 for foo'
+                            },
+                            {
+                                'error':
+                                    u'Missing attribute: tender at line 3,'
+                                    ' column 1 for bar',
+                            },
+                    ],
+                },
+                'summary': {
+                    None: {'changed': 3, 'changed_w': 5, 'errors': 2}
+                }
+            }
+        )
+
+        # validate merge results
+        mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+        self.assert_(os.path.exists(mergepath))
+
+        p = getParser(mergepath)
+        p.readFile(mergepath)
+        merged_entities, merged_map = p.parse()
+        self.assertEqual([e.key for e in merged_entities], ["eff"])
+        merged_eff = merged_entities[merged_map['eff']]
+
+        # eff should be l10n
+        p.readFile(self.l10n)
+        l10n_entities, l10n_map = p.parse()
+        l10n_eff = l10n_entities[l10n_map['eff']]
+        self.assertTrue(merged_eff.equals(l10n_eff))
+
+    def testMismatchingValues(self):
+        self.reference("""
+foo = Foo
+  .foottr = something
+bar
+  .tender = Attribute value
+""")
+        self.localized("""\
+foo
+  .foottr = attr
+bar = lBar
+  .tender = localized
+""")
+        cc = ContentComparer([Observer()])
+        cc.compare(File(self.ref, "en-reference.ftl", ""),
+                   File(self.l10n, "l10n.ftl", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+        self.assertDictEqual(
+            cc.observers[0].toJSON(),
+            {
+                'details': {
+                    'l10n.ftl': [
+                        {
+                            'error':
+                                u'Missing value at line 1, column 1 for foo'
+                        },
+                        {
+                            'error':
+                                u'Obsolete value at line 3, column 7 for bar',
+                        },
+                    ]
+                },
+                'summary': {
+                    None: {'changed': 2, 'changed_w': 4, 'errors': 2}
+                }
+            }
+        )
+
+        # validate merge results
+        mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+        self.assert_(os.path.exists(mergepath))
+
+        p = getParser(mergepath)
+        p.readFile(mergepath)
+        merged_entities, _ = p.parse()
+        self.assertEqual([e.key for e in merged_entities], [])
+
+    def testMissingSection(self):
+        self.reference("""\
+foo = fooVal
+
+[[ Section ]]
+bar = barVal
+""")
+        self.localized("""\
+foo = lFoo
+bar = lBar
+""")
+        cc = ContentComparer([Observer()])
+        cc.compare(File(self.ref, "en-reference.ftl", ""),
+                   File(self.l10n, "l10n.ftl", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+        self.assertDictEqual(
+            cc.observers[0].toJSON(),
+            {
+                'details': {},
+                'summary': {
+                    None: {
+                        'changed': 2,
+                        'changed_w': 2,
+                    }
+                }
+            }
+        )
+
+        # validate merge results
+        mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+        self.assert_(not os.path.exists(mergepath))
+
+    def testMissingAttachedComment(self):
+        self.reference("""\
+foo = fooVal
+
+// Attached Comment
+bar = barVal
+""")
+        self.localized("""\
+foo = lFoo
+bar = barVal
+""")
+        cc = ContentComparer([Observer()])
+        cc.compare(File(self.ref, "en-reference.ftl", ""),
+                   File(self.l10n, "l10n.ftl", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+        self.assertDictEqual(
+            cc.observers[0].toJSON(),
+            {
+                'details': {},
+                'summary': {
+                    None: {
+                        'changed': 1,
+                        'changed_w': 1,
+                        'unchanged': 1,
+                        'unchanged_w': 1,
+                    }
+                }
+            }
+        )
+
+        # validate merge results
+        mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+        self.assert_(not os.path.exists(mergepath))
+
+    def testObsoleteStandaloneComment(self):
+        self.reference("""\
+foo = fooVal
+bar = barVal
+""")
+        self.localized("""\
+foo = lFoo
+
+// Standalone Comment
+
+bar = lBar
+""")
+        cc = ContentComparer([Observer()])
+        cc.compare(File(self.ref, "en-reference.ftl", ""),
+                   File(self.l10n, "l10n.ftl", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+        self.assertDictEqual(
+            cc.observers[0].toJSON(),
+            {
+                'details': {},
+                'summary': {
+                    None: {
+                        'changed': 2,
+                        'changed_w': 2,
+                    }
+                }
+            }
+        )
+
+        # validate merge results
+        mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+        self.assert_(not os.path.exists(mergepath))
+
+    def test_duplicate(self):
+        self.assertTrue(os.path.isdir(self.tmp))
+        self.reference("""foo = fooVal
+bar = barVal
+eff = effVal
+foo = other val for foo""")
+        self.localized("""foo = localized
+bar = lBar
+eff = localized eff
+bar = duplicated bar
+""")
+        cc = ContentComparer([Observer()])
+        cc.compare(File(self.ref, "en-reference.ftl", ""),
+                   File(self.l10n, "l10n.ftl", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.ftl"))
+        self.assertDictEqual(
+            cc.observers[0].toJSON(),
+            {'summary':
+                {None: {
+                    'errors': 1,
+                    'warnings': 1,
+                    'changed': 3,
+                    'changed_w': 6
+                }},
+             'details': {
+                 'l10n.ftl': [
+                     {'warning': u'foo occurs 2 times'},
+                     {'error': u'bar occurs 2 times'}]
+                }
+             })
+        mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl")
+        self.assertFalse(os.path.isfile(mergefile))
+
+    def test_duplicate_attributes(self):
+        self.assertTrue(os.path.isdir(self.tmp))
+        self.reference("""foo = fooVal
+    .attr = good""")
+        self.localized("""foo = localized
+    .attr = not
+    .attr = so
+    .attr = good
+""")
+        cc = ContentComparer([Observer()])
+        cc.compare(File(self.ref, "en-reference.ftl", ""),
+                   File(self.l10n, "l10n.ftl", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.ftl"))
+        self.assertDictEqual(
+            cc.observers[0].toJSON(),
+            {'summary':
+                {None: {
+                    'warnings': 1,
+                    'changed': 1,
+                    'changed_w': 2
+                }},
+             'details': {
+                 'l10n.ftl': [
+                     {'warning':
+                      u'Attribute "attr" occurs 3 times '
+                      u'at line 4, column 5 for foo'
+                      }]
+                }
+             })
+        mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl")
+        self.assertFalse(os.path.isfile(mergefile))
+
+    def test_unmatched_tags(self):
+        self.assertTrue(os.path.isdir(self.tmp))
+        self.reference("""foo = fooVal
+    #yes
+""")
+        self.localized("""foo = fooVal
+    #no
+""")
+        cc = ContentComparer([Observer()])
+        cc.compare(File(self.ref, "en-reference.ftl", ""),
+                   File(self.l10n, "l10n.ftl", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.ftl"))
+        self.assertDictEqual(
+            cc.observers[0].toJSON(),
+            {'summary':
+                {None: {
+                    'unchanged': 1,
+                    'unchanged_w': 1
+                }},
+             'details': {}
+             })
+        mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl")
+        self.assertFalse(os.path.isfile(mergefile))
+
+    def test_matching_tags(self):
+        self.assertTrue(os.path.isdir(self.tmp))
+        self.reference("""foo = fooVal
+    #yes
+""")
+        self.localized("""foo = fooVal
+    #yes
+""")
+        cc = ContentComparer([Observer()])
+        cc.compare(File(self.ref, "en-reference.ftl", ""),
+                   File(self.l10n, "l10n.ftl", ""),
+                   mozpath.join(self.tmp, "merge", "l10n.ftl"))
+        self.assertDictEqual(
+            cc.observers[0].toJSON(),
+            {'summary':
+                {None: {
+                    'unchanged': 1,
+                    'unchanged_w': 1
+                }},
+             'details': {}
+             })
+        mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl")
+        self.assertFalse(os.path.isfile(mergefile))
+
+
 if __name__ == '__main__':
     unittest.main()
new file mode 100644
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_mozpath.py
@@ -0,0 +1,138 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from compare_locales.mozpath import (
+    relpath,
+    join,
+    normpath,
+    dirname,
+    commonprefix,
+    basename,
+    split,
+    splitext,
+    basedir,
+    match,
+    rebase,
+)
+import unittest
+import os
+
+
+class TestPath(unittest.TestCase):
+    SEP = os.sep
+
+    def test_relpath(self):
+        self.assertEqual(relpath('foo', 'foo'), '')
+        self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/bar'), '')
+        self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo'), 'bar')
+        self.assertEqual(relpath(self.SEP.join(('foo', 'bar', 'baz')), 'foo'),
+                         'bar/baz')
+        self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/bar/baz'),
+                         '..')
+        self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/baz'),
+                         '../bar')
+        self.assertEqual(relpath('foo/', 'foo'), '')
+        self.assertEqual(relpath('foo/bar/', 'foo'), 'bar')
+
+    def test_join(self):
+        self.assertEqual(join('foo', 'bar', 'baz'), 'foo/bar/baz')
+        self.assertEqual(join('foo', '', 'bar'), 'foo/bar')
+        self.assertEqual(join('', 'foo', 'bar'), 'foo/bar')
+        self.assertEqual(join('', 'foo', '/bar'), '/bar')
+
+    def test_normpath(self):
+        self.assertEqual(normpath(self.SEP.join(('foo', 'bar', 'baz',
+                                                 '..', 'qux'))), 'foo/bar/qux')
+
+    def test_dirname(self):
+        self.assertEqual(dirname('foo/bar/baz'), 'foo/bar')
+        self.assertEqual(dirname('foo/bar'), 'foo')
+        self.assertEqual(dirname('foo'), '')
+        self.assertEqual(dirname('foo/bar/'), 'foo/bar')
+
+    def test_commonprefix(self):
+        self.assertEqual(commonprefix([self.SEP.join(('foo', 'bar', 'baz')),
+                                       'foo/qux', 'foo/baz/qux']), 'foo/')
+        self.assertEqual(commonprefix([self.SEP.join(('foo', 'bar', 'baz')),
+                                       'foo/qux', 'baz/qux']), '')
+
+    def test_basename(self):
+        self.assertEqual(basename('foo/bar/baz'), 'baz')
+        self.assertEqual(basename('foo/bar'), 'bar')
+        self.assertEqual(basename('foo'), 'foo')
+        self.assertEqual(basename('foo/bar/'), '')
+
+    def test_split(self):
+        self.assertEqual(split(self.SEP.join(('foo', 'bar', 'baz'))),
+                         ['foo', 'bar', 'baz'])
+
+    def test_splitext(self):
+        self.assertEqual(splitext(self.SEP.join(('foo', 'bar', 'baz.qux'))),
+                         ('foo/bar/baz', '.qux'))
+
+    def test_basedir(self):
+        foobarbaz = self.SEP.join(('foo', 'bar', 'baz'))
+        self.assertEqual(basedir(foobarbaz, ['foo', 'bar', 'baz']), 'foo')
+        self.assertEqual(basedir(foobarbaz, ['foo', 'foo/bar', 'baz']),
+                         'foo/bar')
+        self.assertEqual(basedir(foobarbaz, ['foo/bar', 'foo', 'baz']),
+                         'foo/bar')
+        self.assertEqual(basedir(foobarbaz, ['foo', 'bar', '']), 'foo')
+        self.assertEqual(basedir(foobarbaz, ['bar', 'baz', '']), '')
+
+    def test_match(self):
+        self.assertTrue(match('foo', ''))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/bar'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo'))
+        self.assertTrue(match('foo', '*'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/*/baz.qux'))
+        self.assertTrue(match('foo/bar/baz.qux', '*/bar/baz.qux'))
+        self.assertTrue(match('foo/bar/baz.qux', '*/*/baz.qux'))
+        self.assertTrue(match('foo/bar/baz.qux', '*/*/*'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*.qux'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/b*/*z.qux'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/b*r/ba*z.qux'))
+        self.assertFalse(match('foo/bar/baz.qux', 'foo/b*z/ba*r.qux'))
+        self.assertTrue(match('foo/bar/baz.qux', '**'))
+        self.assertTrue(match('foo/bar/baz.qux', '**/baz.qux'))
+        self.assertTrue(match('foo/bar/baz.qux', '**/bar/baz.qux'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/**/baz.qux'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux'))
+        self.assertTrue(match('foo/bar/baz.qux', '**/foo/bar/baz.qux'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/baz.qux'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/*.qux'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux'))
+        self.assertTrue(match('foo/bar/baz.qux', '**/*.qux'))
+        self.assertFalse(match('foo/bar/baz.qux', '**.qux'))
+        self.assertFalse(match('foo/bar', 'foo/*/bar'))
+        self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/**'))
+        self.assertFalse(match('foo/nobar/baz.qux', 'foo/**/bar/**'))
+        self.assertTrue(match('foo/bar', 'foo/**/bar/**'))
+
+    def test_rebase(self):
+        self.assertEqual(rebase('foo', 'foo/bar', 'bar/baz'), 'baz')
+        self.assertEqual(rebase('foo', 'foo', 'bar/baz'), 'bar/baz')
+        self.assertEqual(rebase('foo/bar', 'foo', 'baz'), 'bar/baz')
+
+
+if os.altsep:
+    class TestAltPath(TestPath):
+        SEP = os.altsep
+
+    class TestReverseAltPath(TestPath):
+        def setUp(self):
+            sep = os.sep
+            os.sep = os.altsep
+            os.altsep = sep
+
+        def tearDown(self):
+            self.setUp()
+
+    class TestAltReverseAltPath(TestReverseAltPath):
+        SEP = os.altsep
new file mode 100644
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_parser.py
@@ -0,0 +1,44 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from compare_locales import parser
+
+
+class TestParserContext(unittest.TestCase):
+    def test_lines(self):
+        "Test that Parser.Context.lines returns 1-based tuples"
+        ctx = parser.Parser.Context('''first line
+second line
+third line
+''')
+        self.assertEqual(
+            ctx.lines(0, 1),
+            [(1, 1), (1, 2)]
+        )
+        self.assertEqual(
+            ctx.lines(len('first line')),
+            [(1, len('first line') + 1)]
+        )
+        self.assertEqual(
+            ctx.lines(len('first line') + 1),
+            [(2, 1)]
+        )
+        self.assertEqual(
+            ctx.lines(len(ctx.contents)),
+            [(4, 1)]
+        )
+
+    def test_empty_parser(self):
+        p = parser.Parser()
+        entities, _map = p.parse()
+        self.assertListEqual(
+            entities,
+            []
+        )
+        self.assertDictEqual(
+            _map,
+            {}
+        )
new file mode 100644
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_paths.py
@@ -0,0 +1,473 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from compare_locales.paths import ProjectConfig, File, ProjectFiles, Matcher
+from compare_locales import mozpath
+
+
+class TestMatcher(unittest.TestCase):
+
+    def test_matcher(self):
+        one = Matcher('foo/*')
+        self.assertTrue(one.match('foo/baz'))
+        self.assertFalse(one.match('foo/baz/qux'))
+        other = Matcher('bar/*')
+        self.assertTrue(other.match('bar/baz'))
+        self.assertFalse(other.match('bar/baz/qux'))
+        self.assertEqual(one.sub(other, 'foo/baz'), 'bar/baz')
+        self.assertIsNone(one.sub(other, 'bar/baz'))
+        one = Matcher('foo/**')
+        self.assertTrue(one.match('foo/baz'))
+        self.assertTrue(one.match('foo/baz/qux'))
+        other = Matcher('bar/**')
+        self.assertTrue(other.match('bar/baz'))
+        self.assertTrue(other.match('bar/baz/qux'))
+        self.assertEqual(one.sub(other, 'foo/baz'), 'bar/baz')
+        self.assertEqual(one.sub(other, 'foo/baz/qux'), 'bar/baz/qux')
+        one = Matcher('foo/*/one/**')
+        self.assertTrue(one.match('foo/baz/one/qux'))
+        self.assertFalse(one.match('foo/baz/bez/one/qux'))
+        other = Matcher('bar/*/other/**')
+        self.assertTrue(other.match('bar/baz/other/qux'))
+        self.assertFalse(other.match('bar/baz/bez/other/qux'))
+        self.assertEqual(one.sub(other, 'foo/baz/one/qux'),
+                         'bar/baz/other/qux')
+        self.assertEqual(one.sub(other, 'foo/baz/one/qux/zzz'),
+                         'bar/baz/other/qux/zzz')
+        self.assertIsNone(one.sub(other, 'foo/baz/bez/one/qux'))
+
+
+class SetupMixin(object):
+    def setUp(self):
+        self.cfg = ProjectConfig()
+        self.file = File(
+            '/tmp/somedir/de/browser/one/two/file.ftl',
+            'file.ftl',
+            module='browser', locale='de')
+        self.other_file = File(
+            '/tmp/somedir/de/toolkit/two/one/file.ftl',
+            'file.ftl',
+            module='toolkit', locale='de')
+
+
+class TestConfigLegacy(SetupMixin, unittest.TestCase):
+
+    def test_filter_py_true(self):
+        'Test filter.py just return bool(True)'
+        def filter(mod, path, entity=None):
+            return True
+        self.cfg.set_filter_py(filter)
+        with self.assertRaises(AssertionError):
+            self.cfg.add_rules({})
+        rv = self.cfg.filter(self.file)
+        self.assertEqual(rv, 'error')
+        rv = self.cfg.filter(self.file, entity='one_entity')
+        self.assertEqual(rv, 'error')
+
+    def test_filter_py_false(self):
+        'Test filter.py just return bool(False)'
+        def filter(mod, path, entity=None):
+            return False
+        self.cfg.set_filter_py(filter)
+        with self.assertRaises(AssertionError):
+            self.cfg.add_rules({})
+        rv = self.cfg.filter(self.file)
+        self.assertEqual(rv, 'ignore')
+        rv = self.cfg.filter(self.file, entity='one_entity')
+        self.assertEqual(rv, 'ignore')
+
+    def test_filter_py_error(self):
+        'Test filter.py just return str("error")'
+        def filter(mod, path, entity=None):
+            return 'error'
+        self.cfg.set_filter_py(filter)
+        with self.assertRaises(AssertionError):
+            self.cfg.add_rules({})
+        rv = self.cfg.filter(self.file)
+        self.assertEqual(rv, 'error')
+        rv = self.cfg.filter(self.file, entity='one_entity')
+        self.assertEqual(rv, 'error')
+
+    def test_filter_py_ignore(self):
+        'Test filter.py just return str("ignore")'
+        def filter(mod, path, entity=None):
+            return 'ignore'
+        self.cfg.set_filter_py(filter)
+        with self.assertRaises(AssertionError):
+            self.cfg.add_rules({})
+        rv = self.cfg.filter(self.file)
+        self.assertEqual(rv, 'ignore')
+        rv = self.cfg.filter(self.file, entity='one_entity')
+        self.assertEqual(rv, 'ignore')
+
+    def test_filter_py_report(self):
+        'Test filter.py just return str("report") and match to "warning"'
+        def filter(mod, path, entity=None):
+            return 'report'
+        self.cfg.set_filter_py(filter)
+        with self.assertRaises(AssertionError):
+            self.cfg.add_rules({})
+        rv = self.cfg.filter(self.file)
+        self.assertEqual(rv, 'warning')
+        rv = self.cfg.filter(self.file, entity='one_entity')
+        self.assertEqual(rv, 'warning')
+
+    def test_filter_py_module(self):
+        'Test filter.py to return str("error") for browser or "ignore"'
+        def filter(mod, path, entity=None):
+            return 'error' if mod == 'browser' else 'ignore'
+        self.cfg.set_filter_py(filter)
+        with self.assertRaises(AssertionError):
+            self.cfg.add_rules({})
+        rv = self.cfg.filter(self.file)
+        self.assertEqual(rv, 'error')
+        rv = self.cfg.filter(self.file, entity='one_entity')
+        self.assertEqual(rv, 'error')
+        rv = self.cfg.filter(self.other_file)
+        self.assertEqual(rv, 'ignore')
+        rv = self.cfg.filter(self.other_file, entity='one_entity')
+        self.assertEqual(rv, 'ignore')
+
+
+class TestConfigRules(SetupMixin, unittest.TestCase):
+
+    def test_filter_empty(self):
+        'Test that an empty config works'
+        self.cfg.add_paths({
+            'l10n': '/tmp/somedir/{locale}/browser/**'
+        })
+        rv = self.cfg.filter(self.file)
+        self.assertEqual(rv, 'error')
+        rv = self.cfg.filter(self.file, entity='one_entity')
+        self.assertEqual(rv, 'error')
+        rv = self.cfg.filter(self.other_file)
+        self.assertEqual(rv, 'ignore')
+        rv = self.cfg.filter(self.other_file, entity='one_entity')
+        self.assertEqual(rv, 'ignore')
+
+    def test_single_file_rule(self):
+        'Test a single rule for just a single file, no key'
+        self.cfg.add_paths({
+            'l10n': '/tmp/somedir/{locale}/browser/**'
+        })
+        self.cfg.add_rules({
+            'path': '/tmp/somedir/{locale}/browser/one/two/file.ftl',
+            'action': 'ignore'
+        })
+        rv = self.cfg.filter(self.file)
+        self.assertEqual(rv, 'ignore')
+        rv = self.cfg.filter(self.file, 'one_entity')
+        self.assertEqual(rv, 'error')
+        rv = self.cfg.filter(self.other_file)
+        self.assertEqual(rv, 'ignore')
+        rv = self.cfg.filter(self.other_file, 'one_entity')
+        self.assertEqual(rv, 'ignore')
+
+    def test_single_key_rule(self):
+        'Test a single rule with file and key'
+        self.cfg.add_paths({
+            'l10n': '/tmp/somedir/{locale}/browser/**'
+        })
+        self.cfg.add_rules({
+            'path': '/tmp/somedir/{locale}/browser/one/two/file.ftl',
+            'key': 'one_entity',
+            'action': 'ignore'
+        })
+        rv = self.cfg.filter(self.file)
+        self.assertEqual(rv, 'error')
+        rv = self.cfg.filter(self.file, 'one_entity')
+        self.assertEqual(rv, 'ignore')
+        rv = self.cfg.filter(self.other_file)
+        self.assertEqual(rv, 'ignore')
+        rv = self.cfg.filter(self.other_file, 'one_entity')
+        self.assertEqual(rv, 'ignore')
+
+    def test_single_non_matching_key_rule(self):
+        'Test a single key rule with regex special chars that should not match'
+        self.cfg.add_paths({
+            'l10n': '/tmp/somedir/{locale}/**'
+        })
+        self.cfg.add_rules({
+            'path': '/tmp/somedir/{locale}/browser/one/two/file.ftl',
+            'key': '.ne_entit.',
+            'action': 'ignore'
+        })
+        rv = self.cfg.filter(self.file, 'one_entity')
+        self.assertEqual(rv, 'error')
+
+    def test_single_matching_re_key_rule(self):
+        'Test a single key with regular expression'
+        self.cfg.add_paths({
+            'l10n': '/tmp/somedir/{locale}/**'
+        })
+        self.cfg.add_rules({
+            'path': '/tmp/somedir/{locale}/browser/one/two/file.ftl',
+            'key': 're:.ne_entit.$',
+            'action': 'ignore'
+        })
+        rv = self.cfg.filter(self.file, 'one_entity')
+        self.assertEqual(rv, 'ignore')
+
+    def test_double_file_rule(self):
+        'Test path shortcut, one for each of our files'
+        self.cfg.add_paths({
+            'l10n': '/tmp/somedir/{locale}/**'
+        })
+        self.cfg.add_rules({
+            'path': [
+                '/tmp/somedir/{locale}/browser/one/two/file.ftl',
+                '/tmp/somedir/{locale}/toolkit/two/one/file.ftl',
+            ],
+            'action': 'ignore'
+        })
+        rv = self.cfg.filter(self.file)
+        self.assertEqual(rv, 'ignore')
+        rv = self.cfg.filter(self.other_file)
+        self.assertEqual(rv, 'ignore')
+
+    def test_double_file_key_rule(self):
+        'Test path and key shortcut, one key matching, one not'
+        self.cfg.add_paths({
+            'l10n': '/tmp/somedir/{locale}/**'
+        })
+        self.cfg.add_rules({
+            'path': [
+                '/tmp/somedir/{locale}/browser/one/two/file.ftl',
+                '/tmp/somedir/{locale}/toolkit/two/one/file.ftl',
+            ],
+            'key': [
+                'one_entity',
+                'other_entity',
+            ],
+            'action': 'ignore'
+        })
+        rv = self.cfg.filter(self.file)
+        self.assertEqual(rv, 'error')
+        rv = self.cfg.filter(self.file, 'one_entity')
+        self.assertEqual(rv, 'ignore')
+        rv = self.cfg.filter(self.other_file)
+        self.assertEqual(rv, 'error')
+        rv = self.cfg.filter(self.other_file, 'one_entity')
+        self.assertEqual(rv, 'ignore')
+
+    def test_single_wildcard_rule(self):
+        'Test single wildcard'
+        self.cfg.add_paths({
+            'l10n': '/tmp/somedir/{locale}/browser/**'
+        })
+        self.cfg.add_rules({
+            'path': [
+                '/tmp/somedir/{locale}/browser/one/*/*',
+            ],
+            'action': 'ignore'
+        })
+        rv = self.cfg.filter(self.file)
+        self.assertEqual(rv, 'ignore')
+        rv = self.cfg.filter(self.other_file)
+        self.assertEqual(rv, 'ignore')
+
+    def test_double_wildcard_rule(self):
+        'Test double wildcard'
+        self.cfg.add_paths({
+            'l10n': '/tmp/somedir/{locale}/**'
+        })
+        self.cfg.add_rules({
+            'path': [
+                '/tmp/somedir/{locale}/**',
+            ],
+            'action': 'ignore'
+        })
+        rv = self.cfg.filter(self.file)
+        self.assertEqual(rv, 'ignore')
+        rv = self.cfg.filter(self.other_file)
+        self.assertEqual(rv, 'ignore')
+
+
+class MockProjectFiles(ProjectFiles):
+    def __init__(self, mocks, locale, projects, mergebase=None):
+        (super(MockProjectFiles, self)
+            .__init__(locale, projects, mergebase=mergebase))
+        self.mocks = mocks
+
+    def _files(self, matcher):
+        base = matcher.prefix
+        for path in self.mocks.get(base, []):
+            p = mozpath.join(base, path)
+            if matcher.match(p):
+                yield p
+
+
+class TestProjectPaths(unittest.TestCase):
+    def test_l10n_path(self):
+        cfg = ProjectConfig()
+        cfg.locales.append('de')
+        cfg.add_paths({
+            'l10n': '{l10n_base}/{locale}/*'
+        })
+        cfg.add_environment(l10n_base='/tmp')
+        mocks = {
+            '/tmp/de/': [
+                'good.ftl',
+                'not/subdir/bad.ftl'
+            ],
+            '/tmp/fr/': [
+                'good.ftl',
+                'not/subdir/bad.ftl'
+            ],
+        }
+        files = MockProjectFiles(mocks, 'de', [cfg])
+        self.assertListEqual(
+            list(files), [('/tmp/de/good.ftl', None, None, set())])
+        self.assertTupleEqual(
+            files.match('/tmp/de/something.ftl'),
+            ('/tmp/de/something.ftl', None, None, set()))
+        self.assertIsNone(files.match('/tmp/fr/something.ftl'))
+        files = MockProjectFiles(mocks, 'de', [cfg], mergebase='merging')
+        self.assertListEqual(
+            list(files),
+            [('/tmp/de/good.ftl', None, 'merging/de/good.ftl', set())])
+        self.assertTupleEqual(
+            files.match('/tmp/de/something.ftl'),
+            ('/tmp/de/something.ftl', None, 'merging/de/something.ftl', set()))
+        # 'fr' is not in the locale list, should return no files
+        files = MockProjectFiles(mocks, 'fr', [cfg])
+        self.assertListEqual(list(files), [])
+
+    def test_reference_path(self):
+        cfg = ProjectConfig()
+        cfg.locales.append('de')
+        cfg.add_paths({
+            'l10n': '{l10n_base}/{locale}/*',
+            'reference': '/tmp/reference/*'
+        })
+        cfg.add_environment(l10n_base='/tmp/l10n')
+        mocks = {
+            '/tmp/l10n/de/': [
+                'good.ftl',
+                'not/subdir/bad.ftl'
+            ],
+            '/tmp/l10n/fr/': [
+                'good.ftl',
+                'not/subdir/bad.ftl'
+            ],
+            '/tmp/reference/': [
+                'ref.ftl',
+                'not/subdir/bad.ftl'
+            ],
+        }
+        files = MockProjectFiles(mocks, 'de', [cfg])
+        self.assertListEqual(
+            list(files),
+            [
+                ('/tmp/l10n/de/good.ftl', '/tmp/reference/good.ftl', None,
+                 set()),
+                ('/tmp/l10n/de/ref.ftl', '/tmp/reference/ref.ftl', None,
+                 set()),
+            ])
+        self.assertTupleEqual(
+            files.match('/tmp/l10n/de/good.ftl'),
+            ('/tmp/l10n/de/good.ftl', '/tmp/reference/good.ftl', None,
+             set()),
+            )
+        self.assertTupleEqual(
+            files.match('/tmp/reference/good.ftl'),
+            ('/tmp/l10n/de/good.ftl', '/tmp/reference/good.ftl', None,
+             set()),
+            )
+        self.assertIsNone(files.match('/tmp/l10n/de/subdir/bad.ftl'))
+        self.assertIsNone(files.match('/tmp/reference/subdir/bad.ftl'))
+        files = MockProjectFiles(mocks, 'de', [cfg], mergebase='merging')
+        self.assertListEqual(
+            list(files),
+            [
+                ('/tmp/l10n/de/good.ftl', '/tmp/reference/good.ftl',
+                 'merging/de/good.ftl', set()),
+                ('/tmp/l10n/de/ref.ftl', '/tmp/reference/ref.ftl',
+                 'merging/de/ref.ftl', set()),
+            ])
+        self.assertTupleEqual(
+            files.match('/tmp/l10n/de/good.ftl'),
+            ('/tmp/l10n/de/good.ftl', '/tmp/reference/good.ftl',
+             'merging/de/good.ftl', set()),
+            )
+        self.assertTupleEqual(
+            files.match('/tmp/reference/good.ftl'),
+            ('/tmp/l10n/de/good.ftl', '/tmp/reference/good.ftl',
+             'merging/de/good.ftl', set()),
+            )
+        # 'fr' is not in the locale list, should return no files
+        files = MockProjectFiles(mocks, 'fr', [cfg])
+        self.assertListEqual(list(files), [])
+
+    def test_partial_l10n(self):
+        cfg = ProjectConfig()
+        cfg.locales.extend(['de', 'fr'])
+        cfg.add_paths({
+            'l10n': '/tmp/{locale}/major/*'
+        }, {
+            'l10n': '/tmp/{locale}/minor/*',
+            'locales': ['de']
+        })
+        mocks = {
+            '/tmp/de/major/': [
+                'good.ftl',
+                'not/subdir/bad.ftl'
+            ],
+            '/tmp/de/minor/': [
+                'good.ftl',
+            ],
+            '/tmp/fr/major/': [
+                'good.ftl',
+                'not/subdir/bad.ftl'
+            ],
+            '/tmp/fr/minor/': [
+                'good.ftl',
+            ],
+        }
+        files = MockProjectFiles(mocks, 'de', [cfg])
+        self.assertListEqual(
+            list(files),
+            [
+                ('/tmp/de/major/good.ftl', None, None, set()),
+                ('/tmp/de/minor/good.ftl', None, None, set()),
+            ])
+        self.assertTupleEqual(
+            files.match('/tmp/de/major/some.ftl'),
+            ('/tmp/de/major/some.ftl', None, None, set()))
+        self.assertIsNone(files.match('/tmp/de/other/some.ftl'))
+        # 'fr' is not in the locale list of minor, should only return major
+        files = MockProjectFiles(mocks, 'fr', [cfg])
+        self.assertListEqual(
+            list(files),
+            [
+                ('/tmp/fr/major/good.ftl', None, None, set()),
+            ])
+        self.assertIsNone(files.match('/tmp/fr/minor/some.ftl'))
+
+
+class TestProjectConfig(unittest.TestCase):
+    def test_expand_paths(self):
+        pc = ProjectConfig()
+        pc.add_environment(one="first_path")
+        self.assertEqual(pc.expand('foo'), 'foo')
+        self.assertEqual(pc.expand('foo{one}bar'), 'foofirst_pathbar')
+        pc.add_environment(l10n_base='../tmp/localizations')
+        self.assertEqual(
+            pc.expand('{l}dir', {'l': '{l10n_base}/{locale}/'}),
+            '../tmp/localizations/{locale}/dir')
+        self.assertEqual(
+            pc.expand('{l}dir', {
+                'l': '{l10n_base}/{locale}/',
+                'l10n_base': '../merge-base'
+            }),
+            '../merge-base/{locale}/dir')
+
+    def test_children(self):
+        pc = ProjectConfig()
+        child = ProjectConfig()
+        pc.add_child(child)
+        self.assertListEqual([pc, child], list(pc.configs))
--- a/third_party/python/compare-locales/compare_locales/tests/test_properties.py
+++ b/third_party/python/compare-locales/compare_locales/tests/test_properties.py
@@ -141,10 +141,11 @@ escaped value
         one, two = list(self.parser)
         self.assertEqual(one.position(), (1, 1))
         self.assertEqual(one.value_position(), (1, 7))
         self.assertEqual(two.position(), (2, 1))
         self.assertEqual(two.value_position(), (2, 7))
         self.assertEqual(two.value_position(-1), (3, 14))
         self.assertEqual(two.value_position(10), (3, 3))
 
+
 if __name__ == '__main__':
     unittest.main()
deleted file mode 100644
--- a/third_party/python/compare-locales/compare_locales/tests/test_webapps.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# -*- coding: utf-8 -*-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-import unittest
-
-from compare_locales import webapps
-
-
-class TestFileComparison(unittest.TestCase):
-
-    def mock_FileComparison(self, mock_listdir):
-        class Target(webapps.FileComparison):
-            def _listdir(self):
-                return mock_listdir()
-        return Target('.', 'en-US')
-
-    def test_just_reference(self):
-        def _listdir():
-            return ['my_app.en-US.properties']
-        filecomp = self.mock_FileComparison(_listdir)
-        filecomp.files()
-        self.assertEqual(filecomp.locales(), [])
-        self.assertEqual(filecomp._reference.keys(), ['my_app'])
-        file_ = filecomp._reference['my_app']
-        self.assertEqual(file_.file, 'locales/my_app.en-US.properties')
-
-    def test_just_locales(self):
-        def _listdir():
-            return ['my_app.ar.properties',
-                    'my_app.sr-Latn.properties',
-                    'my_app.sv-SE.properties',
-                    'my_app.po_SI.properties']
-        filecomp = self.mock_FileComparison(_listdir)
-        filecomp.files()
-        self.assertEqual(filecomp.locales(),
-                         ['ar', 'sr-Latn', 'sv-SE'])
-        self.assertEqual(filecomp._files['ar'].keys(), ['my_app'])
-        file_ = filecomp._files['ar']['my_app']
-        self.assertEqual(file_.file, 'locales/my_app.ar.properties')
deleted file mode 100644
--- a/third_party/python/compare-locales/compare_locales/webapps.py
+++ /dev/null
@@ -1,235 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-'''gaia-style web apps support
-
-This variant supports manifest.webapp localization as well as
-.properties files with a naming scheme of locales/foo.*.properties.
-'''
-
-from collections import defaultdict
-import json
-import os
-import os.path
-import re
-
-from compare_locales.paths import File, EnumerateDir
-from compare_locales.compare import AddRemove, ContentComparer
-
-
-class WebAppCompare(object):
-    '''For a given directory, analyze
-    /manifest.webapp
-    /locales/*.*.properties
-
-    Deduce the present locale codes.
-    '''
-    ignore_dirs = EnumerateDir.ignore_dirs
-    reference_locale = 'en-US'
-
-    def __init__(self, basedir):
-        '''Constructor
-        :param basedir: Directory of the web app to inspect
-        '''
-        self.basedir = basedir
-        self.manifest = Manifest(basedir, self.reference_locale)
-        self.files = FileComparison(basedir, self.reference_locale)
-        self.watcher = None
-
-    def compare(self, locales):
-        '''Compare the manifest.webapp and the locales/*.*.properties
-        '''
-        if not locales:
-            locales = self.locales()
-        self.manifest.compare(locales)
-        self.files.compare(locales)
-
-    def setWatcher(self, watcher):
-        self.watcher = watcher
-        self.manifest.watcher = watcher
-        self.files.watcher = watcher
-
-    def locales(self):
-        '''Inspect files on disk to find present languages.
-        :rtype: List of locales, sorted, including reference.
-        '''
-        locales = set(self.manifest.strings.keys())
-        locales.update(self.files.locales())
-        locales = list(sorted(locales))
-        return locales
-
-
-class Manifest(object):
-    '''Class that helps with parsing and inspection of manifest.webapp.
-    '''
-
-    def __init__(self, basedir, reference_locale):
-        self.file = File(os.path.join(basedir, 'manifest.webapp'),
-                         'manifest.webapp')
-        self.reference_locale = reference_locale
-        self._strings = None
-        self.watcher = None
-
-    @property
-    def strings(self):
-        if self._strings is None:
-            self._strings = self.load_and_parse()
-        return self._strings
-
-    def load_and_parse(self):
-        try:
-            manifest = json.load(open(self.file.fullpath))
-        except (ValueError, IOError), e:
-            if self.watcher:
-                self.watcher.notify('error', self.file, str(e))
-            return {}
-        return self.extract_manifest_strings(manifest)
-
-    def extract_manifest_strings(self, manifest_fragment):
-        '''Extract localizable strings from a manifest dict.
-        This method is recursive, and returns a two-level dict,
-        first level being locale codes, second level being generated
-        key and localized value. Keys are generated by concatenating
-        each level in the json with a ".".
-        '''
-        rv = defaultdict(dict)
-        localizable = manifest_fragment.pop('locales', {})
-        if localizable:
-            for locale, keyvalue in localizable.iteritems():
-                for key, value in keyvalue.iteritems():
-                    key = '.'.join(['locales', 'AB_CD', key])
-                    rv[locale][key] = value
-        for key, sub_manifest in manifest_fragment.iteritems():
-            if not isinstance(sub_manifest, dict):
-                continue
-            subdict = self.extract_manifest_strings(sub_manifest)
-            if subdict:
-                for locale, keyvalue in subdict:
-                    rv[locale].update((key + '.' + subkey, value)
-                                      for subkey, value
-                                      in keyvalue.iteritems())
-        return rv
-
-    def compare(self, locales):
-        strings = self.strings
-        if not strings:
-            return
-        # create a copy so that we can mock around with it
-        strings = strings.copy()
-        reference = strings.pop(self.reference_locale)
-        for locale in locales:
-            if locale == self.reference_locale:
-                continue
-            self.compare_strings(reference,
-                                 strings.get(locale, {}),
-                                 locale)
-
-    def compare_strings(self, reference, l10n, locale):
-        add_remove = AddRemove()
-        add_remove.set_left(sorted(reference.keys()))
-        add_remove.set_right(sorted(l10n.keys()))
-        missing = obsolete = changed = unchanged = 0
-        for op, item_or_pair in add_remove:
-            if op == 'equal':
-                if reference[item_or_pair[0]] == l10n[item_or_pair[1]]:
-                    unchanged += 1
-                else:
-                    changed += 1
-            else:
-                key = item_or_pair.replace('.AB_CD.',
-                                           '.%s.' % locale)
-                if op == 'add':
-                    # obsolete entry
-                    obsolete += 1
-                    self.watcher.notify('obsoleteEntity', self.file, key)
-                else:
-                    # missing entry
-                    missing += 1
-                    self.watcher.notify('missingEntity', self.file, key)
-
-
-class FileComparison(object):
-    '''Compare the locales/*.*.properties files inside a webapp.
-    '''
-    prop = re.compile('(?P<base>.*)\\.'
-                      '(?P<locale>[a-zA-Z]+(?:-[a-zA-Z]+)*)'
-                      '\\.properties$')
-
-    def __init__(self, basedir, reference_locale):
-        self.basedir = basedir
-        self.reference_locale = reference_locale
-        self.watcher = None
-        self._reference = self._files = None
-
-    def locales(self):
-        '''Get the locales present in the webapp
-        '''
-        self.files()
-        locales = self._files.keys()
-        locales.sort()
-        return locales
-
-    def compare(self, locales):
-        self.files()
-        for locale in locales:
-            l10n = self._files[locale]
-            filecmp = AddRemove()
-            filecmp.set_left(sorted(self._reference.keys()))
-            filecmp.set_right(sorted(l10n.keys()))
-            for op, item_or_pair in filecmp:
-                if op == 'equal':
-                    self.watcher.compare(self._reference[item_or_pair[0]],
-                                         l10n[item_or_pair[1]])
-                elif op == 'add':
-                    # obsolete file
-                    self.watcher.remove(l10n[item_or_pair])
-                else:
-                    # missing file
-                    _path = '.'.join([item_or_pair, locale, 'properties'])
-                    missingFile = File(
-                        os.path.join(self.basedir, 'locales', _path),
-                        'locales/' + _path)
-                    self.watcher.add(self._reference[item_or_pair],
-                                     missingFile)
-
-    def files(self):
-        '''Read the list of locales from disk.
-        '''
-        if self._reference:
-            return
-        self._reference = {}
-        self._files = defaultdict(dict)
-        path_list = self._listdir()
-        for path in path_list:
-            match = self.prop.match(path)
-            if match is None:
-                continue
-            locale = match.group('locale')
-            if locale == self.reference_locale:
-                target = self._reference
-            else:
-                target = self._files[locale]
-            fullpath = os.path.join(self.basedir, 'locales', path)
-            target[match.group('base')] = File(fullpath, 'locales/' + path)
-
-    def _listdir(self):
-        'Monkey-patch this for testing.'
-        return os.listdir(os.path.join(self.basedir, 'locales'))
-
-
-def compare_web_app(basedir, locales, other_observer=None):
-    '''Compare gaia-style web app.
-
-    Optional arguments are:
-    - other_observer. A object implementing
-        notify(category, _file, data)
-      The return values of that callback are ignored.
-    '''
-    comparer = ContentComparer()
-    if other_observer is not None:
-        comparer.add_observer(other_observer)
-    webapp_comp = WebAppCompare(basedir)
-    webapp_comp.setWatcher(comparer)
-    webapp_comp.compare(locales)
-    return comparer.observer
new file mode 100644
--- /dev/null
+++ b/third_party/python/fluent/PKG-INFO
@@ -0,0 +1,16 @@
+Metadata-Version: 1.1
+Name: fluent
+Version: 0.4.2
+Summary: Localization library for expressive translations.
+Home-page: https://github.com/projectfluent/python-fluent
+Author: Mozilla
+Author-email: l10n-drivers@mozilla.org
+License: APL 2
+Description: UNKNOWN
+Keywords: fluent,localization,l10n
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.5
new file mode 100644
new file mode 100644
--- /dev/null
+++ b/third_party/python/fluent/fluent/migrate/__init__.py
@@ -0,0 +1,10 @@
+# coding=utf8
+
+from .context import MergeContext                      # noqa: F401
+from .transforms import (                              # noqa: F401
+    Source, COPY, REPLACE_IN_TEXT, REPLACE, PLURALS, CONCAT
+)
+from .helpers import (                                 # noqa: F401
+    LITERAL, EXTERNAL_ARGUMENT, MESSAGE_REFERENCE
+)
+from .changesets import convert_blame_to_changesets    # noqa: F401
new file mode 100644
--- /dev/null
+++ b/third_party/python/fluent/fluent/migrate/changesets.py
@@ -0,0 +1,58 @@
+# coding=utf8
+
+import time
+
+
+def by_first_commit(item):
+    """Order two changesets by their first commit date."""
+    return item['first_commit']
+
+
+def convert_blame_to_changesets(blame_json):
+    """Convert a blame dict into a list of changesets.
+
+    The blame information in `blame_json` should be a dict of the following
+    structure:
+
+        {
+            'authors': [
+                'A.N. Author <author@example.com>',
+            ],
+            'blame': {
+                'path/one': {
+                    'key1': [0, 1346095921.0],
+                },
+            }
+        }
+
+    It will be transformed into a list of changesets which can be fed into
+    `MergeContext.serialize_changeset`:
+
+        [
+            {
+                'author': 'A.N. Author <author@example.com>',
+                'first_commit': 1346095921.0,
+                'changes': {
+                    ('path/one', 'key1'),
+                }
+            },
+        ]
+
+    """
+    now = time.time()
+    changesets = [
+        {
+            'author': author,
+            'first_commit': now,
+            'changes': set()
+        } for author in blame_json['authors']
+    ]
+
+    for path, keys_info in blame_json['blame'].items():
+        for key, (author_index, timestamp) in keys_info.items():
+            changeset = changesets[author_index]
+            changeset['changes'].add((path, key))
+            if timestamp < changeset['first_commit']:
+                changeset['first_commit'] = timestamp
+
+    return sorted(changesets, key=by_first_commit)
new file mode 100644
--- /dev/null
+++ b/third_party/python/fluent/fluent/migrate/cldr.py
@@ -0,0 +1,55 @@
+# coding=utf8
+
+import pkgutil
+import json
+
+
+def in_canonical_order(item):
+    return canonical_order.index(item)
+
+
+cldr_plurals = json.loads(
+    pkgutil.get_data('fluent.migrate', 'cldr_data/plurals.json').decode('utf-8')
+)
+
+rules = cldr_plurals['supplemental']['plurals-type-cardinal']
+canonical_order = ('zero', 'one', 'two', 'few', 'many', 'other')
+
+categories = {}
+for lang, rules in rules.items():
+    categories[lang] = tuple(sorted(map(
+        lambda key: key.replace('pluralRule-count-', ''),
+        rules.keys()
+    ), key=in_canonical_order))
+
+
+def get_plural_categories(lang):
+    """Return a tuple of CLDR plural categories for `lang`.
+
+    If an exact match for `lang` is not available, recursively fall back to
+    a language code with the last subtag stripped. That is, if `ja-JP-mac` is
+    not defined in CLDR, the code will try `ja-JP` and then `ja`.
+
+    If no matches are found, a `RuntimeError` is raised.
+
+    >>> get_plural_categories('sl')
+    ('one', 'two', 'few', 'other')
+    >>> get_plural_categories('ga-IE')
+    ('one', 'few', 'two', 'few', 'other')
+    >>> get_plural_categories('ja-JP-mac')
+    ('other')
+
+    """
+
+    langs_categories = categories.get(lang, None)
+
+    if langs_categories is None:
+        # Remove the trailing subtag.
+        fallback_lang, _, _ = lang.rpartition('-')
+
+        if fallback_lang == '':
+            raise RuntimeError('Unknown language: {}'.format(lang))
+
+        return get_plural_categories(fallback_lang)
+
+    return langs_categories
new file mode 100644
--- /dev/null
+++ b/third_party/python/fluent/fluent/migrate/cldr_data/plurals.json
@@ -0,0 +1,857 @@
+{
+  "supplemental": {
+    "version": {
+      "_number": "$Revision: 12805 $",
+      "_unicodeVersion": "9.0.0",
+      "_cldrVersion": "30"
+    },
+    "plurals-type-cardinal": {
+      "af": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ak": {
+        "pluralRule-count-one": "n = 0..1 @integer 0, 1 @decimal 0.0, 1.0, 0.00, 1.00, 0.000, 1.000, 0.0000, 1.0000",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.1~0.9, 1.1~1.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "am": {
+        "pluralRule-count-one": "i = 0 or n = 1 @integer 0, 1 @decimal 0.0~1.0, 0.00~0.04",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 1.1~2.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ar": {
+        "pluralRule-count-zero": "n = 0 @integer 0 @decimal 0.0, 0.00, 0.000, 0.0000",
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-two": "n = 2 @integer 2 @decimal 2.0, 2.00, 2.000, 2.0000",
+        "pluralRule-count-few": "n % 100 = 3..10 @integer 3~10, 103~110, 1003, … @decimal 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 103.0, 1003.0, …",
+        "pluralRule-count-many": "n % 100 = 11..99 @integer 11~26, 111, 1011, … @decimal 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 111.0, 1011.0, …",
+        "pluralRule-count-other": " @integer 100~102, 200~202, 300~302, 400~402, 500~502, 600, 1000, 10000, 100000, 1000000, … @decimal 0.1~0.9, 1.1~1.7, 10.1, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ars": {
+        "pluralRule-count-zero": "n = 0 @integer 0 @decimal 0.0, 0.00, 0.000, 0.0000",
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-two": "n = 2 @integer 2 @decimal 2.0, 2.00, 2.000, 2.0000",
+        "pluralRule-count-few": "n % 100 = 3..10 @integer 3~10, 103~110, 1003, … @decimal 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 103.0, 1003.0, …",
+        "pluralRule-count-many": "n % 100 = 11..99 @integer 11~26, 111, 1011, … @decimal 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 111.0, 1011.0, …",
+        "pluralRule-count-other": " @integer 100~102, 200~202, 300~302, 400~402, 500~502, 600, 1000, 10000, 100000, 1000000, … @decimal 0.1~0.9, 1.1~1.7, 10.1, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "as": {
+        "pluralRule-count-one": "i = 0 or n = 1 @integer 0, 1 @decimal 0.0~1.0, 0.00~0.04",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 1.1~2.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "asa": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ast": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "az": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "be": {
+        "pluralRule-count-one": "n % 10 = 1 and n % 100 != 11 @integer 1, 21, 31, 41, 51, 61, 71, 81, 101, 1001, … @decimal 1.0, 21.0, 31.0, 41.0, 51.0, 61.0, 71.0, 81.0, 101.0, 1001.0, …",
+        "pluralRule-count-few": "n % 10 = 2..4 and n % 100 != 12..14 @integer 2~4, 22~24, 32~34, 42~44, 52~54, 62, 102, 1002, … @decimal 2.0, 3.0, 4.0, 22.0, 23.0, 24.0, 32.0, 33.0, 102.0, 1002.0, …",
+        "pluralRule-count-many": "n % 10 = 0 or n % 10 = 5..9 or n % 100 = 11..14 @integer 0, 5~19, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …",
+        "pluralRule-count-other": "   @decimal 0.1~0.9, 1.1~1.7, 10.1, 100.1, 1000.1, …"
+      },
+      "bem": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "bez": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "bg": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "bh": {
+        "pluralRule-count-one": "n = 0..1 @integer 0, 1 @decimal 0.0, 1.0, 0.00, 1.00, 0.000, 1.000, 0.0000, 1.0000",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.1~0.9, 1.1~1.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "bm": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "bn": {
+        "pluralRule-count-one": "i = 0 or n = 1 @integer 0, 1 @decimal 0.0~1.0, 0.00~0.04",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 1.1~2.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "bo": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "br": {
+        "pluralRule-count-one": "n % 10 = 1 and n % 100 != 11,71,91 @integer 1, 21, 31, 41, 51, 61, 81, 101, 1001, … @decimal 1.0, 21.0, 31.0, 41.0, 51.0, 61.0, 81.0, 101.0, 1001.0, …",
+        "pluralRule-count-two": "n % 10 = 2 and n % 100 != 12,72,92 @integer 2, 22, 32, 42, 52, 62, 82, 102, 1002, … @decimal 2.0, 22.0, 32.0, 42.0, 52.0, 62.0, 82.0, 102.0, 1002.0, …",
+        "pluralRule-count-few": "n % 10 = 3..4,9 and n % 100 != 10..19,70..79,90..99 @integer 3, 4, 9, 23, 24, 29, 33, 34, 39, 43, 44, 49, 103, 1003, … @decimal 3.0, 4.0, 9.0, 23.0, 24.0, 29.0, 33.0, 34.0, 103.0, 1003.0, …",
+        "pluralRule-count-many": "n != 0 and n % 1000000 = 0 @integer 1000000, … @decimal 1000000.0, 1000000.00, 1000000.000, …",
+        "pluralRule-count-other": " @integer 0, 5~8, 10~20, 100, 1000, 10000, 100000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, …"
+      },
+      "brx": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "bs": {
+        "pluralRule-count-one": "v = 0 and i % 10 = 1 and i % 100 != 11 or f % 10 = 1 and f % 100 != 11 @integer 1, 21, 31, 41, 51, 61, 71, 81, 101, 1001, … @decimal 0.1, 1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 10.1, 100.1, 1000.1, …",
+        "pluralRule-count-few": "v = 0 and i % 10 = 2..4 and i % 100 != 12..14 or f % 10 = 2..4 and f % 100 != 12..14 @integer 2~4, 22~24, 32~34, 42~44, 52~54, 62, 102, 1002, … @decimal 0.2~0.4, 1.2~1.4, 2.2~2.4, 3.2~3.4, 4.2~4.4, 5.2, 10.2, 100.2, 1000.2, …",
+        "pluralRule-count-other": " @integer 0, 5~19, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0, 0.5~1.0, 1.5~2.0, 2.5~2.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ca": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ce": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "cgg": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "chr": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ckb": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "cs": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-few": "i = 2..4 and v = 0 @integer 2~4",
+        "pluralRule-count-many": "v != 0   @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …",
+        "pluralRule-count-other": " @integer 0, 5~19, 100, 1000, 10000, 100000, 1000000, …"
+      },
+      "cy": {
+        "pluralRule-count-zero": "n = 0 @integer 0 @decimal 0.0, 0.00, 0.000, 0.0000",
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-two": "n = 2 @integer 2 @decimal 2.0, 2.00, 2.000, 2.0000",
+        "pluralRule-count-few": "n = 3 @integer 3 @decimal 3.0, 3.00, 3.000, 3.0000",
+        "pluralRule-count-many": "n = 6 @integer 6 @decimal 6.0, 6.00, 6.000, 6.0000",
+        "pluralRule-count-other": " @integer 4, 5, 7~20, 100, 1000, 10000, 100000, 1000000, … @decimal 0.1~0.9, 1.1~1.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "da": {
+        "pluralRule-count-one": "n = 1 or t != 0 and i = 0,1 @integer 1 @decimal 0.1~1.6",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0, 2.0~3.4, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "de": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "dsb": {
+        "pluralRule-count-one": "v = 0 and i % 100 = 1 or f % 100 = 1 @integer 1, 101, 201, 301, 401, 501, 601, 701, 1001, … @decimal 0.1, 1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 10.1, 100.1, 1000.1, …",
+        "pluralRule-count-two": "v = 0 and i % 100 = 2 or f % 100 = 2 @integer 2, 102, 202, 302, 402, 502, 602, 702, 1002, … @decimal 0.2, 1.2, 2.2, 3.2, 4.2, 5.2, 6.2, 7.2, 10.2, 100.2, 1000.2, …",
+        "pluralRule-count-few": "v = 0 and i % 100 = 3..4 or f % 100 = 3..4 @integer 3, 4, 103, 104, 203, 204, 303, 304, 403, 404, 503, 504, 603, 604, 703, 704, 1003, … @decimal 0.3, 0.4, 1.3, 1.4, 2.3, 2.4, 3.3, 3.4, 4.3, 4.4, 5.3, 5.4, 6.3, 6.4, 7.3, 7.4, 10.3, 100.3, 1000.3, …",
+        "pluralRule-count-other": " @integer 0, 5~19, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0, 0.5~1.0, 1.5~2.0, 2.5~2.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "dv": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "dz": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ee": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "el": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "en": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "eo": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "es": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "et": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "eu": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "fa": {
+        "pluralRule-count-one": "i = 0 or n = 1 @integer 0, 1 @decimal 0.0~1.0, 0.00~0.04",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 1.1~2.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ff": {
+        "pluralRule-count-one": "i = 0,1 @integer 0, 1 @decimal 0.0~1.5",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 2.0~3.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "fi": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "fil": {
+        "pluralRule-count-one": "v = 0 and i = 1,2,3 or v = 0 and i % 10 != 4,6,9 or v != 0 and f % 10 != 4,6,9 @integer 0~3, 5, 7, 8, 10~13, 15, 17, 18, 20, 21, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.3, 0.5, 0.7, 0.8, 1.0~1.3, 1.5, 1.7, 1.8, 2.0, 2.1, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …",
+        "pluralRule-count-other": " @integer 4, 6, 9, 14, 16, 19, 24, 26, 104, 1004, … @decimal 0.4, 0.6, 0.9, 1.4, 1.6, 1.9, 2.4, 2.6, 10.4, 100.4, 1000.4, …"
+      },
+      "fo": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "fr": {
+        "pluralRule-count-one": "i = 0,1 @integer 0, 1 @decimal 0.0~1.5",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 2.0~3.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "fur": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "fy": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ga": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-two": "n = 2 @integer 2 @decimal 2.0, 2.00, 2.000, 2.0000",
+        "pluralRule-count-few": "n = 3..6 @integer 3~6 @decimal 3.0, 4.0, 5.0, 6.0, 3.00, 4.00, 5.00, 6.00, 3.000, 4.000, 5.000, 6.000, 3.0000, 4.0000, 5.0000, 6.0000",
+        "pluralRule-count-many": "n = 7..10 @integer 7~10 @decimal 7.0, 8.0, 9.0, 10.0, 7.00, 8.00, 9.00, 10.00, 7.000, 8.000, 9.000, 10.000, 7.0000, 8.0000, 9.0000, 10.0000",
+        "pluralRule-count-other": " @integer 0, 11~25, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.1, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "gd": {
+        "pluralRule-count-one": "n = 1,11 @integer 1, 11 @decimal 1.0, 11.0, 1.00, 11.00, 1.000, 11.000, 1.0000",
+        "pluralRule-count-two": "n = 2,12 @integer 2, 12 @decimal 2.0, 12.0, 2.00, 12.00, 2.000, 12.000, 2.0000",
+        "pluralRule-count-few": "n = 3..10,13..19 @integer 3~10, 13~19 @decimal 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 3.00",
+        "pluralRule-count-other": " @integer 0, 20~34, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.1, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "gl": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "gsw": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "gu": {
+        "pluralRule-count-one": "i = 0 or n = 1 @integer 0, 1 @decimal 0.0~1.0, 0.00~0.04",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 1.1~2.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "guw": {
+        "pluralRule-count-one": "n = 0..1 @integer 0, 1 @decimal 0.0, 1.0, 0.00, 1.00, 0.000, 1.000, 0.0000, 1.0000",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.1~0.9, 1.1~1.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "gv": {
+        "pluralRule-count-one": "v = 0 and i % 10 = 1 @integer 1, 11, 21, 31, 41, 51, 61, 71, 101, 1001, …",
+        "pluralRule-count-two": "v = 0 and i % 10 = 2 @integer 2, 12, 22, 32, 42, 52, 62, 72, 102, 1002, …",
+        "pluralRule-count-few": "v = 0 and i % 100 = 0,20,40,60,80 @integer 0, 20, 40, 60, 80, 100, 120, 140, 1000, 10000, 100000, 1000000, …",
+        "pluralRule-count-many": "v != 0   @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …",
+        "pluralRule-count-other": " @integer 3~10, 13~19, 23, 103, 1003, …"
+      },
+      "ha": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "haw": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "he": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-two": "i = 2 and v = 0 @integer 2",
+        "pluralRule-count-many": "v = 0 and n != 0..10 and n % 10 = 0 @integer 20, 30, 40, 50, 60, 70, 80, 90, 100, 1000, 10000, 100000, 1000000, …",
+        "pluralRule-count-other": " @integer 0, 3~17, 101, 1001, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "hi": {
+        "pluralRule-count-one": "i = 0 or n = 1 @integer 0, 1 @decimal 0.0~1.0, 0.00~0.04",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 1.1~2.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "hr": {
+        "pluralRule-count-one": "v = 0 and i % 10 = 1 and i % 100 != 11 or f % 10 = 1 and f % 100 != 11 @integer 1, 21, 31, 41, 51, 61, 71, 81, 101, 1001, … @decimal 0.1, 1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 10.1, 100.1, 1000.1, …",
+        "pluralRule-count-few": "v = 0 and i % 10 = 2..4 and i % 100 != 12..14 or f % 10 = 2..4 and f % 100 != 12..14 @integer 2~4, 22~24, 32~34, 42~44, 52~54, 62, 102, 1002, … @decimal 0.2~0.4, 1.2~1.4, 2.2~2.4, 3.2~3.4, 4.2~4.4, 5.2, 10.2, 100.2, 1000.2, …",
+        "pluralRule-count-other": " @integer 0, 5~19, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0, 0.5~1.0, 1.5~2.0, 2.5~2.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "hsb": {
+        "pluralRule-count-one": "v = 0 and i % 100 = 1 or f % 100 = 1 @integer 1, 101, 201, 301, 401, 501, 601, 701, 1001, … @decimal 0.1, 1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 10.1, 100.1, 1000.1, …",
+        "pluralRule-count-two": "v = 0 and i % 100 = 2 or f % 100 = 2 @integer 2, 102, 202, 302, 402, 502, 602, 702, 1002, … @decimal 0.2, 1.2, 2.2, 3.2, 4.2, 5.2, 6.2, 7.2, 10.2, 100.2, 1000.2, …",
+        "pluralRule-count-few": "v = 0 and i % 100 = 3..4 or f % 100 = 3..4 @integer 3, 4, 103, 104, 203, 204, 303, 304, 403, 404, 503, 504, 603, 604, 703, 704, 1003, … @decimal 0.3, 0.4, 1.3, 1.4, 2.3, 2.4, 3.3, 3.4, 4.3, 4.4, 5.3, 5.4, 6.3, 6.4, 7.3, 7.4, 10.3, 100.3, 1000.3, …",
+        "pluralRule-count-other": " @integer 0, 5~19, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0, 0.5~1.0, 1.5~2.0, 2.5~2.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "hu": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "hy": {
+        "pluralRule-count-one": "i = 0,1 @integer 0, 1 @decimal 0.0~1.5",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 2.0~3.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "id": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ig": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ii": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "in": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "is": {
+        "pluralRule-count-one": "t = 0 and i % 10 = 1 and i % 100 != 11 or t != 0 @integer 1, 21, 31, 41, 51, 61, 71, 81, 101, 1001, … @decimal 0.1~1.6, 10.1, 100.1, 1000.1, …",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "it": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "iu": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-two": "n = 2 @integer 2 @decimal 2.0, 2.00, 2.000, 2.0000",
+        "pluralRule-count-other": " @integer 0, 3~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "iw": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-two": "i = 2 and v = 0 @integer 2",
+        "pluralRule-count-many": "v = 0 and n != 0..10 and n % 10 = 0 @integer 20, 30, 40, 50, 60, 70, 80, 90, 100, 1000, 10000, 100000, 1000000, …",
+        "pluralRule-count-other": " @integer 0, 3~17, 101, 1001, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ja": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "jbo": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "jgo": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ji": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "jmc": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "jv": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "jw": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ka": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "kab": {
+        "pluralRule-count-one": "i = 0,1 @integer 0, 1 @decimal 0.0~1.5",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 2.0~3.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "kaj": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "kcg": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "kde": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "kea": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "kk": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "kkj": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "kl": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "km": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "kn": {
+        "pluralRule-count-one": "i = 0 or n = 1 @integer 0, 1 @decimal 0.0~1.0, 0.00~0.04",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 1.1~2.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ko": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ks": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ksb": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ksh": {
+        "pluralRule-count-zero": "n = 0 @integer 0 @decimal 0.0, 0.00, 0.000, 0.0000",
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.1~0.9, 1.1~1.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ku": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "kw": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-two": "n = 2 @integer 2 @decimal 2.0, 2.00, 2.000, 2.0000",
+        "pluralRule-count-other": " @integer 0, 3~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ky": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "lag": {
+        "pluralRule-count-zero": "n = 0 @integer 0 @decimal 0.0, 0.00, 0.000, 0.0000",
+        "pluralRule-count-one": "i = 0,1 and n != 0 @integer 1 @decimal 0.1~1.6",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 2.0~3.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "lb": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "lg": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "lkt": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ln": {
+        "pluralRule-count-one": "n = 0..1 @integer 0, 1 @decimal 0.0, 1.0, 0.00, 1.00, 0.000, 1.000, 0.0000, 1.0000",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.1~0.9, 1.1~1.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "lo": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "lt": {
+        "pluralRule-count-one": "n % 10 = 1 and n % 100 != 11..19 @integer 1, 21, 31, 41, 51, 61, 71, 81, 101, 1001, … @decimal 1.0, 21.0, 31.0, 41.0, 51.0, 61.0, 71.0, 81.0, 101.0, 1001.0, …",
+        "pluralRule-count-few": "n % 10 = 2..9 and n % 100 != 11..19 @integer 2~9, 22~29, 102, 1002, … @decimal 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 22.0, 102.0, 1002.0, …",
+        "pluralRule-count-many": "f != 0   @decimal 0.1~0.9, 1.1~1.7, 10.1, 100.1, 1000.1, …",
+        "pluralRule-count-other": " @integer 0, 10~20, 30, 40, 50, 60, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "lv": {
+        "pluralRule-count-zero": "n % 10 = 0 or n % 100 = 11..19 or v = 2 and f % 100 = 11..19 @integer 0, 10~20, 30, 40, 50, 60, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …",
+        "pluralRule-count-one": "n % 10 = 1 and n % 100 != 11 or v = 2 and f % 10 = 1 and f % 100 != 11 or v != 2 and f % 10 = 1 @integer 1, 21, 31, 41, 51, 61, 71, 81, 101, 1001, … @decimal 0.1, 1.0, 1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 10.1, 100.1, 1000.1, …",
+        "pluralRule-count-other": " @integer 2~9, 22~29, 102, 1002, … @decimal 0.2~0.9, 1.2~1.9, 10.2, 100.2, 1000.2, …"
+      },
+      "mas": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "mg": {
+        "pluralRule-count-one": "n = 0..1 @integer 0, 1 @decimal 0.0, 1.0, 0.00, 1.00, 0.000, 1.000, 0.0000, 1.0000",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.1~0.9, 1.1~1.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "mgo": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "mk": {
+        "pluralRule-count-one": "v = 0 and i % 10 = 1 or f % 10 = 1 @integer 1, 11, 21, 31, 41, 51, 61, 71, 101, 1001, … @decimal 0.1, 1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 10.1, 100.1, 1000.1, …",
+        "pluralRule-count-other": " @integer 0, 2~10, 12~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0, 0.2~1.0, 1.2~1.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ml": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "mn": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "mo": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-few": "v != 0 or n = 0 or n != 1 and n % 100 = 1..19 @integer 0, 2~16, 101, 1001, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …",
+        "pluralRule-count-other": " @integer 20~35, 100, 1000, 10000, 100000, 1000000, …"
+      },
+      "mr": {
+        "pluralRule-count-one": "i = 0 or n = 1 @integer 0, 1 @decimal 0.0~1.0, 0.00~0.04",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 1.1~2.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ms": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "mt": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-few": "n = 0 or n % 100 = 2..10 @integer 0, 2~10, 102~107, 1002, … @decimal 0.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 10.0, 102.0, 1002.0, …",
+        "pluralRule-count-many": "n % 100 = 11..19 @integer 11~19, 111~117, 1011, … @decimal 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 111.0, 1011.0, …",
+        "pluralRule-count-other": " @integer 20~35, 100, 1000, 10000, 100000, 1000000, … @decimal 0.1~0.9, 1.1~1.7, 10.1, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "my": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "nah": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "naq": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-two": "n = 2 @integer 2 @decimal 2.0, 2.00, 2.000, 2.0000",
+        "pluralRule-count-other": " @integer 0, 3~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "nb": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "nd": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ne": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "nl": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "nn": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "nnh": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "no": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "nqo": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "nr": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "nso": {
+        "pluralRule-count-one": "n = 0..1 @integer 0, 1 @decimal 0.0, 1.0, 0.00, 1.00, 0.000, 1.000, 0.0000, 1.0000",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.1~0.9, 1.1~1.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ny": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "nyn": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "om": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "or": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "os": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "pa": {
+        "pluralRule-count-one": "n = 0..1 @integer 0, 1 @decimal 0.0, 1.0, 0.00, 1.00, 0.000, 1.000, 0.0000, 1.0000",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.1~0.9, 1.1~1.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "pap": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "pl": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-few": "v = 0 and i % 10 = 2..4 and i % 100 != 12..14 @integer 2~4, 22~24, 32~34, 42~44, 52~54, 62, 102, 1002, …",
+        "pluralRule-count-many": "v = 0 and i != 1 and i % 10 = 0..1 or v = 0 and i % 10 = 5..9 or v = 0 and i % 100 = 12..14 @integer 0, 5~19, 100, 1000, 10000, 100000, 1000000, …",
+        "pluralRule-count-other": "   @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "prg": {
+        "pluralRule-count-zero": "n % 10 = 0 or n % 100 = 11..19 or v = 2 and f % 100 = 11..19 @integer 0, 10~20, 30, 40, 50, 60, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …",
+        "pluralRule-count-one": "n % 10 = 1 and n % 100 != 11 or v = 2 and f % 10 = 1 and f % 100 != 11 or v != 2 and f % 10 = 1 @integer 1, 21, 31, 41, 51, 61, 71, 81, 101, 1001, … @decimal 0.1, 1.0, 1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 10.1, 100.1, 1000.1, …",
+        "pluralRule-count-other": " @integer 2~9, 22~29, 102, 1002, … @decimal 0.2~0.9, 1.2~1.9, 10.2, 100.2, 1000.2, …"
+      },
+      "ps": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "pt": {
+        "pluralRule-count-one": "n = 0..2 and n != 2 @integer 0, 1 @decimal 0.0, 1.0, 0.00, 1.00, 0.000, 1.000, 0.0000, 1.0000",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.1~0.9, 1.1~1.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "pt-PT": {
+        "pluralRule-count-one": "n = 1 and v = 0 @integer 1",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "rm": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ro": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-few": "v != 0 or n = 0 or n != 1 and n % 100 = 1..19 @integer 0, 2~16, 101, 1001, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …",
+        "pluralRule-count-other": " @integer 20~35, 100, 1000, 10000, 100000, 1000000, …"
+      },
+      "rof": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "root": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ru": {
+        "pluralRule-count-one": "v = 0 and i % 10 = 1 and i % 100 != 11 @integer 1, 21, 31, 41, 51, 61, 71, 81, 101, 1001, …",
+        "pluralRule-count-few": "v = 0 and i % 10 = 2..4 and i % 100 != 12..14 @integer 2~4, 22~24, 32~34, 42~44, 52~54, 62, 102, 1002, …",
+        "pluralRule-count-many": "v = 0 and i % 10 = 0 or v = 0 and i % 10 = 5..9 or v = 0 and i % 100 = 11..14 @integer 0, 5~19, 100, 1000, 10000, 100000, 1000000, …",
+        "pluralRule-count-other": "   @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "rwk": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "sah": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "saq": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "sdh": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "se": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-two": "n = 2 @integer 2 @decimal 2.0, 2.00, 2.000, 2.0000",
+        "pluralRule-count-other": " @integer 0, 3~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "seh": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ses": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "sg": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "sh": {
+        "pluralRule-count-one": "v = 0 and i % 10 = 1 and i % 100 != 11 or f % 10 = 1 and f % 100 != 11 @integer 1, 21, 31, 41, 51, 61, 71, 81, 101, 1001, … @decimal 0.1, 1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 10.1, 100.1, 1000.1, …",
+        "pluralRule-count-few": "v = 0 and i % 10 = 2..4 and i % 100 != 12..14 or f % 10 = 2..4 and f % 100 != 12..14 @integer 2~4, 22~24, 32~34, 42~44, 52~54, 62, 102, 1002, … @decimal 0.2~0.4, 1.2~1.4, 2.2~2.4, 3.2~3.4, 4.2~4.4, 5.2, 10.2, 100.2, 1000.2, …",
+        "pluralRule-count-other": " @integer 0, 5~19, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0, 0.5~1.0, 1.5~2.0, 2.5~2.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "shi": {
+        "pluralRule-count-one": "i = 0 or n = 1 @integer 0, 1 @decimal 0.0~1.0, 0.00~0.04",
+        "pluralRule-count-few": "n = 2..10 @integer 2~10 @decimal 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 2.00, 3.00, 4.00, 5.00, 6.00, 7.00, 8.00",
+        "pluralRule-count-other": " @integer 11~26, 100, 1000, 10000, 100000, 1000000, … @decimal 1.1~1.9, 2.1~2.7, 10.1, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "si": {
+        "pluralRule-count-one": "n = 0,1 or i = 0 and f = 1 @integer 0, 1 @decimal 0.0, 0.1, 1.0, 0.00, 0.01, 1.00, 0.000, 0.001, 1.000, 0.0000, 0.0001, 1.0000",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.2~0.9, 1.1~1.8, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "sk": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-few": "i = 2..4 and v = 0 @integer 2~4",
+        "pluralRule-count-many": "v != 0   @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …",
+        "pluralRule-count-other": " @integer 0, 5~19, 100, 1000, 10000, 100000, 1000000, …"
+      },
+      "sl": {
+        "pluralRule-count-one": "v = 0 and i % 100 = 1 @integer 1, 101, 201, 301, 401, 501, 601, 701, 1001, …",
+        "pluralRule-count-two": "v = 0 and i % 100 = 2 @integer 2, 102, 202, 302, 402, 502, 602, 702, 1002, …",
+        "pluralRule-count-few": "v = 0 and i % 100 = 3..4 or v != 0 @integer 3, 4, 103, 104, 203, 204, 303, 304, 403, 404, 503, 504, 603, 604, 703, 704, 1003, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …",
+        "pluralRule-count-other": " @integer 0, 5~19, 100, 1000, 10000, 100000, 1000000, …"
+      },
+      "sma": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-two": "n = 2 @integer 2 @decimal 2.0, 2.00, 2.000, 2.0000",
+        "pluralRule-count-other": " @integer 0, 3~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "smi": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-two": "n = 2 @integer 2 @decimal 2.0, 2.00, 2.000, 2.0000",
+        "pluralRule-count-other": " @integer 0, 3~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "smj": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-two": "n = 2 @integer 2 @decimal 2.0, 2.00, 2.000, 2.0000",
+        "pluralRule-count-other": " @integer 0, 3~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "smn": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-two": "n = 2 @integer 2 @decimal 2.0, 2.00, 2.000, 2.0000",
+        "pluralRule-count-other": " @integer 0, 3~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "sms": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-two": "n = 2 @integer 2 @decimal 2.0, 2.00, 2.000, 2.0000",
+        "pluralRule-count-other": " @integer 0, 3~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "sn": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "so": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "sq": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "sr": {
+        "pluralRule-count-one": "v = 0 and i % 10 = 1 and i % 100 != 11 or f % 10 = 1 and f % 100 != 11 @integer 1, 21, 31, 41, 51, 61, 71, 81, 101, 1001, … @decimal 0.1, 1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 10.1, 100.1, 1000.1, …",
+        "pluralRule-count-few": "v = 0 and i % 10 = 2..4 and i % 100 != 12..14 or f % 10 = 2..4 and f % 100 != 12..14 @integer 2~4, 22~24, 32~34, 42~44, 52~54, 62, 102, 1002, … @decimal 0.2~0.4, 1.2~1.4, 2.2~2.4, 3.2~3.4, 4.2~4.4, 5.2, 10.2, 100.2, 1000.2, …",
+        "pluralRule-count-other": " @integer 0, 5~19, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0, 0.5~1.0, 1.5~2.0, 2.5~2.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ss": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ssy": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "st": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "sv": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "sw": {
+        "pluralRule-count-one": "i = 1 and v = 0 @integer 1",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "syr": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ta": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "te": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "teo": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "th": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ti": {
+        "pluralRule-count-one": "n = 0..1 @integer 0, 1 @decimal 0.0, 1.0, 0.00, 1.00, 0.000, 1.000, 0.0000, 1.0000",
+        "pluralRule-count-other": " @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 0.1~0.9, 1.1~1.7, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "tig": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "tk": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "tl": {
+        "pluralRule-count-one": "v = 0 and i = 1,2,3 or v = 0 and i % 10 != 4,6,9 or v != 0 and f % 10 != 4,6,9 @integer 0~3, 5, 7, 8, 10~13, 15, 17, 18, 20, 21, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.3, 0.5, 0.7, 0.8, 1.0~1.3, 1.5, 1.7, 1.8, 2.0, 2.1, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …",
+        "pluralRule-count-other": " @integer 4, 6, 9, 14, 16, 19, 24, 26, 104, 1004, … @decimal 0.4, 0.6, 0.9, 1.4, 1.6, 1.9, 2.4, 2.6, 10.4, 100.4, 1000.4, …"
+      },
+      "tn": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "to": {
+        "pluralRule-count-other": " @integer 0~15, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "tr": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",
+        "pluralRule-count-other": " @integer 0, 2~16, 100, 1000, 10000, 100000, 1000000, … @decimal 0.0~0.9, 1.1~1.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …"
+      },
+      "ts": {
+        "pluralRule-count-one": "n = 1 @integer 1 @decimal 1.0, 1.00, 1.000, 1.0000",