Merge m-c to inbound, a=merge
authorWes Kocher <wkocher@mozilla.com>
Fri, 21 Apr 2017 17:35:24 -0700
changeset 566731 604ab17daeb86f7e7c110aa620b9c1b6c96b9cee
parent 566730 9b23450ae23c1776a8ad3a5f80377e14dd05a562 (current diff)
parent 566710 9550eedc0bd80d404dab4e42660c629cc705c16c (diff)
child 566732 f19ee0e2d1986a5514da46381c70ee5fb5f0ed1b
push id55308
push userbmo:rajesh.kathiriya507@gmail.com
push dateSat, 22 Apr 2017 10:15:05 +0000
reviewersmerge
milestone55.0a1
Merge m-c to inbound, a=merge MozReview-Commit-ID: Cb0b59wJ0vy
browser/components/search/content/search.xml
toolkit/crashreporter/jsoncpp/AUTHORS
toolkit/crashreporter/jsoncpp/GIT-INFO
toolkit/crashreporter/jsoncpp/LICENSE
toolkit/crashreporter/jsoncpp/NEWS.txt
toolkit/crashreporter/jsoncpp/README.md
toolkit/crashreporter/jsoncpp/include/json/allocator.h
toolkit/crashreporter/jsoncpp/include/json/assertions.h
toolkit/crashreporter/jsoncpp/include/json/autolink.h
toolkit/crashreporter/jsoncpp/include/json/config.h
toolkit/crashreporter/jsoncpp/include/json/features.h
toolkit/crashreporter/jsoncpp/include/json/forwards.h
toolkit/crashreporter/jsoncpp/include/json/json.h
toolkit/crashreporter/jsoncpp/include/json/reader.h
toolkit/crashreporter/jsoncpp/include/json/value.h
toolkit/crashreporter/jsoncpp/include/json/version.h
toolkit/crashreporter/jsoncpp/include/json/writer.h
toolkit/crashreporter/jsoncpp/src/lib_json/json_reader.cpp
toolkit/crashreporter/jsoncpp/src/lib_json/json_tool.h
toolkit/crashreporter/jsoncpp/src/lib_json/json_value.cpp
toolkit/crashreporter/jsoncpp/src/lib_json/json_valueiterator.inl
toolkit/crashreporter/jsoncpp/src/lib_json/json_writer.cpp
toolkit/crashreporter/jsoncpp/src/lib_json/moz.build
toolkit/themes/linux/global/dirListing/remote.png
toolkit/themes/osx/global/dirListing/remote.png
toolkit/themes/windows/global/dirListing/local.png
toolkit/themes/windows/global/dirListing/remote.png
--- a/.eslintrc.js
+++ b/.eslintrc.js
@@ -13,16 +13,18 @@ module.exports = {
     "mozilla/no-import-into-var-and-global": "error",
     "mozilla/no-useless-parameters": "error",
     "mozilla/no-useless-removeEventListener": "error",
     "mozilla/use-default-preference-values": "error",
     "mozilla/use-ownerGlobal": "error",
 
     // No (!foo in bar) or (!object instanceof Class)
     "no-unsafe-negation": "error",
+    // No eval() and no strings in the first param of setTimeout or setInterval
+    "no-implied-eval": "error",
   },
   "env": {
     "es6": true
   },
   "parserOptions": {
     "ecmaVersion": 8,
   },
 };
--- a/browser/base/content/browser.css
+++ b/browser/base/content/browser.css
@@ -517,16 +517,20 @@ toolbar:not(#TabsToolbar) > #personal-bo
 #PopupAutoComplete > richlistbox > richlistitem > .ac-type-icon,
 #PopupAutoComplete > richlistbox > richlistitem > .ac-site-icon,
 #PopupAutoComplete > richlistbox > richlistitem > .ac-tags,
 #PopupAutoComplete > richlistbox > richlistitem > .ac-separator,
 #PopupAutoComplete > richlistbox > richlistitem > .ac-url {
   display: none;
 }
 
+#PopupAutoComplete[firstresultstyle="insecureWarning"] {
+  min-width: 200px;
+}
+
 #PopupAutoComplete > richlistbox > richlistitem[originaltype="insecureWarning"] {
   -moz-binding: url("chrome://global/content/bindings/autocomplete.xml#autocomplete-richlistitem-insecure-field");
   height: auto;
 }
 
 #PopupAutoComplete > richlistbox > richlistitem[originaltype="insecureWarning"] > .ac-site-icon {
   display: initial;
 }
--- a/browser/base/content/browser.xul
+++ b/browser/base/content/browser.xul
@@ -1066,37 +1066,36 @@
     </toolbarpalette>
   </toolbox>
 
   <hbox id="fullscr-toggler" hidden="true"/>
 
   <deck id="content-deck" flex="1">
     <hbox flex="1" id="browser">
       <vbox id="browser-border-start" hidden="true" layer="true"/>
-      <vbox id="sidebar-box" hidden="true" class="chromeclass-extrachrome">
-        <sidebarheader id="sidebar-header" align="center">
-          <label id="sidebar-title" persist="value" flex="1" crop="end" control="sidebar"/>
-          <image id="sidebar-throbber"/>
-          <toolbarbutton class="close-icon tabbable" tooltiptext="&sidebarCloseButton.tooltip;" oncommand="SidebarUI.hide();"/>
-        </sidebarheader>
-        <browser id="sidebar" flex="1" autoscroll="false" disablehistory="true" disablefullscreen="true"
-                  style="min-width: 14em; width: 18em; max-width: 36em;" tooltip="aHTMLTooltip"/>
-      </vbox>
-
-      <splitter id="sidebar-splitter" class="chromeclass-extrachrome sidebar-splitter" hidden="true"/>
       <vbox id="appcontent" flex="1">
         <notificationbox id="high-priority-global-notificationbox" notificationside="top"/>
         <tabbrowser id="content"
                     flex="1" contenttooltip="aHTMLTooltip"
                     tabcontainer="tabbrowser-tabs"
                     contentcontextmenu="contentAreaContextMenu"
                     autocompletepopup="PopupAutoComplete"
                     selectmenulist="ContentSelectDropdown"
                     datetimepicker="DateTimePickerPanel"/>
       </vbox>
+      <splitter id="sidebar-splitter" class="chromeclass-extrachrome sidebar-splitter" hidden="true"/>
+      <vbox id="sidebar-box" hidden="true" class="chromeclass-extrachrome">
+        <sidebarheader id="sidebar-header" align="center">
+          <label id="sidebar-title" persist="value" flex="1" crop="end" control="sidebar"/>
+          <image id="sidebar-throbber"/>
+          <toolbarbutton class="close-icon tabbable" tooltiptext="&sidebarCloseButton.tooltip;" oncommand="SidebarUI.hide();"/>
+        </sidebarheader>
+        <browser id="sidebar" flex="1" autoscroll="false" disablehistory="true" disablefullscreen="true"
+                  style="min-width: 14em; width: 18em; max-width: 36em;" tooltip="aHTMLTooltip"/>
+      </vbox>
       <vbox id="browser-border-end" hidden="true" layer="true"/>
     </hbox>
 #include ../../components/customizableui/content/customizeMode.inc.xul
   </deck>
 
   <html:div id="fullscreen-warning" class="pointerlockfswarning" hidden="true">
     <html:div class="pointerlockfswarning-domain-text">
       &fullscreenWarning.beforeDomain.label;
--- a/browser/base/content/test/general/browser_documentnavigation.js
+++ b/browser/base/content/test/general/browser_documentnavigation.js
@@ -140,35 +140,37 @@ add_task(function* () {
                                false, "back focus content page urlbar");
 });
 
 // Open the sidebar and navigate between the sidebar, content and top-level window
 add_task(function* () {
   let sidebar = document.getElementById("sidebar");
 
   let loadPromise = BrowserTestUtils.waitForEvent(sidebar, "load", true);
+  let focusPromise = BrowserTestUtils.waitForEvent(sidebar, "focus", true);
   SidebarUI.toggle("viewBookmarksSidebar");
   yield loadPromise;
-
+  yield focusPromise;
 
   gURLBar.focus();
+
+  yield* expectFocusOnF6(false, "html1", "html1",
+                                true, "focus with sidebar open content");
   yield* expectFocusOnF6(false, "bookmarksPanel",
                                 sidebar.contentDocument.getElementById("search-box").inputField,
                                 false, "focus with sidebar open sidebar");
-  yield* expectFocusOnF6(false, "html1", "html1",
-                                true, "focus with sidebar open content");
   yield* expectFocusOnF6(false, "main-window", gURLBar.inputField,
                                 false, "focus with sidebar urlbar");
 
   // Now go backwards
-  yield* expectFocusOnF6(true, "html1", "html1",
-                               true, "back focus with sidebar open content");
   yield* expectFocusOnF6(true, "bookmarksPanel",
                                sidebar.contentDocument.getElementById("search-box").inputField,
                                false, "back focus with sidebar open sidebar");
+  yield* expectFocusOnF6(true, "html1", "html1",
+                               true, "back focus with sidebar open content");
   yield* expectFocusOnF6(true, "main-window", gURLBar.inputField,
                                false, "back focus with sidebar urlbar");
 
   SidebarUI.toggle("viewBookmarksSidebar");
 });
 
 // Navigate when the downloads panel is open
 add_task(function* () {
--- a/browser/base/content/test/static/browser_all_files_referenced.js
+++ b/browser/base/content/test/static/browser_all_files_referenced.js
@@ -154,21 +154,16 @@ var whitelist = new Set([
   {file: "chrome://global/skin/arrow/arrow-rit-sharp.gif",
    platforms: ["linux", "win"]},
   {file: "chrome://global/skin/arrow/arrow-up-sharp.gif",
    platforms: ["linux", "win"]},
   {file: "chrome://global/skin/arrow/panelarrow-horizontal.svg",
    platforms: ["linux"]},
   {file: "chrome://global/skin/arrow/panelarrow-vertical.svg",
    platforms: ["linux"]},
-  // Bug 1348359
-  {file: "chrome://global/skin/dirListing/folder.png", platforms: ["linux"]},
-  {file: "chrome://global/skin/dirListing/local.png", platforms: ["linux", "win"]},
-  {file: "chrome://global/skin/dirListing/remote.png"},
-  {file: "chrome://global/skin/dirListing/up.png", platforms: ["linux"]},
   // Bug 1348362
   {file: "chrome://global/skin/icons/Close.gif", platforms: ["win"]},
   {file: "chrome://global/skin/icons/Error.png", platforms: ["linux", "macosx"]},
   {file: "chrome://global/skin/icons/Landscape.png", platforms: ["linux"]},
   {file: "chrome://global/skin/icons/Minimize.gif", platforms: ["win"]},
   {file: "chrome://global/skin/icons/Portrait.png", platforms: ["linux"]},
   {file: "chrome://global/skin/icons/Print-preview.png", platforms: ["linux"]},
   {file: "chrome://global/skin/icons/Question.png", platforms: ["linux"]},
--- a/browser/base/content/urlbarBindings.xml
+++ b/browser/base/content/urlbarBindings.xml
@@ -119,16 +119,22 @@ file, You can obtain one at http://mozil
         this._prefs = null;
         this.inputField.controllers.removeController(this._copyCutController);
         this.inputField.removeEventListener("paste", this);
         this.inputField.removeEventListener("mousedown", this);
         this.inputField.removeEventListener("mousemove", this);
         this.inputField.removeEventListener("mouseout", this);
         this.inputField.removeEventListener("overflow", this);
         this.inputField.removeEventListener("underflow", this);
+
+        // Null out the one-offs' popup and textbox so that it cleans up its
+        // internal state for both.  Most importantly, it removes the event
+        // listeners that it added to both.
+        this.popup.oneOffSearchButtons.popup = null;
+        this.popup.oneOffSearchButtons.textbox = null;
       ]]></destructor>
 
       <field name="_value">""</field>
       <field name="gotResultForCurrentQuery">false</field>
 
       <!--
         This is set around HandleHenter so it can be used in handleCommand.
         It is also used to track whether we must handle a delayed handleEnter,
--- a/browser/components/extensions/test/browser/browser_ext_omnibox.js
+++ b/browser/components/extensions/test/browser/browser_ext_omnibox.js
@@ -95,16 +95,21 @@ add_task(function* () {
 
   function* startInputSession() {
     gURLBar.focus();
     gURLBar.value = keyword;
     EventUtils.synthesizeKey(" ", {});
     yield expectEvent("on-input-started-fired");
     EventUtils.synthesizeKey("t", {});
     yield expectEvent("on-input-changed-fired", {text: "t"});
+    // Wait for the autocomplete search. Note that we cannot wait for the search
+    // to be complete, since the add-on doesn't communicate when it's done, so
+    // just check matches count.
+    yield BrowserTestUtils.waitForCondition(() => gURLBar.controller.matchCount >= 2,
+                                            "waiting urlbar search to complete");
     return "t";
   }
 
   function* testInputEvents() {
     gURLBar.focus();
 
     // Start an input session by typing in <keyword><space>.
     for (let letter of keyword) {
@@ -262,17 +267,16 @@ add_task(function* () {
   });
   yield testSuggestions({
     test: "test-suggestions-after-delay",
     skipHeuristic: true,
     suggestions,
   });
 
   // Start monitoring the console.
-  SimpleTest.waitForExplicitFinish();
   let waitForConsole = new Promise(resolve => {
     SimpleTest.monitorConsole(resolve, [{
       message: new RegExp(`The keyword provided is already registered: "${keyword}"`),
     }]);
   });
 
   // Try registering another extension with the same keyword
   let extension2 = ExtensionTestUtils.loadExtension({
--- a/browser/components/preferences/languages.js
+++ b/browser/components/preferences/languages.js
@@ -14,17 +14,19 @@ var gLanguagesDialog = {
     if (!this._availableLanguagesList.length)
       this._loadAvailableLanguages();
   },
 
   // Ugly hack used to trigger extra reflow in order to work around XUL bug 1194844;
   // see bug 1194346.
   forceReflow() {
     this._activeLanguages.style.fontKerning = "none";
-    setTimeout("gLanguagesDialog._activeLanguages.style.removeProperty('font-kerning')", 0);
+    setTimeout(() => {
+      this._activeLanguages.style.removeProperty("font-kerning")
+    }, 0);
   },
 
   get _activeLanguages() {
     return document.getElementById("activeLanguages");
   },
 
   get _availableLanguages() {
     return document.getElementById("availableLanguages");
--- a/browser/components/search/content/search.xml
+++ b/browser/components/search/content/search.xml
@@ -1238,20 +1238,16 @@
       <!-- The popup that contains the one-offs.  This is required, so it should
            never be null or undefined, except possibly before the one-offs are
            used. -->
       <property name="popup">
         <getter><![CDATA[
           return this._popup;
         ]]></getter>
         <setter><![CDATA[
-          if (this._popup == val) {
-            return val;
-          }
-
           let events = [
             "popupshowing",
             "popuphidden",
           ];
           if (this._popup) {
             for (let event of events) {
               this._popup.removeEventListener(event, this);
             }
@@ -1280,19 +1276,16 @@
            automatically keep the related one-offs UI up to date.  Otherwise you
            can leave it null/undefined, and in that case you should update the
            query property manually. -->
       <property name="textbox">
         <getter><![CDATA[
           return this._textbox;
         ]]></getter>
         <setter><![CDATA[
-          if (this._textbox == val) {
-            return val;
-          }
           if (this._textbox) {
             this._textbox.removeEventListener("input", this);
           }
           if (val) {
             val.addEventListener("input", this);
           }
           return this._textbox = val;
         ]]></setter>
--- a/browser/experiments/Experiments.jsm
+++ b/browser/experiments/Experiments.jsm
@@ -589,17 +589,17 @@ Experiments.Experiments.prototype = {
       yield this.disableExperiment(TELEMETRY_LOG.TERMINATION.SERVICE_DISABLED);
       if (this._timer) {
         this._timer.clear();
       }
     }
   }),
 
   _telemetryStatusChanged() {
-    this._toggleExperimentsEnabled(gExperimentsEnabled);
+    this._toggleExperimentsEnabled(gPrefs.get(PREF_ENABLED, false));
   },
 
   /**
    * Returns a promise that is resolved with an array of `ExperimentInfo` objects,
    * which provide info on the currently and recently active experiments.
    * The array is in chronological order.
    *
    * The experiment info is of the form:
--- a/browser/experiments/ExperimentsService.js
+++ b/browser/experiments/ExperimentsService.js
@@ -11,78 +11,78 @@ Cu.import("resource://gre/modules/Servic
 Cu.import("resource://gre/modules/Preferences.jsm");
 
 XPCOMUtils.defineLazyModuleGetter(this, "Experiments",
                                   "resource:///modules/experiments/Experiments.jsm");
 XPCOMUtils.defineLazyModuleGetter(this, "OS",
                                   "resource://gre/modules/osfile.jsm");
 XPCOMUtils.defineLazyModuleGetter(this, "CommonUtils",
                                   "resource://services-common/utils.js");
+XPCOMUtils.defineLazyModuleGetter(this, "TelemetryUtils",
+                                  "resource://gre/modules/TelemetryUtils.jsm");
+
 
 const PREF_EXPERIMENTS_ENABLED  = "experiments.enabled";
 const PREF_ACTIVE_EXPERIMENT    = "experiments.activeExperiment"; // whether we have an active experiment
-const PREF_TELEMETRY_ENABLED    = "toolkit.telemetry.enabled";
-const PREF_TELEMETRY_UNIFIED    = "toolkit.telemetry.unified";
 const DELAY_INIT_MS             = 30 * 1000;
 
-// Whether the FHR/Telemetry unification features are enabled.
-// Changing this pref requires a restart.
-const IS_UNIFIED_TELEMETRY = Preferences.get(PREF_TELEMETRY_UNIFIED, false);
-
 XPCOMUtils.defineLazyGetter(
   this, "gPrefs", () => {
     return new Preferences();
   });
 
 XPCOMUtils.defineLazyGetter(
-  this, "gExperimentsEnabled", () => {
-    // We can enable experiments if either unified Telemetry or FHR is on, and the user
-    // has opted into Telemetry.
-    return gPrefs.get(PREF_EXPERIMENTS_ENABLED, false) &&
-           IS_UNIFIED_TELEMETRY && gPrefs.get(PREF_TELEMETRY_ENABLED, false);
-  });
-
-XPCOMUtils.defineLazyGetter(
   this, "gActiveExperiment", () => {
     return gPrefs.get(PREF_ACTIVE_EXPERIMENT);
   });
 
 function ExperimentsService() {
   this._initialized = false;
   this._delayedInitTimer = null;
 }
 
 ExperimentsService.prototype = {
   classID: Components.ID("{f7800463-3b97-47f9-9341-b7617e6d8d49}"),
   QueryInterface: XPCOMUtils.generateQI([Ci.nsITimerCallback, Ci.nsIObserver]),
 
+  get _experimentsEnabled() {
+    // We can enable experiments if either unified Telemetry or FHR is on, and the user
+    // has opted into Telemetry.
+    return gPrefs.get(PREF_EXPERIMENTS_ENABLED, false) &&
+           TelemetryUtils.isTelemetryEnabled;
+  },
+
   notify(timer) {
-    if (!gExperimentsEnabled) {
+    if (!this._experimentsEnabled) {
       return;
     }
     if (OS.Constants.Path.profileDir === undefined) {
       throw Error("Update timer fired before profile was initialized?");
     }
     let instance = Experiments.instance();
     if (instance.isReady) {
-      instance.updateManifest();
+      instance.updateManifest().catch(error => {
+        // Don't throw, as this breaks tests. In any case the best we can do here
+        // is to log the failure.
+        Cu.reportError(error);
+      });
     }
   },
 
   _delayedInit() {
     if (!this._initialized) {
       this._initialized = true;
       Experiments.instance(); // for side effects
     }
   },
 
   observe(subject, topic, data) {
     switch (topic) {
       case "profile-after-change":
-        if (gExperimentsEnabled) {
+        if (this._experimentsEnabled) {
           Services.obs.addObserver(this, "quit-application");
           Services.obs.addObserver(this, "sessionstore-state-finalized");
           Services.obs.addObserver(this, "EM-loaded");
 
           if (gActiveExperiment) {
             this._initialized = true;
             Experiments.instance(); // for side effects
           }
--- a/browser/experiments/test/xpcshell/test_telemetry_disabled.js
+++ b/browser/experiments/test/xpcshell/test_telemetry_disabled.js
@@ -8,14 +8,21 @@ Cu.import("resource:///modules/experimen
 add_test(function test_experiments_activation() {
   do_get_profile();
   loadAddonManager();
 
   Services.prefs.setBoolPref(PREF_EXPERIMENTS_ENABLED, true);
   Services.prefs.setBoolPref(PREF_TELEMETRY_ENABLED, false);
 
   let experiments = Experiments.instance();
+
   Assert.ok(!experiments.enabled, "Experiments must be disabled if Telemetry is disabled.");
 
-  // TODO: Test that Experiments are turned back on when bug 1232648 lands.
+  // Patch updateManifest to not do anything when the pref is switched back to true,
+  // otherwise it attempts to connect to the server.
+  experiments.updateManifest = () => Promise.resolve();
+
+  Services.prefs.setBoolPref(PREF_TELEMETRY_ENABLED, true);
+
+  Assert.ok(experiments.enabled, "Experiments must be re-enabled if Telemetry is re-enabled");
 
   run_next_test();
 });
--- a/browser/extensions/formautofill/.eslintrc.js
+++ b/browser/extensions/formautofill/.eslintrc.js
@@ -1,17 +1,13 @@
 "use strict";
 
 module.exports = {
-  "extends": "../../.eslintrc.js",
-
   "globals": {
     "addMessageListener": false,
-    "Components": true,
-    "dump": true,
     "removeMessageListener": false,
     "sendAsyncMessage": false,
     "TextDecoder": false,
     "TextEncoder": false,
   },
 
   "rules": {
     // Rules from the mozilla plugin
@@ -29,57 +25,44 @@ module.exports = {
         "Number": "number",
         "String": "string",
         "bool": "boolean",
       },
       "requireParamDescription": false,
       "requireReturn": false,
       "requireReturnDescription": false,
     }],
-
-    // Braces only needed for multi-line arrow function blocks
-    // "arrow-body-style": ["error", "as-needed"],
-
+   
     // Forbid spaces inside the square brackets of array literals.
     "array-bracket-spacing": ["error", "never"],
 
     // Forbid spaces inside the curly brackets of object literals.
     "object-curly-spacing": ["error", "never"],
 
     // No space padding in parentheses
     "space-in-parens": ["error", "never"],
-
-    // Enforce one true brace style (opening brace on the same line) and avoid
-    // start and end braces on the same line.
-    "brace-style": ["error", "1tbs", {"allowSingleLine": true}],
-
+   
     // Commas at the end of the line not the start
     "comma-style": "error",
 
     // Require braces around blocks that start a new line
     "curly": ["error", "all"],
 
     // Require function* name()
     "generator-star-spacing": ["error", {"before": false, "after": true}],
 
     // Two space indent
     "indent": ["error", 2, {"SwitchCase": 1}],
 
-    // Space after colon not before in property declarations
-    "key-spacing": ["error", {"beforeColon": false, "afterColon": true, "mode": "minimum"}],
-
     // Always require parenthesis for new calls
     "new-parens": "error",
 
     // Use [] instead of Array()
     "no-array-constructor": "error",
-
-    // If an if block ends with a return no need for an else block
-    // "no-else-return": "error",
-
+   
     // Disallow empty statements. This will report an error for:
     // try { something(); } catch (e) {}
     // but will not report it for:
     // try { something(); } catch (e) { /* Silencing the error because ...*/ }
     // which is a valid use case.
     "no-empty": "error",
 
     // No spaces between function name and parentheses
@@ -92,34 +75,27 @@ module.exports = {
     "no-unused-vars": ["error", {"args": "none", "varsIgnorePattern": "^(Cc|Ci|Cr|Cu|EXPORTED_SYMBOLS)$"}],
 
     // No using variables before defined
     "no-use-before-define": "error",
 
     // Always require semicolon at end of statement
     "semi": ["error", "always"],
 
-    // Require spaces around operators, except for a|"off".
-    "space-infix-ops": ["error", {"int32Hint": true}],
-
     // Disallow using variables outside the blocks they are defined (especially
     // since only let and const are used, see "no-var").
     "block-scoped-var": "error",
 
     // Allow trailing commas for easy list extension.  Having them does not
     // impair readability, but also not required either.
     "comma-dangle": ["error", "always-multiline"],
 
     // Warn about cyclomatic complexity in functions.
     "complexity": ["error", {"max": 20}],
 
-    // Don't warn for inconsistent naming when capturing this (not so important
-    // with auto-binding fat arrow functions).
-    // "consistent-this": ["error", "self"],
-
     // Enforce dots on the next line with property name.
     "dot-location": ["error", "property"],
 
     // Encourage the use of dot notation whenever possible.
     "dot-notation": "error",
 
     // Maximum length of a line.
     // This should be 100 but too many lines were longer than that so set a
--- a/build/clang-plugin/Utils.h
+++ b/build/clang-plugin/Utils.h
@@ -234,16 +234,17 @@ inline bool isIgnoredPathForSprintfLiter
                                     End = llvm::sys::path::rend(FileName);
   for (; Begin != End; ++Begin) {
     if (Begin->compare_lower(StringRef("angle")) == 0 ||
         Begin->compare_lower(StringRef("chromium")) == 0 ||
         Begin->compare_lower(StringRef("crashreporter")) == 0 ||
         Begin->compare_lower(StringRef("google-breakpad")) == 0 ||
         Begin->compare_lower(StringRef("gflags")) == 0 ||
         Begin->compare_lower(StringRef("harfbuzz")) == 0 ||
+        Begin->compare_lower(StringRef("jsoncpp")) == 0 ||
         Begin->compare_lower(StringRef("libstagefright")) == 0 ||
         Begin->compare_lower(StringRef("mtransport")) == 0 ||
         Begin->compare_lower(StringRef("protobuf")) == 0 ||
         Begin->compare_lower(StringRef("skia")) == 0 ||
         Begin->compare_lower(StringRef("sfntly")) == 0 ||
         // Gtest uses snprintf as GTEST_SNPRINTF_ with sizeof
         Begin->compare_lower(StringRef("testing")) == 0) {
       return true;
--- a/build/moz.configure/rust.configure
+++ b/build/moz.configure/rust.configure
@@ -45,17 +45,17 @@ def cargo_info(cargo):
 
 @depends(rustc_info, cargo_info)
 @imports(_from='textwrap', _import='dedent')
 def rust_compiler(rustc_info, cargo_info):
     if not rustc_info:
         die(dedent('''\
         Rust compiler not found.
         To compile rust language sources, you must have 'rustc' in your path.
-        See https//www.rust-lang.org/ for more information.
+        See https://www.rust-lang.org/ for more information.
 
         You can install rust by running './mach bootstrap'
         or by directly running the installer from https://rustup.rs/
         '''))
     rustc_min_version = Version('1.15.1')
     cargo_min_version = Version('0.{}'.format(rustc_min_version.minor + 1))
 
     version = rustc_info.version
--- a/devtools/.eslintrc.js
+++ b/devtools/.eslintrc.js
@@ -228,16 +228,18 @@ module.exports = {
     // Disallow unnecessary semicolons.
     "no-extra-semi": "error",
     // Deprecated, will be removed in 1.0.
     "no-extra-strict": "off",
     // Disallow fallthrough of case statements, except if there is a comment.
     "no-fallthrough": "error",
     // Allow the use of leading or trailing decimal points in numeric literals.
     "no-floating-decimal": "off",
+    // disallow use of eval()-like methods
+    "no-implied-eval": "error",
     // Allow comments inline after code.
     "no-inline-comments": "off",
     // Disallow if as the only statement in an else block.
     "no-lonely-if": "error",
     // Allow mixing regular variable and require declarations (not a node env).
     "no-mixed-requires": "off",
     // Disallow mixed spaces and tabs for indentation.
     "no-mixed-spaces-and-tabs": "error",
@@ -422,18 +424,16 @@ module.exports = {
     // allow/disallow an empty newline after var statement
     "newline-after-var": "off",
     // disallow the use of alert, confirm, and prompt
     "no-alert": "off",
     // disallow comparisons to null without a type-checking operator
     "no-eq-null": "off",
     // disallow overwriting functions written as function declarations
     "no-func-assign": "off",
-    // disallow use of eval()-like methods
-    "no-implied-eval": "off",
     // disallow function or variable declarations in nested blocks
     "no-inner-declarations": "off",
     // disallow invalid regular expression strings in the RegExp constructor
     "no-invalid-regexp": "off",
     // disallow irregular whitespace outside of strings and comments
     "no-irregular-whitespace": "off",
     // disallow usage of __iterator__ property
     "no-iterator": "off",
--- a/devtools/client/dom/content/components/dom-tree.js
+++ b/devtools/client/dom/content/components/dom-tree.js
@@ -8,17 +8,17 @@
 // React & Redux
 const React = require("devtools/client/shared/vendor/react");
 const { connect } = require("devtools/client/shared/vendor/react-redux");
 
 const TreeView = React.createFactory(require("devtools/client/shared/components/tree/tree-view"));
 
 // Reps
 const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
-const Rep = React.createFactory(REPS.Rep);
+const { Rep } = REPS;
 const Grip = REPS.Grip;
 
 // DOM Panel
 const { GripProvider } = require("../grip-provider");
 const { DomDecorator } = require("../dom-decorator");
 
 // Shortcuts
 const PropTypes = React.PropTypes;
--- a/devtools/client/inspector/boxmodel/components/BoxModelMain.js
+++ b/devtools/client/inspector/boxmodel/components/BoxModelMain.js
@@ -10,17 +10,17 @@ const { findDOMNode } = require("devtool
 const { KeyCodes } = require("devtools/client/shared/keycodes");
 
 const { LocalizationHelper } = require("devtools/shared/l10n");
 
 const BoxModelEditable = createFactory(require("./BoxModelEditable"));
 
 // Reps
 const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
-const Rep = createFactory(REPS.Rep);
+const { Rep } = REPS;
 
 const Types = require("../types");
 
 const BOXMODEL_STRINGS_URI = "devtools/client/locales/boxmodel.properties";
 const BOXMODEL_L10N = new LocalizationHelper(BOXMODEL_STRINGS_URI);
 
 const SHARED_STRINGS_URI = "devtools/client/locales/shared.properties";
 const SHARED_L10N = new LocalizationHelper(SHARED_STRINGS_URI);
--- a/devtools/client/inspector/grids/components/GridItem.js
+++ b/devtools/client/inspector/grids/components/GridItem.js
@@ -1,20 +1,20 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 "use strict";
 
-const { addons, createClass, createFactory, DOM: dom, PropTypes } = require("devtools/client/shared/vendor/react");
+const { addons, createClass, DOM: dom, PropTypes } = require("devtools/client/shared/vendor/react");
 const { findDOMNode } = require("devtools/client/shared/vendor/react-dom");
 
 // Reps
 const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
-const Rep = createFactory(REPS.Rep);
+const { Rep } = REPS;
 const ElementNode = REPS.ElementNode;
 
 const Types = require("../types");
 
 module.exports = createClass({
 
   displayName: "GridItem",
 
--- a/devtools/client/jsonview/components/json-panel.js
+++ b/devtools/client/jsonview/components/json-panel.js
@@ -7,17 +7,17 @@
 "use strict";
 
 define(function (require, exports, module) {
   const { DOM: dom, createFactory, createClass, PropTypes } = require("devtools/client/shared/vendor/react");
   const TreeView = createFactory(require("devtools/client/shared/components/tree/tree-view"));
 
   const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
   const { createFactories } = require("devtools/client/shared/react-utils");
-  const Rep = createFactory(REPS.Rep);
+  const { Rep } = REPS;
 
   const { SearchBox } = createFactories(require("./search-box"));
   const { Toolbar, ToolbarButton } = createFactories(require("./reps/toolbar"));
 
   const { div } = dom;
   const AUTO_EXPAND_MAX_SIZE = 100 * 1024;
   const AUTO_EXPAND_MAX_LEVEL = 7;
 
--- a/devtools/client/netmonitor/src/components/headers-panel.js
+++ b/devtools/client/netmonitor/src/components/headers-panel.js
@@ -19,17 +19,17 @@ const {
 } = require("../utils/mdn-utils");
 const { writeHeaderText } = require("../utils/request-utils");
 
 // Components
 const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
 const MDNLink = createFactory(require("./mdn-link"));
 const PropertiesView = createFactory(require("./properties-view"));
 
-const Rep = createFactory(REPS.Rep);
+const { Rep } = REPS;
 const { button, div, input, textarea } = DOM;
 
 const EDIT_AND_RESEND = L10N.getStr("netmonitor.summary.editAndResend");
 const RAW_HEADERS = L10N.getStr("netmonitor.summary.rawHeaders");
 const RAW_HEADERS_REQUEST = L10N.getStr("netmonitor.summary.rawHeaders.requestHeaders");
 const RAW_HEADERS_RESPONSE = L10N.getStr("netmonitor.summary.rawHeaders.responseHeaders");
 const HEADERS_EMPTY_TEXT = L10N.getStr("headersEmptyText");
 const HEADERS_FILTER_TEXT = L10N.getStr("headersFilterText");
--- a/devtools/client/netmonitor/src/components/properties-view.js
+++ b/devtools/client/netmonitor/src/components/properties-view.js
@@ -9,17 +9,17 @@
 const {
   createClass,
   createFactory,
   DOM,
   PropTypes,
 } = require("devtools/client/shared/vendor/react");
 
 const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
-const Rep = createFactory(REPS.Rep);
+const { Rep } = REPS;
 
 const { FILTER_SEARCH_DELAY } = require("../constants");
 
 // Components
 const SearchBox = createFactory(require("devtools/client/shared/components/search-box"));
 const TreeView = createFactory(require("devtools/client/shared/components/tree/tree-view"));
 const TreeRow = createFactory(require("devtools/client/shared/components/tree/tree-row"));
 const SourceEditor = createFactory(require("./source-editor"));
--- a/devtools/client/webconsole/net/components/post-tab.js
+++ b/devtools/client/webconsole/net/components/post-tab.js
@@ -3,17 +3,17 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 "use strict";
 
 const React = require("devtools/client/shared/vendor/react");
 
 const TreeView = React.createFactory(require("devtools/client/shared/components/tree/tree-view"));
 
 const { REPS, MODE, parseURLEncodedText } = require("devtools/client/shared/components/reps/reps");
-const Rep = React.createFactory(REPS.Rep);
+const { Rep } = REPS;
 
 // Network
 const NetInfoParams = React.createFactory(require("./net-info-params"));
 const NetInfoGroupList = React.createFactory(require("./net-info-group-list"));
 const Spinner = React.createFactory(require("./spinner"));
 const SizeLimit = React.createFactory(require("./size-limit"));
 const NetUtils = require("../utils/net");
 const Json = require("../utils/json");
--- a/devtools/client/webconsole/net/components/response-tab.js
+++ b/devtools/client/webconsole/net/components/response-tab.js
@@ -3,17 +3,17 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 "use strict";
 
 const React = require("devtools/client/shared/vendor/react");
 
 // Reps
 const TreeView = React.createFactory(require("devtools/client/shared/components/tree/tree-view"));
 const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
-const Rep = React.createFactory(REPS.Rep);
+const { Rep } = REPS;
 
 // Network
 const SizeLimit = React.createFactory(require("./size-limit"));
 const NetInfoGroupList = React.createFactory(require("./net-info-group-list"));
 const Spinner = React.createFactory(require("./spinner"));
 const Json = require("../utils/json");
 const NetUtils = require("../utils/net");
 
--- a/devtools/client/webconsole/new-console-output/components/grip-message-body.js
+++ b/devtools/client/webconsole/new-console-output/components/grip-message-body.js
@@ -16,42 +16,48 @@ if (typeof define === "undefined") {
 const {
   createFactory,
   PropTypes
 } = require("devtools/client/shared/vendor/react");
 
 const VariablesViewLink = createFactory(require("devtools/client/webconsole/new-console-output/components/variables-view-link"));
 
 const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
-const { createFactories } = require("devtools/client/shared/react-utils");
-const Rep = createFactory(REPS.Rep);
+const Rep = REPS.Rep;
 const Grip = REPS.Grip;
-const StringRep = createFactories(REPS.StringRep).rep;
+const StringRep = REPS.StringRep.rep;
 
 GripMessageBody.displayName = "GripMessageBody";
 
 GripMessageBody.propTypes = {
   grip: PropTypes.oneOfType([
     PropTypes.string,
     PropTypes.number,
     PropTypes.object,
   ]).isRequired,
   serviceContainer: PropTypes.shape({
     createElement: PropTypes.func.isRequired,
   }),
   userProvidedStyle: PropTypes.string,
   useQuotes: PropTypes.bool,
+  escapeWhitespace: PropTypes.bool,
 };
 
 GripMessageBody.defaultProps = {
   mode: MODE.LONG,
 };
 
 function GripMessageBody(props) {
-  const { grip, userProvidedStyle, serviceContainer, useQuotes } = props;
+  const {
+    grip,
+    userProvidedStyle,
+    serviceContainer,
+    useQuotes,
+    escapeWhitespace
+  } = props;
 
   let styleObject;
   if (userProvidedStyle && userProvidedStyle !== "") {
     styleObject = cleanupStyle(userProvidedStyle, serviceContainer.createElement);
   }
 
   let onDOMNodeMouseOver;
   let onDOMNodeMouseOut;
@@ -61,16 +67,17 @@ function GripMessageBody(props) {
   }
 
   return (
     // @TODO once there is a longString rep, also turn off quotes for those.
     typeof grip === "string"
       ? StringRep({
         object: grip,
         useQuotes: useQuotes,
+        escapeWhitespace: escapeWhitespace,
         mode: props.mode,
         style: styleObject
       })
       : Rep({
         object: grip,
         objectLink: VariablesViewLink,
         onDOMNodeMouseOver,
         onDOMNodeMouseOut,
--- a/devtools/client/webconsole/new-console-output/components/message-types/evaluation-result.js
+++ b/devtools/client/webconsole/new-console-output/components/message-types/evaluation-result.js
@@ -38,17 +38,22 @@ function EvaluationResult(props) {
     parameters,
     notes,
   } = message;
 
   let messageBody;
   if (message.messageText) {
     messageBody = message.messageText;
   } else {
-    messageBody = GripMessageBody({grip: parameters, serviceContainer, useQuotes: true});
+    messageBody = GripMessageBody({
+      grip: parameters,
+      serviceContainer,
+      useQuotes: true,
+      escapeWhitespace: false,
+    });
   }
 
   const topLevelClasses = ["cm-s-mozilla"];
 
   const childProps = {
     source,
     type,
     level,
--- a/devtools/client/webconsole/new-console-output/test/mochitest/browser_webconsole_string.js
+++ b/devtools/client/webconsole/new-console-output/test/mochitest/browser_webconsole_string.js
@@ -6,30 +6,31 @@
 "use strict";
 
 const TEST_URI = "http://example.com/browser/devtools/client/webconsole/new-console-output/test/mochitest/test-console.html";
 
 add_task(function* () {
   let hud = yield openNewTabAndConsole(TEST_URI);
 
   info("console.log with a string argument");
-  let receievedMessages = waitForMessages({
+  let receivedMessages = waitForMessages({
     hud,
     messages: [{
       // Test that the output does not include quotes.
       text: "stringLog",
     }],
   });
 
   yield ContentTask.spawn(gBrowser.selectedBrowser, {}, function () {
     content.wrappedJSObject.stringLog();
   });
 
-  yield receievedMessages;
+  yield receivedMessages;
 
   info("evaluating a string constant");
   let jsterm = hud.jsterm;
-  yield jsterm.execute("\"string constant\"");
+  yield jsterm.execute("\"string\\nconstant\"");
   let msg = yield waitFor(() => findMessage(hud, "constant"));
   let body = msg.querySelector(".message-body");
-  // On the other hand, a string constant result should be quoted.
-  ok(body.textContent.includes("\"string constant\""), "found expected text");
+  // On the other hand, a string constant result should be quoted, but
+  // newlines should be let through.
+  ok(body.textContent.includes("\"string\nconstant\""), "found expected text");
 });
--- a/devtools/docs/frontend/telemetry.md
+++ b/devtools/docs/frontend/telemetry.md
@@ -2,26 +2,54 @@
 
 We use telemetry to get metrics of usage of the different features and panels in DevTools. This will help us take better, informed decisions when prioritising our work.
 
 ## Adding metrics to a tool
 
 The process to add metrics to a tool roughly consists in:
 
 1. Adding the probe to Firefox
-2. Using the probe in DevTools code
-3. Getting approval from the data team
+2. Using Histograms.json probes in DevTools code
+3. Using Scalars.yaml probes in DevTools code
+4. Getting approval from the data team
 
 ### 1. Adding the probe to Firefox
 
-The first step involves creating entries for the probe in the file that contains declarations for all data that Firefox might report to Mozilla.
+The first step involves creating entries for the probe in one of the files that contain declarations for all data that Firefox might report to Mozilla.
+
+These files are:
+- `toolkit/components/telemetry/Histograms.json`
+- `toolkit/components/telemetry/Scalars.yaml`
+
+
+Scalars allow collection of simple values, like counts, booleans and strings and are to be used whenever possible instead of histograms.
+
+Histograms allow collection of multiple different values, but aggregate them into a number of buckets. Each bucket has a value range and a count of how many values we recorded.
+
+Both scalars & histograms allow recording by keys. This allows for more flexible, two-level data collection.
+
+#### Why the different file formats?
 
-This file is at `toolkit/components/telemetry/Histograms.json`.
+The data team chose YAML for `Scalars.yaml` because it is easy to write and provides a number of features not available in JSON including comments, extensible data types, relational anchors, strings without quotation marks, and mapping types preserving key order.
+
+While we previously used JSON for similar purposes in histograms.json, we have used YAML here because it allows for comments and is generally easier to write.
+
+When the YAML format is proven the data team are considering moving the histograms over to YAML format at some point.
+
+If it's the first time you add one of these, it's advised to follow the style of existing entries.
 
-If it's the first time you add one of these, it's advised to follow the style of existing entries. Our entries are prepended with `DEVTOOLS_`. For example:
+New data types have been added over the years, so it's quite feasible that some of our probes are not the most suitable nowadays.
+
+There's more information about types (and telemetry in general) on [this page](https://developer.mozilla.org/en-US/docs/Mozilla/Performance/Adding_a_new_Telemetry_probe) and [this other page](https://gecko.readthedocs.io/en/latest/toolkit/components/telemetry/telemetry/collection/index.html).
+
+And of course, in case of doubt, ask!
+
+### Adding probes to `Histograms.json`
+
+Our entries are prefixed with `DEVTOOLS_`. For example:
 
 ```javascript
   "DEVTOOLS_DOM_OPENED_COUNT": {
     "alert_emails": ["dev-developer-tools@lists.mozilla.org"],
     "expires_in_version": "never",
     "kind": "count",
     "bug_numbers": [1343501],
     "description": "Number of times the DevTools DOM Inspector has been opened.",
@@ -35,23 +63,51 @@ If it's the first time you add one of th
     "high": 10000000,
     "n_buckets": 100,
     "description": "How long has the DOM inspector been active (seconds)"
   },
 ```
 
 There are different types of probes you can use. These are specified by the `kind` field. Normally we use `count` for counting how many times the tools are opened, and `exponential` for how many times a panel is active.
 
-New data types have been added over the years, so it's quite feasible that some of our probes are not the most suitable nowadays.
+### Adding probes to `Scalars.yaml`
 
-There's more information about types (and telemetry in general) on [this page](https://developer.mozilla.org/en-US/docs/Mozilla/Performance/Adding_a_new_Telemetry_probe) and [this other page](https://gecko.readthedocs.io/en/latest/toolkit/components/telemetry/telemetry/collection/index.html).
+Our entries are prefixed with `devtools.`. For example:
 
-And of course, in case of doubt, ask!
+```javascript
+devtools.toolbar.eyedropper:
+  opened:
+    bug_numbers:
+      - 1247985
+      - 1352115
+    description: Number of times the DevTools Eyedropper has been opened via the inspector toolbar.
+    expires: never
+    kind: uint
+    notification_emails:
+      - dev-developer-tools@lists.mozilla.org
+    release_channel_collection: opt-out
+    record_in_processes:
+      - 'main'
 
-### 2. Using the probe in DevTools code
+devtools.copy.unique.css.selector:
+  opened:
+    bug_numbers:
+      - 1323700
+      - 1352115
+    description: Number of times the DevTools copy unique CSS selector has been used.
+    expires: "57"
+    kind: uint
+    notification_emails:
+      - dev-developer-tools@lists.mozilla.org
+    release_channel_collection: opt-out
+    record_in_processes:
+      - 'main'
+```
+
+### 2. Using Histograms.json probes in DevTools code
 
 Once the probe has been declared in the `Histograms.json` file, you'll need to actually use it in our code.
 
 First, you need to give it an id in `devtools/client/shared/telemetry.js`. Similarly to the `Histograms.json` case, you'll want to follow the style of existing entries. For example:
 
 ```javascript
 dom: {
   histogram: "DEVTOOLS_DOM_OPENED_COUNT",
@@ -62,17 +118,17 @@ dom: {
 ... would correspond to the probes we declared in the previous section.
 
 Then, include that module on each tool that requires telemetry:
 
 ```javascript
 let Telemetry = require("devtools/client/shared/telemetry");
 ```
 
-Create telemetry instance on the tool constructor:
+Create a telemetry instance on the tool constructor:
 
 ```javascript
 this._telemetry = new Telemetry();
 ```
 
 And use the instance to report e.g. tool opening...
 
 ```javascript
@@ -82,16 +138,58 @@ this._telemetry.toolOpened("mytoolname")
 ... or closing:
 
 ```javascript
 this._telemetry.toolClosed("mytoolname");
 ```
 
 Note that `mytoolname` is the id we declared in the `telemetry.js` module.
 
+### 3. Using Scalars.yaml probes in DevTools code
+
+Once the probe has been declared in the `Scalars.yaml` file, you'll need to actually use it in our code.
+
+First, you need to give it an id in `devtools/client/shared/telemetry.js`. You will want to follow the style of existing lowercase histogram entries. For example:
+
+```javascript
+toolbareyedropper: {
+  scalar: "devtools.toolbar.eyedropper.opened", // Note that the scalar is lowercase
+},
+copyuniquecssselector: {
+  scalar: "devtools.copy.unique.css.selector.opened",
+},
+```
+
+... would correspond to the probes we declared in the previous section.
+
+Then, include that module on each tool that requires telemetry:
+
+```javascript
+let Telemetry = require("devtools/client/shared/telemetry");
+```
+
+Create a telemetry instance on the tool constructor:
+
+```javascript
+this._telemetry = new Telemetry();
+```
+
+And use the instance to report e.g. tool opening...
+
+```javascript
+this._telemetry.toolOpened("mytoolname");
+```
+
+Notes:
+
+  - `mytoolname` is the id we declared in the `Scalars.yaml` module.
+  - Because we are not logging tool's time opened in `Scalars.yaml` we don't care
+about toolClosed. Of course, if there was an accompanying `timerHistogram` field defined
+in `telemetry.js` and `histograms.json` then `toolClosed` should also be added.
+
 #### Note on top level panels
 
 The code for the tabs uses their ids to automatically report telemetry when you switch between panels, so you don't need to explicitly call `toolOpened` and `toolClosed` on top level panels.
 
 You will still need to call those functions on subpanels, or tools such as about:debugging which are not opened as tabs.
 
 #### Testing
 
@@ -111,27 +209,27 @@ Would report an error to stdout:
 ```
 Warning: An attempt was made to write to the mytoolnmae histogram, which is not defined in Histograms.json
 ```
 
 So watch out for errors.
 
 #### Compile it!
 
-It's strongly recommended that you do a full Firefox build if you have edited `Histograms.json`, as it is processed at build time, and various checks will be run on it to guarantee it is valid.
+It's strongly recommended that you do a full Firefox build if you have edited either `Histograms.json` or `Scalars.yaml`, as they are processed at build time, and various checks will be run on it to guarantee it is valid.
 
 ```
 ./mach build
 ```
 
 If you use `mach build faster` or artifact builds, the checks will not be performed, and your try builds will fail ("bust") when the checks are run there.
 
 Save yourself some time and run the checks locally.
 
-### 3. Getting approval from the data team
+### 4. Getting approval from the data team
 
 This is required before the changes make their way into `mozilla-central`.
 
 To get approval, attach your patch to the bug in Bugzilla, and set two flags:
 
 * a `feedback?` flag for bsmedberg (or someone else from the data team)
 * a `needinfo?` flag to clarkbw (our product manager, so he vouches that we're using the data)
 
@@ -157,9 +255,8 @@ If you want to get better understanding 
 
 The easiest way to get started is to *fork* an existing report and modify it to get used to the syntax, as SQL for massive data tables is very different from SQL for a humble blog engine, and you'll find some new operators that might look unfamiliar.
 
 It's also recommended to take small steps and run the queries often to detect errors before they're too complicated to solve, particularly if you're not experienced with this (yet).
 
 Slow queries will be interrupted by the system, so don't worry about "fetching too much data" or "using too many resources". There's built-in protection to avoid your code eating up the Telemetry database.
 
 Funnily, if you're based in Europe, you might be in luck, as the website tends to be more responsive during European working hours than it is at Pacific working hours, as seemingly there's less people in Europe interacting with it.
-
--- a/devtools/server/actors/highlighters/utils/markup.js
+++ b/devtools/server/actors/highlighters/utils/markup.js
@@ -1,17 +1,17 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 "use strict";
 
 const { Cc, Ci, Cu } = require("chrome");
 const { getCurrentZoom, getWindowDimensions, getViewportDimensions,
-  getRootBindingParent } = require("devtools/shared/layout/utils");
+  getRootBindingParent, loadSheet } = require("devtools/shared/layout/utils");
 const { on, emit } = require("sdk/event/core");
 
 const lazyContainer = {};
 
 loader.lazyRequireGetter(lazyContainer, "CssLogic",
   "devtools/server/css-logic", true);
 exports.getComputedStyle = (node) =>
   lazyContainer.CssLogic.getComputedStyle(node);
@@ -99,27 +99,24 @@ ClassList.prototype = {
 function isXUL(window) {
   return window.document.documentElement.namespaceURI === XUL_NS;
 }
 exports.isXUL = isXUL;
 
 /**
  * Inject a helper stylesheet in the window.
  */
-var installedHelperSheets = new WeakMap();
+var installedHelperSheets = new WeakSet();
 
-function installHelperSheet(win, source, type = "agent") {
+function installHelperSheet(win, url = STYLESHEET_URI, type = "agent") {
   if (installedHelperSheets.has(win.document)) {
     return;
   }
-  let {Style} = require("sdk/stylesheet/style");
-  let {attach} = require("sdk/content/mod");
-  let style = Style({source, type});
-  attach(style, win);
-  installedHelperSheets.set(win.document, style);
+  loadSheet(win, url, type);
+  installedHelperSheets.add(win.document);
 }
 exports.installHelperSheet = installHelperSheet;
 
 /**
  * Returns true if a DOM node is "valid", where "valid" means that the node isn't a dead
  * object wrapper, is still attached to a document, and is of a given type.
  * @param {DOMNode} node
  * @param {Number} nodeType Optional, defaults to ELEMENT_NODE
@@ -273,25 +270,17 @@ CanvasFrameAnonymousContentHelper.protot
     if (isXUL(this.highlighterEnv.window)) {
       return;
     }
 
     // For now highlighters.css is injected in content as a ua sheet because
     // <style scoped> doesn't work inside anonymous content (see bug 1086532).
     // If it did, highlighters.css would be injected as an anonymous content
     // node using CanvasFrameAnonymousContentHelper instead.
-    if (!installedHelperSheets.has(doc)) {
-      installedHelperSheets.set(doc, true);
-      let source = "@import url('" + STYLESHEET_URI + "');";
-      let url = "data:text/css;charset=utf-8," + encodeURIComponent(source);
-      let winUtils = this.highlighterEnv.window
-                         .QueryInterface(Ci.nsIInterfaceRequestor)
-                         .getInterface(Ci.nsIDOMWindowUtils);
-      winUtils.loadSheetUsingURIString(url, winUtils.AGENT_SHEET);
-    }
+    installHelperSheet(this.highlighterEnv.window);
 
     let node = this.nodeBuilder();
 
     // It was stated that hidden documents don't accept
     // `insertAnonymousContent` calls yet. That doesn't seems the case anymore,
     // at least on desktop. Therefore, removing the code that was dealing with
     // that scenario, fixes when we're adding anonymous content in a tab that
     // is not the active one (see bug 1260043 and bug 1260044)
--- a/devtools/server/actors/inspector.js
+++ b/devtools/server/actors/inspector.js
@@ -67,17 +67,18 @@ const {
   HighlighterEnvironment
 } = require("devtools/server/actors/highlighters");
 const {EyeDropper} = require("devtools/server/actors/highlighters/eye-dropper");
 const {
   isAnonymous,
   isNativeAnonymous,
   isXBLAnonymous,
   isShadowAnonymous,
-  getFrameElement
+  getFrameElement,
+  loadSheet
 } = require("devtools/shared/layout/utils");
 const {getLayoutChangesObserver, releaseLayoutChangesObserver} = require("devtools/server/actors/reflow");
 const nodeFilterConstants = require("devtools/shared/dom-node-filter-constants");
 const {colorUtils} = require("devtools/shared/css/color");
 
 const {EventParsers} = require("devtools/server/event-parsers");
 const {nodeSpec, nodeListSpec, walkerSpec, inspectorSpec} = require("devtools/shared/specs/inspector");
 
@@ -122,17 +123,17 @@ const PSEUDO_SELECTORS = [
   [":empty", 0],
   [":target", 0],
   [":enabled", 0],
   [":disabled", 0],
   [":checked", 1],
   ["::selection", 0]
 ];
 
-var HELPER_SHEET = `
+var HELPER_SHEET = `data:text/css;charset=utf-8,
   .__fx-devtools-hide-shortcut__ {
     visibility: hidden !important;
   }
 
   :-moz-devtools-highlighted {
     outline: 2px dashed #F06!important;
     outline-offset: -2px !important;
   }
@@ -1860,25 +1861,22 @@ var WalkerActor = protocol.ActorClassWit
     DOMUtils.addPseudoClassLock(node.rawNode, pseudo, enabled);
     this._activePseudoClassLocks.add(node);
     this._queuePseudoClassMutation(node);
     return true;
   },
 
   _installHelperSheet: function (node) {
     if (!this.installedHelpers) {
-      this.installedHelpers = new WeakMap();
+      this.installedHelpers = new WeakSet();
     }
     let win = node.rawNode.ownerGlobal;
     if (!this.installedHelpers.has(win)) {
-      let { Style } = require("sdk/stylesheet/style");
-      let { attach } = require("sdk/content/mod");
-      let style = Style({source: HELPER_SHEET, type: "agent" });
-      attach(style, win);
-      this.installedHelpers.set(win, style);
+      loadSheet(win, HELPER_SHEET, "agent");
+      this.installedHelpers.add(win);
     }
   },
 
   hideNode: function (node) {
     if (isNodeDead(node)) {
       return;
     }
 
--- a/devtools/server/actors/object.js
+++ b/devtools/server/actors/object.js
@@ -101,18 +101,22 @@ ObjectActor.prototype = {
       if (g.class == "Promise") {
         g.promiseState = this._createPromiseState();
       }
 
       // FF40+: Allow to know how many properties an object has
       // to lazily display them when there is a bunch.
       // Throws on some MouseEvent object in tests.
       try {
-        // Bug 1163520: Assert on internal functions
-        if (!["Function", "Proxy"].includes(g.class)) {
+        if (TYPED_ARRAY_CLASSES.indexOf(g.class) != -1) {
+          // Bug 1348761: getOwnPropertyNames is unecessary slow on TypedArrays
+          let length = DevToolsUtils.getProperty(this.obj, "length");
+          g.ownPropertyLength = length;
+        } else if (!["Function", "Proxy"].includes(g.class)) {
+          // Bug 1163520: Assert on internal functions
           g.ownPropertyLength = this.obj.getOwnPropertyNames().length;
         }
       } catch (e) {}
 
       let raw = this.obj.unsafeDereference();
 
       // If Cu is not defined, we are running on a worker thread, where xrays
       // don't exist.
--- a/devtools/server/performance/profiler.js
+++ b/devtools/server/performance/profiler.js
@@ -248,17 +248,19 @@ const ProfilerManager = (function () {
     },
 
     /**
      * Returns an array of objects that describes the shared libraries
      * which are currently loaded into our process. Can be called while the
      * profiler is stopped.
      */
     get sharedLibraries() {
-      return nsIProfilerModule.sharedLibraries;
+      return {
+        sharedLibraries: nsIProfilerModule.sharedLibraries
+      };
     },
 
     /**
      * Number of profiler instances.
      *
      * @return {number}
      */
     get length() {
--- a/devtools/server/tests/unit/test_profiler_sharedlibraries.js
+++ b/devtools/server/tests/unit/test_profiler_sharedlibraries.js
@@ -16,17 +16,18 @@ function run_test() {
       });
     });
   });
 
   do_test_pending();
 }
 
 function test_sharedlibraries(client, actor, callback) {
-  client.request({ to: actor, type: "sharedLibraries" }, libs => {
+  client.request({ to: actor, type: "sharedLibraries" }, response => {
+    const libs = response.sharedLibraries;
     do_check_eq(typeof libs, "object");
     do_check_true(Array.isArray(libs));
     do_check_eq(typeof libs, "object");
     do_check_true(libs.length >= 1);
     do_check_eq(typeof libs[0], "object");
     do_check_eq(typeof libs[0].name, "string");
     do_check_eq(typeof libs[0].path, "string");
     do_check_eq(typeof libs[0].debugName, "string");
--- a/devtools/shared/layout/utils.js
+++ b/devtools/shared/layout/utils.js
@@ -2,16 +2,22 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 "use strict";
 
 const { Ci, Cc } = require("chrome");
 const nodeFilterConstants = require("devtools/shared/dom-node-filter-constants");
 
+const SHEET_TYPE = {
+  "agent": "AGENT_SHEET",
+  "user": "USER_SHEET",
+  "author": "AUTHOR_SHEET"
+};
+
 loader.lazyRequireGetter(this, "setIgnoreLayoutChanges", "devtools/server/actors/reflow", true);
 exports.setIgnoreLayoutChanges = (...args) =>
   this.setIgnoreLayoutChanges(...args);
 
 /**
  * Returns the `DOMWindowUtils` for the window given.
  *
  * @param {DOMWindow} win
@@ -710,8 +716,54 @@ function getWindowFor(node) {
       return node.defaultView;
     }
     return node.ownerDocument.defaultView;
   } else if (node instanceof Ci.nsIDOMWindow) {
     return node;
   }
   return null;
 }
+
+/**
+ * Synchronously loads a style sheet from `uri` and adds it to the list of
+ * additional style sheets of the document.
+ * The sheets added takes effect immediately, and only on the document of the
+ * `window` given.
+ *
+ * @param {DOMWindow} window
+ * @param {String} url
+ * @param {String} [type="author"]
+ */
+function loadSheet(window, url, type = "author") {
+  if (!(type in SHEET_TYPE)) {
+    type = "author";
+  }
+
+  let windowUtils = utilsFor(window);
+  try {
+    windowUtils.loadSheetUsingURIString(url, windowUtils[SHEET_TYPE[type]]);
+  } catch (e) {
+    // The method fails if the url is already loaded.
+  }
+}
+exports.loadSheet = loadSheet;
+
+/**
+ * Remove the document style sheet at `sheetURI` from the list of additional
+ * style sheets of the document. The removal takes effect immediately.
+ *
+ * @param {DOMWindow} window
+ * @param {String} url
+ * @param {String} [type="author"]
+ */
+function removeSheet(window, url, type = "author") {
+  if (!(type in SHEET_TYPE)) {
+    type = "author";
+  }
+
+  let windowUtils = utilsFor(window);
+  try {
+    windowUtils.removeSheetUsingURIString(url, windowUtils[SHEET_TYPE[type]]);
+  } catch (e) {
+    // The method fails if the url is already removed.
+  }
+}
+exports.removeSheet = removeSheet;
--- a/dom/canvas/WebGLContext.cpp
+++ b/dom/canvas/WebGLContext.cpp
@@ -1007,17 +1007,23 @@ WebGLContext::SetDimensions(int32_t sign
     bool forceEnabled = gfxPrefs::WebGLForceEnabled();
     ScopedGfxFeatureReporter reporter("WebGL", forceEnabled);
 
     MOZ_ASSERT(!gl);
     std::vector<FailureReason> failReasons;
     if (!CreateAndInitGL(forceEnabled, &failReasons)) {
         nsCString text("WebGL creation failed: ");
         for (const auto& cur : failReasons) {
-            Telemetry::Accumulate(Telemetry::CANVAS_WEBGL_FAILURE_ID, cur.key);
+            // Don't try to accumulate using an empty key if |cur.key| is empty.
+            if (cur.key.IsEmpty()) {
+                Telemetry::Accumulate(Telemetry::CANVAS_WEBGL_FAILURE_ID,
+                                      NS_LITERAL_CSTRING("FEATURE_FAILURE_REASON_UNKNOWN"));
+            } else {
+                Telemetry::Accumulate(Telemetry::CANVAS_WEBGL_FAILURE_ID, cur.key);
+            }
 
             text.AppendASCII("\n* ");
             text.Append(cur.info);
         }
         failureId = NS_LITERAL_CSTRING("FEATURE_FAILURE_REASON");
         ThrowEvent_WebGLContextCreationError(text);
         return NS_ERROR_FAILURE;
     }
--- a/dom/events/IMEStateManager.cpp
+++ b/dom/events/IMEStateManager.cpp
@@ -141,26 +141,32 @@ StaticRefPtr<nsPresContext> IMEStateMana
 nsIWidget* IMEStateManager::sFocusedIMEWidget = nullptr;
 nsIWidget* IMEStateManager::sActiveInputContextWidget = nullptr;
 StaticRefPtr<TabParent> IMEStateManager::sActiveTabParent;
 StaticRefPtr<IMEContentObserver> IMEStateManager::sActiveIMEContentObserver;
 TextCompositionArray* IMEStateManager::sTextCompositions = nullptr;
 bool IMEStateManager::sInstalledMenuKeyboardListener = false;
 bool IMEStateManager::sIsGettingNewIMEState = false;
 bool IMEStateManager::sCheckForIMEUnawareWebApps = false;
+bool IMEStateManager::sInputModeSupported = false;
 bool IMEStateManager::sRemoteHasFocus = false;
 
 // static
 void
 IMEStateManager::Init()
 {
   Preferences::AddBoolVarCache(
     &sCheckForIMEUnawareWebApps,
     "intl.ime.hack.on_ime_unaware_apps.fire_key_events_for_composition",
     false);
+
+  Preferences::AddBoolVarCache(
+    &sInputModeSupported,
+    "dom.forms.inputmode",
+    false);
 }
 
 // static
 void
 IMEStateManager::Shutdown()
 {
   MOZ_LOG(sISMLog, LogLevel::Info,
     ("Shutdown(), sTextCompositions=0x%p, sTextCompositions->Length()=%" PRIuSIZE,
@@ -995,17 +1001,17 @@ IMEStateManager::SetIMEState(const IMESt
         }
       }
       content->GetAttr(kNameSpaceID_None, nsGkAtoms::type,
                        context.mHTMLInputType);
     } else {
       context.mHTMLInputType.Assign(nsGkAtoms::textarea->GetUTF16String());
     }
 
-    if (Preferences::GetBool("dom.forms.inputmode", false) ||
+    if (sInputModeSupported ||
         nsContentUtils::IsChromeDoc(aContent->OwnerDoc())) {
       aContent->GetAttr(kNameSpaceID_None, nsGkAtoms::inputmode,
                         context.mHTMLInputInputmode);
     }
 
     aContent->GetAttr(kNameSpaceID_None, nsGkAtoms::moz_action_hint,
                       context.mActionHint);
 
--- a/dom/events/IMEStateManager.h
+++ b/dom/events/IMEStateManager.h
@@ -280,16 +280,17 @@ protected:
   // When you get an item of this array and use it, please be careful.
   // The instances in this array can be destroyed automatically if you do
   // something to cause committing or canceling the composition.
   static TextCompositionArray* sTextCompositions;
 
   static bool           sInstalledMenuKeyboardListener;
   static bool           sIsGettingNewIMEState;
   static bool           sCheckForIMEUnawareWebApps;
+  static bool           sInputModeSupported;
   static bool           sRemoteHasFocus;
 
   class MOZ_STACK_CLASS GettingNewIMEStateBlocker final
   {
   public:
     GettingNewIMEStateBlocker()
       : mOldValue(IMEStateManager::sIsGettingNewIMEState)
     {
--- a/dom/media/test/eme.js
+++ b/dom/media/test/eme.js
@@ -126,32 +126,31 @@ function UpdateSessionFunc(test, token, 
       if (key) {
         Log(token, "found key " + key + " for key id " + idHex);
         outKeys.push({
           "kty":"oct",
           "kid":id64,
           "k":HexToBase64(key)
         });
       } else {
-        bail(token + " couldn't find key for key id " + idHex)("No such key");
+        reject(`${token} couldn't find key for key id ${idHex}`);
       }
     }
 
     var update = JSON.stringify({
       "keys" : outKeys,
       "type" : msg.type
     });
     Log(token, "sending update message to CDM: " + update);
 
     ev.target.update(StringToArrayBuffer(update)).then(function() {
       Log(token, "MediaKeySession update ok!");
       resolve(ev.target);
     }).catch(function(reason) {
-      bail(token + " MediaKeySession update failed")(reason);
-      reject();
+      reject(`${token} MediaKeySession update failed: ${reason}`);
     });
   }
 }
 
 function MaybeCrossOriginURI(test, uri)
 {
   if (test.crossOrigin) {
     return "http://test2.mochi.test:8888/tests/dom/media/test/allowed.sjs?" + uri;
@@ -187,18 +186,22 @@ function AppendTrack(test, ms, track, to
       req.open("GET", fragmentFile);
       req.responseType = "arraybuffer";
 
       req.addEventListener("load", function() {
         Log(token, track.name + ": fetch of " + fragmentFile + " complete, appending");
         sb.appendBuffer(new Uint8Array(req.response));
       });
 
-      req.addEventListener("error", function(){info(token + " error fetching " + fragmentFile);});
-      req.addEventListener("abort", function(){info(token + " aborted fetching " + fragmentFile);});
+      req.addEventListener("error", function() {
+        reject(`${token} - ${track.name}: error fetching ${fragmentFile}`);
+      });
+      req.addEventListener("abort", function() {
+        reject(`${token} - ${track.name}: aborted fetching ${fragmentFile}`);
+      });
 
       Log(token, track.name + ": addNextFragment() fetching next fragment " + fragmentFile);
       req.send(null);
     }
 
     Log(token, track.name + ": addSourceBuffer(" + track.type + ")");
     sb = ms.addSourceBuffer(track.type);
     sb.addEventListener("updateend", function() {
@@ -233,39 +236,30 @@ function LoadTest(test, elem, token, loa
     ok(false, token + " test does not have a tracks list");
     return Promise.reject();
   }
 
   var ms = new MediaSource();
   elem.src = URL.createObjectURL(ms);
 
   return new Promise(function (resolve, reject) {
-    var firstOpen = true;
     ms.addEventListener("sourceopen", function () {
-      if (!firstOpen) {
-        Log(token, "sourceopen again?");
-        return;
-      }
-
-      firstOpen = false;
       Log(token, "sourceopen");
-      return Promise.all(test.tracks.map(function(track) {
+      Promise.all(test.tracks.map(function(track) {
         return AppendTrack(test, ms, track, token, loadParams);
       })).then(function() {
         if (loadParams && loadParams.noEndOfStream) {
           Log(token, "Tracks loaded");
         } else {
           Log(token, "Tracks loaded, calling MediaSource.endOfStream()");
           ms.endOfStream();
         }
         resolve();
-      }).catch(function() {
-        Log(token, "error while loading tracks");
-      });
-    })
+      }).catch(reject);
+    }, {once: true});
   });
 }
 
 function EMEPromise() {
   var self = this;
   self.promise = new Promise(function(resolve, reject) {
     self.resolve = resolve;
     self.reject = reject;
--- a/gfx/layers/d3d11/CompositorD3D11.cpp
+++ b/gfx/layers/d3d11/CompositorD3D11.cpp
@@ -1410,23 +1410,16 @@ CompositorD3D11::NormalDrawingDone()
 void
 CompositorD3D11::EndFrame()
 {
   if (!mDefaultRT) {
     Compositor::EndFrame();
     return;
   }
 
-  if (mDevice->GetDeviceRemovedReason() != S_OK) {
-    gfxCriticalNote << "GFX: D3D11 skip EndFrame with device-removed.";
-    Compositor::EndFrame();
-    mCurrentRT = nullptr;
-    return;
-  }
-
   LayoutDeviceIntSize oldSize = mSize;
   EnsureSize();
   if (mSize.width <= 0 || mSize.height <= 0) {
     Compositor::EndFrame();
     return;
   }
 
   RefPtr<ID3D11Query> query;
--- a/gfx/vr/openvr/moz.build
+++ b/gfx/vr/openvr/moz.build
@@ -18,17 +18,17 @@ if CONFIG['OS_ARCH'] == 'WINNT':
 # will also need to define these:
 #
 #   LINUX64
 #   LINUX
 #   OSX
 #   POSIX
 
 LOCAL_INCLUDES += [
-    '/toolkit/crashreporter/jsoncpp/include',
+    '/toolkit/components/jsoncpp/include',
 ]
 
 USE_LIBS += [
     'jsoncpp',
 ]
 
 EXPORTS += [
     'headers/openvr.h',
--- a/image/imgIRequest.idl
+++ b/image/imgIRequest.idl
@@ -203,10 +203,28 @@ interface imgIRequest : nsIRequest
 
   /**
    * Tell the image it can forget about a request that the image animate.
    *
    * @see Image::DecrementAnimationConsumers for documentation of the
    * underlying call.
    */
   void decrementAnimationConsumers();
+
+  /**
+   * Request loading priority boost to requested category, each category
+   * of request increases priority only one time..
+   *
+   * CATEGORY_FRAME_INIT: increase priority when the imgRequest is associated
+   * with an nsImageFrame.
+   *
+   * CATEGORY_SIZE_QUERY: increase priority when size decoding is necessary to
+   * determine the layout size of the associated nsImageFrame.
+   *
+   * CATEGORY_DISPLAY: increase priority when the image is about to be displayed
+   * in the viewport.
+   */
+  const uint32_t CATEGORY_FRAME_INIT = 1 << 0;
+  const uint32_t CATEGORY_SIZE_QUERY = 1 << 1;
+  const uint32_t CATEGORY_DISPLAY    = 1 << 2;
+  void boostPriority(in uint32_t aCategory);
 };
 
--- a/image/imgRequest.cpp
+++ b/image/imgRequest.cpp
@@ -536,20 +536,58 @@ imgRequest::AdjustPriority(imgRequestPro
   // to increase the priority of requests that have a lot of proxies.  the key
   // concern though is that image loads remain lower priority than other pieces
   // of content such as link clicks, CSS, and JS.
   //
   if (!mFirstProxy || proxy != mFirstProxy) {
     return;
   }
 
+  AdjustPriorityInternal(delta);
+}
+
+void
+imgRequest::AdjustPriorityInternal(int32_t aDelta)
+{
   nsCOMPtr<nsISupportsPriority> p = do_QueryInterface(mChannel);
   if (p) {
-    p->AdjustPriority(delta);
+    p->AdjustPriority(aDelta);
+  }
+}
+
+void
+imgRequest::BoostPriority(uint32_t aCategory)
+{
+  uint32_t newRequestedCategory =
+    (mBoostCategoriesRequested & aCategory) ^ aCategory;
+  if (!newRequestedCategory) {
+    // priority boost for each category can only apply once.
+    return;
   }
+
+  MOZ_LOG(gImgLog, LogLevel::Debug,
+         ("[this=%p] imgRequest::BoostPriority for category %x",
+          this, newRequestedCategory));
+
+  int32_t delta = 0;
+
+  if (newRequestedCategory & imgIRequest::CATEGORY_FRAME_INIT) {
+    --delta;
+  }
+
+  if (newRequestedCategory & imgIRequest::CATEGORY_SIZE_QUERY) {
+    --delta;
+  }
+
+  if (newRequestedCategory & imgIRequest::CATEGORY_DISPLAY) {
+    delta += nsISupportsPriority::PRIORITY_HIGH;
+  }
+
+  AdjustPriorityInternal(delta);
+  mBoostCategoriesRequested |= newRequestedCategory;
 }
 
 bool
 imgRequest::HasTransferredData() const
 {
   MutexAutoLock lock(mMutex);
   return mGotData;
 }
--- a/image/imgRequest.h
+++ b/image/imgRequest.h
@@ -166,16 +166,18 @@ public:
   /// @return the priority of the underlying network request, or
   /// PRIORITY_NORMAL if it doesn't support nsISupportsPriority.
   int32_t Priority() const;
 
   /// Adjust the priority of the underlying network request by @aDelta on behalf
   /// of @aProxy.
   void AdjustPriority(imgRequestProxy* aProxy, int32_t aDelta);
 
+  void BoostPriority(uint32_t aCategory);
+
   /// Returns a weak pointer to the underlying request.
   nsIRequest* GetRequest() const { return mRequest; }
 
   nsITimedChannel* GetTimedChannel() const { return mTimedChannel; }
 
   nsresult GetSecurityInfo(nsISupports** aSecurityInfoOut);
 
   imgCacheValidator* GetValidator() const { return mValidator; }
@@ -218,16 +220,18 @@ private:
   void Cancel(nsresult aStatus);
 
   // Update the cache entry size based on the image container.
   void UpdateCacheEntrySize();
 
   /// Returns true if StartDecoding() was called.
   bool IsDecodeRequested() const;
 
+  void AdjustPriorityInternal(int32_t aDelta);
+
   // Weak reference to parent loader; this request cannot outlive its owner.
   imgLoader* mLoader;
   nsCOMPtr<nsIRequest> mRequest;
   // The original URI we were loaded with. This is the same as the URI we are
   // keyed on in the cache. We store a string here to avoid off main thread
   // refcounting issues with nsStandardURL.
   RefPtr<ImageURL> mURI;
   // The URI of the resource we ended up loading after all redirects, etc.
@@ -270,16 +274,19 @@ private:
   // default, imgIRequest::CORS_NONE.
   int32_t mCORSMode;
 
   // The Referrer Policy (defined in ReferrerPolicy.h) used for this image.
   ReferrerPolicy mReferrerPolicy;
 
   nsresult mImageErrorCode;
 
+  // The categories of prioritization strategy that have been requested.
+  uint32_t mBoostCategoriesRequested = 0;
+
   mutable mozilla::Mutex mMutex;
 
   // Member variables protected by mMutex. Note that *all* flags in our bitfield
   // are protected by mMutex; if you're adding a new flag that isn'protected, it
   // must not be a part of this bitfield.
   RefPtr<ProgressTracker> mProgressTracker;
   RefPtr<Image> mImage;
   bool mIsMultiPartChannel : 1;
--- a/image/imgRequestProxy.cpp
+++ b/image/imgRequestProxy.cpp
@@ -713,16 +713,24 @@ imgRequestProxy::GetCORSMode(int32_t* aC
     return NS_ERROR_FAILURE;
   }
 
   *aCorsMode = GetOwner()->GetCORSMode();
 
   return NS_OK;
 }
 
+NS_IMETHODIMP
+imgRequestProxy::BoostPriority(uint32_t aCategory)
+{
+  NS_ENSURE_STATE(GetOwner() && !mCanceled);
+  GetOwner()->BoostPriority(aCategory);
+  return NS_OK;
+}
+
 /** nsISupportsPriority methods **/
 
 NS_IMETHODIMP
 imgRequestProxy::GetPriority(int32_t* priority)
 {
   NS_ENSURE_STATE(GetOwner());
   *priority = GetOwner()->Priority();
   return NS_OK;
--- a/image/test/browser/browser.ini
+++ b/image/test/browser/browser.ini
@@ -1,14 +1,15 @@
 [DEFAULT]
 support-files =
   animated.gif
   animated2.gif
   big.png
   head.js
   image.html
   imageX2.html
+  browser_docshell_type_editor/**
 
 [browser_bug666317.js]
 skip-if = true || e10s # Bug 1207012 - Permaorange from an uncaught exception that isn't actually turning the suite orange until it hits beta, Bug 948194 - Decoded Images seem to not be discarded on memory-pressure notification with e10s enabled
 [browser_image.js]
 skip-if = true # Bug 987616
 [browser_docshell_type_editor.js]
--- a/image/test/browser/browser_docshell_type_editor.js
+++ b/image/test/browser/browser_docshell_type_editor.js
@@ -1,63 +1,87 @@
 
 "use strict";
 
 const Ci = Components.interfaces;
 const SIMPLE_HTML = "data:text/html,<html><head></head><body></body></html>";
 
+/**
+ * Returns the directory where the chrome.manifest file for the test can be found.
+ *
+ * @return nsILocalFile of the manifest directory
+ */
+function getManifestDir() {
+  let path = getTestFilePath("browser_docshell_type_editor");
+  let file = Components.classes["@mozilla.org/file/local;1"]
+                       .createInstance(Components.interfaces.nsILocalFile);
+  file.initWithPath(path);
+  return file;
+}
+
 // The following URI is *not* accessible to content, hence loading that URI
 // from an unprivileged site should be blocked. If docshell is of appType
 // APP_TYPE_EDITOR however the load should be allowed.
-// >> chrome://devtools/content/framework/dev-edition-promo/dev-edition-logo.png
+// >> chrome://test1/skin/privileged.png
 
 add_task(function* () {
   info("docshell of appType APP_TYPE_EDITOR can access privileged images.");
 
+  // Load a temporary manifest adding a route to a privileged image
+  let manifestDir = getManifestDir();
+  Components.manager.addBootstrappedManifestLocation(manifestDir);
+
   yield BrowserTestUtils.withNewTab({
     gBrowser,
     url: SIMPLE_HTML
   }, function* (browser) {
     yield ContentTask.spawn(browser, null, function* () {
       let rootDocShell = docShell.QueryInterface(Ci.nsIDocShellTreeItem)
                                  .rootTreeItem
                                  .QueryInterface(Ci.nsIInterfaceRequestor)
                                  .getInterface(Ci.nsIDocShell);
       let defaultAppType = rootDocShell.appType;
 
       rootDocShell.appType = Ci.nsIDocShell.APP_TYPE_EDITOR;
 
       is(rootDocShell.appType, Ci.nsIDocShell.APP_TYPE_EDITOR,
         "sanity check: appType after update should be type editor");
 
+
       return new Promise(resolve => {
         let doc = content.document;
         let image = doc.createElement("img");
         image.onload = function() {
           ok(true, "APP_TYPE_EDITOR is allowed to load privileged image");
           // restore appType of rootDocShell before moving on to the next test
           rootDocShell.appType = defaultAppType;
           resolve();
         }
         image.onerror = function() {
           ok(false, "APP_TYPE_EDITOR is allowed to load privileged image");
           // restore appType of rootDocShell before moving on to the next test
           rootDocShell.appType = defaultAppType;
           resolve();
         }
         doc.body.appendChild(image);
-        image.src = "chrome://devtools/content/framework/dev-edition-promo/dev-edition-logo.png";
+        image.src = "chrome://test1/skin/privileged.png";
       });
     });
   });
+
+  Components.manager.removeBootstrappedManifestLocation(manifestDir);
 });
 
 add_task(function* () {
   info("docshell of appType APP_TYPE_UNKNOWN can *not* access privileged images.");
 
+  // Load a temporary manifest adding a route to a privileged image
+  let manifestDir = getManifestDir();
+  Components.manager.addBootstrappedManifestLocation(manifestDir);
+
   yield BrowserTestUtils.withNewTab({
     gBrowser,
     url: SIMPLE_HTML
   }, function* (browser) {
     yield ContentTask.spawn(browser, null, function* () {
       let rootDocShell = docShell.QueryInterface(Ci.nsIDocShellTreeItem)
                                  .rootTreeItem
                                  .QueryInterface(Ci.nsIInterfaceRequestor)
@@ -80,13 +104,15 @@ add_task(function* () {
         }
         image.onerror = function() {
           ok(true, "APP_TYPE_UNKNOWN is *not* allowed to acces privileged image");
           // restore appType of rootDocShell before moving on to the next test
           rootDocShell.appType = defaultAppType;
           resolve();
         }
         doc.body.appendChild(image);
-        image.src = "chrome://devtools/content/framework/dev-edition-promo/dev-edition-logo.png";
+        image.src = "chrome://test1/skin/privileged.png";
       });
     });
   });
+
+  Components.manager.removeBootstrappedManifestLocation(manifestDir);
 });
new file mode 100644
--- /dev/null
+++ b/image/test/browser/browser_docshell_type_editor/chrome.manifest
@@ -0,0 +1,1 @@
+skin test1 test img/
\ No newline at end of file
new file mode 100644
index 0000000000000000000000000000000000000000..2bf7b7e828b39c161a4e3218c281b78b242e7f9d
GIT binary patch
literal 90
zc%17D@N?(olHy`uVBq!ia0vp^EFjFm1|(O0oL2{=<UL&+LnOkJ|J2WYXwNK^z`|*e
n<>c}&K0V=pUc(wz2D>W$$mL64`T~_Qc)I$ztaD0eVqgFOl-w6?
--- a/js/src/frontend/BytecodeEmitter.cpp
+++ b/js/src/frontend/BytecodeEmitter.cpp
@@ -9227,51 +9227,16 @@ BytecodeEmitter::isRestParameter(ParseNo
             return paramName && name == paramName;
         }
     }
 
     return false;
 }
 
 bool
-BytecodeEmitter::emitOptimizeSpread(ParseNode* arg0, JumpList* jmp, bool* emitted)
-{
-    // Emit a pereparation code to optimize the spread call with a rest
-    // parameter:
-    //
-    //   function f(...args) {
-    //     g(...args);
-    //   }
-    //
-    // If the spread operand is a rest parameter and it's optimizable array,
-    // skip spread operation and pass it directly to spread call operation.
-    // See the comment in OptimizeSpreadCall in Interpreter.cpp for the
-    // optimizable conditons.
-    if (!isRestParameter(arg0)) {
-        *emitted = false;
-        return true;
-    }
-
-    if (!emitTree(arg0))
-        return false;
-
-    if (!emit1(JSOP_OPTIMIZE_SPREADCALL))
-        return false;
-
-    if (!emitJump(JSOP_IFNE, jmp))
-        return false;
-
-    if (!emit1(JSOP_POP))
-        return false;
-
-    *emitted = true;
-    return true;
-}
-
-bool
 BytecodeEmitter::emitCallOrNew(ParseNode* pn, ValueUsage valueUsage /* = ValueUsage::WantValue */)
 {
     bool callop = pn->isKind(PNK_CALL) || pn->isKind(PNK_TAGGED_TEMPLATE);
     /*
      * Emit callable invocation or operator new (constructor call) code.
      * First, emit code for the left operand to evaluate the callable or
      * constructable object expression.
      *
@@ -9418,28 +9383,53 @@ BytecodeEmitter::emitCallOrNew(ParseNode
             } else {
                 // Repush the callee as new.target
                 if (!emitDupAt(argc + 1))
                     return false;
             }
         }
     } else {
         ParseNode* args = pn2->pn_next;
-        JumpList jmp;
-        bool optCodeEmitted = false;
-        if (argc == 1) {
-            if (!emitOptimizeSpread(args->pn_kid, &jmp, &optCodeEmitted))
+        bool emitOptCode = (argc == 1) && isRestParameter(args->pn_kid);
+        IfThenElseEmitter ifNotOptimizable(this);
+
+        if (emitOptCode) {
+            // Emit a preparation code to optimize the spread call with a rest
+            // parameter:
+            //
+            //   function f(...args) {
+            //     g(...args);
+            //   }
+            //
+            // If the spread operand is a rest parameter and it's optimizable
+            // array, skip spread operation and pass it directly to spread call
+            // operation.  See the comment in OptimizeSpreadCall in
+            // Interpreter.cpp for the optimizable conditons.
+
+            if (!emitTree(args->pn_kid))
+                return false;
+
+            if (!emit1(JSOP_OPTIMIZE_SPREADCALL))
+                return false;
+
+            if (!emit1(JSOP_NOT))
+                return false;
+
+            if (!ifNotOptimizable.emitIf())
+                return false;
+
+            if (!emit1(JSOP_POP))
                 return false;
         }
 
         if (!emitArray(args, argc, JSOP_SPREADCALLARRAY))
             return false;
 
-        if (optCodeEmitted) {
-            if (!emitJumpTargetAndPatch(jmp))
+        if (emitOptCode) {
+            if (!ifNotOptimizable.emitEnd())
                 return false;
         }
 
         if (isNewOp) {
             if (pn->isKind(PNK_SUPERCALL)) {
                 if (!emit1(JSOP_NEWTARGET))
                     return false;
             } else {
--- a/js/src/frontend/BytecodeEmitter.h
+++ b/js/src/frontend/BytecodeEmitter.h
@@ -748,17 +748,16 @@ struct MOZ_STACK_CLASS BytecodeEmitter
                                        ValueUsage valueUsage = ValueUsage::WantValue);
 
     MOZ_NEVER_INLINE MOZ_MUST_USE bool emitIncOrDec(ParseNode* pn);
 
     MOZ_MUST_USE bool emitConditionalExpression(ConditionalExpression& conditional,
                                                 ValueUsage valueUsage = ValueUsage::WantValue);
 
     bool isRestParameter(ParseNode* pn);
-    MOZ_MUST_USE bool emitOptimizeSpread(ParseNode* arg0, JumpList* jmp, bool* emitted);
 
     MOZ_MUST_USE bool emitCallOrNew(ParseNode* pn, ValueUsage valueUsage = ValueUsage::WantValue);
     MOZ_MUST_USE bool emitSelfHostedCallFunction(ParseNode* pn);
     MOZ_MUST_USE bool emitSelfHostedResumeGenerator(ParseNode* pn);
     MOZ_MUST_USE bool emitSelfHostedForceInterpreter(ParseNode* pn);
     MOZ_MUST_USE bool emitSelfHostedAllowContentIter(ParseNode* pn);
     MOZ_MUST_USE bool emitSelfHostedDefineDataProperty(ParseNode* pn);
     MOZ_MUST_USE bool emitSelfHostedHasOwn(ParseNode* pn);
--- a/js/src/jit/IonBuilder.cpp
+++ b/js/src/jit/IonBuilder.cpp
@@ -2311,16 +2311,26 @@ IonBuilder::inspectOpcode(JSOp op)
 #endif
         return Ok();
       }
 
       case JSOP_IS_CONSTRUCTING:
         pushConstant(MagicValue(JS_IS_CONSTRUCTING));
         return Ok();
 
+      case JSOP_OPTIMIZE_SPREADCALL:
+      {
+        // Assuming optimization isn't available doesn't affect correctness.
+        // TODO: Investigate dynamic checks.
+        MDefinition* arr = current->peek(-1);
+        arr->setImplicitlyUsedUnchecked();
+        pushConstant(BooleanValue(false));
+        return Ok();
+      }
+
       default:
         break;
     }
 
     // Track a simpler message, since the actionable abort message is a
     // static string, and the internal opcode name isn't an actionable
     // thing anyways.
     trackActionableAbort("Unsupported bytecode");
--- a/js/src/jit/IonControlFlow.cpp
+++ b/js/src/jit/IonControlFlow.cpp
@@ -331,17 +331,16 @@ ControlFlowGenerator::snoopControlFlow(J
         return processAndOr(op);
 
       case JSOP_LABEL:
         return processLabel();
 
       case JSOP_TRY:
         return processTry();
 
-      case JSOP_OPTIMIZE_SPREADCALL:
       case JSOP_THROWMSG:
         // Not implemented yet.
         return ControlStatus::Abort;
 
       default:
         break;
     }
     return ControlStatus::None;
--- a/js/xpconnect/src/XPCLocale.cpp
+++ b/js/xpconnect/src/XPCLocale.cpp
@@ -231,17 +231,24 @@ private:
 #ifdef DEBUG
   PRThread* mThread;
 #endif
 };
 
 bool
 xpc_LocalizeContext(JSContext* cx)
 {
-  JS_SetLocaleCallbacks(cx, new XPCLocaleCallbacks());
+  // We want to assign the locale callbacks only the first time we
+  // localize the context.
+  // All consequent calls to this function are result of language changes
+  // and should not assign it again.
+  const JSLocaleCallbacks* lc = JS_GetLocaleCallbacks(cx);
+  if (!lc) {
+    JS_SetLocaleCallbacks(cx, new XPCLocaleCallbacks());
+  }
 
   // Set the default locale.
 
   // Check a pref to see if we should use US English locale regardless
   // of the system locale.
   if (Preferences::GetBool("javascript.use_us_english_locale", false)) {
     return JS_SetDefaultLocale(cx, "en-US");
   }
--- a/layout/style/ServoBindingList.h
+++ b/layout/style/ServoBindingList.h
@@ -158,16 +158,19 @@ SERVO_BINDING_FUNC(Servo_Property_IsDisc
 SERVO_BINDING_FUNC(Servo_AnimationValues_Interpolate,
                    RawServoAnimationValueStrong,
                    RawServoAnimationValueBorrowed from,
                    RawServoAnimationValueBorrowed to,
                    double progress)
 SERVO_BINDING_FUNC(Servo_AnimationValues_IsInterpolable, bool,
                    RawServoAnimationValueBorrowed from,
                    RawServoAnimationValueBorrowed to)
+SERVO_BINDING_FUNC(Servo_AnimationValues_ComputeDistance, double,
+                   RawServoAnimationValueBorrowed from,
+                   RawServoAnimationValueBorrowed to)
 SERVO_BINDING_FUNC(Servo_AnimationValue_Serialize, void,
                    RawServoAnimationValueBorrowed value,
                    nsCSSPropertyID property,
                    nsAString* buffer)
 SERVO_BINDING_FUNC(Servo_AnimationValue_GetOpacity, float,
                    RawServoAnimationValueBorrowed value)
 SERVO_BINDING_FUNC(Servo_AnimationValue_GetTransform, void,
                    RawServoAnimationValueBorrowed value,
--- a/layout/style/test/mochitest.ini
+++ b/layout/style/test/mochitest.ini
@@ -219,16 +219,17 @@ skip-if = stylo # bug 1339656
 [test_ident_escaping.html]
 [test_inherit_computation.html]
 skip-if = toolkit == 'android'
 [test_inherit_storage.html]
 [test_initial_computation.html]
 skip-if = toolkit == 'android'
 [test_initial_storage.html]
 [test_keyframes_rules.html]
+[test_keyframes_vendor_prefix.html]
 [test_load_events_on_stylesheets.html]
 [test_logical_properties.html]
 [test_media_queries.html]
 skip-if = android_version == '18' #debug-only failure; timed out #Android 4.3 aws only; bug 1030419
 [test_media_queries_dynamic.html]
 [test_media_queries_dynamic_xbl.html]
 [test_media_query_list.html]
 [test_moz_device_pixel_ratio.html]
new file mode 100644
--- /dev/null
+++ b/layout/style/test/test_keyframes_vendor_prefix.html
@@ -0,0 +1,193 @@
+<!DOCTYPE html>
+<meta charset=utf-8>
+<title>
+Test for interaction between prefixed and non-prefixed @keyframes rules with
+the same name
+</title>
+<script src='/resources/testharness.js'></script>
+<script src='/resources/testharnessreport.js'></script>
+<div id='log'></div>
+<script>
+/**
+ * Appends a style element to the document head.
+ *
+ * @param t  The testharness.js Test object. If provided, this will be used
+ *           to register a cleanup callback to remove the style element
+ *           when the test finishes.
+ *
+ * @param rules  A dictionary object with selector names and rules to set on
+ *               the style sheet.
+ */
+function addStyle(t, rules) {
+  var extraStyle = document.createElement('style');
+  document.head.appendChild(extraStyle);
+  if (rules) {
+    var sheet = extraStyle.sheet;
+    for (var selector in rules) {
+      sheet.insertRule(selector + '{' + rules[selector] + '}',
+                       sheet.cssRules.length);
+    }
+  }
+
+  if (t && typeof t.add_cleanup === 'function') {
+    t.add_cleanup(function() {
+      extraStyle.remove();
+    });
+  }
+}
+
+/**
+ * Appends a div to the document body.
+ *
+ * @param t  The testharness.js Test object. If provided, this will be used
+ *           to register a cleanup callback to remove the div when the test
+ *           finishes.
+ *
+ * @param attrs  A dictionary object with attribute names and values to set on
+ *               the div.
+ */
+function addDiv(t, attrs) {
+  var div = document.createElement('div');
+  if (attrs) {
+    for (var attrName in attrs) {
+      div.setAttribute(attrName, attrs[attrName]);
+    }
+  }
+  document.body.appendChild(div);
+  if (t && typeof t.add_cleanup === 'function') {
+    t.add_cleanup(function() {
+      div.remove();
+    });
+  }
+  return div;
+}
+
+var isStylo = false;
+// 'layout.css.servo.enabled' is not yet defined on gecko, so we need a try
+// block.
+try {
+  isStylo = SpecialPowers.getBoolPref('layout.css.servo.enabled');
+} catch(e) {
+}
+
+test(function(t) {
+  addStyle(t,
+    { 'dummy': '', // XXX bug 1336863 hackaround: a single insertRule is broken
+                   // on stylo.
+      '@-webkit-keyframes anim': 'from,to { color: rgb(0, 255, 0); }' });
+
+  var div = addDiv(t, { style: 'animation: anim 100s' });
+
+  assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
+}, '-webkit- prefix keyframes');
+
+test(function(t) {
+  addStyle(t,
+    { 'dummy': '', // XXX bug 1336863 hackaround, as above.
+      '@-moz-keyframes anim': 'from,to { color: rgb(0, 255, 0); }' });
+
+  var div = addDiv(t, { style: 'animation: anim 100s' });
+
+  assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
+}, '-moz- prefix keyframes');
+
+test(function(t) {
+  addStyle(t,
+    { 'dummy': '', // XXX bug 1336863 hackaround, as above.
+      '@-WEBKIT-keyframes anim': 'from,to { color: rgb(0, 255, 0); }' });
+
+  var div = addDiv(t, { style: 'animation: anim 100s' });
+
+  assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
+}, '-WEBKIT- prefix keyframes');
+
+test(function(t) {
+  addStyle(t,
+    { 'dummy': '', // XXX bug 1336863 hackaround, as above.
+      '@-MOZ-keyframes anim': 'from,to { color: rgb(0, 255, 0); }' });
+
+  var div = addDiv(t, { style: 'animation: anim 100s' });
+
+  assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
+}, '-MOZ- prefix keyframes');
+
+test(function(t) {
+  addStyle(t,
+    { 'dummy': '', // XXX bug 1336863 hackaround, as above.
+      '@-webkit-KEYFRAMES anim': 'from,to { color: rgb(0, 255, 0); }' });
+
+  var div = addDiv(t, { style: 'animation: anim 100s' });
+
+  assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
+}, '-webkit- prefix KEYFRAMES');
+
+test(function(t) {
+  if (!isStylo) {
+    // FIXME: Bug 1312918: later prefixed rule incorrectly overrides earlier
+    // non-prefixed on gecko.
+    return;
+  }
+
+  addStyle(t,
+    { '@keyframes anim':         'from,to { color: rgb(0, 255, 0); }',
+      '@-webkit-keyframes anim': 'from,to { color: rgb(255, 0, 0); }' });
+
+  var div = addDiv(t, { style: 'animation: anim 100s' });
+
+  assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
+}, '-webkit-keyframes should not override earlier non-prefix keyframes');
+
+test(function(t) {
+  if (!isStylo) {
+    // FIXME: Bug 1312918: later prefixed rule incorrectly overrides earlier
+    // non-prefixed on gecko.
+    return;
+  }
+
+  addStyle(t,
+    { '@keyframes anim':      'from,to { color: rgb(0, 255, 0); }',
+      '@-moz-keyframes anim': 'from,to { color: rgb(255, 0, 0); }' });
+
+  var div = addDiv(t, { style: 'animation: anim 100s' });
+
+  assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
+}, '-moz-keyframes should not override earlier non-prefix keyframes');
+
+test(function(t) {
+  addStyle(t,
+    { '@-moz-keyframes anim': 'from,to { color: rgb(255, 0, 0); }',
+      '@keyframes anim':      'from,to { color: rgb(0, 255, 0); }' });
+
+  var div = addDiv(t, { style: 'animation: anim 100s' });
+
+  assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
+}, 'non-prefix keyframes should override earlier -moz-keyframes');
+
+test(function(t) {
+  addStyle(t,
+    { '@-webkit-keyframes anim': 'from,to { color: rgb(255, 0, 0); }',
+      '@keyframes anim':         'from,to { color: rgb(0, 255, 0); }' });
+
+  var div = addDiv(t, { style: 'animation: anim 100s' });
+
+  assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
+}, 'non-prefix keyframes should override earlier -webkit-keyframes');
+
+test(function(t) {
+  addStyle(t,
+    { '@-webkit-keyframes anim': 'from,to { color: rgb(255, 0, 0); }',
+      '@-moz-keyframes anim':    'from,to { color: rgb(0, 255, 0); }' });
+
+  var div = addDiv(t, { style: 'animation: anim 100s' });
+
+  assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
+
+  addStyle(t,
+    { '@-moz-keyframes anim2':    'from,to { color: rgb(255, 0, 0); }',
+      '@-webkit-keyframes anim2': 'from,to { color: rgb(0, 255, 0); }' });
+
+  var div = addDiv(t, { style: 'animation: anim2 100s' });
+
+  assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
+}, 'last prefixed keyframes should override earlier prefixed keyframes');
+</script>
--- a/mozglue/build/WindowsDllBlocklist.cpp
+++ b/mozglue/build/WindowsDllBlocklist.cpp
@@ -229,16 +229,19 @@ static const DllBlockInfo sWindowsDllBlo
   // smci*.dll - goobzo crashware (bug 1339908)
   { "smci32.dll", ALL_VERSIONS },
   { "smci64.dll", ALL_VERSIONS },
 
   // Crashes with Internet Download Manager, bug 1333486
   { "idmcchandler7.dll", ALL_VERSIONS },
   { "idmcchandler7_64.dll", ALL_VERSIONS },
 
+  // Nahimic 2 breaks applicaton update (bug 1356637)
+  { "nahimic2devprops.dll", ALL_VERSIONS },
+
   { nullptr, 0 }
 };
 
 #ifndef STATUS_DLL_NOT_FOUND
 #define STATUS_DLL_NOT_FOUND ((DWORD)0xC0000135L)
 #endif
 
 // define this for very verbose dll load debug spew
--- a/netwerk/base/nsChannelClassifier.cpp
+++ b/netwerk/base/nsChannelClassifier.cpp
@@ -43,46 +43,144 @@
 namespace mozilla {
 namespace net {
 
 //
 // MOZ_LOG=nsChannelClassifier:5
 //
 static LazyLogModule gChannelClassifierLog("nsChannelClassifier");
 
-// Whether channels should be annotated as being on the tracking protection
-// list.
-static bool sAnnotateChannelEnabled = false;
-// Whether the priority of the channels annotated as being on the tracking
-// protection list should be lowered.
-static bool sLowerNetworkPriority = false;
-static bool sIsInited = false;
 
 #undef LOG
 #define LOG(args)     MOZ_LOG(gChannelClassifierLog, LogLevel::Debug, args)
 #define LOG_ENABLED() MOZ_LOG_TEST(gChannelClassifierLog, LogLevel::Debug)
 
+#define URLCLASSIFIER_SKIP_HOSTNAMES       "urlclassifier.skipHostnames"
+#define URLCLASSIFIER_TRACKING_WHITELIST   "urlclassifier.trackingWhitelistTable"
+
+// Put CachedPrefs in anonymous namespace to avoid any collision from outside of
+// this file.
+namespace {
+
+/**
+ * It is not recommended to read from Preference everytime a channel is
+ * connected.
+ * That is not fast and we should cache preference values and reuse them
+ */
+class CachedPrefs final
+{
+public:
+  static CachedPrefs* GetInstance();
+
+  void Init();
+  bool IsAllowListExample() { return sAllowListExample;}
+  bool IsLowerNetworkPriority() { return sLowerNetworkPriority;}
+  bool IsAnnotateChannelEnabled() { return sAnnotateChannelEnabled;}
+  nsCString GetTrackingWhiteList() { return mTrackingWhitelist; }
+  void SetTrackingWhiteList(const nsACString& aList) { mTrackingWhitelist = aList; }
+  nsCString GetSkipHostnames() { return mSkipHostnames; }
+  void SetSkipHostnames(const nsACString& aHostnames) { mSkipHostnames = aHostnames; }
+
+private:
+  friend class StaticAutoPtr<CachedPrefs>;
+  CachedPrefs();
+  ~CachedPrefs();
+
+  static void OnPrefsChange(const char* aPrefName, void* );
+
+  // Whether channels should be annotated as being on the tracking protection
+  // list.
+  static bool sAnnotateChannelEnabled;
+  // Whether the priority of the channels annotated as being on the tracking
+  // protection list should be lowered.
+  static bool sLowerNetworkPriority;
+  static bool sAllowListExample;
+
+  nsCString mTrackingWhitelist;
+  nsCString mSkipHostnames;
+
+  static StaticAutoPtr<CachedPrefs> sInstance;
+};
+
+bool CachedPrefs::sAllowListExample = false;
+bool CachedPrefs::sLowerNetworkPriority = false;
+bool CachedPrefs::sAnnotateChannelEnabled = false;
+
+StaticAutoPtr<CachedPrefs> CachedPrefs::sInstance;
+
+// static
+void
+CachedPrefs::OnPrefsChange(const char* aPref, void* aClosure)
+{
+  CachedPrefs* prefs = static_cast<CachedPrefs*> (aClosure);
+
+  if (!strcmp(aPref, URLCLASSIFIER_SKIP_HOSTNAMES)) {
+    nsCString skipHostnames = Preferences::GetCString(URLCLASSIFIER_SKIP_HOSTNAMES);
+    ToLowerCase(skipHostnames);
+    prefs->SetSkipHostnames(skipHostnames);
+  } else if (!strcmp(aPref, URLCLASSIFIER_TRACKING_WHITELIST)) {
+    nsCString trackingWhitelist = Preferences::GetCString(URLCLASSIFIER_TRACKING_WHITELIST);
+    prefs->SetTrackingWhiteList(trackingWhitelist);
+  }
+}
+
+void
+CachedPrefs::Init()
+{
+  Preferences::AddBoolVarCache(&sAnnotateChannelEnabled,
+                               "privacy.trackingprotection.annotate_channels");
+  Preferences::AddBoolVarCache(&sLowerNetworkPriority,
+                               "privacy.trackingprotection.lower_network_priority");
+  Preferences::AddBoolVarCache(&sAllowListExample,
+                               "channelclassifier.allowlist_example");
+  Preferences::RegisterCallbackAndCall(CachedPrefs::OnPrefsChange,
+                                       URLCLASSIFIER_SKIP_HOSTNAMES, this);
+  Preferences::RegisterCallbackAndCall(CachedPrefs::OnPrefsChange,
+                                       URLCLASSIFIER_TRACKING_WHITELIST, this);
+
+}
+
+// static
+CachedPrefs*
+CachedPrefs::GetInstance()
+{
+  if (!sInstance) {
+    sInstance = new CachedPrefs();
+    sInstance->Init();
+    ClearOnShutdown(&sInstance);
+  }
+  MOZ_ASSERT(sInstance);
+  return sInstance;
+}
+
+CachedPrefs::CachedPrefs()
+{
+  MOZ_COUNT_CTOR(CachedPrefs);
+}
+
+CachedPrefs::~CachedPrefs()
+{
+  MOZ_COUNT_DTOR(CachedPrefs);
+
+  Preferences::UnregisterCallback(CachedPrefs::OnPrefsChange, URLCLASSIFIER_SKIP_HOSTNAMES, this);
+  Preferences::UnregisterCallback(CachedPrefs::OnPrefsChange, URLCLASSIFIER_TRACKING_WHITELIST, this);
+}
+} // anonymous namespace
+
 NS_IMPL_ISUPPORTS(nsChannelClassifier,
                   nsIURIClassifierCallback,
                   nsIObserver)
 
 nsChannelClassifier::nsChannelClassifier(nsIChannel *aChannel)
   : mIsAllowListed(false),
     mSuspendedChannel(false),
     mChannel(aChannel),
     mTrackingProtectionEnabled(Nothing())
 {
   MOZ_ASSERT(mChannel);
-  if (!sIsInited) {
-    sIsInited = true;
-    Preferences::AddBoolVarCache(&sAnnotateChannelEnabled,
-                                 "privacy.trackingprotection.annotate_channels");
-    Preferences::AddBoolVarCache(&sLowerNetworkPriority,
-                                 "privacy.trackingprotection.lower_network_priority");
-  }
 }
 
 nsresult
 nsChannelClassifier::ShouldEnableTrackingProtection(bool *result)
 {
   nsresult rv = ShouldEnableTrackingProtectionInternal(mChannel, result);
   mTrackingProtectionEnabled = Some(*result);
   return rv;
@@ -151,18 +249,17 @@ nsChannelClassifier::ShouldEnableTrackin
 
     if (AddonMayLoad(aChannel, chanURI)) {
         return NS_OK;
     }
 
     nsCOMPtr<nsIIOService> ios = do_GetService(NS_IOSERVICE_CONTRACTID, &rv);
     NS_ENSURE_SUCCESS(rv, rv);
 
-    const char ALLOWLIST_EXAMPLE_PREF[] = "channelclassifier.allowlist_example";
-    if (!topWinURI && Preferences::GetBool(ALLOWLIST_EXAMPLE_PREF, false)) {
+    if (!topWinURI && CachedPrefs::GetInstance()->IsAllowListExample()) {
       LOG(("nsChannelClassifier[%p]: Allowlisting test domain\n", this));
       rv = ios->NewURI(NS_LITERAL_CSTRING("http://allowlisted.example.com"),
                        nullptr, nullptr, getter_AddRefs(topWinURI));
       NS_ENSURE_SUCCESS(rv, rv);
     }
 
     // Take the host/port portion so we can allowlist by site. Also ignore the
     // scheme, since users who put sites on the allowlist probably don't expect
@@ -360,24 +457,21 @@ nsChannelClassifier::StartInternal()
     if (hasFlags) return NS_ERROR_UNEXPECTED;
 
     rv = NS_URIChainHasFlags(uri,
                              nsIProtocolHandler::URI_IS_LOCAL_RESOURCE,
                              &hasFlags);
     NS_ENSURE_SUCCESS(rv, rv);
     if (hasFlags) return NS_ERROR_UNEXPECTED;
 
-    // Skip whitelisted hostnames.
-    nsAutoCString whitelisted;
-    Preferences::GetCString("urlclassifier.skipHostnames", &whitelisted);
-    if (!whitelisted.IsEmpty()) {
-      ToLowerCase(whitelisted);
+    nsCString skipHostnames = CachedPrefs::GetInstance()->GetSkipHostnames();
+    if (!skipHostnames.IsEmpty()) {
       LOG(("nsChannelClassifier[%p]:StartInternal whitelisted hostnames = %s",
-           this, whitelisted.get()));
-      if (IsHostnameWhitelisted(uri, whitelisted)) {
+           this, skipHostnames.get()));
+      if (IsHostnameWhitelisted(uri, skipHostnames)) {
         return NS_ERROR_UNEXPECTED;
       }
     }
 
     nsCOMPtr<nsIURIClassifier> uriClassifier =
         do_GetService(NS_URICLASSIFIERSERVICE_CONTRACTID, &rv);
     if (rv == NS_ERROR_FACTORY_NOT_REGISTERED ||
         rv == NS_ERROR_NOT_AVAILABLE) {
@@ -406,17 +500,19 @@ nsChannelClassifier::StartInternal()
       nsCOMPtr<nsIURI> principalURI;
       principal->GetURI(getter_AddRefs(principalURI));
       LOG(("nsChannelClassifier[%p]: Classifying principal %s on channel with "
            "uri %s", this, principalURI->GetSpecOrDefault().get(),
            uri->GetSpecOrDefault().get()));
     }
     // The classify is running in parent process, no need to give a valid event
     // target
-    rv = uriClassifier->Classify(principal, nullptr, sAnnotateChannelEnabled | trackingProtectionEnabled,
+    rv = uriClassifier->Classify(principal, nullptr,
+                                 CachedPrefs::GetInstance()->IsAnnotateChannelEnabled() ||
+                                   trackingProtectionEnabled,
                                  this, &expectCallback);
     if (NS_FAILED(rv)) {
         return rv;
     }
 
     if (expectCallback) {
         // Suspend the channel, it will be resumed when we get the classifier
         // callback.
@@ -728,20 +824,18 @@ nsChannelClassifier::IsTrackerWhiteliste
                                           const nsACString& aProvider,
                                           const nsACString& aPrefix)
 {
   nsresult rv;
   nsCOMPtr<nsIURIClassifier> uriClassifier =
     do_GetService(NS_URICLASSIFIERSERVICE_CONTRACTID, &rv);
   NS_ENSURE_SUCCESS(rv, rv);
 
-  nsAutoCString tables;
-  Preferences::GetCString("urlclassifier.trackingWhitelistTable", &tables);
-
-  if (tables.IsEmpty()) {
+  nsCString trackingWhitelist = CachedPrefs::GetInstance()->GetTrackingWhiteList();
+  if (trackingWhitelist.IsEmpty()) {
     LOG(("nsChannelClassifier[%p]:IsTrackerWhitelisted whitelist disabled",
          this));
     return NS_ERROR_TRACKING_URI;
   }
 
   nsCOMPtr<nsIHttpChannelInternal> chan = do_QueryInterface(mChannel, &rv);
   NS_ENSURE_SUCCESS(rv, rv);
 
@@ -775,17 +869,17 @@ nsChannelClassifier::IsTrackerWhiteliste
   nsCOMPtr<nsIURI> whitelistURI;
   rv = NS_NewURI(getter_AddRefs(whitelistURI), whitelistEntry);
   NS_ENSURE_SUCCESS(rv, rv);
 
   RefPtr<IsTrackerWhitelistedCallback> cb =
     new IsTrackerWhitelistedCallback(this, aList, aProvider, aPrefix,
                                      whitelistEntry);
 
-  return uriClassifier->AsyncClassifyLocalWithTables(whitelistURI, tables, cb);
+  return uriClassifier->AsyncClassifyLocalWithTables(whitelistURI, trackingWhitelist, cb);
 }
 
 NS_IMETHODIMP
 nsChannelClassifier::OnClassifyComplete(nsresult aErrorCode,
                                         const nsACString& aList,
                                         const nsACString& aProvider,
                                         const nsACString& aPrefix)
 {
@@ -818,31 +912,31 @@ nsChannelClassifier::OnClassifyCompleteI
       MarkEntryClassified(aErrorCode);
 
       // The value of |mTrackingProtectionEnabled| should be assigned at
       // |ShouldEnableTrackingProtection| before.
       MOZ_ASSERT(mTrackingProtectionEnabled, "Should contain a value.");
 
       if (aErrorCode == NS_ERROR_TRACKING_URI &&
           !mTrackingProtectionEnabled.valueOr(false)) {
-        if (sAnnotateChannelEnabled) {
+        if (CachedPrefs::GetInstance()->IsAnnotateChannelEnabled()) {
           nsCOMPtr<nsIParentChannel> parentChannel;
           NS_QueryNotificationCallbacks(mChannel, parentChannel);
           if (parentChannel) {
             // This channel is a parent-process proxy for a child process
             // request. We should notify the child process as well.
             parentChannel->NotifyTrackingResource();
           }
           RefPtr<HttpBaseChannel> httpChannel = do_QueryObject(mChannel);
           if (httpChannel) {
             httpChannel->SetIsTrackingResource();
           }
         }
 
-        if (sLowerNetworkPriority) {
+        if (CachedPrefs::GetInstance()->IsLowerNetworkPriority()) {
           if (LOG_ENABLED()) {
             nsCOMPtr<nsIURI> uri;
             mChannel->GetURI(getter_AddRefs(uri));
             LOG(("nsChannelClassifier[%p]: lower the priority of channel %p"
                  ", since %s is a tracker", this, mChannel.get(),
                  uri->GetSpecOrDefault().get()));
           }
           nsCOMPtr<nsISupportsPriority> p = do_QueryInterface(mChannel);
--- a/netwerk/streamconv/converters/nsIndexedToHTML.cpp
+++ b/netwerk/streamconv/converters/nsIndexedToHTML.cpp
@@ -427,17 +427,16 @@ nsIndexedToHTML::DoOnStartRequest(nsIReq
 
     buffer.AppendLiteral("<link rel=\"icon\" type=\"image/png\" href=\"");
     nsCOMPtr<nsIURI> innerUri = NS_GetInnermostURI(uri);
     if (!innerUri)
         return NS_ERROR_UNEXPECTED;
     nsCOMPtr<nsIFileURL> fileURL(do_QueryInterface(innerUri));
     //XXX bug 388553: can't use skinnable icons here due to security restrictions
     if (fileURL) {
-        //buffer.AppendLiteral("chrome://global/skin/dirListing/local.png");
         buffer.AppendLiteral("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB"
                              "AAAAAQCAYAAAAf8%2F9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i"
                              "ZSBJbWFnZVJlYWR5ccllPAAAAjFJREFUeNqsU8uOElEQPffR"
                              "3XQ3ONASdBJCSBxHos5%2B3Bg3rvkCv8PElS78gPkO%2FATj"
                              "QoUdO2ftrJiRh6aneTb9sOpC4weMN6lcuFV16pxDIfI8x12O"
                              "YIDhcPiu2Wx%2B%2FHF5CW1Z6Jyegt%2FTNEWSJIjjGFEUIQ"
                              "xDrFYrWFSzXC4%2FdLvd95pRKpXKy%2BpRFZ7nwaWo1%2BsG"
                              "nQG2260BKJfLKJVKGI1GEEJw7ateryd0v993W63WEwjgxfn5"
@@ -449,17 +448,16 @@ nsIndexedToHTML::DoOnStartRequest(nsIReq
                              "xfqFUJT36AYBZGd6PJkFCZnnlBxMp38gqIgLpZB0y4Nph18l"
                              "yWh5FFbrOSxbl3V4G%2BVB7T4ajYYxTyuLtO%2BCvWGgJE1M"
                              "c7JNsJEhvgw%2FQV4fo%2F24nbEsX2u1d5sVyn8sJO0ZAQiI"
                              "YnFh%2BxrfLz%2Fj29cBS%2FO14zg3i8XigW3ZkErDtmKoeM"
                              "%2BAJGRMnXeEPGKf0nCD1ydvkDzU9Jbc6OpR7WIw6L8lQ%2B"
                              "4pQ1%2FlPF0RGM9Ns91Wmptk0GfB4EJkt77vXYj%2F8m%2B8"
                              "y%2FkrwABHbz2H9V68DQAAAABJRU5ErkJggg%3D%3D");
     } else {
-        //buffer.AppendLiteral("chrome://global/skin/dirListing/remote.png");
         buffer.AppendLiteral("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB"
                              "AAAAAQCAYAAAAf8%2F9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i"
                              "ZSBJbWFnZVJlYWR5ccllPAAAAeBJREFUeNqcU81O20AQ%2Ft"
                              "Z2AgQSYQRqL1UPVG2hAUQkxLEStz4DrXpLpD5Drz31Cajax%"
                              "2Bghhx6qHIJURBTxIwQRwopCBbZjHMcOTrzermPipsSt1Iw0"
                              "3p3ZmW%2B%2B2R0TxhgOD34wjCHZlQ0iDYz9yvEfhxMTCYhE"
                              "QDIZhkxKd2sqzX2TOD2vBQCQhpPefng1ZP2dVPlLLdpL8SEM"
                              "cxng%2Fbs0RIHhtgs4twxOh%2BHjZxvzDx%2F3GQQiDFISiR"
--- a/services/fxaccounts/FxAccountsConfig.jsm
+++ b/services/fxaccounts/FxAccountsConfig.jsm
@@ -20,16 +20,17 @@ XPCOMUtils.defineLazyModuleGetter(this, 
 
 const CONFIG_PREFS = [
   "identity.fxaccounts.auth.uri",
   "identity.fxaccounts.remote.oauth.uri",
   "identity.fxaccounts.remote.profile.uri",
   "identity.sync.tokenserver.uri",
   "identity.fxaccounts.remote.webchannel.uri",
   "identity.fxaccounts.settings.uri",
+  "identity.fxaccounts.settings.devices.uri",
   "identity.fxaccounts.remote.signup.uri",
   "identity.fxaccounts.remote.signin.uri",
   "identity.fxaccounts.remote.force_auth.uri",
 ];
 
 this.FxAccountsConfig = {
 
   // Returns a promise that resolves with the URI of the remote UI flows.
@@ -148,16 +149,17 @@ this.FxAccountsConfig = {
       Services.prefs.setCharPref("identity.sync.tokenserver.uri", config.sync_tokenserver_base_url + "/1.0/sync/1.5");
       // Update the prefs that are based off of the autoconfig url
 
       let contextParam = encodeURIComponent(
         Services.prefs.getCharPref("identity.fxaccounts.contextParam"));
 
       Services.prefs.setCharPref("identity.fxaccounts.remote.webchannel.uri", rootURL);
       Services.prefs.setCharPref("identity.fxaccounts.settings.uri", rootURL + "/settings?service=sync&context=" + contextParam);
+      Services.prefs.setCharPref("identity.fxaccounts.settings.devices.uri", rootURL + "/settings/clients?service=sync&context=" + contextParam);
       Services.prefs.setCharPref("identity.fxaccounts.remote.signup.uri", rootURL + "/signup?service=sync&context=" + contextParam);
       Services.prefs.setCharPref("identity.fxaccounts.remote.signin.uri", rootURL + "/signin?service=sync&context=" + contextParam);
       Services.prefs.setCharPref("identity.fxaccounts.remote.force_auth.uri", rootURL + "/force_auth?service=sync&context=" + contextParam);
 
       let whitelistValue = Services.prefs.getCharPref("webchannel.allowObject.urlWhitelist");
       if (!whitelistValue.includes(rootURL)) {
         whitelistValue = `${rootURL} ${whitelistValue}`;
         Services.prefs.setCharPref("webchannel.allowObject.urlWhitelist", whitelistValue);
--- a/services/sync/modules/bookmark_repair.js
+++ b/services/sync/modules/bookmark_repair.js
@@ -634,17 +634,17 @@ class BookmarkRepairResponder extends Co
       // anything in that case, so bail now.
       return { toUpload, toDelete };
     }
 
     // which of these items exist on the server?
     let itemSource = engine.itemSource();
     itemSource.ids = repairable.map(item => item.syncId);
     log.trace(`checking the server for items`, itemSource.ids);
-    let itemsResponse = itemSource.get();
+    let itemsResponse = await itemSource.get();
     // If the response failed, don't bother trying to parse the output.
     // Throwing here means we abort the repair, which isn't ideal for transient
     // errors (eg, no network, 500 service outage etc), but we don't currently
     // have a sane/safe way to try again later (we'd need to implement a kind
     // of timeout, otherwise we might end up retrying forever and never remove
     // our request command.) Bug 1347805.
     if (!itemsResponse.success) {
       throw new Error(`request for server IDs failed: ${itemsResponse.status}`);
--- a/services/sync/modules/bookmark_validator.js
+++ b/services/sync/modules/bookmark_validator.js
@@ -808,40 +808,40 @@ class BookmarkValidator {
       }
       if (structuralDifferences.length) {
         problemData.structuralDifferences.push({ id, differences: structuralDifferences });
       }
     }
     return inspectionInfo;
   }
 
-  _getServerState(engine) {
+  async _getServerState(engine) {
 // XXXXX - todo - we need to capture last-modified of the server here and
 // ensure the repairer only applys with if-unmodified-since that date.
     let collection = engine.itemSource();
     let collectionKey = engine.service.collectionKeys.keyForCollection(engine.name);
     collection.full = true;
     let items = [];
     collection.recordHandler = function(item) {
       item.decrypt(collectionKey);
       items.push(item.cleartext);
     };
-    let resp = collection.getBatched();
+    let resp = await collection.getBatched();
     if (!resp.success) {
       throw resp;
     }
     return items;
   }
 
   async validate(engine) {
     let start = Date.now();
     let clientTree = await PlacesUtils.promiseBookmarksTree("", {
       includeItemIds: true
     });
-    let serverState = this._getServerState(engine);
+    let serverState = await this._getServerState(engine);
     let serverRecordCount = serverState.length;
     let result = await this.compareServerWithClient(serverState, clientTree);
     let end = Date.now();
     let duration = end - start;
     return {
       duration,
       version: this.version,
       problems: result.problemData,
--- a/services/sync/modules/collection_validator.js
+++ b/services/sync/modules/collection_validator.js
@@ -54,26 +54,26 @@ class CollectionValidator {
     this.idProp = idProp;
   }
 
   // Should a custom ProblemData type be needed, return it here.
   emptyProblemData() {
     return new CollectionProblemData();
   }
 
-  getServerItems(engine) {
+  async getServerItems(engine) {
     let collection = engine.itemSource();
     let collectionKey = engine.service.collectionKeys.keyForCollection(engine.name);
     collection.full = true;
     let items = [];
     collection.recordHandler = function(item) {
       item.decrypt(collectionKey);
       items.push(item.cleartext);
     };
-    let resp = collection.getBatched();
+    let resp = await collection.getBatched();
     if (!resp.success) {
       throw resp;
     }
     return items;
   }
 
   // Should return a promise that resolves to an array of client items.
   getClientItems() {
--- a/services/sync/modules/engines.js
+++ b/services/sync/modules/engines.js
@@ -963,17 +963,17 @@ SyncEngine.prototype = {
     tombstone.deleted = true;
     return tombstone;
   },
 
   // Any setup that needs to happen at the beginning of each sync.
   _syncStartup() {
 
     // Determine if we need to wipe on outdated versions
-    let metaGlobal = this.service.recordManager.get(this.metaURL);
+    let metaGlobal = Async.promiseSpinningly(this.service.recordManager.get(this.metaURL));
     let engines = metaGlobal.payload.engines || {};
     let engineData = engines[this.name] || {};
 
     let needsWipe = false;
 
     // Assume missing versions are 0 and wipe the server
     if ((engineData.version || 0) < this.version) {
       this._log.debug("Old engine data: " + [engineData.version, this.version]);
@@ -1228,17 +1228,17 @@ SyncEngine.prototype = {
       if (applyBatch.length == self.applyIncomingBatchSize) {
         doApplyBatch.call(self);
       }
       self._store._sleep(0);
     };
 
     // Only bother getting data from the server if there's new things
     if (this.lastModified == null || this.lastModified > this.lastSync) {
-      let resp = newitems.getBatched();
+      let resp = Async.promiseSpinningly(newitems.getBatched());
       doApplyBatchAndPersistFailed.call(this);
       if (!resp.success) {
         resp.failureCode = ENGINE_DOWNLOAD_FAIL;
         throw resp;
       }
 
       if (aborting) {
         throw aborting;
@@ -1251,17 +1251,17 @@ SyncEngine.prototype = {
 
       // Sort and limit so that on mobile we only get the last X records.
       guidColl.limit = this.downloadLimit;
       guidColl.newer = this.lastSync;
 
       // index: Orders by the sortindex descending (highest weight first).
       guidColl.sort  = "index";
 
-      let guids = guidColl.get();
+      let guids = Async.promiseSpinningly(guidColl.get());
       if (!guids.success)
         throw guids;
 
       // Figure out which guids weren't just fetched then remove any guids that
       // were already waiting and prepend the new ones
       let extra = Utils.arraySub(guids.obj, handled);
       if (extra.length > 0) {
         fetchBatch = Utils.arrayUnion(extra, fetchBatch);
@@ -1284,17 +1284,17 @@ SyncEngine.prototype = {
     while (fetchBatch.length && !aborting) {
       // Reuse the original query, but get rid of the restricting params
       // and batch remaining records.
       newitems.limit = 0;
       newitems.newer = 0;
       newitems.ids = fetchBatch.slice(0, batchSize);
 
       // Reuse the existing record handler set earlier
-      let resp = newitems.get();
+      let resp = Async.promiseSpinningly(newitems.get());
       if (!resp.success) {
         resp.failureCode = ENGINE_DOWNLOAD_FAIL;
         throw resp;
       }
 
       // This batch was successfully applied. Not using
       // doApplyBatchAndPersistFailed() here to avoid writing toFetch twice.
       fetchBatch = fetchBatch.slice(batchSize);
@@ -1755,17 +1755,17 @@ SyncEngine.prototype = {
   // Save the current snapshot so as to calculate changes at next sync
   _syncFinish() {
     this._log.trace("Finishing up sync");
     this._tracker.resetScore();
 
     let doDelete = Utils.bind2(this, function(key, val) {
       let coll = new Collection(this.engineURL, this._recordObj, this.service);
       coll[key] = val;
-      coll.delete();
+      Async.promiseSpinningly(coll.delete());
     });
 
     for (let [key, val] of Object.entries(this._delete)) {
       // Remove the key for future uses
       delete this._delete[key];
 
       // Send a simple delete for the property
       if (key != "ids" || val.length <= 100)
@@ -1821,17 +1821,17 @@ SyncEngine.prototype = {
     test.recordHandler = function recordHandler(record) {
       record.decrypt(key);
       canDecrypt = true;
     };
 
     // Any failure fetching/decrypting will just result in false
     try {
       this._log.trace("Trying to decrypt a record from the server..");
-      test.get();
+      Async.promiseSpinningly(test.get());
     } catch (ex) {
       if (Async.isShutdownException(ex)) {
         throw ex;
       }
       this._log.debug("Failed test decrypt", ex);
     }
 
     return canDecrypt;
@@ -1839,24 +1839,24 @@ SyncEngine.prototype = {
 
   _resetClient() {
     this.resetLastSync();
     this.previousFailed = [];
     this.toFetch = [];
   },
 
   wipeServer() {
-    let response = this.service.resource(this.engineURL).delete();
+    let response = Async.promiseSpinningly(this.service.resource(this.engineURL).delete());
     if (response.status != 200 && response.status != 404) {
       throw response;
     }
     this._resetClient();
   },
 
-  removeClientData() {
+  async removeClientData() {
     // Implement this method in engines that store client specific data
     // on the server.
   },
 
   /*
    * Decide on (and partially effect) an error-handling strategy.
    *
    * Asks the Service to respond to an HMAC error, which might result in keys
--- a/services/sync/modules/engines/clients.js
+++ b/services/sync/modules/engines/clients.js
@@ -504,19 +504,19 @@ ClientEngine.prototype = {
     this._store.wipe();
     const logRemoveError = err => this._log.warn("Could not delete json file", err);
     Async.promiseSpinningly(
       Utils.jsonRemove("commands", this).catch(logRemoveError)
         .then(Utils.jsonRemove("commands-syncing", this).catch(logRemoveError))
     );
   },
 
-  removeClientData: function removeClientData() {
+  async removeClientData() {
     let res = this.service.resource(this.engineURL + "/" + this.localID);
-    res.delete();
+    await res.delete();
   },
 
   // Override the default behavior to delete bad records from the server.
   handleHMACMismatch: function handleHMACMismatch(item, mayRetry) {
     this._log.debug("Handling HMAC mismatch for " + item.id);
 
     let base = SyncEngine.prototype.handleHMACMismatch.call(this, item, mayRetry);
     if (base != SyncEngine.kRecoveryStrategy.error)
--- a/services/sync/modules/engines/tabs.js
+++ b/services/sync/modules/engines/tabs.js
@@ -68,19 +68,19 @@ TabEngine.prototype = {
 
   _resetClient() {
     SyncEngine.prototype._resetClient.call(this);
     this._store.wipe();
     this._tracker.modified = true;
     this.hasSyncedThisSession = false;
   },
 
-  removeClientData() {
+  async removeClientData() {
     let url = this.engineURL + "/" + this.service.clientsEngine.localID;
-    this.service.resource(url).delete();
+    await this.service.resource(url).delete();
   },
 
   /**
    * Return a Set of open URLs.
    */
   getOpenURLs() {
     let urls = new Set();
     for (let entry of this._store.getAllTabs()) {
--- a/services/sync/modules/record.js
+++ b/services/sync/modules/record.js
@@ -37,30 +37,30 @@ WBORecord.prototype = {
   get sortindex() {
     if (this.data.sortindex)
       return this.data.sortindex;
     return 0;
   },
 
   // Get thyself from your URI, then deserialize.
   // Set thine 'response' field.
-  fetch: function fetch(resource) {
+  async fetch(resource) {
     if (!(resource instanceof Resource)) {
       throw new Error("First argument must be a Resource instance.");
     }
 
-    let r = resource.get();
+    let r = await resource.get();
     if (r.success) {
       this.deserialize(r);   // Warning! Muffles exceptions!
     }
     this.response = r;
     return this;
   },
 
-  upload: function upload(resource) {
+  upload(resource) {
     if (!(resource instanceof Resource)) {
       throw new Error("First argument must be a Resource instance.");
     }
 
     return resource.put(this);
   },
 
   // Take a base URI string, with trailing slash, and return the URI of this
@@ -218,22 +218,22 @@ this.RecordManager = function RecordMana
 
   this._log = Log.repository.getLogger(this._logName);
   this._records = {};
 }
 RecordManager.prototype = {
   _recordType: CryptoWrapper,
   _logName: "Sync.RecordManager",
 
-  import: function RecordMgr_import(url) {
+  async import(url) {
     this._log.trace("Importing record: " + (url.spec ? url.spec : url));
     try {
       // Clear out the last response with empty object if GET fails
       this.response = {};
-      this.response = this.service.resource(url).get();
+      this.response = await this.service.resource(url).get();
 
       // Don't parse and save the record on failure
       if (!this.response.success)
         return null;
 
       let record = new this._recordType(url);
       record.deserialize(this.response);
 
@@ -242,21 +242,21 @@ RecordManager.prototype = {
       if (Async.isShutdownException(ex)) {
         throw ex;
       }
       this._log.debug("Failed to import record", ex);
       return null;
     }
   },
 
-  get: function RecordMgr_get(url) {
+  get(url) {
     // Use a url string as the key to the hash
     let spec = url.spec ? url.spec : url;
     if (spec in this._records)
-      return this._records[spec];
+      return Promise.resolve(this._records[spec]);
     return this.import(url);
   },
 
   set: function RecordMgr_set(url, record) {
     let spec = url.spec ? url.spec : url;
     return this._records[spec] = record;
   },
 
@@ -698,17 +698,17 @@ Collection.prototype = {
   },
 
   // Similar to get(), but will page through the items `batchSize` at a time,
   // deferring calling the record handler until we've gotten them all.
   //
   // Returns the last response processed, and doesn't run the record handler
   // on any items if a non-success status is received while downloading the
   // records (or if a network error occurs).
-  getBatched(batchSize = DEFAULT_DOWNLOAD_BATCH_SIZE) {
+  async getBatched(batchSize = DEFAULT_DOWNLOAD_BATCH_SIZE) {
     let totalLimit = Number(this.limit) || Infinity;
     if (batchSize <= 0 || batchSize >= totalLimit) {
       // Invalid batch sizes should arguably be an error, but they're easy to handle
       return this.get();
     }
 
     if (!this.full) {
       throw new Error("getBatched is unimplemented for guid-only GETs");
@@ -728,17 +728,17 @@ Collection.prototype = {
       do {
         this._onProgress = _onProgress;
         this._onComplete = _onComplete;
         if (batchSize + recordBuffer.length > totalLimit) {
           this.limit = totalLimit - recordBuffer.length;
         }
         this._log.trace("Performing batched GET", { limit: this.limit, offset: this.offset });
         // Actually perform the request
-        resp = this.get();
+        resp = await this.get();
         if (!resp.success) {
           break;
         }
 
         // Initialize last modified, or check that something broken isn't happening.
         let lastModified = resp.headers["x-last-modified"];
         if (!lastModifiedTime) {
           lastModifiedTime = lastModified;
@@ -991,17 +991,18 @@ PostQueue.prototype = {
       this.bytesAlreadyBatched = 0;
       this.numAlreadyBatched = 0;
     } else {
       this.bytesAlreadyBatched += queued.length;
       this.numAlreadyBatched += this.numQueued;
     }
     this.queued = "";
     this.numQueued = 0;
-    let response = this.poster(queued, headers, batch, !!(finalBatchPost && this.batchID !== null));
+    let response = Async.promiseSpinningly(
+                    this.poster(queued, headers, batch, !!(finalBatchPost && this.batchID !== null)));
 
     if (!response.success) {
       this.log.trace("Server error response during a batch", response);
       // not clear what we should do here - we expect the consumer of this to
       // abort by throwing in the postCallback below.
       this.postCallback(response, !finalBatchPost);
       return;
     }
--- a/services/sync/modules/resource.js
+++ b/services/sync/modules/resource.js
@@ -128,17 +128,18 @@ AsyncResource.prototype = {
   },
 
   // ** {{{ AsyncResource._createRequest }}} **
   //
   // This method returns a new IO Channel for requests to be made
   // through. It is never called directly, only {{{_doRequest}}} uses it
   // to obtain a request channel.
   //
-  _createRequest: function Res__createRequest(method) {
+  _createRequest(method) {
+    this.method = method;
     let channel = NetUtil.newChannel({uri: this.spec, loadUsingSystemPrincipal: true})
                          .QueryInterface(Ci.nsIRequest)
                          .QueryInterface(Ci.nsIHttpChannel);
 
     channel.loadFlags |= DEFAULT_LOAD_FLAGS;
 
     // Setup a callback to handle channel notifications.
     let listener = new ChannelNotificationListener(this.headerNames);
@@ -167,65 +168,65 @@ AsyncResource.prototype = {
         this._log.trace("HTTP Header " + key + ": ***** (suppressed)");
       else
         this._log.trace("HTTP Header " + key + ": " + headers[key]);
       channel.setRequestHeader(key, headers[key], false);
     }
     return channel;
   },
 
-  _onProgress: function Res__onProgress(channel) {},
+  _onProgress(channel) {},
 
-  _doRequest: function _doRequest(action, data, callback) {
+  _doRequest(action, data) {
     this._log.trace("In _doRequest.");
-    this._callback = callback;
-    let channel = this._createRequest(action);
+    return new Promise((resolve, reject) => {
+      this._deferred = { resolve, reject };
+      let channel = this._createRequest(action);
 
-    if ("undefined" != typeof(data))
-      this._data = data;
+      if ("undefined" != typeof(data))
+        this._data = data;
 
-    // PUT and POST are treated differently because they have payload data.
-    if ("PUT" == action || "POST" == action) {
-      // Convert non-string bodies into JSON
-      if (this._data.constructor.toString() != String)
-        this._data = JSON.stringify(this._data);
-
-      this._log.debug(action + " Length: " + this._data.length);
-      this._log.trace(action + " Body: " + this._data);
+      // PUT and POST are treated differently because they have payload data.
+      if ("PUT" == action || "POST" == action) {
+        // Convert non-string bodies into JSON
+        if (this._data.constructor.toString() != String)
+          this._data = JSON.stringify(this._data);
 
-      let type = ("content-type" in this._headers) ?
-        this._headers["content-type"] : "text/plain";
+        this._log.debug(action + " Length: " + this._data.length);
+        this._log.trace(action + " Body: " + this._data);
 
-      let stream = Cc["@mozilla.org/io/string-input-stream;1"].
-        createInstance(Ci.nsIStringInputStream);
-      stream.setData(this._data, this._data.length);
+        let type = ("content-type" in this._headers) ?
+          this._headers["content-type"] : "text/plain";
 
-      channel.QueryInterface(Ci.nsIUploadChannel);
-      channel.setUploadStream(stream, type, this._data.length);
-    }
+        let stream = Cc["@mozilla.org/io/string-input-stream;1"].
+          createInstance(Ci.nsIStringInputStream);
+        stream.setData(this._data, this._data.length);
 
-    // Setup a channel listener so that the actual network operation
-    // is performed asynchronously.
-    let listener = new ChannelListener(this._onComplete, this._onProgress,
-                                       this._log, this.ABORT_TIMEOUT);
-    channel.requestMethod = action;
-    try {
+        channel.QueryInterface(Ci.nsIUploadChannel);
+        channel.setUploadStream(stream, type, this._data.length);
+      }
+
+      // Setup a channel listener so that the actual network operation
+      // is performed asynchronously.
+      let listener = new ChannelListener(this._onComplete, this._onProgress,
+                                         this._log, this.ABORT_TIMEOUT);
+      channel.requestMethod = action;
       channel.asyncOpen2(listener);
-    } catch (ex) {
-      // asyncOpen2 can throw in a bunch of cases -- e.g., a forbidden port.
-      this._log.warn("Caught an error in asyncOpen2", ex);
-      CommonUtils.nextTick(callback.bind(this, ex));
-    }
+    });
   },
 
-  _onComplete: function _onComplete(error, data, channel) {
-    this._log.trace("In _onComplete. Error is " + error + ".");
+  _onComplete(ex, data, channel) {
+    this._log.trace("In _onComplete. Error is " + ex + ".");
 
-    if (error) {
-      this._callback(error);
+    if (ex) {
+      if (!Async.isShutdownException(ex)) {
+        this._log.warn("${action} request to ${url} failed: ${ex}",
+                       { action: this.method, url: this.uri.spec, ex});
+      }
+      this._deferred.reject(ex);
       return;
     }
 
     this._data = data;
     let action = channel.requestMethod;
 
     this._log.trace("Channel: " + channel);
     this._log.trace("Action: " + action);
@@ -319,127 +320,39 @@ AsyncResource.prototype = {
         // Stringify to avoid possibly printing non-printable characters.
         this._log.debug("Parse fail: Response body starts: \"" +
                         JSON.stringify((ret + "").slice(0, 100)) +
                         "\".");
         throw ex;
       }
     }.bind(this));
 
-    this._callback(null, ret);
+    this._deferred.resolve(ret);
   },
 
-  get: function get(callback) {
-    this._doRequest("GET", undefined, callback);
+  get() {
+    return this._doRequest("GET", undefined);
   },
 
-  put: function put(data, callback) {
-    if (typeof data == "function")
-      [data, callback] = [undefined, data];
-    this._doRequest("PUT", data, callback);
+  put(data) {
+    return this._doRequest("PUT", data);
   },
 
-  post: function post(data, callback) {
-    if (typeof data == "function")
-      [data, callback] = [undefined, data];
-    this._doRequest("POST", data, callback);
+  post(data) {
+    return this._doRequest("POST", data);
   },
 
-  delete: function delete_(callback) {
-    this._doRequest("DELETE", undefined, callback);
+  delete() {
+    return this._doRequest("DELETE", undefined);
   }
 };
 
-
-/*
- * Represent a remote network resource, identified by a URI, with a
- * synchronous API.
- *
- * 'Resource' is not recommended for new code. Use the asynchronous API of
- * 'AsyncResource' instead.
- */
-this.Resource = function Resource(uri) {
-  AsyncResource.call(this, uri);
-}
-Resource.prototype = {
-
-  __proto__: AsyncResource.prototype,
-
-  _logName: "Sync.Resource",
-
-  // ** {{{ Resource._request }}} **
-  //
-  // Perform a particular HTTP request on the resource. This method
-  // is never called directly, but is used by the high-level
-  // {{{get}}}, {{{put}}}, {{{post}}} and {{delete}} methods.
-  _request: function Res__request(action, data) {
-    let cb = Async.makeSyncCallback();
-    function callback(error, ret) {
-      if (error)
-        cb.throw(error);
-      else
-        cb(ret);
-    }
-
-    // The channel listener might get a failure code
-    try {
-      this._doRequest(action, data, callback);
-      return Async.waitForSyncCallback(cb);
-    } catch (ex) {
-      if (Async.isShutdownException(ex)) {
-        throw ex;
-      }
-      this._log.warn("${action} request to ${url} failed: ${ex}",
-                     { action, url: this.uri.spec, ex });
-      // Combine the channel stack with this request stack.  Need to create
-      // a new error object for that.
-      let error = Error(ex.message);
-      error.result = ex.result;
-      let chanStack = [];
-      if (ex.stack)
-        chanStack = ex.stack.trim().split(/\n/).slice(1);
-      let requestStack = error.stack.split(/\n/).slice(1);
-
-      // Strip out the args for the last 2 frames because they're usually HUGE!
-      for (let i = 0; i <= 1; i++)
-        requestStack[i] = requestStack[i].replace(/\(".*"\)@/, "(...)@");
-
-      error.stack = chanStack.concat(requestStack).join("\n");
-      throw error;
-    }
-  },
-
-  // ** {{{ Resource.get }}} **
-  //
-  // Perform an asynchronous HTTP GET for this resource.
-  get: function Res_get() {
-    return this._request("GET");
-  },
-
-  // ** {{{ Resource.put }}} **
-  //
-  // Perform a HTTP PUT for this resource.
-  put: function Res_put(data) {
-    return this._request("PUT", data);
-  },
-
-  // ** {{{ Resource.post }}} **
-  //
-  // Perform a HTTP POST for this resource.
-  post: function Res_post(data) {
-    return this._request("POST", data);
-  },
-
-  // ** {{{ Resource.delete }}} **
-  //
-  // Perform a HTTP DELETE for this resource.
-  delete: function Res_delete() {
-    return this._request("DELETE");
-  }
-};
+// TODO: We still export both "Resource" and "AsyncRecourse" as the same
+// object, but we should decide on one and unify all references.
+this.Resource = AsyncResource;
 
 // = ChannelListener =
 //
 // This object implements the {{{nsIStreamListener}}} interface
 // and is called as the network operation proceeds.
 function ChannelListener(onComplete, onProgress, logger, timeout) {
   this._onComplete = onComplete;
   this._onProgress = onProgress;
--- a/services/sync/modules/service.js
+++ b/services/sync/modules/service.js
@@ -201,17 +201,17 @@ Sync11Service.prototype = {
                    "or signaling to other clients.");
 
     // Set the last handled time so that we don't act again.
     this.lastHMACEvent = now;
 
     // Fetch keys.
     let cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
     try {
-      let cryptoResp = cryptoKeys.fetch(this.resource(this.cryptoKeysURL)).response;
+      let cryptoResp = Async.promiseSpinningly(cryptoKeys.fetch(this.resource(this.cryptoKeysURL))).response;
 
       // Save out the ciphertext for when we reupload. If there's a bug in
       // CollectionKeyManager, this will prevent us from uploading junk.
       let cipherText = cryptoKeys.ciphertext;
 
       if (!cryptoResp.success) {
         this._log.warn("Failed to download keys.");
         return false;
@@ -460,17 +460,17 @@ Sync11Service.prototype = {
    * inside engine sync.
    */
   _fetchInfo(url) {
     let infoURL = url || this.infoURL;
 
     this._log.trace("In _fetchInfo: " + infoURL);
     let info;
     try {
-      info = this.resource(infoURL).get();
+      info = Async.promiseSpinningly(this.resource(infoURL).get());
     } catch (ex) {
       this.errorHandler.checkServerError(ex);
       throw ex;
     }
 
     // Always check for errors; this is also where we look for X-Weave-Alert.
     this.errorHandler.checkServerError(info);
     if (!info.success) {
@@ -513,17 +513,17 @@ Sync11Service.prototype = {
         // Don't always set to CREDENTIALS_CHANGED -- we will probably take care of this.
 
         // Fetch storage/crypto/keys.
         let cryptoKeys;
 
         if (infoCollections && (CRYPTO_COLLECTION in infoCollections)) {
           try {
             cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
-            let cryptoResp = cryptoKeys.fetch(this.resource(this.cryptoKeysURL)).response;
+            let cryptoResp = Async.promiseSpinningly(cryptoKeys.fetch(this.resource(this.cryptoKeysURL))).response;
 
             if (cryptoResp.success) {
               this.handleFetchedKeys(syncKeyBundle, cryptoKeys);
               return true;
             } else if (cryptoResp.status == 404) {
               // On failure, ask to generate new keys and upload them.
               // Fall through to the behavior below.
               this._log.warn("Got 404 for crypto/keys, but 'crypto' in info/collections. Regenerating.");
@@ -608,17 +608,17 @@ Sync11Service.prototype = {
       // This is a little weird, if we don't get a node we pretend
       // to succeed, since that probably means we just don't have storage.
       if (this.clusterURL == "" && !this._clusterManager.setCluster()) {
         this.status.sync = NO_SYNC_NODE_FOUND;
         return true;
       }
 
       // Fetch collection info on every startup.
-      let test = this.resource(this.infoURL).get();
+      let test = Async.promiseSpinningly(this.resource(this.infoURL).get());
 
       switch (test.status) {
         case 200:
           // The user is authenticated.
 
           // We have no way of verifying the passphrase right now,
           // so wait until remoteSetup to do so.
           // Just make the most trivial checks.
@@ -712,17 +712,17 @@ Sync11Service.prototype = {
                       "is stale after successful upload.");
       throw new Error("Symmetric key upload failed.");
     }
 
     // Doesn't matter if the timestamp is ahead.
 
     // Download and install them.
     let cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
-    let cryptoResp = cryptoKeys.fetch(this.resource(this.cryptoKeysURL)).response;
+    let cryptoResp = Async.promiseSpinningly(cryptoKeys.fetch(this.resource(this.cryptoKeysURL))).response;
     if (cryptoResp.status != 200) {
       this._log.warn("Failed to download keys.");
       throw new Error("Symmetric key download failed.");
     }
     let keysChanged = this.handleFetchedKeys(this.identity.syncKeyBundle,
                                              cryptoKeys, true);
     if (keysChanged) {
       this._log.info("Downloaded keys differed, as expected.");
@@ -734,17 +734,21 @@ Sync11Service.prototype = {
     Svc.Obs.notify("weave:engine:stop-tracking");
     this.status.resetSync();
 
     // Deletion doesn't make sense if we aren't set up yet!
     if (this.clusterURL != "") {
       // Clear client-specific data from the server, including disabled engines.
       for (let engine of [this.clientsEngine].concat(this.engineManager.getAll())) {
         try {
-          engine.removeClientData();
+          // Note the additional Promise.resolve here is to handle the fact that
+          // some 3rd party engines probably don't return a promise. We can
+          // probably nuke this once webextensions become mandatory as then
+          // no 3rd party engines will be allowed to exist.
+          Async.promiseSpinningly(Promise.resolve().then(() => engine.removeClientData()));
         } catch (ex) {
           this._log.warn(`Deleting client data for ${engine.name} failed`, ex);
         }
       }
       this._log.debug("Finished deleting client data.");
     } else {
       this._log.debug("Skipping client data removal: no cluster URL.");
     }
@@ -775,17 +779,17 @@ Sync11Service.prototype = {
 
     try {
       this.identity.finalize();
       this.status.__authManager = null;
       this.identity = Status._authManager;
       this._clusterManager = this.identity.createClusterManager(this);
       Svc.Obs.notify("weave:service:start-over:finish");
     } catch (err) {
-      this._log.error("startOver failed to re-initialize the identity manager: " + err);
+      this._log.error("startOver failed to re-initialize the identity manager", err);
       // Still send the observer notification so the current state is
       // reflected in the UI.
       Svc.Obs.notify("weave:service:start-over:finish");
     }
   },
 
   login: function login() {
     function onNotify() {
@@ -842,17 +846,17 @@ Sync11Service.prototype = {
   // supporting the api.
   _fetchServerConfiguration() {
     // This is similar to _fetchInfo, but with different error handling.
 
     let infoURL = this.userBaseURL + "info/configuration";
     this._log.debug("Fetching server configuration", infoURL);
     let configResponse;
     try {
-      configResponse = this.resource(infoURL).get();
+      configResponse = Async.promiseSpinningly(this.resource(infoURL).get());
     } catch (ex) {
       // This is probably a network or similar error.
       this._log.warn("Failed to fetch info/configuration", ex);
       this.errorHandler.checkServerError(ex);
       return false;
     }
 
     if (configResponse.status == 404) {
@@ -872,32 +876,32 @@ Sync11Service.prototype = {
   // Stuff we need to do after login, before we can really do
   // anything (e.g. key setup).
   _remoteSetup: function _remoteSetup(infoResponse) {
     if (!this._fetchServerConfiguration()) {
       return false;
     }
 
     this._log.debug("Fetching global metadata record");
-    let meta = this.recordManager.get(this.metaURL);
+    let meta = Async.promiseSpinningly(this.recordManager.get(this.metaURL));
 
     // Checking modified time of the meta record.
     if (infoResponse &&
         (infoResponse.obj.meta != this.metaModified) &&
         (!meta || !meta.isNew)) {
 
       // Delete the cached meta record...
       this._log.debug("Clearing cached meta record. metaModified is " +
           JSON.stringify(this.metaModified) + ", setting to " +
           JSON.stringify(infoResponse.obj.meta));
 
       this.recordManager.del(this.metaURL);
 
       // ... fetch the current record from the server, and COPY THE FLAGS.
-      let newMeta = this.recordManager.get(this.metaURL);
+      let newMeta = Async.promiseSpinningly(this.recordManager.get(this.metaURL));
 
       // If we got a 401, we do not want to create a new meta/global - we
       // should be able to get the existing meta after we get a new node.
       if (this.recordManager.response.status == 401) {
         this._log.debug("Fetching meta/global record on the server returned 401.");
         this.errorHandler.checkServerError(this.recordManager.response);
         return false;
       }
@@ -1087,17 +1091,17 @@ Sync11Service.prototype = {
       // so don't attempt to get it in that case.
       if (this.clusterURL) {
         this.identity.prefetchMigrationSentinel(this);
       }
 
       // Now let's update our declined engines (but only if we have a metaURL;
       // if Sync failed due to no node we will not have one)
       if (this.metaURL) {
-        let meta = this.recordManager.get(this.metaURL);
+        let meta = Async.promiseSpinningly(this.recordManager.get(this.metaURL));
         if (!meta) {
           this._log.warn("No meta/global; can't update declined state.");
           return;
         }
 
         let declinedEngines = new DeclinedEngines(this);
         let didChange = declinedEngines.updateDeclined(meta, this.engineManager);
         if (!didChange) {
@@ -1129,17 +1133,17 @@ Sync11Service.prototype = {
    * Upload meta/global, throwing the response on failure
    * @param {WBORecord} meta meta/global record
    * @throws the response object if the request was not a success
    */
   uploadMetaGlobal(meta) {
     this._log.debug("Uploading meta/global", meta);
     let res = this.resource(this.metaURL);
     res.setHeader("X-If-Unmodified-Since", meta.modified);
-    let response = res.put(meta);
+    let response = Async.promiseSpinningly(res.put(meta));
     if (!response.success) {
       throw response;
     }
     // From https://docs.services.mozilla.com/storage/apis-1.5.html:
     // "Successful responses will return the new last-modified time for the collection."
     meta.modified = response.obj;
     this.recordManager.set(this.metaURL, meta);
   },
@@ -1149,17 +1153,17 @@ Sync11Service.prototype = {
    * @param {WBORecord} cryptoKeys crypto/keys record
    * @param {Number} lastModified known last modified timestamp (in decimal seconds),
    *                 will be used to set the X-If-Unmodified-Since header
    */
   _uploadCryptoKeys(cryptoKeys, lastModified) {
     this._log.debug(`Uploading crypto/keys (lastModified: ${lastModified})`);
     let res = this.resource(this.cryptoKeysURL);
     res.setHeader("X-If-Unmodified-Since", lastModified);
-    return res.put(cryptoKeys);
+    return Async.promiseSpinningly(res.put(cryptoKeys));
   },
 
   _freshStart: function _freshStart() {
     this._log.info("Fresh start. Resetting client.");
     this.resetClient();
     this.collectionKeys.clear();
 
     // Wipe the server.
@@ -1192,17 +1196,17 @@ Sync11Service.prototype = {
   wipeServer: function wipeServer(collections) {
     let response;
     let histogram = Services.telemetry.getHistogramById("WEAVE_WIPE_SERVER_SUCCEEDED");
     if (!collections) {
       // Strip the trailing slash.
       let res = this.resource(this.storageURL.slice(0, -1));
       res.setHeader("X-Confirm-Delete", "1");
       try {
-        response = res.delete();
+        response = Async.promiseSpinningly(res.delete());
       } catch (ex) {
         this._log.debug("Failed to wipe server", ex);
         histogram.add(false);
         throw ex;
       }
       if (response.status != 200 && response.status != 404) {
         this._log.debug("Aborting wipeServer. Server responded with " +
                         response.status + " response for " + this.storageURL);
@@ -1212,17 +1216,17 @@ Sync11Service.prototype = {
       histogram.add(true);
       return response.headers["x-weave-timestamp"];
     }
 
     let timestamp;
     for (let name of collections) {
       let url = this.storageURL + name;
       try {
-        response = this.resource(url).delete();
+        response = Async.promiseSpinningly(this.resource(url).delete());
       } catch (ex) {
         this._log.debug("Failed to wipe '" + name + "' collection", ex);
         histogram.add(false);
         throw ex;
       }
 
       if (response.status != 200 && response.status != 404) {
         this._log.debug("Aborting wipeServer. Server responded with " +
--- a/services/sync/modules/stages/enginesync.js
+++ b/services/sync/modules/stages/enginesync.js
@@ -175,17 +175,17 @@ EngineSynchronizer.prototype = {
       if (!this.service.clusterURL) {
         this._log.debug("Aborting sync, no cluster URL: " +
                         "not uploading new meta/global.");
         this.onComplete(null);
         return;
       }
 
       // Upload meta/global if any engines changed anything.
-      let meta = this.service.recordManager.get(this.service.metaURL);
+      let meta = Async.promiseSpinningly(this.service.recordManager.get(this.service.metaURL));
       if (meta.isNew || meta.changed) {
         this._log.info("meta/global changed locally: reuploading.");
         try {
           this.service.uploadMetaGlobal(meta);
           delete meta.isNew;
           delete meta.changed;
         } catch (error) {
           this._log.error("Unable to upload meta/global. Leaving marked as new.");
@@ -337,16 +337,16 @@ EngineSynchronizer.prototype = {
     engineManager.decline(toDecline);
     engineManager.undecline(toUndecline);
 
     Svc.Prefs.resetBranch("engineStatusChanged.");
     this.service._ignorePrefObserver = false;
   },
 
   _updateEnabledEngines() {
-    let meta = this.service.recordManager.get(this.service.metaURL);
+    let meta = Async.promiseSpinningly(this.service.recordManager.get(this.service.metaURL));
     let numClients = this.service.scheduler.numClients;
     let engineManager = this.service.engineManager;
 
     this._updateEnabledFromMeta(meta, numClients, engineManager);
   },
 };
 Object.freeze(EngineSynchronizer.prototype);
--- a/services/sync/tests/unit/head_errorhandler_common.js
+++ b/services/sync/tests/unit/head_errorhandler_common.js
@@ -95,23 +95,24 @@ const EHTestsCommon = {
 
 
   generateCredentialsChangedFailure() {
     // Make sync fail due to changed credentials. We simply re-encrypt
     // the keys with a different Sync Key, without changing the local one.
     let newSyncKeyBundle = new SyncKeyBundle("johndoe", "23456234562345623456234562");
     let keys = Service.collectionKeys.asWBO();
     keys.encrypt(newSyncKeyBundle);
-    keys.upload(Service.resource(Service.cryptoKeysURL));
+    return keys.upload(Service.resource(Service.cryptoKeysURL));
   },
 
   async setUp(server) {
     await configureIdentity({ username: "johndoe" }, server);
     return EHTestsCommon.generateAndUploadKeys()
   },
 
-  generateAndUploadKeys() {
+  async generateAndUploadKeys() {
     generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
     serverKeys.encrypt(Service.identity.syncKeyBundle);
-    return serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success;
+    let response = await serverKeys.upload(Service.resource(Service.cryptoKeysURL));
+    return response.success;
   }
 };
--- a/services/sync/tests/unit/test_bookmark_engine.js
+++ b/services/sync/tests/unit/test_bookmark_engine.js
@@ -718,17 +718,17 @@ add_task(async function test_misreconcil
     }
   };
 
   _("Applying record.");
   engine._processIncoming({
     getBatched() {
       return this.get();
     },
-    get() {
+    async get() {
       this.recordHandler(encrypted);
       return {success: true}
     },
   });
 
   // Ensure that afterwards, toolbar is still there.
   // As of 2012-12-05, this only passes because Places doesn't use "toolbar" as
   // the real GUID, instead using a generated one. Sync does the translation.
--- a/services/sync/tests/unit/test_clients_engine.js
+++ b/services/sync/tests/unit/test_clients_engine.js
@@ -87,21 +87,21 @@ add_task(async function test_bad_hmac() 
   }
 
   function check_client_deleted(id) {
     let coll = user.collection("clients");
     let wbo  = coll.wbo(id);
     return !wbo || !wbo.payload;
   }
 
-  function uploadNewKeys() {
+  async function uploadNewKeys() {
     generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
     serverKeys.encrypt(Service.identity.syncKeyBundle);
-    ok(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success);
+    ok((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
   }
 
   try {
     await configureIdentity({username: "foo"}, server);
     Service.login();
 
     generateNewKeys(Service.collectionKeys);
 
@@ -121,17 +121,17 @@ add_task(async function test_bad_hmac() 
 
     _("Change our keys and our client ID, reupload keys.");
     let oldLocalID  = engine.localID;     // Preserve to test for deletion!
     engine.localID = Utils.makeGUID();
     engine.resetClient();
     generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
     serverKeys.encrypt(Service.identity.syncKeyBundle);
-    ok(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success);
+    ok((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
 
     _("Sync.");
     engine._sync();
 
     _("Old record " + oldLocalID + " was deleted, new one uploaded.");
     check_clients_count(1);
     check_client_deleted(oldLocalID);
 
@@ -156,24 +156,24 @@ add_task(async function test_bad_hmac() 
     user.collection("clients")._wbos = {};
     Service.lastHMACEvent = 0;
     engine.localID = Utils.makeGUID();
     engine.resetClient();
     deletedCollections = [];
     deletedItems       = [];
     check_clients_count(0);
 
-    uploadNewKeys();
+    await uploadNewKeys();
 
     // Sync once to upload a record.
     engine._sync();
     check_clients_count(1);
 
     // Generate and upload new keys, so the old client record is wrong.
-    uploadNewKeys();
+    await uploadNewKeys();
 
     // Create a new client record and new keys. Now our keys are wrong, as well
     // as the object on the server. We'll download the new keys and also delete
     // the bad client record.
     oldLocalID  = engine.localID;         // Preserve to test for deletion!
     engine.localID = Utils.makeGUID();
     engine.resetClient();
     generateNewKeys(Service.collectionKeys);
@@ -312,17 +312,17 @@ add_task(async function test_sync() {
     engine.lastRecordUpload -= MORE_THAN_CLIENTS_TTL_REFRESH;
     let lastweek = engine.lastRecordUpload;
     clientWBO().payload = undefined;
     engine._sync();
     ok(!!clientWBO().payload);
     ok(engine.lastRecordUpload > lastweek);
 
     _("Remove client record.");
-    engine.removeClientData();
+    await engine.removeClientData();
     equal(clientWBO().payload, undefined);
 
     _("Time travel one day back, no record uploaded.");
     engine.lastRecordUpload -= LESS_THAN_CLIENTS_TTL_REFRESH;
     let yesterday = engine.lastRecordUpload;
     engine._sync();
     equal(clientWBO().payload, undefined);
     equal(engine.lastRecordUpload, yesterday);
--- a/services/sync/tests/unit/test_collection_getBatched.js
+++ b/services/sync/tests/unit/test_collection_getBatched.js
@@ -21,17 +21,17 @@ function recordRange(lim, offset, total)
 function get_test_collection_info({ totalRecords, batchSize, lastModified,
                                     throwAfter = Infinity,
                                     interruptedAfter = Infinity }) {
   let coll = new Collection("http://example.com/test/", WBORecord, Service);
   coll.full = true;
   let requests = [];
   let responses = [];
   let sawRecord = false;
-  coll.get = function() {
+  coll.get = async function() {
     ok(!sawRecord); // make sure we call record handler after all requests.
     let limit = +this.limit;
     let offset = 0;
     if (this.offset) {
       equal(this.offset.slice(0, 6), "foobar");
       offset = +this.offset.slice(6);
     }
     requests.push({
@@ -71,26 +71,26 @@ function get_test_collection_info({ tota
     // ensure records are coming in in the right order
     equal(record.id, String(records.length));
     equal(record.payload, "test:" + records.length);
     records.push(record);
   };
   return { records, responses, requests, coll };
 }
 
-add_test(function test_success() {
+add_task(async function test_success() {
   const totalRecords = 11;
   const batchSize = 2;
   const lastModified = "111111";
   let { records, responses, requests, coll } = get_test_collection_info({
     totalRecords,
     batchSize,
     lastModified,
   });
-  let response = coll.getBatched(batchSize);
+  let response = await coll.getBatched(batchSize);
 
   equal(requests.length, Math.ceil(totalRecords / batchSize));
 
   // records are mostly checked in recordHandler, we just care about the length
   equal(records.length, totalRecords);
 
   // ensure we're returning the last response
   equal(responses[responses.length - 1], response);
@@ -111,85 +111,79 @@ add_test(function test_success() {
     expectedOffset += batchSize;
   }
 
   // ensure we cleaned up anything that would break further
   // use of this collection.
   ok(!coll._headers["x-if-unmodified-since"]);
   ok(!coll.offset);
   ok(!coll.limit || (coll.limit == Infinity));
-
-  run_next_test();
 });
 
-add_test(function test_total_limit() {
+add_task(async function test_total_limit() {
   _("getBatched respects the (initial) value of the limit property");
   const totalRecords = 100;
   const recordLimit = 11;
   const batchSize = 2;
   const lastModified = "111111";
   let { records, requests, coll } = get_test_collection_info({
     totalRecords,
     batchSize,
     lastModified,
   });
   coll.limit = recordLimit;
-  coll.getBatched(batchSize);
+  await coll.getBatched(batchSize);
 
   equal(requests.length, Math.ceil(recordLimit / batchSize));
   equal(records.length, recordLimit);
 
   for (let i = 0; i < requests.length; ++i) {
     let req = requests[i];
     if (i !== requests.length - 1) {
       equal(req.limit, batchSize);
     } else {
       equal(req.limit, recordLimit % batchSize);
     }
   }
 
   equal(coll._limit, recordLimit);
-
-  run_next_test();
 });
 
-add_test(function test_412() {
+add_task(async function test_412() {
   _("We shouldn't record records if we get a 412 in the middle of a batch");
   const totalRecords = 11;
   const batchSize = 2;
   const lastModified = "111111";
   let { records, responses, requests, coll } = get_test_collection_info({
     totalRecords,
     batchSize,
     lastModified,
     interruptedAfter: 3
   });
-  let response = coll.getBatched(batchSize);
+  let response = await coll.getBatched(batchSize);
 
   equal(requests.length, 3);
   equal(records.length, 0); // record handler shouldn't be called for anything
 
   // ensure we're returning the last response
   equal(responses[responses.length - 1], response);
 
   ok(!response.success);
   equal(response.status, 412);
-  run_next_test();
 });
 
-add_test(function test_get_throws() {
+add_task(async function test_get_throws() {
   _("We shouldn't record records if get() throws for some reason");
   const totalRecords = 11;
   const batchSize = 2;
   const lastModified = "111111";
   let { records, requests, coll } = get_test_collection_info({
     totalRecords,
     batchSize,
     lastModified,
     throwAfter: 3
   });
 
-  throws(() => coll.getBatched(batchSize), "Some Network Error");
+  await Assert.rejects(coll.getBatched(batchSize), "Some Network Error");
 
   equal(requests.length, 3);
   equal(records.length, 0);
-  run_next_test();
 });
--- a/services/sync/tests/unit/test_corrupt_keys.js
+++ b/services/sync/tests/unit/test_corrupt_keys.js
@@ -66,25 +66,25 @@ add_task(async function test_locally_cha
     }
 
     _("Setting meta.");
 
     // Bump version on the server.
     let m = new WBORecord("meta", "global");
     m.payload = {"syncID": "foooooooooooooooooooooooooo",
                  "storageVersion": STORAGE_VERSION};
-    m.upload(Service.resource(Service.metaURL));
+    await m.upload(Service.resource(Service.metaURL));
 
     _("New meta/global: " + JSON.stringify(johndoe.collection("meta").wbo("global")));
 
     // Upload keys.
     generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
     serverKeys.encrypt(Service.identity.syncKeyBundle);
-    do_check_true(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success);
+    do_check_true((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
 
     // Check that login works.
     do_check_true(Service.login());
     do_check_true(Service.isLoggedIn);
 
     // Sync should upload records.
     await sync_and_validate_telem();
 
@@ -118,17 +118,17 @@ add_task(async function test_locally_cha
     }
 
     history.timestamp = Date.now() / 1000;
     let old_key_time = johndoe.modified("crypto");
     _("Old key time: " + old_key_time);
 
     // Check that we can decrypt one.
     let rec = new CryptoWrapper("history", "record-no--0");
-    rec.fetch(Service.resource(Service.storageURL + "history/record-no--0"));
+    await rec.fetch(Service.resource(Service.storageURL + "history/record-no--0"));
     _(JSON.stringify(rec));
     do_check_true(!!rec.decrypt(liveKeys));
 
     do_check_eq(hmacErrorCount, 0);
 
     // Fill local key cache with bad data.
     corrupt_local_keys();
     _("Keys now: " + Service.collectionKeys.keyForCollection("history").keyPair);
--- a/services/sync/tests/unit/test_errorhandler_1.js
+++ b/services/sync/tests/unit/test_errorhandler_1.js
@@ -115,17 +115,17 @@ add_task(async function test_credentials
   let server = EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
   await sync_and_validate_telem();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
-  EHTestsCommon.generateCredentialsChangedFailure();
+  await EHTestsCommon.generateCredentialsChangedFailure();
 
   let ping = await sync_and_validate_telem(true);
   equal(ping.status.sync, CREDENTIALS_CHANGED);
   deepEqual(ping.failureReason, {
     name: "unexpectederror",
     error: "Error: Aborting sync, remote setup failed"
   });
 
@@ -392,17 +392,17 @@ add_task(async function test_sync_syncAn
   let server = EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
   Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
-  EHTestsCommon.generateCredentialsChangedFailure();
+  await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
   let ping = await wait_for_ping(() => errorHandler.syncAndReportErrors(), true);
   equal(ping.status.sync, CREDENTIALS_CHANGED);
   deepEqual(ping.failureReason, {
     name: "unexpectederror",
@@ -445,17 +445,17 @@ add_task(async function test_sync_syncAn
   let server = EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
   Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
-  EHTestsCommon.generateCredentialsChangedFailure();
+  await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
   let ping = await wait_for_ping(() => errorHandler.syncAndReportErrors(), true);
   equal(ping.status.sync, CREDENTIALS_CHANGED);
   deepEqual(ping.failureReason, {
     name: "unexpectederror",
@@ -574,17 +574,17 @@ add_task(async function test_sync_prolon
   let server = EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
   Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
-  EHTestsCommon.generateCredentialsChangedFailure();
+  await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
 
   let ping = await sync_and_validate_telem(true);
   equal(ping.status.sync, PROLONGED_SYNC_FAILURE);
   deepEqual(ping.failureReason, {
@@ -663,17 +663,17 @@ add_task(async function test_sync_non_ne
   let server = EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
   Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
-  EHTestsCommon.generateCredentialsChangedFailure();
+  await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
   Service.sync();
   await promiseObserved;
   do_check_eq(Status.sync, CREDENTIALS_CHANGED);
   do_check_false(errorHandler.didReportProlongedError);
--- a/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
+++ b/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
@@ -52,22 +52,22 @@ function sync_httpd_setup() {
   return httpd_setup(handlers);
 }
 
 async function setUp(server) {
   await configureIdentity({username: "johndoe"}, server);
   new FakeCryptoService();
 }
 
-function generateAndUploadKeys(server) {
+async function generateAndUploadKeys(server) {
   generateNewKeys(Service.collectionKeys);
   let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
   serverKeys.encrypt(Service.identity.syncKeyBundle);
   let res = Service.resource(server.baseURI + "/1.1/johndoe/storage/crypto/keys");
-  return serverKeys.upload(res).success;
+  return (await serverKeys.upload(res)).success;
 }
 
 
 add_task(async function test_backoff500() {
   enableValidationPrefs();
 
   _("Test: HTTP 500 sets backoff status.");
   let server = sync_httpd_setup();
@@ -76,17 +76,17 @@ add_task(async function test_backoff500(
   let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 500};
 
   try {
     do_check_false(Status.enforceBackoff);
 
     // Forcibly create and upload keys here -- otherwise we don't get to the 500!
-    do_check_true(generateAndUploadKeys(server));
+    do_check_true(await generateAndUploadKeys(server));
 
     Service.login();
     Service.sync();
     do_check_true(Status.enforceBackoff);
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetBackoff();
@@ -111,17 +111,17 @@ add_task(async function test_backoff503(
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function(subject) {
     backoffInterval = subject;
   });
 
   try {
     do_check_false(Status.enforceBackoff);
 
-    do_check_true(generateAndUploadKeys(server));
+    do_check_true(await generateAndUploadKeys(server));
 
     Service.login();
     Service.sync();
 
     do_check_true(Status.enforceBackoff);
     do_check_eq(backoffInterval, BACKOFF);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
     do_check_eq(Status.sync, SERVER_MAINTENANCE);
@@ -145,17 +145,17 @@ add_task(async function test_overQuota()
   engine.exception = {status: 400,
                       toString() {
                         return "14";
                       }};
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
-    do_check_true(generateAndUploadKeys(server));
+    do_check_true(await generateAndUploadKeys(server));
 
     Service.login();
     Service.sync();
 
     do_check_eq(Status.sync, OVER_QUOTA);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetSync();
@@ -225,17 +225,17 @@ add_task(async function test_engine_netw
   let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = Components.Exception("NS_ERROR_UNKNOWN_HOST",
                                           Cr.NS_ERROR_UNKNOWN_HOST);
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
-    do_check_true(generateAndUploadKeys(server));
+    do_check_true(await generateAndUploadKeys(server));
 
     Service.login();
     Service.sync();
 
     do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetSync();
@@ -254,17 +254,17 @@ add_task(async function test_resource_ti
   engine.enabled = true;
   // Resource throws this when it encounters a timeout.
   engine.exception = Components.Exception("Aborting due to channel inactivity.",
                                           Cr.NS_ERROR_NET_TIMEOUT);
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
-    do_check_true(generateAndUploadKeys(server));
+    do_check_true(await generateAndUploadKeys(server));
 
     Service.login();
     Service.sync();
 
     do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetSync();
--- a/services/sync/tests/unit/test_postqueue.js
+++ b/services/sync/tests/unit/test_postqueue.js
@@ -17,17 +17,17 @@ function makePostQueue(config, lastModTi
     posts: [],
   }
   let poster = (data, headers, batch, commit) => {
     let thisPost = { nbytes: data.length, batch, commit };
     if (headers.length) {
       thisPost.headers = headers;
     }
     stats.posts.push(thisPost);
-    return responseGenerator.next().value;
+    return Promise.resolve(responseGenerator.next().value);
   }
 
   let done = () => {}
   let pq = new PostQueue(poster, lastModTime, config, getTestLogger(), done);
   return { pq, stats };
 }
 
 add_test(function test_simple() {
--- a/services/sync/tests/unit/test_records_wbo.js
+++ b/services/sync/tests/unit/test_records_wbo.js
@@ -2,18 +2,19 @@
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://services-sync/record.js");
 Cu.import("resource://services-sync/resource.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
+initTestLogging("Trace");
 
-function test_toJSON() {
+add_test(function test_toJSON() {
   _("Create a record, for now without a TTL.");
   let wbo = new WBORecord("coll", "a_record");
   wbo.modified = 12345;
   wbo.sortindex = 42;
   wbo.payload = {};
 
   _("Verify that the JSON representation contains the WBO properties, but not TTL.");
   let json = JSON.parse(JSON.stringify(wbo));
@@ -21,64 +22,57 @@ function test_toJSON() {
   do_check_eq(json.sortindex, 42);
   do_check_eq(json.payload, "{}");
   do_check_false("ttl" in json);
 
   _("Set a TTL, make sure it's present in the JSON representation.");
   wbo.ttl = 30 * 60;
   json = JSON.parse(JSON.stringify(wbo));
   do_check_eq(json.ttl, 30 * 60);
-}
+  run_next_test();
+});
 
 
-function test_fetch() {
+add_task(async function test_fetch() {
   let record = {id: "asdf-1234-asdf-1234",
                 modified: 2454725.98283,
                 payload: JSON.stringify({cheese: "roquefort"})};
   let record2 = {id: "record2",
                  modified: 2454725.98284,
                  payload: JSON.stringify({cheese: "gruyere"})};
   let coll = [{id: "record2",
                modified: 2454725.98284,
                payload: JSON.stringify({cheese: "gruyere"})}];
 
   _("Setting up server.");
   let server = httpd_setup({
     "/record":  httpd_handler(200, "OK", JSON.stringify(record)),
     "/record2": httpd_handler(200, "OK", JSON.stringify(record2)),
     "/coll":    httpd_handler(200, "OK", JSON.stringify(coll))
   });
-  do_test_pending();
 
   try {
     _("Fetching a WBO record");
     let rec = new WBORecord("coll", "record");
-    rec.fetch(Service.resource(server.baseURI + "/record"));
+    await rec.fetch(Service.resource(server.baseURI + "/record"));
     do_check_eq(rec.id, "asdf-1234-asdf-1234"); // NOT "record"!
 
     do_check_eq(rec.modified, 2454725.98283);
     do_check_eq(typeof(rec.payload), "object");
     do_check_eq(rec.payload.cheese, "roquefort");
 
     _("Fetching a WBO record using the record manager");
-    let rec2 = Service.recordManager.get(server.baseURI + "/record2");
+    let rec2 = await Service.recordManager.get(server.baseURI + "/record2");
     do_check_eq(rec2.id, "record2");
     do_check_eq(rec2.modified, 2454725.98284);
     do_check_eq(typeof(rec2.payload), "object");
     do_check_eq(rec2.payload.cheese, "gruyere");
     do_check_eq(Service.recordManager.response.status, 200);
 
     // Testing collection extraction.
     _("Extracting collection.");
     let rec3 = new WBORecord("tabs", "foo");   // Create through constructor.
     do_check_eq(rec3.collection, "tabs");
 
   } finally {
-    server.stop(do_test_finished);
+    await promiseStopServer(server);
   }
-}
-
-function run_test() {
-  initTestLogging("Trace");
-
-  test_toJSON();
-  test_fetch();
-}
+});
--- a/services/sync/tests/unit/test_resource.js
+++ b/services/sync/tests/unit/test_resource.js
@@ -143,21 +143,19 @@ function server_headers(metadata, respon
   for (let header of header_names) {
     headers[header] = metadata.getHeader(header);
   }
   let body = JSON.stringify(headers);
   response.setStatusLine(metadata.httpVersion, 200, "OK");
   response.bodyOutputStream.write(body, body.length);
 }
 
-function run_test() {
+add_task(async function test() {
   initTestLogging("Trace");
 
-  do_test_pending();
-
   let logger = Log.repository.getLogger("Test");
   Log.repository.rootLogger.addAppender(new Log.DumpAppender());
 
   let server = httpd_setup({
     "/open": server_open,
     "/protected": server_protected,
     "/404": server_404,
     "/upload": server_upload,
@@ -174,17 +172,17 @@ function run_test() {
   Svc.Prefs.set("network.numRetries", 1); // speed up test
 
   // This apparently has to come first in order for our PAC URL to be hit.
   // Don't put any other HTTP requests earlier in the file!
   _("Testing handling of proxy auth redirection.");
   PACSystemSettings.PACURI = server.baseURI + "/pac1";
   installFakePAC();
   let proxiedRes = new Resource(server.baseURI + "/open");
-  let content = proxiedRes.get();
+  let content = await proxiedRes.get();
   do_check_true(pacFetched);
   do_check_true(fetched);
   do_check_eq(content, "This path exists");
   pacFetched = fetched = false;
   uninstallFakePAC();
 
   _("Resource object members");
   let res = new Resource(server.baseURI + "/open");
@@ -193,17 +191,17 @@ function run_test() {
   do_check_eq(res.spec, server.baseURI + "/open");
   do_check_eq(typeof res.headers, "object");
   do_check_eq(typeof res.authenticator, "object");
   // Initially res.data is null since we haven't performed a GET or
   // PUT/POST request yet.
   do_check_eq(res.data, null);
 
   _("GET a non-password-protected resource");
-  content = res.get();
+  content = await res.get();
   do_check_eq(content, "This path exists");
   do_check_eq(content.status, 200);
   do_check_true(content.success);
   // res.data has been updated with the result from the request
   do_check_eq(res.data, content);
 
   // Observe logging messages.
   logger = res._log;
@@ -226,197 +224,197 @@ function run_test() {
   do_check_true(didThrow);
   do_check_eq(debugMessages.length, 1);
   do_check_eq(debugMessages[0],
               "Parse fail: Response body starts: \"\"This path exists\"\".");
   logger.debug = dbg;
 
   _("GET a password protected resource (test that it'll fail w/o pass, no throw)");
   let res2 = new Resource(server.baseURI + "/protected");
-  content = res2.get();
+  content = await res2.get();
   do_check_eq(content, "This path exists and is protected - failed");
   do_check_eq(content.status, 401);
   do_check_false(content.success);
 
   _("GET a password protected resource");
   let res3 = new Resource(server.baseURI + "/protected");
   let identityConfig = makeIdentityConfig();
   let browseridManager = Status._authManager;
   configureFxAccountIdentity(browseridManager, identityConfig);
   let auth = browseridManager.getResourceAuthenticator();
   res3.authenticator = auth;
   do_check_eq(res3.authenticator, auth);
-  content = res3.get();
+  content = await res3.get();
   do_check_eq(content, "This path exists and is protected");
   do_check_eq(content.status, 200);
   do_check_true(content.success);
 
   _("GET a non-existent resource (test that it'll fail, but not throw)");
   let res4 = new Resource(server.baseURI + "/404");
-  content = res4.get();
+  content = await res4.get();
   do_check_eq(content, "File not found");
   do_check_eq(content.status, 404);
   do_check_false(content.success);
 
   // Check some headers of the 404 response
   do_check_eq(content.headers.connection, "close");
   do_check_eq(content.headers.server, "httpd.js");
   do_check_eq(content.headers["content-length"], 14);
 
   _("PUT to a resource (string)");
   let res5 = new Resource(server.baseURI + "/upload");
-  content = res5.put(JSON.stringify(sample_data));
+  content = await res5.put(JSON.stringify(sample_data));
   do_check_eq(content, "Valid data upload via PUT");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("PUT to a resource (object)");
-  content = res5.put(sample_data);
+  content = await res5.put(sample_data);
   do_check_eq(content, "Valid data upload via PUT");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("PUT without data arg (uses resource.data) (string)");
   res5.data = JSON.stringify(sample_data);
-  content = res5.put();
+  content = await res5.put();
   do_check_eq(content, "Valid data upload via PUT");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("PUT without data arg (uses resource.data) (object)");
   res5.data = sample_data;
-  content = res5.put();
+  content = await res5.put();
   do_check_eq(content, "Valid data upload via PUT");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("POST to a resource (string)");
-  content = res5.post(JSON.stringify(sample_data));
+  content = await res5.post(JSON.stringify(sample_data));
   do_check_eq(content, "Valid data upload via POST");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("POST to a resource (object)");
-  content = res5.post(sample_data);
+  content = await res5.post(sample_data);
   do_check_eq(content, "Valid data upload via POST");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("POST without data arg (uses resource.data) (string)");
   res5.data = JSON.stringify(sample_data);
-  content = res5.post();
+  content = await res5.post();
   do_check_eq(content, "Valid data upload via POST");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("POST without data arg (uses resource.data) (object)");
   res5.data = sample_data;
-  content = res5.post();
+  content = await res5.post();
   do_check_eq(content, "Valid data upload via POST");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("DELETE a resource");
   let res6 = new Resource(server.baseURI + "/delete");
-  content = res6.delete();
+  content = await res6.delete();
   do_check_eq(content, "This resource has been deleted")
   do_check_eq(content.status, 200);
 
   _("JSON conversion of response body");
   let res7 = new Resource(server.baseURI + "/json");
-  content = res7.get();
+  content = await res7.get();
   do_check_eq(content, JSON.stringify(sample_data));
   do_check_eq(content.status, 200);
   do_check_eq(JSON.stringify(content.obj), JSON.stringify(sample_data));
 
   _("X-Weave-Timestamp header updates AsyncResource.serverTime");
   // Before having received any response containing the
   // X-Weave-Timestamp header, AsyncResource.serverTime is null.
   do_check_eq(AsyncResource.serverTime, null);
   let res8 = new Resource(server.baseURI + "/timestamp");
-  content = res8.get();
+  content = await res8.get();
   do_check_eq(AsyncResource.serverTime, TIMESTAMP);
 
   _("GET: no special request headers");
   let res9 = new Resource(server.baseURI + "/headers");
-  content = res9.get();
+  content = await res9.get();
   do_check_eq(content, "{}");
 
   _("PUT: Content-Type defaults to text/plain");
-  content = res9.put("data");
+  content = await res9.put("data");
   do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
 
   _("POST: Content-Type defaults to text/plain");
-  content = res9.post("data");
+  content = await res9.post("data");
   do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
 
   _("setHeader(): setting simple header");
   res9.setHeader("X-What-Is-Weave", "awesome");
   do_check_eq(res9.headers["x-what-is-weave"], "awesome");
-  content = res9.get();
+  content = await res9.get();
   do_check_eq(content, JSON.stringify({"x-what-is-weave": "awesome"}));
 
   _("setHeader(): setting multiple headers, overwriting existing header");
   res9.setHeader("X-WHAT-is-Weave", "more awesomer");
   res9.setHeader("X-Another-Header", "hello world");
   do_check_eq(res9.headers["x-what-is-weave"], "more awesomer");
   do_check_eq(res9.headers["x-another-header"], "hello world");
-  content = res9.get();
+  content = await res9.get();
   do_check_eq(content, JSON.stringify({"x-another-header": "hello world",
                                        "x-what-is-weave": "more awesomer"}));
 
   _("Setting headers object");
   res9.headers = {};
-  content = res9.get();
+  content = await res9.get();
   do_check_eq(content, "{}");
 
   _("PUT/POST: override default Content-Type");
   res9.setHeader("Content-Type", "application/foobar");
   do_check_eq(res9.headers["content-type"], "application/foobar");
-  content = res9.put("data");
+  content = await res9.put("data");
   do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
-  content = res9.post("data");
+  content = await res9.post("data");
   do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
 
 
   _("X-Weave-Backoff header notifies observer");
   let backoffInterval;
   function onBackoff(subject, data) {
     backoffInterval = subject;
   }
   Observers.add("weave:service:backoff:interval", onBackoff);
 
   let res10 = new Resource(server.baseURI + "/backoff");
-  content = res10.get();
+  content = await res10.get();
   do_check_eq(backoffInterval, 600);
 
 
   _("X-Weave-Quota-Remaining header notifies observer on successful requests.");
   let quotaValue;
   function onQuota(subject, data) {
     quotaValue = subject;
   }
   Observers.add("weave:service:quota:remaining", onQuota);
 
   res10 = new Resource(server.baseURI + "/quota-error");
-  content = res10.get();
+  content = await res10.get();
   do_check_eq(content.status, 400);
   do_check_eq(quotaValue, undefined); // HTTP 400, so no observer notification.
 
   res10 = new Resource(server.baseURI + "/quota-notice");
-  content = res10.get();
+  content = await res10.get();
   do_check_eq(content.status, 200);
   do_check_eq(quotaValue, 1048576);
 
 
   _("Error handling in _request() preserves exception information");
   let error;
   let res11 = new Resource("http://localhost:12345/does/not/exist");
   try {
-    content = res11.get();
+    content = await res11.get();
   } catch (ex) {
     error = ex;
   }
   do_check_eq(error.result, Cr.NS_ERROR_CONNECTION_REFUSED);
   do_check_eq(error.message, "NS_ERROR_CONNECTION_REFUSED");
   do_check_eq(typeof error.stack, "string");
 
   _("Checking handling of errors in onProgress.");
@@ -425,24 +423,24 @@ function run_test() {
     // Provoke an XPC exception without a Javascript wrapper.
     Services.io.newURI("::::::::");
   };
   res18._onProgress = onProgress;
   let warnings = [];
   res18._log.warn = function(msg) { warnings.push(msg) };
   error = undefined;
   try {
-    content = res18.get();
+    content = await res18.get();
   } catch (ex) {
     error = ex;
   }
 
   // It throws and logs.
   do_check_eq(error.result, Cr.NS_ERROR_MALFORMED_URI);
-  do_check_eq(error, "Error: NS_ERROR_MALFORMED_URI");
+  do_check_eq(error.message, "NS_ERROR_MALFORMED_URI");
   // Note the strings haven't been formatted yet, but that's OK for this test.
   do_check_eq(warnings.pop(), "${action} request to ${url} failed: ${ex}");
   do_check_eq(warnings.pop(),
               "Got exception calling onProgress handler during fetch of " +
               server.baseURI + "/json");
 
   // And this is what happens if JS throws an exception.
   res18 = new Resource(server.baseURI + "/json");
@@ -450,37 +448,37 @@ function run_test() {
     throw "BOO!";
   };
   res18._onProgress = onProgress;
   let oldWarn = res18._log.warn;
   warnings = [];
   res18._log.warn = function(msg) { warnings.push(msg) };
   error = undefined;
   try {
-    content = res18.get();
+    content = await res18.get();
   } catch (ex) {
     error = ex;
   }
 
   // It throws and logs.
   do_check_eq(error.result, Cr.NS_ERROR_XPC_JS_THREW_STRING);
-  do_check_eq(error, "Error: NS_ERROR_XPC_JS_THREW_STRING");
+  do_check_eq(error.message, "NS_ERROR_XPC_JS_THREW_STRING");
   do_check_eq(warnings.pop(), "${action} request to ${url} failed: ${ex}");
   do_check_eq(warnings.pop(),
               "Got exception calling onProgress handler during fetch of " +
               server.baseURI + "/json");
 
   res18._log.warn = oldWarn;
 
   _("Ensure channel timeouts are thrown appropriately.");
   let res19 = new Resource(server.baseURI + "/json");
   res19.ABORT_TIMEOUT = 0;
   error = undefined;
   try {
-    content = res19.get();
+    content = await res19.get();
   } catch (ex) {
     error = ex;
   }
   do_check_eq(error.result, Cr.NS_ERROR_NET_TIMEOUT);
 
   _("Testing URI construction.");
   let args = [];
   args.push("newer=" + 1234);
@@ -491,9 +489,9 @@ function run_test() {
 
   let uri1 = Utils.makeURI("http://foo/" + query)
                   .QueryInterface(Ci.nsIURL);
   let uri2 = Utils.makeURI("http://foo/")
                   .QueryInterface(Ci.nsIURL);
   uri2.query = query;
   do_check_eq(uri1.query, uri2.query);
   server.stop(do_test_finished);
-}
+});
--- a/services/sync/tests/unit/test_resource_async.js
+++ b/services/sync/tests/unit/test_resource_async.js
@@ -158,39 +158,37 @@ function run_test() {
   Log.repository.rootLogger.addAppender(new Log.DumpAppender());
 
   Svc.Prefs.set("network.numRetries", 1); // speed up test
   run_next_test();
 }
 
 // This apparently has to come first in order for our PAC URL to be hit.
 // Don't put any other HTTP requests earlier in the file!
-add_test(function test_proxy_auth_redirect() {
+add_task(async function test_proxy_auth_redirect() {
   _("Ensure that a proxy auth redirect (which switches out our channel) " +
     "doesn't break AsyncResource.");
   let server = httpd_setup({
     "/open": server_open,
     "/pac2": server_pac
   });
 
   PACSystemSettings.PACURI = server.baseURI + "/pac2";
   installFakePAC();
   let res = new AsyncResource(server.baseURI + "/open");
-  res.get(function(error, result) {
-    do_check_true(!error);
-    do_check_true(pacFetched);
-    do_check_true(fetched);
-    do_check_eq("This path exists", result);
-    pacFetched = fetched = false;
-    uninstallFakePAC();
-    server.stop(run_next_test);
-  });
+  let result = await res.get();
+  do_check_true(pacFetched);
+  do_check_true(fetched);
+  do_check_eq("This path exists", result);
+  pacFetched = fetched = false;
+  uninstallFakePAC();
+  await promiseStopServer(server);
 });
 
-add_test(function test_new_channel() {
+add_task(async function test_new_channel() {
   _("Ensure a redirect to a new channel is handled properly.");
 
   let resourceRequested = false;
   function resourceHandler(metadata, response) {
     resourceRequested = true;
 
     let body = "Test";
     response.setHeader("Content-Type", "text/plain");
@@ -205,25 +203,23 @@ add_test(function test_new_channel() {
     response.bodyOutputStream.write(body, body.length);
   }
 
   let server = httpd_setup({"/resource": resourceHandler,
                             "/redirect": redirectHandler});
   locationURL = server.baseURI + "/resource";
 
   let request = new AsyncResource(server.baseURI + "/redirect");
-  request.get(function onRequest(error, content) {
-    do_check_null(error);
-    do_check_true(resourceRequested);
-    do_check_eq(200, content.status);
-    do_check_true("content-type" in content.headers);
-    do_check_eq("text/plain", content.headers["content-type"]);
+  let content = await request.get()
+  do_check_true(resourceRequested);
+  do_check_eq(200, content.status);
+  do_check_true("content-type" in content.headers);
+  do_check_eq("text/plain", content.headers["content-type"]);
 
-    server.stop(run_next_test);
-  });
+  await promiseStopServer(server);
 });
 
 
 var server;
 
 add_test(function setup() {
   server = httpd_setup({
     "/open": server_open,
@@ -254,438 +250,357 @@ add_test(function test_members() {
   do_check_eq(typeof res.authenticator, "object");
   // Initially res.data is null since we haven't performed a GET or
   // PUT/POST request yet.
   do_check_eq(res.data, null);
 
   run_next_test();
 });
 
-add_test(function test_get() {
+add_task(async function test_get() {
   _("GET a non-password-protected resource");
   let res = new AsyncResource(server.baseURI + "/open");
-  res.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "This path exists");
-    do_check_eq(content.status, 200);
-    do_check_true(content.success);
-    // res.data has been updated with the result from the request
-    do_check_eq(res.data, content);
-
-    // Observe logging messages.
-    let resLogger = res._log;
-    let dbg    = resLogger.debug;
-    let debugMessages = [];
-    resLogger.debug = function(msg) {
-      debugMessages.push(msg);
-      dbg.call(this, msg);
-    }
+  let content = await res.get();
+  do_check_eq(content, "This path exists");
+  do_check_eq(content.status, 200);
+  do_check_true(content.success);
+  // res.data has been updated with the result from the request
+  do_check_eq(res.data, content);
 
-    // Since we didn't receive proper JSON data, accessing content.obj
-    // will result in a SyntaxError from JSON.parse
-    let didThrow = false;
-    try {
-      content.obj;
-    } catch (ex) {
-      didThrow = true;
-    }
-    do_check_true(didThrow);
-    do_check_eq(debugMessages.length, 1);
-    do_check_eq(debugMessages[0],
-                "Parse fail: Response body starts: \"\"This path exists\"\".");
-    resLogger.debug = dbg;
+  // Observe logging messages.
+  let resLogger = res._log;
+  let dbg    = resLogger.debug;
+  let debugMessages = [];
+  resLogger.debug = function(msg) {
+    debugMessages.push(msg);
+    dbg.call(this, msg);
+  }
 
-    run_next_test();
-  });
+  // Since we didn't receive proper JSON data, accessing content.obj
+  // will result in a SyntaxError from JSON.parse
+  let didThrow = false;
+  try {
+    content.obj;
+  } catch (ex) {
+    didThrow = true;
+  }
+  do_check_true(didThrow);
+  do_check_eq(debugMessages.length, 1);
+  do_check_eq(debugMessages[0],
+              "Parse fail: Response body starts: \"\"This path exists\"\".");
+  resLogger.debug = dbg;
 });
 
 add_test(function test_basicauth() {
   _("Test that the BasicAuthenticator doesn't screw up header case.");
   let res1 = new AsyncResource(server.baseURI + "/foo");
   res1.setHeader("Authorization", "Basic foobar");
   do_check_eq(res1._headers["authorization"], "Basic foobar");
   do_check_eq(res1.headers["authorization"], "Basic foobar");
 
   run_next_test();
 });
 
-add_test(function test_get_protected_fail() {
+add_task(async function test_get_protected_fail() {
   _("GET a password protected resource (test that it'll fail w/o pass, no throw)");
   let res2 = new AsyncResource(server.baseURI + "/protected");
-  res2.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "This path exists and is protected - failed");
-    do_check_eq(content.status, 401);
-    do_check_false(content.success);
-    run_next_test();
-  });
+  let content = await res2.get()
+  do_check_eq(content, "This path exists and is protected - failed");
+  do_check_eq(content.status, 401);
+  do_check_false(content.success);
 });
 
-add_test(function test_get_protected_success() {
+add_task(async function test_get_protected_success() {
   _("GET a password protected resource");
   let identityConfig = makeIdentityConfig();
   let browseridManager = new BrowserIDManager();
   configureFxAccountIdentity(browseridManager, identityConfig);
   let auth = browseridManager.getResourceAuthenticator();
   let res3 = new AsyncResource(server.baseURI + "/protected");
   res3.authenticator = auth;
   do_check_eq(res3.authenticator, auth);
-  res3.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "This path exists and is protected");
-    do_check_eq(content.status, 200);
-    do_check_true(content.success);
-    run_next_test();
-  });
+  let content = await res3.get();
+  do_check_eq(content, "This path exists and is protected");
+  do_check_eq(content.status, 200);
+  do_check_true(content.success);
 });
 
-add_test(function test_get_404() {
+add_task(async function test_get_404() {
   _("GET a non-existent resource (test that it'll fail, but not throw)");
   let res4 = new AsyncResource(server.baseURI + "/404");
-  res4.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "File not found");
-    do_check_eq(content.status, 404);
-    do_check_false(content.success);
+  let content = await res4.get();
+  do_check_eq(content, "File not found");
+  do_check_eq(content.status, 404);
+  do_check_false(content.success);
 
-    // Check some headers of the 404 response
-    do_check_eq(content.headers.connection, "close");
-    do_check_eq(content.headers.server, "httpd.js");
-    do_check_eq(content.headers["content-length"], 14);
-
-    run_next_test();
-  });
+  // Check some headers of the 404 response
+  do_check_eq(content.headers.connection, "close");
+  do_check_eq(content.headers.server, "httpd.js");
+  do_check_eq(content.headers["content-length"], 14);
 });
 
-add_test(function test_put_string() {
+add_task(async function test_put_string() {
   _("PUT to a resource (string)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
-  res_upload.put(JSON.stringify(sample_data), function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via PUT");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.put(JSON.stringify(sample_data));
+  do_check_eq(content, "Valid data upload via PUT");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_put_object() {
+add_task(async function test_put_object() {
   _("PUT to a resource (object)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
-  res_upload.put(sample_data, function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via PUT");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.put(sample_data);
+  do_check_eq(content, "Valid data upload via PUT");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_put_data_string() {
+add_task(async function test_put_data_string() {
   _("PUT without data arg (uses resource.data) (string)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
   res_upload.data = JSON.stringify(sample_data);
-  res_upload.put(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via PUT");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.put();
+  do_check_eq(content, "Valid data upload via PUT");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_put_data_object() {
+add_task(async function test_put_data_object() {
   _("PUT without data arg (uses resource.data) (object)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
   res_upload.data = sample_data;
-  res_upload.put(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via PUT");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.put();
+  do_check_eq(content, "Valid data upload via PUT");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_post_string() {
+add_task(async function test_post_string() {
   _("POST to a resource (string)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
-  res_upload.post(JSON.stringify(sample_data), function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via POST");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.post(JSON.stringify(sample_data));
+  do_check_eq(content, "Valid data upload via POST");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_post_object() {
+add_task(async function test_post_object() {
   _("POST to a resource (object)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
-  res_upload.post(sample_data, function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via POST");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.post(sample_data);
+  do_check_eq(content, "Valid data upload via POST");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_post_data_string() {
+add_task(async function test_post_data_string() {
   _("POST without data arg (uses resource.data) (string)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
   res_upload.data = JSON.stringify(sample_data);
-  res_upload.post(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via POST");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.post();
+  do_check_eq(content, "Valid data upload via POST");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_post_data_object() {
+add_task(async function test_post_data_object() {
   _("POST without data arg (uses resource.data) (object)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
   res_upload.data = sample_data;
-  res_upload.post(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via POST");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.post();
+  do_check_eq(content, "Valid data upload via POST");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_delete() {
+add_task(async function test_delete() {
   _("DELETE a resource");
   let res6 = new AsyncResource(server.baseURI + "/delete");
-  res6.delete(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "This resource has been deleted");
-    do_check_eq(content.status, 200);
-    run_next_test();
-  });
+  let content = await res6.delete();
+  do_check_eq(content, "This resource has been deleted");
+  do_check_eq(content.status, 200);
 });
 
-add_test(function test_json_body() {
+add_task(async function test_json_body() {
   _("JSON conversion of response body");
   let res7 = new AsyncResource(server.baseURI + "/json");
-  res7.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, JSON.stringify(sample_data));
-    do_check_eq(content.status, 200);
-    do_check_eq(JSON.stringify(content.obj), JSON.stringify(sample_data));
-    run_next_test();
-  });
+  let content = await res7.get();
+  do_check_eq(content, JSON.stringify(sample_data));
+  do_check_eq(content.status, 200);
+  do_check_eq(JSON.stringify(content.obj), JSON.stringify(sample_data));
 });
 
-add_test(function test_weave_timestamp() {
+add_task(async function test_weave_timestamp() {
   _("X-Weave-Timestamp header updates AsyncResource.serverTime");
   // Before having received any response containing the
   // X-Weave-Timestamp header, AsyncResource.serverTime is null.
   do_check_eq(AsyncResource.serverTime, null);
   let res8 = new AsyncResource(server.baseURI + "/timestamp");
-  res8.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(AsyncResource.serverTime, TIMESTAMP);
-    run_next_test();
-  });
+  await res8.get();
+  do_check_eq(AsyncResource.serverTime, TIMESTAMP);
 });
 
-add_test(function test_get_no_headers() {
+add_task(async function test_get_no_headers() {
   _("GET: no special request headers");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
-  res_headers.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "{}");
-    run_next_test();
-  });
+  let content = await res_headers.get();
+  do_check_eq(content, "{}");
 });
 
-add_test(function test_put_default_content_type() {
+add_task(async function test_put_default_content_type() {
   _("PUT: Content-Type defaults to text/plain");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
-  res_headers.put("data", function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
-    run_next_test();
-  });
+  let content = await res_headers.put("data");
+  do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
 });
 
-add_test(function test_post_default_content_type() {
+add_task(async function test_post_default_content_type() {
   _("POST: Content-Type defaults to text/plain");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
-  res_headers.post("data", function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
-    run_next_test();
-  });
+  let content = await res_headers.post("data");
+  do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
 });
 
-add_test(function test_setHeader() {
+add_task(async function test_setHeader() {
   _("setHeader(): setting simple header");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
   res_headers.setHeader("X-What-Is-Weave", "awesome");
   do_check_eq(res_headers.headers["x-what-is-weave"], "awesome");
-  res_headers.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, JSON.stringify({"x-what-is-weave": "awesome"}));
-    run_next_test();
-  });
+  let content = await res_headers.get();
+  do_check_eq(content, JSON.stringify({"x-what-is-weave": "awesome"}));
 });
 
-add_test(function test_setHeader_overwrite() {
+add_task(async function test_setHeader_overwrite() {
   _("setHeader(): setting multiple headers, overwriting existing header");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
   res_headers.setHeader("X-WHAT-is-Weave", "more awesomer");
   res_headers.setHeader("X-Another-Header", "hello world");
   do_check_eq(res_headers.headers["x-what-is-weave"], "more awesomer");
   do_check_eq(res_headers.headers["x-another-header"], "hello world");
-  res_headers.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, JSON.stringify({"x-another-header": "hello world",
-                                         "x-what-is-weave": "more awesomer"}));
-
-    run_next_test();
-  });
+  let content = await res_headers.get();
+  do_check_eq(content, JSON.stringify({"x-another-header": "hello world",
+                                       "x-what-is-weave": "more awesomer"}));
 });
 
-add_test(function test_headers_object() {
+add_task(async function test_headers_object() {
   _("Setting headers object");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
   res_headers.headers = {};
-  res_headers.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "{}");
-    run_next_test();
-  });
+  let content = await res_headers.get();
+  do_check_eq(content, "{}");
 });
 
-add_test(function test_put_override_content_type() {
+add_task(async function test_put_override_content_type() {
   _("PUT: override default Content-Type");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
   res_headers.setHeader("Content-Type", "application/foobar");
   do_check_eq(res_headers.headers["content-type"], "application/foobar");
-  res_headers.put("data", function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
-    run_next_test();
-  });
+  let content = await res_headers.put("data");
+  do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
 });
 
-add_test(function test_post_override_content_type() {
+add_task(async function test_post_override_content_type() {
   _("POST: override default Content-Type");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
   res_headers.setHeader("Content-Type", "application/foobar");
-  res_headers.post("data", function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
-    run_next_test();
-  });
+  let content = await res_headers.post("data");
+  do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
 });
 
-add_test(function test_weave_backoff() {
+add_task(async function test_weave_backoff() {
   _("X-Weave-Backoff header notifies observer");
   let backoffInterval;
   function onBackoff(subject, data) {
     backoffInterval = subject;
   }
   Observers.add("weave:service:backoff:interval", onBackoff);
 
   let res10 = new AsyncResource(server.baseURI + "/backoff");
-  res10.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(backoffInterval, 600);
-    run_next_test();
-  });
+  await res10.get();
+  do_check_eq(backoffInterval, 600);
+});
+
+add_task(async function test_quota_error() {
+  _("X-Weave-Quota-Remaining header notifies observer on successful requests.");
+  let res10 = new AsyncResource(server.baseURI + "/quota-error");
+  let content = await res10.get();
+  do_check_eq(content.status, 400);
+  do_check_eq(quotaValue, undefined); // HTTP 400, so no observer notification.
 });
 
-add_test(function test_quota_error() {
-  _("X-Weave-Quota-Remaining header notifies observer on successful requests.");
-  let res10 = new AsyncResource(server.baseURI + "/quota-error");
-  res10.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content.status, 400);
-    do_check_eq(quotaValue, undefined); // HTTP 400, so no observer notification.
-    run_next_test();
+add_task(async function test_quota_notice() {
+  let res10 = new AsyncResource(server.baseURI + "/quota-notice");
+  let content = await res10.get();
+  do_check_eq(content.status, 200);
+  do_check_eq(quotaValue, 1048576);
+});
+
+add_task(async function test_preserve_exceptions() {
+  _("Error handling in ChannelListener etc. preserves exception information");
+  let res11 = new AsyncResource("http://localhost:12345/does/not/exist");
+  await Assert.rejects(res11.get(), error => {
+    do_check_neq(error, null);
+    do_check_eq(error.result, Cr.NS_ERROR_CONNECTION_REFUSED);
+    do_check_eq(error.message, "NS_ERROR_CONNECTION_REFUSED");
+    return true;
   });
 });
 
-add_test(function test_quota_notice() {
-  let res10 = new AsyncResource(server.baseURI + "/quota-notice");
-  res10.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content.status, 200);
-    do_check_eq(quotaValue, 1048576);
-    run_next_test();
-  });
-});
-
-add_test(function test_preserve_exceptions() {
-  _("Error handling in ChannelListener etc. preserves exception information");
-  let res11 = new AsyncResource("http://localhost:12345/does/not/exist");
-  res11.get(function(error, content) {
-    do_check_neq(error, null);
-    do_check_eq(error.result, Cr.NS_ERROR_CONNECTION_REFUSED);
-    do_check_eq(error.message, "NS_ERROR_CONNECTION_REFUSED");
-    run_next_test();
-  });
-});
-
-add_test(function test_xpc_exception_handling() {
+add_task(async function test_xpc_exception_handling() {
   _("Exception handling inside fetches.");
   let res14 = new AsyncResource(server.baseURI + "/json");
   res14._onProgress = function(rec) {
     // Provoke an XPC exception without a Javascript wrapper.
     Services.io.newURI("::::::::");
   };
   let warnings = [];
   res14._log.warn = function(msg) { warnings.push(msg); };
 
-  res14.get(function(error, content) {
+  await Assert.rejects(res14.get(), error => {
     do_check_eq(error.result, Cr.NS_ERROR_MALFORMED_URI);
     do_check_eq(error.message, "NS_ERROR_MALFORMED_URI");
-    do_check_eq(content, null);
-    do_check_eq(warnings.pop(),
-                "Got exception calling onProgress handler during fetch of " +
-                server.baseURI + "/json");
-
-    run_next_test();
+    return true;
   });
+  do_check_eq(warnings.pop(),
+              "${action} request to ${url} failed: ${ex}");
+  do_check_eq(warnings.pop(),
+              "Got exception calling onProgress handler during fetch of " +
+              server.baseURI + "/json");
 });
 
-add_test(function test_js_exception_handling() {
+add_task(async function test_js_exception_handling() {
   _("JS exception handling inside fetches.");
   let res15 = new AsyncResource(server.baseURI + "/json");
   res15._onProgress = function(rec) {
     throw "BOO!";
   };
   let warnings = [];
   res15._log.warn = function(msg) { warnings.push(msg); };
 
-  res15.get(function(error, content) {
+  await Assert.rejects(res15.get(), error => {
     do_check_eq(error.result, Cr.NS_ERROR_XPC_JS_THREW_STRING);
     do_check_eq(error.message, "NS_ERROR_XPC_JS_THREW_STRING");
-    do_check_eq(content, null);
-    do_check_eq(warnings.pop(),
-                "Got exception calling onProgress handler during fetch of " +
-                server.baseURI + "/json");
-
-    run_next_test();
+    return true;
   });
+  do_check_eq(warnings.pop(),
+              "${action} request to ${url} failed: ${ex}");
+  do_check_eq(warnings.pop(),
+              "Got exception calling onProgress handler during fetch of " +
+              server.baseURI + "/json");
 });
 
-add_test(function test_timeout() {
+add_task(async function test_timeout() {
   _("Ensure channel timeouts are thrown appropriately.");
   let res19 = new AsyncResource(server.baseURI + "/json");
   res19.ABORT_TIMEOUT = 0;
-  res19.get(function(error, content) {
+  await Assert.rejects(res19.get(), error => {
     do_check_eq(error.result, Cr.NS_ERROR_NET_TIMEOUT);
-    run_next_test();
+    return true;
   });
 });
 
 add_test(function test_uri_construction() {
   _("Testing URI construction.");
   let args = [];
   args.push("newer=" + 1234);
   args.push("limit=" + 1234);
@@ -698,30 +613,15 @@ add_test(function test_uri_construction(
   let uri2 = Utils.makeURI("http://foo/")
                   .QueryInterface(Ci.nsIURL);
   uri2.query = query;
   do_check_eq(uri1.query, uri2.query);
 
   run_next_test();
 });
 
-add_test(function test_not_sending_cookie() {
-  let cookieSer = Cc["@mozilla.org/cookieService;1"]
-                    .getService(Ci.nsICookieService);
-  let uri = CommonUtils.makeURI(server.baseURI);
-  cookieSer.setCookieString(uri, null, "test=test; path=/;", null);
-
-  let res = new AsyncResource(server.baseURI + "/test");
-  res.get(function(error) {
-    do_check_null(error);
-    do_check_true(this.response.success);
-    do_check_eq("COOKIE!", this.response.body);
-    server.stop(run_next_test);
-  });
-});
-
 /**
  * End of tests that rely on a single HTTP server.
  * All tests after this point must begin and end their own.
  */
 add_test(function eliminate_server() {
   server.stop(run_next_test);
 });
--- a/services/sync/tests/unit/test_resource_header.js
+++ b/services/sync/tests/unit/test_resource_header.js
@@ -41,25 +41,25 @@ function triggerRedirect() {
                          "}";
 
   let prefsService = Cc["@mozilla.org/preferences-service;1"].getService(Ci.nsIPrefService);
   let prefs = prefsService.getBranch("network.proxy.");
   prefs.setIntPref("type", 2);
   prefs.setCharPref("autoconfig_url", "data:text/plain," + PROXY_FUNCTION);
 }
 
-add_test(function test_headers_copied() {
+add_task(async function test_headers_copied() {
   triggerRedirect();
 
   _("Issuing request.");
   let resource = new Resource(TEST_URL);
   resource.setHeader("Authorization", "Basic foobar");
   resource.setHeader("X-Foo", "foofoo");
 
-  let result = resource.get(TEST_URL);
+  let result = await resource.get(TEST_URL);
   _("Result: " + result);
 
   do_check_eq(result, BODY);
   do_check_eq(auth, "Basic foobar");
   do_check_eq(foo, "foofoo");
 
-  httpServer.stop(run_next_test);
+  await promiseStopServer(httpServer);
 });
--- a/services/sync/tests/unit/test_resource_ua.js
+++ b/services/sync/tests/unit/test_resource_ua.js
@@ -52,47 +52,41 @@ add_test(function test_fetchInfo() {
   Service.login();
   Service._fetchInfo();
   _("User-Agent: " + ua);
   do_check_eq(ua, expectedUA + ".desktop");
   ua = "";
   run_next_test();
 });
 
-add_test(function test_desktop_post() {
+add_task(async function test_desktop_post() {
   _("Testing direct Resource POST.");
   let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
-  r.post("foo=bar", function(error, content) {
-    _("User-Agent: " + ua);
-    do_check_eq(ua, expectedUA + ".desktop");
-    ua = "";
-    run_next_test();
-  });
+  await r.post("foo=bar");
+  _("User-Agent: " + ua);
+  do_check_eq(ua, expectedUA + ".desktop");
+  ua = "";
 });
 
-add_test(function test_desktop_get() {
+add_task(async function test_desktop_get() {
   _("Testing async.");
   Svc.Prefs.set("client.type", "desktop");
   let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
-  r.get(function(error, content) {
-    _("User-Agent: " + ua);
-    do_check_eq(ua, expectedUA + ".desktop");
-    ua = "";
-    run_next_test();
-  });
+  await r.get();
+  _("User-Agent: " + ua);
+  do_check_eq(ua, expectedUA + ".desktop");
+  ua = "";
 });
 
-add_test(function test_mobile_get() {
+add_task(async function test_mobile_get() {
   _("Testing mobile.");
   Svc.Prefs.set("client.type", "mobile");
   let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
-  r.get(function(error, content) {
-    _("User-Agent: " + ua);
-    do_check_eq(ua, expectedUA + ".mobile");
-    ua = "";
-    run_next_test();
-  });
+  await r.get();
+  _("User-Agent: " + ua);
+  do_check_eq(ua, expectedUA + ".mobile");
+  ua = "";
 });
 
 add_test(function tear_down() {
   server.stop(run_next_test);
 });
 
--- a/services/sync/tests/unit/test_service_detect_upgrade.js
+++ b/services/sync/tests/unit/test_service_detect_upgrade.js
@@ -108,79 +108,79 @@ add_task(async function v4_upgrade() {
     Service.sync();
     do_check_true(Service.isLoggedIn);
 
     let serverDecrypted;
     let serverKeys;
     let serverResp;
 
 
-    function retrieve_server_default() {
+    async function retrieve_server_default() {
       serverKeys = serverResp = serverDecrypted = null;
 
       serverKeys = new CryptoWrapper("crypto", "keys");
-      serverResp = serverKeys.fetch(Service.resource(Service.cryptoKeysURL)).response;
+      serverResp = (await serverKeys.fetch(Service.resource(Service.cryptoKeysURL))).response;
       do_check_true(serverResp.success);
 
       serverDecrypted = serverKeys.decrypt(Service.identity.syncKeyBundle);
       _("Retrieved WBO:       " + JSON.stringify(serverDecrypted));
       _("serverKeys:          " + JSON.stringify(serverKeys));
 
       return serverDecrypted.default;
     }
 
-    function retrieve_and_compare_default(should_succeed) {
-      let serverDefault = retrieve_server_default();
+    async function retrieve_and_compare_default(should_succeed) {
+      let serverDefault = await retrieve_server_default();
       let localDefault = Service.collectionKeys.keyForCollection().keyPairB64;
 
       _("Retrieved keyBundle: " + JSON.stringify(serverDefault));
       _("Local keyBundle:     " + JSON.stringify(localDefault));
 
       if (should_succeed)
         do_check_eq(JSON.stringify(serverDefault), JSON.stringify(localDefault));
       else
         do_check_neq(JSON.stringify(serverDefault), JSON.stringify(localDefault));
     }
 
     // Uses the objects set above.
-    function set_server_keys(pair) {
+    async function set_server_keys(pair) {
       serverDecrypted.default = pair;
       serverKeys.cleartext = serverDecrypted;
       serverKeys.encrypt(Service.identity.syncKeyBundle);
-      serverKeys.upload(Service.resource(Service.cryptoKeysURL));
+      await serverKeys.upload(Service.resource(Service.cryptoKeysURL));
     }
 
     _("Checking we have the latest keys.");
-    retrieve_and_compare_default(true);
+    await retrieve_and_compare_default(true);
 
     _("Update keys on server.");
-    set_server_keys(["KaaaaaaaaaaaHAtfmuRY0XEJ7LXfFuqvF7opFdBD/MY=",
-                     "aaaaaaaaaaaapxMO6TEWtLIOv9dj6kBAJdzhWDkkkis="]);
+    await set_server_keys(["KaaaaaaaaaaaHAtfmuRY0XEJ7LXfFuqvF7opFdBD/MY=",
+                           "aaaaaaaaaaaapxMO6TEWtLIOv9dj6kBAJdzhWDkkkis="]);
 
     _("Checking that we no longer have the latest keys.");
-    retrieve_and_compare_default(false);
+    await retrieve_and_compare_default(false);
 
     _("Indeed, they're what we set them to...");
     do_check_eq("KaaaaaaaaaaaHAtfmuRY0XEJ7LXfFuqvF7opFdBD/MY=",
-                retrieve_server_default()[0]);
+                (await retrieve_server_default())[0]);
 
     _("Sync. Should download changed keys automatically.");
     let oldClientsModified = collections.clients;
     let oldTabsModified = collections.tabs;
 
     Service.login();
     Service.sync();
     _("New key should have forced upload of data.");
     _("Tabs: " + oldTabsModified + " < " + collections.tabs);
     _("Clients: " + oldClientsModified + " < " + collections.clients);
     do_check_true(collections.clients > oldClientsModified);
     do_check_true(collections.tabs > oldTabsModified);
 
     _("... and keys will now match.");
-    retrieve_and_compare_default(true);
+    await retrieve_and_compare_default(true);
 
     // Clean up.
     Service.startOver();
 
   } finally {
     Svc.Prefs.resetBranch("");
     await promiseStopServer(server);
   }
@@ -232,37 +232,37 @@ add_task(async function v5_upgrade() {
 
     Service.clusterURL = server.baseURI + "/";
 
     await configureIdentity({ "username": "johndoe" }, server);
 
     // Test an upgrade where the contents of the server would cause us to error
     // -- keys decrypted with a different sync key, for example.
     _("Testing v4 -> v5 (or similar) upgrade.");
-    function update_server_keys(syncKeyBundle, wboName, collWBO) {
+    async function update_server_keys(syncKeyBundle, wboName, collWBO) {
       generateNewKeys(Service.collectionKeys);
       let serverKeys = Service.collectionKeys.asWBO("crypto", wboName);
       serverKeys.encrypt(syncKeyBundle);
       let res = Service.resource(Service.storageURL + collWBO);
-      do_check_true(serverKeys.upload(res).success);
+      do_check_true((await serverKeys.upload(res)).success);
     }
 
     _("Bumping version.");
     // Bump version on the server.
     let m = new WBORecord("meta", "global");
     m.payload = {"syncID": "foooooooooooooooooooooooooo",
                  "storageVersion": STORAGE_VERSION + 1};
-    m.upload(Service.resource(Service.metaURL));
+    await m.upload(Service.resource(Service.metaURL));
 
     _("New meta/global: " + JSON.stringify(meta_global));
 
     // Fill the keys with bad data.
     let badKeys = new SyncKeyBundle("foobar", "aaaaaaaaaaaaaaaaaaaaaaaaaa");
-    update_server_keys(badKeys, "keys", "crypto/keys");  // v4
-    update_server_keys(badKeys, "bulk", "crypto/bulk");  // v5
+    await update_server_keys(badKeys, "keys", "crypto/keys");  // v4
+    await update_server_keys(badKeys, "bulk", "crypto/bulk");  // v5
 
     _("Generating new keys.");
     generateNewKeys(Service.collectionKeys);
 
     // Now sync and see what happens. It should be a version fail, not a crypto
     // fail.
 
     _("Logging in.");
--- a/services/sync/tests/unit/test_service_startOver.js
+++ b/services/sync/tests/unit/test_service_startOver.js
@@ -9,17 +9,17 @@ Cu.import("resource://testing-common/ser
 
 function BlaEngine() {
   SyncEngine.call(this, "Bla", Service);
 }
 BlaEngine.prototype = {
   __proto__: SyncEngine.prototype,
 
   removed: false,
-  removeClientData() {
+  async removeClientData() {
     this.removed = true;
   }
 
 };
 
 Service.engineManager.register(BlaEngine);
 
 
--- a/services/sync/tests/unit/test_service_sync_remoteSetup.js
+++ b/services/sync/tests/unit/test_service_sync_remoteSetup.js
@@ -80,17 +80,17 @@ add_task(async function run_test() {
     Service.verifyAndFetchSymmetricKeys();
     do_check_eq(Service.status.sync, CREDENTIALS_CHANGED);
     do_check_eq(Service.status.login, LOGIN_FAILED_NO_PASSPHRASE);
 
     await configureIdentity({ username: "johndoe" }, server);
 
     Service.login();
     _("Checking that remoteSetup returns true when credentials have changed.");
-    Service.recordManager.get(Service.metaURL).payload.syncID = "foobar";
+    (await Service.recordManager.get(Service.metaURL)).payload.syncID = "foobar";
     do_check_true(Service._remoteSetup());
 
     let returnStatusCode = (method, code) => (oldMethod) => (req, res) => {
       if (req.method === method) {
         res.setStatusLine(req.httpVersion, code, "");
       } else {
         oldMethod(req, res);
       }
--- a/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
+++ b/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
@@ -260,17 +260,17 @@ add_task(async function test_enabledRemo
   await setUp(server);
 
   // We need to be very careful how we do this, so that we don't trigger a
   // fresh start!
   try {
     _("Upload some keys to avoid a fresh start.");
     let wbo = Service.collectionKeys.generateNewKeysWBO();
     wbo.encrypt(Service.identity.syncKeyBundle);
-    do_check_eq(200, wbo.upload(Service.resource(Service.cryptoKeysURL)).status);
+    do_check_eq(200, (await wbo.upload(Service.resource(Service.cryptoKeysURL))).status);
 
     _("Engine is disabled.");
     do_check_false(engine.enabled);
 
     _("Sync.");
     Service.sync();
 
     _("Engine is enabled.");
--- a/services/sync/tests/unit/test_syncengine_sync.js
+++ b/services/sync/tests/unit/test_syncengine_sync.js
@@ -94,17 +94,17 @@ add_task(async function test_syncStartup
   await SyncTestingInfrastructure(server);
 
   let engine = makeRotaryEngine();
   engine._store.items = {rekolok: "Rekonstruktionslokomotive"};
   try {
 
     // Confirm initial environment
     do_check_eq(engine._tracker.changedIDs["rekolok"], undefined);
-    let metaGlobal = Service.recordManager.get(engine.metaURL);
+    let metaGlobal = await Service.recordManager.get(engine.metaURL);
     do_check_eq(metaGlobal.payload.engines, undefined);
     do_check_true(!!collection.payload("flying"));
     do_check_true(!!collection.payload("scotsman"));
 
     engine.lastSync = Date.now() / 1000;
     engine.lastSyncLocal = Date.now();
 
     // Trying to prompt a wipe -- we no longer track CryptoMeta per engine,
--- a/services/sync/tests/unit/test_syncscheduler.js
+++ b/services/sync/tests/unit/test_syncscheduler.js
@@ -58,17 +58,17 @@ function sync_httpd_setup() {
 }
 
 async function setUp(server) {
   await configureIdentity({username: "johndoe@mozilla.com"}, server);
 
   generateNewKeys(Service.collectionKeys);
   let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
   serverKeys.encrypt(Service.identity.syncKeyBundle);
-  let result = serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success;
+  let result = (await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success;
   return result;
 }
 
 async function cleanUpAndGo(server) {
   await promiseNextTick();
   clientsEngine._store.wipe();
   Service.startOver();
   if (server) {
--- a/services/sync/tests/unit/test_warn_on_truncated_response.js
+++ b/services/sync/tests/unit/test_warn_on_truncated_response.js
@@ -33,45 +33,43 @@ function getWarningMessages(log) {
     if (message.match(regEx)) {
       warnMessages.push(message);
     }
     warn.call(log, message);
   }
   return warnMessages;
 }
 
-add_test(function test_resource_logs_content_length_mismatch() {
+add_task(async function test_resource_logs_content_length_mismatch() {
   _("Issuing request.");
   let httpServer = httpd_setup({"/content": contentHandler});
   let resource = new Resource(httpServer.baseURI + "/content");
 
   let warnMessages = getWarningMessages(resource._log);
-  let result = resource.get();
+  let result = await resource.get();
 
   notEqual(warnMessages.length, 0, "test that a warning was logged");
   notEqual(result.length, contentLength);
   equal(result, BODY);
 
-  httpServer.stop(run_next_test);
+  await promiseStopServer(httpServer);
 });
 
-add_test(function test_async_resource_logs_content_length_mismatch() {
+add_task(async function test_async_resource_logs_content_length_mismatch() {
   _("Issuing request.");
   let httpServer = httpd_setup({"/content": contentHandler});
   let asyncResource = new AsyncResource(httpServer.baseURI + "/content");
 
   let warnMessages = getWarningMessages(asyncResource._log);
 
-  asyncResource.get(function(error, content) {
-    equal(error, null);
-    equal(content, BODY);
-    notEqual(warnMessages.length, 0, "test that warning was logged");
-    notEqual(content.length, contentLength);
-    httpServer.stop(run_next_test);
-  });
+  let content = await asyncResource.get();
+  equal(content, BODY);
+  notEqual(warnMessages.length, 0, "test that warning was logged");
+  notEqual(content.length, contentLength);
+  await promiseStopServer(httpServer);
 });
 
 add_test(function test_sync_storage_request_logs_content_length_mismatch() {
   _("Issuing request.");
   let httpServer = httpd_setup({"/content": contentHandler});
   let request = new SyncStorageRequest(httpServer.baseURI + "/content");
   let warnMessages = getWarningMessages(request._log);
 
--- a/services/sync/tps/extensions/tps/resource/tps.jsm
+++ b/services/sync/tps/extensions/tps/resource/tps.jsm
@@ -667,17 +667,17 @@ var TPS = {
 
   ValidateCollection(engineName, ValidatorType) {
     let serverRecordDumpStr;
     let clientRecordDumpStr;
     try {
       Logger.logInfo(`About to perform validation for "${engineName}"`);
       let engine = Weave.Service.engineManager.get(engineName);
       let validator = new ValidatorType(engine);
-      let serverRecords = validator.getServerItems(engine);
+      let serverRecords = Async.promiseSpinningly(validator.getServerItems(engine));
       let clientRecords = Async.promiseSpinningly(validator.getClientItems());
       try {
         // This substantially improves the logs for addons while not making a
         // substantial difference for the other two
         clientRecordDumpStr = JSON.stringify(clientRecords.map(r => {
           let res = validator.normalizeClientItem(r);
           delete res.original; // Try and prevent cyclic references
           return res;
--- a/servo/components/style/gecko/media_queries.rs
+++ b/servo/components/style/gecko/media_queries.rs
@@ -11,19 +11,19 @@ use font_metrics::get_metrics_provider_f
 use gecko_bindings::bindings;
 use gecko_bindings::structs::{nsCSSKeyword, nsCSSProps_KTableEntry, nsCSSValue, nsCSSUnit, nsStringBuffer};
 use gecko_bindings::structs::{nsMediaExpression_Range, nsMediaFeature};
 use gecko_bindings::structs::{nsMediaFeature_ValueType, nsMediaFeature_RangeType, nsMediaFeature_RequirementFlags};
 use gecko_bindings::structs::RawGeckoPresContextOwned;
 use media_queries::MediaType;
 use parser::ParserContext;
 use properties::ComputedValues;
-use std::ascii::AsciiExt;
 use std::fmt::{self, Write};
 use std::sync::Arc;
+use str::starts_with_ignore_ascii_case;
 use string_cache::Atom;
 use style_traits::ToCss;
 use style_traits::viewport::ViewportConstraints;
 use values::{CSSFloat, specified};
 use values::computed::{self, ToComputedValue};
 
 /// The `Device` in Gecko wraps a pres context, has a default values computed,
 /// and contains all the viewport rule state.
@@ -335,21 +335,16 @@ impl MediaExpressionValue {
                 let string = str::from_utf8_unchecked(buffer);
 
                 dest.write_str(string)
             }
         }
     }
 }
 
-fn starts_with_ignore_ascii_case(string: &str, prefix: &str) -> bool {
-    string.len() > prefix.len() &&
-      string[0..prefix.len()].eq_ignore_ascii_case(prefix)
-}
-
 fn find_feature<F>(mut f: F) -> Option<&'static nsMediaFeature>
     where F: FnMut(&'static nsMediaFeature) -> bool,
 {
     // FIXME(emilio): With build-time bindgen, we would be able to use
     // structs::nsMediaFeatures_features. That would unfortunately break MSVC
     // builds, or require one bindings file per platform.
     //
     // I'm not into any of those, so meanwhile let's use a FFI function.
--- a/servo/components/style/gecko_bindings/bindings.rs
+++ b/servo/components/style/gecko_bindings/bindings.rs
@@ -1749,16 +1749,23 @@ extern "C" {
 extern "C" {
     pub fn Servo_AnimationValues_IsInterpolable(from:
                                                     RawServoAnimationValueBorrowed,
                                                 to:
                                                     RawServoAnimationValueBorrowed)
      -> bool;
 }
 extern "C" {
+    pub fn Servo_AnimationValues_ComputeDistance(from:
+                                                     RawServoAnimationValueBorrowed,
+                                                 to:
+                                                     RawServoAnimationValueBorrowed)
+     -> f64;
+}
+extern "C" {
     pub fn Servo_AnimationValue_Serialize(value:
                                               RawServoAnimationValueBorrowed,
                                           property: nsCSSPropertyID,
                                           buffer: *mut nsAString);
 }
 extern "C" {
     pub fn Servo_AnimationValue_GetOpacity(value:
                                                RawServoAnimationValueBorrowed)
--- a/servo/components/style/keyframes.rs
+++ b/servo/components/style/keyframes.rs
@@ -13,17 +13,17 @@ use properties::{Importance, PropertyDec
 use properties::{PropertyDeclarationId, LonghandId, ParsedDeclaration};
 use properties::LonghandIdSet;
 use properties::animated_properties::TransitionProperty;
 use properties::longhands::transition_timing_function::single_value::SpecifiedValue as SpecifiedTimingFunction;
 use shared_lock::{SharedRwLock, SharedRwLockReadGuard, Locked, ToCssWithGuard};
 use std::fmt;
 use std::sync::Arc;
 use style_traits::ToCss;
-use stylesheets::{CssRuleType, MemoryHoleReporter, Stylesheet};
+use stylesheets::{CssRuleType, MemoryHoleReporter, Stylesheet, VendorPrefix};
 
 /// A number from 0 to 1, indicating the percentage of the animation when this
 /// keyframe should run.
 #[derive(Debug, Copy, Clone, PartialEq, PartialOrd)]
 #[cfg_attr(feature = "servo", derive(HeapSizeOf))]
 pub struct KeyframePercentage(pub f32);
 
 impl ::std::cmp::Ord for KeyframePercentage {
@@ -234,16 +234,18 @@ impl KeyframesStep {
 /// It only takes into account animable properties.
 #[derive(Debug)]
 #[cfg_attr(feature = "servo", derive(HeapSizeOf))]
 pub struct KeyframesAnimation {
     /// The difference steps of the animation.
     pub steps: Vec<KeyframesStep>,
     /// The properties that change in this animation.
     pub properties_changed: Vec<TransitionProperty>,
+    /// Vendor prefix type the @keyframes has.
+    pub vendor_prefix: Option<VendorPrefix>,
 }
 
 /// Get all the animated properties in a keyframes animation.
 fn get_animated_properties(keyframes: &[Arc<Locked<Keyframe>>], guard: &SharedRwLockReadGuard)
                            -> Vec<TransitionProperty> {
     let mut ret = vec![];
     let mut seen = LonghandIdSet::new();
     // NB: declarations are already deduplicated, so we don't have to check for
@@ -270,21 +272,24 @@ impl KeyframesAnimation {
     /// Create a keyframes animation from a given list of keyframes.
     ///
     /// This will return a keyframe animation with empty steps and
     /// properties_changed if the list of keyframes is empty, or there are no
     //  animated properties obtained from the keyframes.
     ///
     /// Otherwise, this will compute and sort the steps used for the animation,
     /// and return the animation object.
-    pub fn from_keyframes(keyframes: &[Arc<Locked<Keyframe>>], guard: &SharedRwLockReadGuard)
+    pub fn from_keyframes(keyframes: &[Arc<Locked<Keyframe>>],
+                          vendor_prefix: Option<VendorPrefix>,
+                          guard: &SharedRwLockReadGuard)
                           -> Self {
         let mut result = KeyframesAnimation {
             steps: vec![],
             properties_changed: vec![],
+            vendor_prefix: vendor_prefix,
         };
 
         if keyframes.is_empty() {
             return result;
         }
 
         result.properties_changed = get_animated_properties(keyframes, guard);
         if result.properties_changed.is_empty() {
--- a/servo/components/style/properties/helpers.mako.rs
+++ b/servo/components/style/properties/helpers.mako.rs
@@ -90,16 +90,29 @@
 
                 % if delegate_animate:
                     use properties::animated_properties::Interpolate;
                     impl Interpolate for T {
                         fn interpolate(&self, other: &Self, progress: f64) -> Result<Self, ()> {
                             self.0.interpolate(&other.0, progress).map(T)
                         }
                     }
+
+                    use properties::animated_properties::ComputeDistance;
+                    impl ComputeDistance for T {
+                        #[inline]
+                        fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+                            self.0.compute_distance(&other.0)
+                        }
+
+                        #[inline]
+                        fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+                            self.0.compute_squared_distance(&other.0)
+                        }
+                    }
                 % endif
             }
 
             impl ToCss for computed_value::T {
                 fn to_css<W>(&self, dest: &mut W) -> fmt::Result
                     where W: fmt::Write,
                 {
                     let mut iter = self.0.iter();
@@ -780,8 +793,46 @@
                 },
                 (&T(None), &T(None)) => {
                     Ok(T(None))
                 },
             }
         }
     }
 </%def>
+
+/// Macro for defining ComputeDistance trait for tuple struct which has Option<T>,
+/// e.g. struct T(pub Option<Au>).
+<%def name="impl_compute_distance_for_option_tuple(value_for_none)">
+    impl ComputeDistance for T {
+        #[inline]
+        fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+            match (self, other) {
+                (&T(Some(ref this)), &T(Some(ref other))) => {
+                    this.compute_distance(other)
+                },
+                (&T(Some(ref value)), &T(None)) |
+                (&T(None), &T(Some(ref value)))=> {
+                    value.compute_distance(&${value_for_none})
+                },
+                (&T(None), &T(None)) => {
+                    Ok(0.0)
+                },
+            }
+        }
+
+        #[inline]
+        fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+            match (self, other) {
+                (&T(Some(ref this)), &T(Some(ref other))) => {
+                    this.compute_squared_distance(other)
+                },
+                (&T(Some(ref value)), &T(None)) |
+                (&T(None), &T(Some(ref value))) => {
+                    value.compute_squared_distance(&${value_for_none})
+                },
+                (&T(None), &T(None)) => {
+                    Ok(0.0)
+                },
+            }
+        }
+    }
+</%def>
--- a/servo/components/style/properties/helpers/animated_properties.mako.rs
+++ b/servo/components/style/properties/helpers/animated_properties.mako.rs
@@ -5,17 +5,17 @@
 <%namespace name="helpers" file="/helpers.mako.rs" />
 
 use app_units::Au;
 use cssparser::{Color as CSSParserColor, Parser, RGBA};
 use euclid::{Point2D, Size2D};
 #[cfg(feature = "gecko")] use gecko_bindings::structs::nsCSSPropertyID;
 use properties::{CSSWideKeyword, PropertyDeclaration};
 use properties::longhands;
-use properties::longhands::background_size::computed_value::T as BackgroundSize;
+use properties::longhands::background_size::computed_value::T as BackgroundSizeList;
 use properties::longhands::font_weight::computed_value::T as FontWeight;
 use properties::longhands::line_height::computed_value::T as LineHeight;
 use properties::longhands::text_shadow::computed_value::T as TextShadowList;
 use properties::longhands::text_shadow::computed_value::TextShadow;
 use properties::longhands::box_shadow::computed_value::T as BoxShadowList;
 use properties::longhands::box_shadow::single_value::computed_value::T as BoxShadow;
 use properties::longhands::transform::computed_value::ComputedMatrix;
 use properties::longhands::transform::computed_value::ComputedOperation as TransformOperation;
@@ -691,24 +691,24 @@ impl Interpolate for VerticalAlign {
                 this.interpolate(other, progress).map(|value| {
                     VerticalAlign::LengthOrPercentage(LengthOrPercentage::Length(value))
                 })
             }
             _ => Err(()),
         }
     }
 }
-impl Interpolate for BackgroundSize {
+
+impl Interpolate for BackgroundSizeList {
     #[inline]
     fn interpolate(&self, other: &Self, progress: f64) -> Result<Self, ()> {
-        self.0.interpolate(&other.0, progress).map(BackgroundSize)
+        self.0.interpolate(&other.0, progress).map(BackgroundSizeList)
     }
 }
 
-
 /// https://drafts.csswg.org/css-transitions/#animtype-color
 impl Interpolate for RGBA {
     #[inline]
     fn interpolate(&self, other: &RGBA, progress: f64) -> Result<Self, ()> {
         fn clamp(val: f32) -> f32 {
             val.max(0.).min(1.)
         }
 
@@ -2027,8 +2027,619 @@ impl<T, U> Interpolate for Either<T, U>
             },
             _ => {
                 let interpolated = if progress < 0.5 { *self } else { *other };
                 Ok(interpolated)
             }
         }
     }
 }
+
+
+/// We support ComputeDistance for an API in gecko to test the transition per property.
+impl ComputeDistance for AnimationValue {
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (self, other) {
+            % for prop in data.longhands:
+                % if prop.animatable:
+                    % if prop.animation_type == "normal":
+                        (&AnimationValue::${prop.camel_case}(ref from),
+                         &AnimationValue::${prop.camel_case}(ref to)) => {
+                            from.compute_distance(to)
+                        },
+                    % else:
+                        (&AnimationValue::${prop.camel_case}(ref _from),
+                         &AnimationValue::${prop.camel_case}(ref _to)) => {
+                            Err(())
+                        },
+                    % endif
+                % endif
+            % endfor
+            _ => {
+                panic!("Expected compute_distance of computed values of the same \
+                        property, got: {:?}, {:?}", self, other);
+            }
+        }
+    }
+}
+
+/// A trait used to implement [compute_distance].
+/// In order to compute the Euclidean distance of a list, we need to compute squared distance
+/// for each element, so the vector can sum it and then get its squared root as the distance.
+pub trait ComputeDistance: Sized {
+    /// Compute distance between a value and another for a given property.
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()>;
+
+    /// Compute squared distance between a value and another for a given property.
+    /// This is used for list or if there are many components in a property value.
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.compute_distance(other).map(|d| d * d)
+    }
+}
+
+impl<T: ComputeDistance> ComputeDistance for Vec<T> {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.compute_squared_distance(other).map(|sd| sd.sqrt())
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        if self.len() != other.len() {
+            return Err(());
+        }
+
+        let mut squared_dist = 0.0f64;
+        for (this, other) in self.iter().zip(other) {
+            let diff = try!(this.compute_squared_distance(other));
+            squared_dist += diff;
+        }
+        Ok(squared_dist)
+    }
+}
+
+impl ComputeDistance for Au {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.0.compute_distance(&other.0)
+    }
+}
+
+impl ComputeDistance for Auto {
+    #[inline]
+    fn compute_distance(&self, _other: &Self) -> Result<f64, ()> {
+        Err(())
+    }
+}
+
+impl ComputeDistance for Normal {
+    #[inline]
+    fn compute_distance(&self, _other: &Self) -> Result<f64, ()> {
+        Err(())
+    }
+}
+
+impl <T> ComputeDistance for Option<T>
+    where T: ComputeDistance,
+{
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (self, other) {
+            (&Some(ref this), &Some(ref other)) => {
+                this.compute_distance(other)
+            },
+            _ => Err(()),
+        }
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (self, other) {
+            (&Some(ref this), &Some(ref other)) => {
+                this.compute_squared_distance(other)
+            },
+            _ => Err(()),
+        }
+    }
+}
+
+impl ComputeDistance for f32 {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        Ok((*self - *other).abs() as f64)
+    }
+}
+
+impl ComputeDistance for f64 {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        Ok((*self - *other).abs())
+    }
+}
+
+impl ComputeDistance for i32 {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        Ok((*self - *other).abs() as f64)
+    }
+}
+
+impl ComputeDistance for Visibility {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        if *self == *other {
+            Ok(0.0)
+        } else {
+            Ok(1.0)
+        }
+    }
+}
+
+/// https://www.w3.org/TR/smil-animation/#animateColorElement says we should use Euclidean RGB-cube distance.
+impl ComputeDistance for RGBA {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.compute_squared_distance(other).map(|sd| sd.sqrt())
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        fn clamp(val: f32) -> f32 {
+            val.max(0.).min(1.)
+        }
+
+        let start_a = clamp(self.alpha_f32());
+        let end_a = clamp(other.alpha_f32());
+        let start = [ start_a,
+                      self.red_f32() * start_a,
+                      self.green_f32() * start_a,
+                      self.blue_f32() * start_a ];
+        let end = [ end_a,
+                    other.red_f32() * end_a,
+                    other.green_f32() * end_a,
+                    other.blue_f32() * end_a ];
+        let diff = start.iter().zip(&end)
+                               .fold(0.0f64, |n, (&a, &b)| {
+                                   let diff = (a - b) as f64;
+                                   n + diff * diff
+                               });
+        Ok(diff)
+    }
+}
+
+impl ComputeDistance for CSSParserColor {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.compute_squared_distance(other).map(|sq| sq.sqrt())
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (*self, *other) {
+            (CSSParserColor::RGBA(ref this), CSSParserColor::RGBA(ref other)) => {
+                this.compute_squared_distance(other)
+            },
+            _ => Ok(0.0),
+        }
+    }
+}
+
+impl ComputeDistance for CalcLengthOrPercentage {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.compute_squared_distance(other).map(|sq| sq.sqrt())
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        let length_diff = (self.length().0 - other.length().0) as f64;
+        let percentage_diff = (self.percentage() - other.percentage()) as f64;
+        Ok(length_diff * length_diff + percentage_diff * percentage_diff)
+    }
+}
+
+impl ComputeDistance for LengthOrPercentage {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (*self, *other) {
+            (LengthOrPercentage::Length(ref this),
+             LengthOrPercentage::Length(ref other)) => {
+                this.compute_distance(other)
+            },
+            (LengthOrPercentage::Percentage(ref this),
+             LengthOrPercentage::Percentage(ref other)) => {
+                this.compute_distance(other)
+            },
+            (this, other) => {
+                let this: CalcLengthOrPercentage = From::from(this);
+                let other: CalcLengthOrPercentage = From::from(other);
+                this.compute_distance(&other)
+            }
+        }
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (*self, *other) {
+            (LengthOrPercentage::Length(ref this),
+             LengthOrPercentage::Length(ref other)) => {
+                let diff = (this.0 - other.0) as f64;
+                Ok(diff * diff)
+            },
+            (LengthOrPercentage::Percentage(ref this),
+             LengthOrPercentage::Percentage(ref other)) => {
+                let diff = (this - other) as f64;
+                Ok(diff * diff)
+            },
+            (this, other) => {
+                let this: CalcLengthOrPercentage = From::from(this);
+                let other: CalcLengthOrPercentage = From::from(other);
+                let length_diff = (this.length().0 - other.length().0) as f64;
+                let percentage_diff = (this.percentage() - other.percentage()) as f64;
+                Ok(length_diff * length_diff + percentage_diff * percentage_diff)
+            }
+        }
+    }
+}
+
+impl ComputeDistance for LengthOrPercentageOrAuto {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (*self, *other) {
+            (LengthOrPercentageOrAuto::Length(ref this),
+             LengthOrPercentageOrAuto::Length(ref other)) => {
+                this.compute_distance(other)
+            },
+            (LengthOrPercentageOrAuto::Percentage(ref this),
+             LengthOrPercentageOrAuto::Percentage(ref other)) => {
+                this.compute_distance(other)
+            },
+            (this, other) => {
+                // If one of the element is Auto, Option<> will be None, and the returned distance is Err(())
+                let this: Option<CalcLengthOrPercentage> = From::from(this);
+                let other: Option<CalcLengthOrPercentage> = From::from(other);
+                this.compute_distance(&other)
+            }
+        }
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (*self, *other) {
+            (LengthOrPercentageOrAuto::Length(ref this),
+             LengthOrPercentageOrAuto::Length(ref other)) => {
+                let diff = (this.0 - other.0) as f64;
+                Ok(diff * diff)
+            },
+            (LengthOrPercentageOrAuto::Percentage(ref this),
+             LengthOrPercentageOrAuto::Percentage(ref other)) => {
+                let diff = (this - other) as f64;
+                Ok(diff * diff)
+            },
+            (this, other) => {
+                let this: Option<CalcLengthOrPercentage> = From::from(this);
+                let other: Option<CalcLengthOrPercentage> = From::from(other);
+                if this.is_none() || other.is_none() {
+                    Err(())
+                } else {
+                    let length_diff = (this.unwrap().length().0 - other.unwrap().length().0) as f64;
+                    let percentage_diff = (this.unwrap().percentage() - other.unwrap().percentage()) as f64;
+                    Ok(length_diff * length_diff + percentage_diff * percentage_diff)
+                }
+            }
+        }
+    }
+}
+
+impl ComputeDistance for LengthOrPercentageOrNone {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (*self, *other) {
+            (LengthOrPercentageOrNone::Length(ref this),
+             LengthOrPercentageOrNone::Length(ref other)) => {
+                this.compute_distance(other)
+            },
+            (LengthOrPercentageOrNone::Percentage(ref this),
+             LengthOrPercentageOrNone::Percentage(ref other)) => {
+                this.compute_distance(other)
+            },
+            _ => Err(())
+        }
+    }
+}
+
+impl ComputeDistance for LengthOrNone {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (*self, *other) {
+            (Either::First(ref length), Either::First(ref other)) => {
+                length.compute_distance(other)
+            },
+            _ => Err(()),
+        }
+    }
+}
+
+impl ComputeDistance for MinLength {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (*self, *other) {
+            (MinLength::LengthOrPercentage(ref this),
+             MinLength::LengthOrPercentage(ref other)) => {
+                this.compute_distance(other)
+            },
+            _ => Err(()),
+        }
+    }
+}
+
+impl ComputeDistance for MaxLength {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (*self, *other) {
+            (MaxLength::LengthOrPercentage(ref this),
+             MaxLength::LengthOrPercentage(ref other)) => {
+                this.compute_distance(other)
+            },
+            _ => Err(()),
+        }
+    }
+}
+
+impl ComputeDistance for VerticalAlign {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (*self, *other) {
+            (VerticalAlign::LengthOrPercentage(ref this),
+             VerticalAlign::LengthOrPercentage(ref other)) => {
+                this.compute_distance(other)
+            },
+            _ => Err(()),
+        }
+    }
+}
+
+impl ComputeDistance for BorderRadiusSize {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.compute_squared_distance(other).map(|sd| sd.sqrt())
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        Ok(try!(self.0.width.compute_squared_distance(&other.0.width)) +
+           try!(self.0.height.compute_squared_distance(&other.0.height)))
+    }
+}
+
+impl ComputeDistance for BackgroundSizeList {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.0.compute_distance(&other.0)
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.0.compute_squared_distance(&other.0)
+    }
+}
+
+impl ComputeDistance for LineHeight {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (*self, *other) {
+            (LineHeight::Length(ref this),
+             LineHeight::Length(ref other)) => {
+                this.compute_distance(other)
+            },
+            (LineHeight::Number(ref this),
+             LineHeight::Number(ref other)) => {
+                this.compute_distance(other)
+            },
+            _ => Err(()),
+        }
+    }
+}
+
+impl ComputeDistance for FontWeight {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        let a = (*self as u32) as f64;
+        let b = (*other as u32) as f64;
+        a.compute_distance(&b)
+    }
+}
+
+impl ComputeDistance for Position {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.compute_squared_distance(other).map(|sd| sd.sqrt())
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        Ok(try!(self.horizontal.compute_squared_distance(&other.horizontal)) +
+           try!(self.vertical.compute_squared_distance(&other.vertical)))
+    }
+}
+
+impl ComputeDistance for HorizontalPosition {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.0.compute_distance(&other.0)
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.0.compute_squared_distance(&other.0)
+    }
+}
+
+impl ComputeDistance for VerticalPosition {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.0.compute_distance(&other.0)
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.0.compute_squared_distance(&other.0)
+    }
+}
+
+impl ComputeDistance for ClipRect {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.compute_squared_distance(other).map(|sd| sd.sqrt())
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        let list = [ try!(self.top.compute_distance(&other.top)),
+                     try!(self.right.compute_distance(&other.right)),
+                     try!(self.bottom.compute_distance(&other.bottom)),
+                     try!(self.left.compute_distance(&other.left)) ];
+        Ok(list.iter().fold(0.0f64, |sum, diff| sum + diff * diff))
+    }
+}
+
+impl ComputeDistance for TextShadow {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.compute_squared_distance(other).map(|sd| sd.sqrt())
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        let list = [ try!(self.offset_x.compute_distance(&other.offset_x)),
+                     try!(self.offset_y.compute_distance(&other.offset_y)),
+                     try!(self.blur_radius.compute_distance(&other.blur_radius)),
+                     try!(self.color.compute_distance(&other.color)) ];
+        Ok(list.iter().fold(0.0f64, |sum, diff| sum + diff * diff))
+    }
+}
+
+impl ComputeDistance for TextShadowList {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.compute_squared_distance(other).map(|sd| sd.sqrt())
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        let zero = TextShadow {
+            offset_x: Au(0),
+            offset_y: Au(0),
+            blur_radius: Au(0),
+            color: CSSParserColor::RGBA(RGBA::transparent()),
+        };
+
+        let max_len = cmp::max(self.0.len(), other.0.len());
+        let mut diff_squared = 0.0f64;
+        for i in 0..max_len {
+            diff_squared += match (self.0.get(i), other.0.get(i)) {
+                (Some(shadow), Some(other)) => {
+                    try!(shadow.compute_squared_distance(other))
+                },
+                (Some(shadow), None) |
+                (None, Some(shadow)) => {
+                    try!(shadow.compute_squared_distance(&zero))
+                },
+                (None, None) => unreachable!(),
+            };
+        }
+        Ok(diff_squared)
+    }
+}
+
+impl ComputeDistance for BoxShadow {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.compute_squared_distance(other).map(|sd| sd.sqrt())
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        if self.inset != other.inset {
+            return Err(());
+        }
+        let list = [ try!(self.offset_x.compute_distance(&other.offset_x)),
+                     try!(self.offset_y.compute_distance(&other.offset_y)),
+                     try!(self.color.compute_distance(&other.color)),
+                     try!(self.spread_radius.compute_distance(&other.spread_radius)),
+                     try!(self.blur_radius.compute_distance(&other.blur_radius)) ];
+        Ok(list.iter().fold(0.0f64, |sum, diff| sum + diff * diff))
+    }
+}
+
+impl ComputeDistance for BoxShadowList {
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        self.compute_squared_distance(other).map(|sd| sd.sqrt())
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        // The inset value must change
+        let mut zero = BoxShadow {
+            offset_x: Au(0),
+            offset_y: Au(0),
+            spread_radius: Au(0),
+            blur_radius: Au(0),
+            color: CSSParserColor::RGBA(RGBA::transparent()),
+            inset: false,
+        };
+
+        let max_len = cmp::max(self.0.len(), other.0.len());
+        let mut diff_squared = 0.0f64;
+        for i in 0..max_len {
+            diff_squared += match (self.0.get(i), other.0.get(i)) {
+                (Some(shadow), Some(other)) => {
+                    try!(shadow.compute_squared_distance(other))
+                },
+                (Some(shadow), None) |
+                (None, Some(shadow)) => {
+                    zero.inset = shadow.inset;
+                    try!(shadow.compute_squared_distance(&zero))
+                }
+                (None, None) => unreachable!(),
+            };
+        }
+        Ok(diff_squared)
+    }
+}
+
+impl ComputeDistance for TransformList {
+    #[inline]
+    fn compute_distance(&self, _other: &Self) -> Result<f64, ()> {
+        Err(())
+    }
+}
+
+impl<T, U> ComputeDistance for Either<T, U>
+    where T: ComputeDistance, U: ComputeDistance
+{
+    #[inline]
+    fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (self, other) {
+            (&Either::First(ref this), &Either::First(ref other)) => {
+                this.compute_distance(other)
+            },
+            (&Either::Second(ref this), &Either::Second(ref other)) => {
+                this.compute_distance(other)
+            },
+            _ => Err(())
+        }
+    }
+
+    #[inline]
+    fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+        match (self, other) {
+            (&Either::First(ref this), &Either::First(ref other)) => {
+                this.compute_squared_distance(other)
+            },
+            (&Either::Second(ref this), &Either::Second(ref other)) => {
+                this.compute_squared_distance(other)
+            },
+            _ => Err(())
+        }
+    }
+}
--- a/servo/components/style/properties/longhand/background.mako.rs
+++ b/servo/components/style/properties/longhand/background.mako.rs
@@ -329,17 +329,17 @@
     use std::ascii::AsciiExt;
     use std::fmt;
     use style_traits::ToCss;
     use values::HasViewportPercentage;
 
     #[allow(missing_docs)]
     pub mod computed_value {
         use values::computed::LengthOrPercentageOrAuto;
-        use properties::animated_properties::{Interpolate, RepeatableListInterpolate};
+        use properties::animated_properties::{ComputeDistance, Interpolate, RepeatableListInterpolate};
 
         #[derive(PartialEq, Clone, Debug)]
         #[cfg_attr(feature = "servo", derive(HeapSizeOf))]
         pub struct ExplicitSize {
             pub width: LengthOrPercentageOrAuto,
             pub height: LengthOrPercentageOrAuto,
         }
 
@@ -362,16 +362,34 @@
                             width: try!(me.width.interpolate(&other.width, time)),
                             height: try!(me.height.interpolate(&other.height, time)),
                         }))
                     }
                     _ => Err(()),
                 }
             }
         }
+
+        impl ComputeDistance for T {
+            #[inline]
+            fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+                self.compute_squared_distance(other).map(|sd| sd.sqrt())
+            }
+
+            #[inline]
+            fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+                match (self, other) {
+                    (&T::Explicit(ref me), &T::Explicit(ref other)) => {
+                        Ok(try!(me.width.compute_squared_distance(&other.width)) +
+                           try!(me.height.compute_squared_distance(&other.height)))
+                    },
+                    _ => Err(())
+                }
+            }
+        }
     }
 
     impl ToCss for computed_value::T {
         fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
             match *self {
                 computed_value::T::Explicit(ref size) => size.to_css(dest),
                 computed_value::T::Cover => dest.write_str("cover"),
                 computed_value::T::Contain => dest.write_str("contain"),
--- a/servo/components/style/properties/longhand/box.mako.rs
+++ b/servo/components/style/properties/longhand/box.mako.rs
@@ -2192,36 +2192,51 @@
                    spec="https://drafts.csswg.org/css-transforms/#transform-origin-property">
     use app_units::Au;
     use std::fmt;
     use style_traits::ToCss;
     use values::HasViewportPercentage;
     use values::specified::{NoCalcLength, LengthOrPercentage, Percentage};
 
     pub mod computed_value {
-        use properties::animated_properties::Interpolate;
+        use properties::animated_properties::{ComputeDistance, Interpolate};
         use values::computed::{Length, LengthOrPercentage};
 
         #[derive(Clone, Copy, Debug, PartialEq)]
         #[cfg_attr(feature = "servo", derive(HeapSizeOf))]
         pub struct T {
             pub horizontal: LengthOrPercentage,
             pub vertical: LengthOrPercentage,
             pub depth: Length,
         }
 
         impl Interpolate for T {
+            #[inline]
             fn interpolate(&self, other: &Self, time: f64) -> Result<Self, ()> {
                 Ok(T {
                     horizontal: try!(self.horizontal.interpolate(&other.horizontal, time)),
                     vertical: try!(self.vertical.interpolate(&other.vertical, time)),
                     depth: try!(self.depth.interpolate(&other.depth, time)),
                 })
             }
         }
+
+        impl ComputeDistance for T {
+            #[inline]
+            fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+                self.compute_squared_distance(other).map(|sd| sd.sqrt())
+            }
+
+            #[inline]
+            fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
+                Ok(try!(self.horizontal.compute_squared_distance(&other.horizontal)) +
+                   try!(self.vertical.compute_squared_distance(&other.vertical)) +
+                   try!(self.depth.compute_squared_distance(&other.depth)))
+            }
+        }
     }
 
     impl HasViewportPercentage for SpecifiedValue {
         fn has_viewport_percentage(&self) -> bool {
             self.horizontal.has_viewport_percentage() ||
             self.vertical.has_viewport_percentage() ||
             self.depth.has_viewport_percentage()
         }
--- a/servo/components/style/properties/longhand/font.mako.rs
+++ b/servo/components/style/properties/longhand/font.mako.rs
@@ -813,17 +813,17 @@
             match *computed {
                 computed_value::T::None => SpecifiedValue::None,
                 computed_value::T::Number(ref v) => SpecifiedValue::Number(specified::Number::from_computed_value(v)),
             }
         }
     }
 
     pub mod computed_value {
-        use properties::animated_properties::Interpolate;
+        use properties::animated_properties::{ComputeDistance, Interpolate};
         use std::fmt;
         use style_traits::ToCss;
         use values::CSSFloat;
 
         #[derive(Copy, Clone, Debug, PartialEq)]
         #[cfg_attr(feature = "servo", derive(HeapSizeOf))]
         pub enum T {
             None,
@@ -845,16 +845,27 @@
             fn interpolate(&self, other: &Self, time: f64) -> Result<Self, ()> {
                 match (*self, *other) {
                     (T::Number(ref number), T::Number(ref other)) =>
                         Ok(T::Number(try!(number.interpolate(other, time)))),
                     _ => Err(()),
                 }
             }
         }
+
+        impl ComputeDistance for T {
+            #[inline]
+            fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
+                match (*self, *other) {
+                    (T::Number(ref number), T::Number(ref other)) =>
+                        number.compute_distance(other),
+                    _ => Err(()),
+                }
+            }
+        }
     }
 
     #[inline]
     pub fn get_initial_value() -> computed_value::T {
         computed_value::T::None
     }
 
     #[inline]
--- a/servo/components/style/properties/longhand/inherited_text.mako.rs
+++ b/servo/components/style/properties/longhand/inherited_text.mako.rs
@@ -434,23 +434,24 @@
                 SpecifiedValue::Normal => dest.write_str("normal"),
                 SpecifiedValue::Specified(ref l) => l.to_css(dest),
             }
         }
     }
 
     pub mod computed_value {
         use app_units::Au;
-        use properties::animated_properties::Interpolate;
+        use properties::animated_properties::{ComputeDistance, Interpolate};
 
         #[derive(Debug, Clone, PartialEq)]
         #[cfg_attr(feature = "servo", derive(HeapSizeOf))]
         pub struct T(pub Option<Au>);
 
         ${helpers.impl_interpolate_for_option_tuple('Au(0)')}
+        ${helpers.impl_compute_distance_for_option_tuple('Au(0)')}
     }
 
     impl ToCss for computed_value::T {
         fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
             match self.0 {
                 None => dest.write_str("normal"),
                 Some(l) => l.to_css(dest),
             }
@@ -518,23 +519,24 @@
             match *self {
                 SpecifiedValue::Normal => dest.write_str("normal"),
                 SpecifiedValue::Specified(ref l) => l.to_css(dest),
             }
         }
     }
 
     pub mod computed_value {
-        use properties::animated_properties::Interpolate;
+        use properties::animated_properties::{ComputeDistance, Interpolate};
         use values::computed::LengthOrPercentage;
         #[derive(Debug, Clone, PartialEq)]
         #[cfg_attr(feature = "servo", derive(HeapSizeOf))]
         pub struct T(pub Option<LengthOrPercentage>);
 
         ${helpers.impl_interpolate_for_option_tuple('LengthOrPercentage::zero()')}
+        ${helpers.impl_compute_distance_for_option_tuple('LengthOrPercentage::zero()')}
     }
 
     impl ToCss for computed_value::T {
         fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
             match self.0 {
                 None => dest.write_str("normal"),
                 Some(l) => l.to_css(dest),
             }
--- a/servo/components/style/properties/longhand/svg.mako.rs
+++ b/servo/components/style/properties/longhand/svg.mako.rs
@@ -116,48 +116,62 @@
 <%helpers:vector_longhand name="mask-position-x" products="gecko" animation_type="normal" extra_prefixes="webkit"
                           spec="https://drafts.fxtf.org/css-masking/#propdef-mask-position">
     pub use properties::longhands::background_position_x::single_value::get_initial_value;
     pub use properties::longhands::background_position_x::single_value::get_initial_position_value;
     pub use properties::longhands::background_position_x::single_value::get_initial_specified_value;
     pub use properties::longhands::background_position_x::single_value::parse;
     pub use properties::longhands::background_position_x::single_value::SpecifiedValue;
     pub use properties::longhands::background_position_x::single_value::computed_value;
-    use properties::animated_properties::{Interpolate, RepeatableListInterpolate};
+    use properties::animated_properties::{ComputeDistance, Interpolate, RepeatableListInterpolate};
     use properties::longhands::mask_position_x::computed_value::T as MaskPositionX;
 
     impl Interpolate for MaskPositionX {
         #[inline]
         fn interpolate(&self, other: &Self, progress: f64) -> Result<Self, ()> {
             Ok(MaskPositionX(try!(self.0.interpolate(&other.0, progress))))
         }
     }
 
     impl RepeatableListInterpolate for MaskPositionX {}
+
+    impl ComputeDistance for MaskPositionX {
+        #[inline]
+        fn compute_distance(&self, _other: &Self) -> Result<f64, ()> {
+            Err(())
+        }
+    }
 </%helpers:vector_longhand>
 
 <%helpers:vector_longhand name="mask-position-y" products="gecko" animation_type="normal" extra_prefixes="webkit"
                           spec="https://drafts.fxtf.org/css-masking/#propdef-mask-position">
     pub use properties::longhands::background_position_y::single_value::get_initial_value;
     pub use properties::longhands::background_position_y::single_value::get_initial_position_value;
     pub use properties::longhands::background_position_y::single_value::get_initial_specified_value;
     pub use properties::longhands::background_position_y::single_value::parse;
     pub use properties::longhands::background_position_y::single_value::SpecifiedValue;
     pub use properties::longhands::background_position_y::single_value::computed_value;
-    use properties::animated_properties::{Interpolate, RepeatableListInterpolate};
+    use properties::animated_properties::{ComputeDistance, Interpolate, RepeatableListInterpolate};
     use properties::longhands::mask_position_y::computed_value::T as MaskPositionY;
 
     impl Interpolate for MaskPositionY {
         #[inline]
         fn interpolate(&self, other: &Self, progress: f64) -> Result<Self, ()> {
             Ok(MaskPositionY(try!(self.0.interpolate(&other.0, progress))))
         }
     }
 
     impl RepeatableListInterpolate for MaskPositionY {}
+
+    impl ComputeDistance for MaskPositionY {
+        #[inline]
+        fn compute_distance(&self, _other: &Self) -> Result<f64, ()> {
+            Err(())
+        }
+    }
 </%helpers:vector_longhand>
 
 ${helpers.single_keyword("mask-clip",
                          "border-box content-box padding-box",
                          extra_gecko_values="fill-box stroke-box view-box no-clip",
                          vector=True,
                          products="gecko",
                          extra_prefixes="webkit",
--- a/servo/components/style/str.rs
+++ b/servo/components/style/str.rs
@@ -2,16 +2,17 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 //! String utils for attributes and similar stuff.
 
 #![deny(missing_docs)]
 
 use num_traits::ToPrimitive;
+use std::ascii::AsciiExt;
 use std::convert::AsRef;
 use std::iter::{Filter, Peekable};
 use std::str::Split;
 
 /// A static slice of characters.
 pub type StaticCharVec = &'static [char];
 
 /// A static slice of `str`s.
@@ -139,8 +140,14 @@ pub fn str_join<I, T>(strs: I, join: &st
           T: AsRef<str>,
 {
     strs.into_iter().enumerate().fold(String::new(), |mut acc, (i, s)| {
         if i > 0 { acc.push_str(join); }
         acc.push_str(s.as_ref());
         acc
     })
 }
+
+/// Returns true if a given string has a given prefix with case-insensitive match.
+pub fn starts_with_ignore_ascii_case(string: &str, prefix: &str) -> bool {
+    string.len() > prefix.len() &&
+      string[0..prefix.len()].eq_ignore_ascii_case(prefix)
+}
--- a/servo/components/style/stylesheets.rs
+++ b/servo/components/style/stylesheets.rs
@@ -31,16 +31,17 @@ use selectors::parser::SelectorList;
 use servo_config::prefs::PREFS;
 #[cfg(not(feature = "gecko"))]
 use servo_url::ServoUrl;
 use shared_lock::{SharedRwLock, Locked, ToCssWithGuard, SharedRwLockReadGuard};
 use std::cell::Cell;
 use std::fmt;
 use std::sync::Arc;
 use std::sync::atomic::{AtomicBool, Ordering};
+use str::starts_with_ignore_ascii_case;
 use style_traits::ToCss;
 use stylist::FnvHashMap;
 use supports::SupportsCondition;
 use values::specified::url::SpecifiedUrl;
 use viewport::ViewportRule;
 
 
 /// Extra data that the backend may need to resolve url values.
@@ -524,16 +525,18 @@ impl ToCssWithGuard for ImportRule {
 ///
 /// [keyframes]: https://drafts.csswg.org/css-animations/#keyframes
 #[derive(Debug)]
 pub struct KeyframesRule {
     /// The name of the current animation.
     pub name: Atom,
     /// The keyframes specified for this CSS rule.
     pub keyframes: Vec<Arc<Locked<Keyframe>>>,
+    /// Vendor prefix type the @keyframes has.
+    pub vendor_prefix: Option<VendorPrefix>,
 }
 
 impl ToCssWithGuard for KeyframesRule {
     // Serialization of KeyframesRule is not specced.
     fn to_css<W>(&self, guard: &SharedRwLockReadGuard, dest: &mut W) -> fmt::Result
     where W: fmt::Write {
         try!(dest.write_str("@keyframes "));
         try!(dest.write_str(&*self.name.to_string()));
@@ -908,28 +911,37 @@ impl<'b> TopLevelRuleParser<'b> {
 pub enum State {
     Start = 1,
     Imports = 2,
     Namespaces = 3,
     Body = 4,
     Invalid = 5,
 }
 
+#[derive(Clone, Debug)]
+#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
+/// Vendor prefix.
+pub enum VendorPrefix {
+    /// -moz prefix.
+    Moz,
+    /// -webkit prefix.
+    WebKit,
+}
 
 enum AtRulePrelude {
     /// A @font-face rule prelude.
     FontFace,
     /// A @media rule prelude, with its media queries.
     Media(Arc<Locked<MediaList>>),
     /// An @supports rule, with its conditional
     Supports(SupportsCondition),
     /// A @viewport rule prelude.
     Viewport,
-    /// A @keyframes rule, with its animation name.
-    Keyframes(Atom),
+    /// A @keyframes rule, with its animation name and vendor prefix if exists.
+    Keyframes(Atom, Option<VendorPrefix>),
     /// A @page rule prelude.
     Page,
 }
 
 
 impl<'a> AtRuleParser for TopLevelRuleParser<'a> {
     type Prelude = AtRulePrelude;
     type AtRule = CssRule;
@@ -1106,24 +1118,36 @@ impl<'a, 'b> AtRuleParser for NestedRule
             },
             "viewport" => {
                 if is_viewport_enabled() {
                     Ok(AtRuleType::WithBlock(AtRulePrelude::Viewport))
                 } else {
                     Err(())
                 }
             },
-            "keyframes" => {
+            "keyframes" | "-webkit-keyframes" | "-moz-keyframes" => {
+                let prefix = if starts_with_ignore_ascii_case(name, "-webkit-") {
+                    Some(VendorPrefix::WebKit)
+                } else if starts_with_ignore_ascii_case(name, "-moz-") {
+                    Some(VendorPrefix::Moz)
+                } else {
+                    None
+                };
+                if cfg!(feature = "servo") &&
+                   prefix.as_ref().map_or(false, |p| matches!(*p, VendorPrefix::Moz)) {
+                    // Servo should not support @-moz-keyframes.
+                    return Err(())
+                }
                 let name = match input.next() {
                     Ok(Token::Ident(ref value)) if value != "none" => Atom::from(&**value),
                     Ok(Token::QuotedString(value)) => Atom::from(&*value),
                     _ => return Err(())
                 };
 
-                Ok(AtRuleType::WithBlock(AtRulePrelude::Keyframes(Atom::from(name))))
+                Ok(AtRuleType::WithBlock(AtRulePrelude::Keyframes(Atom::from(name), prefix)))
             },
             "page" => {
                 if cfg!(feature = "gecko") {
                     Ok(AtRuleType::WithBlock(AtRulePrelude::Page))
                 } else {
                     Err(())
                 }
             },
@@ -1152,21 +1176,22 @@ impl<'a, 'b> AtRuleParser for NestedRule
                     enabled: enabled,
                 }))))
             }
             AtRulePrelude::Viewport => {
                 let context = ParserContext::new_with_rule_type(self.context, Some(CssRuleType::Viewport));
                 Ok(CssRule::Viewport(Arc::new(self.shared_lock.wrap(
                    try!(ViewportRule::parse(&context, input))))))
             }
-            AtRulePrelude::Keyframes(name) => {
+            AtRulePrelude::Keyframes(name, prefix) => {
                 let context = ParserContext::new_with_rule_type(self.context, Some(CssRuleType::Keyframes));
                 Ok(CssRule::Keyframes(Arc::new(self.shared_lock.wrap(KeyframesRule {
                     name: name,
                     keyframes: parse_keyframe_list(&context, input, self.shared_lock),
+                    vendor_prefix: prefix,
                 }))))
             }
             AtRulePrelude::Page => {
                 let context = ParserContext::new_with_rule_type(self.context, Some(CssRuleType::Page));
                 let declarations = parse_property_declaration_list(&context, input);
                 Ok(CssRule::Page(Arc::new(self.shared_lock.wrap(PageRule(
                     Arc::new(self.shared_lock.wrap(declarations))
                 )))))
--- a/servo/components/style/stylist.rs
+++ b/servo/components/style/stylist.rs
@@ -343,20 +343,27 @@ impl Stylist {
                 }
                 CssRule::Import(ref import) => {
                     let import = import.read_with(guard);
                     self.add_stylesheet(&import.stylesheet, guard, extra_data)
                 }
                 CssRule::Keyframes(ref keyframes_rule) => {
                     let keyframes_rule = keyframes_rule.read_with(guard);
                     debug!("Found valid keyframes rule: {:?}", *keyframes_rule);
-                    let animation = KeyframesAnimation::from_keyframes(
-                        &keyframes_rule.keyframes, guard);
-                    debug!("Found valid keyframe animation: {:?}", animation);
-                    self.animations.insert(keyframes_rule.name.clone(), animation);
+
+                    // Don't let a prefixed keyframes animation override a non-prefixed one.
+                    let needs_insertion = keyframes_rule.vendor_prefix.is_none() ||
+                        self.animations.get(&keyframes_rule.name).map_or(true, |rule|
+                            rule.vendor_prefix.is_some());
+                    if needs_insertion {
+                        let animation = KeyframesAnimation::from_keyframes(
+                            &keyframes_rule.keyframes, keyframes_rule.vendor_prefix.clone(), guard);
+                        debug!("Found valid keyframe animation: {:?}", animation);
+                        self.animations.insert(keyframes_rule.name.clone(), animation);
+                    }
                 }
                 CssRule::FontFace(ref rule) => {
                     extra_data.add_font_face(&rule, stylesheet.origin);
                 }
                 // We don't care about any other rule.
                 _ => {}
             }
         });
--- a/servo/ports/geckolib/glue.rs
+++ b/servo/ports/geckolib/glue.rs
@@ -69,17 +69,17 @@ use style::gecko_bindings::sugar::refptr
 use style::gecko_properties::{self, style_structs};
 use style::keyframes::KeyframesStepValue;
 use style::media_queries::{MediaList, parse_media_query_list};
 use style::parallel;
 use style::parser::{LengthParsingMode, ParserContext};
 use style::properties::{CascadeFlags, ComputedValues, Importance, ParsedDeclaration};
 use style::properties::{PropertyDeclarationBlock, PropertyId};
 use style::properties::SKIP_ROOT_AND_ITEM_BASED_DISPLAY_FIXUP;
-use style::properties::animated_properties::{AnimationValue, Interpolate, TransitionProperty};
+use style::properties::animated_properties::{AnimationValue, ComputeDistance, Interpolate, TransitionProperty};
 use style::properties::parse_one_declaration;
 use style::restyle_hints::{self, RestyleHint};
 use style::rule_tree::StyleSource;
 use style::selector_parser::PseudoElementCascadeType;
 use style::sequential;
 use style::shared_lock::{SharedRwLock, SharedRwLockReadGuard, StylesheetGuards, ToCssWithGuard, Locked};
 use style::string_cache::Atom;
 use style::stylesheets::{CssRule, CssRules, CssRuleType, CssRulesHelpers};
@@ -261,16 +261,25 @@ pub extern "C" fn Servo_AnimationValues_
                                                        to: RawServoAnimationValueBorrowed)
                                                        -> bool {
     let from_value = AnimationValue::as_arc(&from);
     let to_value = AnimationValue::as_arc(&to);
     from_value.interpolate(to_value, 0.5).is_ok()
 }
 
 #[no_mangle]
+pub extern "C" fn Servo_AnimationValues_ComputeDistance(from: RawServoAnimationValueBorrowed,
+                                                        to: RawServoAnimationValueBorrowed)
+                                                        -> f64 {
+    let from_value = AnimationValue::as_arc(&from);
+    let to_value = AnimationValue::as_arc(&to);
+    from_value.compute_distance(to_value).unwrap_or(0.0)
+}
+
+#[no_mangle]
 pub extern "C" fn Servo_AnimationValueMap_Push(value_map: RawServoAnimationValueMapBorrowed,
                                                property: nsCSSPropertyID,
                                                value: RawServoAnimationValueBorrowed)
 {
     use style::properties::animated_properties::AnimationValueMap;
 
     let value_map = RwLock::<AnimationValueMap>::as_arc(&value_map);
     let value = AnimationValue::as_arc(&value).as_ref();
--- a/servo/tests/unit/style/keyframes.rs
+++ b/servo/tests/unit/style/keyframes.rs
@@ -9,38 +9,44 @@ use style::properties::{PropertyDeclarat
 use style::properties::animated_properties::TransitionProperty;
 use style::shared_lock::SharedRwLock;
 use style::values::specified::{LengthOrPercentageOrAuto, NoCalcLength};
 
 #[test]
 fn test_empty_keyframe() {
     let shared_lock = SharedRwLock::new();
     let keyframes = vec![];
-    let animation = KeyframesAnimation::from_keyframes(&keyframes, &shared_lock.read());
+    let animation = KeyframesAnimation::from_keyframes(&keyframes,
+                                                       /* vendor_prefix = */ None,
+                                                       &shared_lock.read());
     let expected = KeyframesAnimation {
         steps: vec![],
         properties_changed: vec![],
+        vendor_prefix: None,
     };
 
     assert_eq!(format!("{:#?}", animation), format!("{:#?}", expected));
 }
 
 #[test]
 fn test_no_property_in_keyframe() {
     let shared_lock = SharedRwLock::new();
     let keyframes = vec![
         Arc::new(shared_lock.wrap(Keyframe {
             selector: KeyframeSelector::new_for_unit_testing(vec![KeyframePercentage::new(1.)]),
             block: Arc::new(shared_lock.wrap(PropertyDeclarationBlock::new()))
         })),
     ];
-    let animation = KeyframesAnimation::from_keyframes(&keyframes, &shared_lock.read());
+    let animation = KeyframesAnimation::from_keyframes(&keyframes,
+                                                       /* vendor_prefix = */ None,
+                                                       &shared_lock.read());
     let expected = KeyframesAnimation {
         steps: vec![],
         properties_changed: vec![],
+        vendor_prefix: None,
     };
 
     assert_eq!(format!("{:#?}", animation), format!("{:#?}", expected));
 }
 
 #[test]
 fn test_missing_property_in_initial_keyframe() {
     let shared_lock = SharedRwLock::new();
@@ -73,31 +79,34 @@ fn test_missing_property_in_initial_keyf
             block: declarations_on_initial_keyframe.clone(),
         })),
 
         Arc::new(shared_lock.wrap(Keyframe {
             selector: KeyframeSelector::new_for_unit_testing(vec![KeyframePercentage::new(1.)]),
             block: declarations_on_final_keyframe.clone(),
         })),
     ];
-    let animation = KeyframesAnimation::from_keyframes(&keyframes, &shared_lock.read());
+    let animation = KeyframesAnimation::from_keyframes(&keyframes,
+                                                       /* vendor_prefix = */ None,
+                                                       &shared_lock.read());
     let expected = KeyframesAnimation {
         steps: vec![
             KeyframesStep {
                 start_percentage: KeyframePercentage(0.),
                 value: KeyframesStepValue::Declarations { block: declarations_on_initial_keyframe },
                 declared_timing_function: false,
             },
             KeyframesStep {
                 start_percentage: KeyframePercentage(1.),
                 value: KeyframesStepValue::Declarations { block: declarations_on_final_keyframe },
                 declared_timing_function: false,
             },
         ],
         properties_changed: vec![TransitionProperty::Width, TransitionProperty::Height],
+        vendor_prefix: None,
     };
 
     assert_eq!(format!("{:#?}", animation), format!("{:#?}", expected));
 }
 
 #[test]
 fn test_missing_property_in_final_keyframe() {
     let shared_lock = SharedRwLock::new();
@@ -130,31 +139,34 @@ fn test_missing_property_in_final_keyfra
             block: declarations_on_initial_keyframe.clone(),
         })),
 
         Arc::new(shared_lock.wrap(Keyframe {
             selector: KeyframeSelector::new_for_unit_testing(vec![KeyframePercentage::new(1.)]),
             block: declarations_on_final_keyframe.clone(),
         })),
     ];
-    let animation = KeyframesAnimation::from_keyframes(&keyframes, &shared_lock.read());
+    let animation = KeyframesAnimation::from_keyframes(&keyframes,
+                                                       /* vendor_prefix = */ None,
+                                                       &shared_lock.read());
     let expected = KeyframesAnimation {
         steps: vec![
             KeyframesStep {
                 start_percentage: KeyframePercentage(0.),
                 value: KeyframesStepValue::Declarations { block: declarations_on_initial_keyframe },
                 declared_timing_function: false,
             },
             KeyframesStep {
                 start_percentage: KeyframePercentage(1.),
                 value: KeyframesStepValue::Declarations { block: declarations_on_final_keyframe },
                 declared_timing_function: false,
             },
         ],
         properties_changed: vec![TransitionProperty::Width, TransitionProperty::Height],
+        vendor_prefix: None,
     };
 
     assert_eq!(format!("{:#?}", animation), format!("{:#?}", expected));
 }
 
 #[test]
 fn test_missing_keyframe_in_both_of_initial_and_final_keyframe() {
     let shared_lock = SharedRwLock::new();
@@ -179,17 +191,19 @@ fn test_missing_keyframe_in_both_of_init
             selector: KeyframeSelector::new_for_unit_testing(vec![KeyframePercentage::new(0.)]),
             block: Arc::new(shared_lock.wrap(PropertyDeclarationBlock::new()))
         })),
         Arc::new(shared_lock.wrap(Keyframe {
             selector: KeyframeSelector::new_for_unit_testing(vec![KeyframePercentage::new(0.5)]),
             block: declarations.clone(),
         })),
     ];
-    let animation = KeyframesAnimation::from_keyframes(&keyframes, &shared_lock.read());
+    let animation = KeyframesAnimation::from_keyframes(&keyframes,
+                                                       /* vendor_prefix = */ None,
+                                                       &shared_lock.read());
     let expected = KeyframesAnimation {
         steps: vec![
             KeyframesStep {
                 start_percentage: KeyframePercentage(0.),
                 value: KeyframesStepValue::Declarations {
                     block: Arc::new(shared_lock.wrap(
                         // XXX: Should we use ComputedValues in this case?
                         PropertyDeclarationBlock::new()
@@ -204,12 +218,13 @@ fn test_missing_keyframe_in_both_of_init
             },
             KeyframesStep {
                 start_percentage: KeyframePercentage(1.),
                 value: KeyframesStepValue::ComputedValues,
                 declared_timing_function: false,
             }
         ],
         properties_changed: vec![TransitionProperty::Width, TransitionProperty::Height],
+        vendor_prefix: None,
     };
 
     assert_eq!(format!("{:#?}", animation), format!("{:#?}", expected));
 }
--- a/servo/tests/unit/style/stylesheets.rs
+++ b/servo/tests/unit/style/stylesheets.rs
@@ -240,17 +240,18 @@ fn test_parse_stylesheet() {
                                 LengthOrPercentageOrAuto::Percentage(Percentage(1.))),
                              Importance::Normal),
                             (PropertyDeclaration::AnimationPlayState(
                                 animation_play_state::SpecifiedValue(
                                     vec![animation_play_state::SingleSpecifiedValue::running])),
                              Importance::Normal),
                         ]))),
                     })),
-                ]
+                ],
+                vendor_prefix: None,
             })))
 
         ], &stylesheet.shared_lock),
     };
 
     assert_eq!(format!("{:#?}", stylesheet), format!("{:#?}", expected));
 }
 
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -406,17 +406,17 @@ task_description_schema = Schema({
             # type of signing task (for CoT)
             Required('taskType'): basestring,
 
             # Paths to the artifacts to sign
             Required('paths'): [basestring],
         }],
 
         # "Invalid" is a noop for try and other non-supported branches
-        Required('google-play-track'): Any('production', 'beta', 'alpha', 'invalid'),
+        Required('google-play-track'): Any('production', 'beta', 'alpha', 'rollout', 'invalid'),
         Required('dry-run', default=True): bool,
         Optional('rollout-percentage'): int,
     }),
 })
 
 GROUP_NAMES = {
     'py': 'Python unit tests',
     'tc': 'Executed by TaskCluster',
--- a/taskcluster/taskgraph/util/scriptworker.py
+++ b/taskcluster/taskgraph/util/scriptworker.py
@@ -210,35 +210,37 @@ PUSH_APK_SCOPES = {
     'release': 'project:releng:googleplay:release',
     'default': 'project:releng:googleplay:invalid',
 }
 
 # See https://github.com/mozilla-releng/pushapkscript#aurora-beta-release-vs-alpha-beta-production
 PUSH_APK_GOOGLE_PLAY_TRACT = {
     'aurora': 'beta',
     'beta': 'production',
-    'release': 'production',
+    'release': 'rollout',
     'default': 'invalid',
 }
 
 PUSH_APK_BREAKPOINT_WORKER_TYPE = {
     'aurora': 'aws-provisioner-v1/taskcluster-generic',
     'beta': 'null-provisioner/human-breakpoint',
     'release': 'null-provisioner/human-breakpoint',
     'default': 'invalid/invalid',
 }
 
 PUSH_APK_DRY_RUN_OPTION = {
     'aurora': False,
     'beta': False,
-    'release': True,
+    'release': False,
     'default': True,
 }
 
 PUSH_APK_ROLLOUT_PERCENTAGE = {
+    # XXX Please make sure to change PUSH_APK_GOOGLE_PLAY_TRACT to 'rollout' if you add a new
+    # supported project
     'release': 10,
     'default': None,
 }
 
 
 # scope functions {{{1
 def get_scope_from_project(alias_to_project_map, alias_to_scope_map, config):
     """Determine the restricted scope from `config.params['project']`.
--- a/testing/marionette/components/marionette.js
+++ b/testing/marionette/components/marionette.js
@@ -15,18 +15,16 @@ const MARIONETTE_CONTRACT_ID = "@mozilla
 const MARIONETTE_CID = Components.ID("{786a1369-dca5-4adc-8486-33d23c88010a}");
 
 const PREF_ENABLED = "marionette.enabled";
 const PREF_ENABLED_FALLBACK = "marionette.defaultPrefs.enabled";
 const PREF_PORT = "marionette.port";
 const PREF_PORT_FALLBACK = "marionette.defaultPrefs.port";
 const PREF_LOG_LEVEL = "marionette.log.level";
 const PREF_LOG_LEVEL_FALLBACK = "marionette.logging";
-const PREF_FORCE_LOCAL = "marionette.forcelocal";
-const PREF_FORCE_LOCAL_FALLBACK = "marionette.force-local";
 
 const DEFAULT_PORT = 2828;
 const DEFAULT_LOG_LEVEL = "info";
 const LOG_LEVELS = new class extends Map {
   constructor () {
     super([
       ["fatal", Log.Level.Fatal],
       ["error", Log.Level.Error],
@@ -84,20 +82,16 @@ const prefs = {
     return getPref(PREF_PORT, PREF_PORT_FALLBACK);
   },
 
   get logLevel () {
     let s = getPref(PREF_LOG_LEVEL, PREF_LOG_LEVEL_FALLBACK);
     return LOG_LEVELS.get(s);
   },
 
-  get forceLocal () {
-    return getPref(PREF_FORCE_LOCAL, PREF_FORCE_LOCAL_FALLBACK);
-  },
-
   readFromEnvironment (key) {
     const env = Cc["@mozilla.org/process/environment;1"]
         .getService(Ci.nsIEnvironment);
 
     if (env.exists(key)) {
       let prefs;
       try {
         prefs = JSON.parse(env.get(key));
@@ -275,31 +269,20 @@ MarionetteComponent.prototype.suppressSa
   }, {once: true});
 };
 
 MarionetteComponent.prototype.init = function () {
   if (this.running || !this.enabled || !this.finalUIStartup) {
     return;
   }
 
-  if (!prefs.forceLocal) {
-    // See bug 800138.  Because the first socket that opens with
-    // force-local=false fails, we open a dummy socket that will fail.
-    // keepWhenOffline=true so that it still work when offline (local).
-    // This allows the following attempt by Marionette to open a socket
-    // to succeed.
-    let insaneSacrificialGoat =
-        new ServerSocket(0, Ci.nsIServerSocket.KeepWhenOffline, 4);
-    insaneSacrificialGoat.asyncListen(this);
-  }
-
   let s;
   try {
     Cu.import("chrome://marionette/content/server.js");
-    s = new server.TCPListener(prefs.port, prefs.forceLocal);
+    s = new server.TCPListener(prefs.port);
     s.start();
     this.logger.info(`Listening on port ${s.port}`);
   } finally {
     if (s) {
       this.server = s;
       this.running = true;
     }
   }
--- a/testing/marionette/prefs/marionette.js
+++ b/testing/marionette/prefs/marionette.js
@@ -10,18 +10,15 @@
 // set to true.
 
 // Whether or not Marionette is enabled.
 pref("marionette.enabled", false);
 
 // Port to start Marionette server on.
 pref("marionette.port", 2828);
 
-// Forces client connections to come from a loopback device.
-pref("marionette.forcelocal", true);
-
 // Marionette logging verbosity.  Allowed values are "fatal", "error",
 // "warn", "info", "config", "debug", and "trace".
 pref("marionette.log.level", "info");
 
 // Sets preferences recommended when using Firefox in automation with
 // Marionette.
 pref("marionette.prefs.recommended", true);
--- a/testing/marionette/server.js
+++ b/testing/marionette/server.js
@@ -19,16 +19,18 @@ Cu.import("chrome://marionette/content/d
 Cu.import("chrome://marionette/content/error.js");
 Cu.import("chrome://marionette/content/message.js");
 
 // Bug 1083711: Load transport.js as an SDK module instead of subscript
 loader.loadSubScript("resource://devtools/shared/transport/transport.js");
 
 const logger = Log.repository.getLogger("Marionette");
 
+const {KeepWhenOffline, LoopbackOnly} = Ci.nsIServerSocket;
+
 this.EXPORTED_SYMBOLS = ["server"];
 this.server = {};
 
 const PROTOCOL_VERSION = 3;
 
 const PREF_CONTENT_LISTENER = "marionette.contentListener";
 const PREF_RECOMMENDED = "marionette.prefs.recommended";
 
@@ -263,23 +265,19 @@ const RECOMMENDED_PREFS = new Map([
  * Starting the Marionette server will open a TCP socket sporting the
  * debugger transport interface on the provided |port|.  For every new
  * connection, a |server.TCPConnection| is created.
  */
 server.TCPListener = class {
   /**
    * @param {number} port
    *     Port for server to listen to.
-   * @param {boolean=} forceLocal
-   *     Listen only to connections from loopback if true (default).
-   *     When false, accept all connections.
    */
-  constructor (port, forceLocal = true) {
+  constructor (port) {
     this.port = port;
-    this.forceLocal = forceLocal;
     this.conns = new Set();
     this.nextConnID = 0;
     this.alive = false;
     this._acceptConnections = false;
     this.alteredPrefs = new Set();
   }
 
   /**
@@ -316,23 +314,19 @@ server.TCPListener = class {
         if (!Preferences.isSet(k)) {
           logger.debug(`Setting recommended pref ${k} to ${v}`);
           Preferences.set(k, v);
           this.alteredPrefs.add(k);
         }
       }
     }
 
-    let flags = Ci.nsIServerSocket.KeepWhenOffline;
-    if (this.forceLocal) {
-      flags |= Ci.nsIServerSocket.LoopbackOnly;
-    } else {
-      logger.warn("Server socket is not limited to loopback connections");
-    }
-    this.listener = new ServerSocket(this.port, flags, 1);
+    const flags = KeepWhenOffline | LoopbackOnly;
+    const backlog = 1;
+    this.listener = new ServerSocket(this.port, flags, backlog);
     this.listener.asyncListen(this);
 
     this.alive = true;
     this._acceptConnections = true;
   }
 
   stop () {
     if (!this.alive) {
--- a/testing/mozharness/configs/releases/dev_postrelease_firefox_beta.py
+++ b/testing/mozharness/configs/releases/dev_postrelease_firefox_beta.py
@@ -3,16 +3,17 @@ config = {
     "log_name": "bump_date",
     "version_files": [{"file": "browser/config/version_display.txt"}],
     "repo": {
         # date is used for staging mozilla-beta
         "repo": "https://hg.mozilla.org/projects/date",
         "branch": "default",
         "dest": "date",
         "vcs": "hg",
+        "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
     },
     # date is used for staging mozilla-beta
     "push_dest": "ssh://hg.mozilla.org/projects/date",
     "ignore_no_changes": True,
     "ssh_user": "ffxbld",
     "ssh_key": "~/.ssh/ffxbld_rsa",
     "ship_it_root": "https://ship-it-dev.allizom.org",
     "ship_it_username":  "stage-ffxbld",
--- a/testing/mozharness/configs/releases/dev_postrelease_firefox_release.py
+++ b/testing/mozharness/configs/releases/dev_postrelease_firefox_release.py
@@ -6,16 +6,17 @@ config = {
         {"file": "config/milestone.txt"},
     ],
     "repo": {
         # jamun is used for staging mozilla-release
         "repo": "https://hg.mozilla.org/projects/jamun",
         "branch": "default",
         "dest": "jamun",
         "vcs": "hg",
+        "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
     },
     "push_dest": "ssh://hg.mozilla.org/projects/jamun",
     "ignore_no_changes": True,
     "ssh_user": "ffxbld",
     "ssh_key": "~/.ssh/ffxbld_rsa",
     "ship_it_root": "https://ship-it-dev.allizom.org",
     "ship_it_username":  "stage-ffxbld",
 }
--- a/testing/mozharness/configs/releases/postrelease_firefox_beta.py
+++ b/testing/mozharness/configs/releases/postrelease_firefox_beta.py
@@ -1,16 +1,17 @@
 config = {
     "log_name": "bump_beta",
     "version_files": [{"file": "browser/config/version_display.txt"}],
     "repo": {
         "repo": "https://hg.mozilla.org/releases/mozilla-beta",
         "branch": "default",
         "dest": "mozilla-beta",
         "vcs": "hg",
+        "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
     },
     "vcs_share_base": "/builds/hg-shared",
     "push_dest": "ssh://hg.mozilla.org/releases/mozilla-beta",
     "ignore_no_changes": True,
     "ssh_user": "ffxbld",
     "ssh_key": "~/.ssh/ffxbld_rsa",
     "ship_it_root": "https://ship-it.mozilla.org",
     "ship_it_username":  "ffxbld",
--- a/testing/mozharness/configs/releases/postrelease_firefox_esr52.py
+++ b/testing/mozharness/configs/releases/postrelease_firefox_esr52.py
@@ -5,16 +5,17 @@ config = {
         {"file": "browser/config/version_display.txt"},
         {"file": "config/milestone.txt"},
     ],
     "repo": {
         "repo": "https://hg.mozilla.org/releases/mozilla-esr52",
         "branch": "default",
         "dest": "mozilla-esr52",
         "vcs": "hg",
+        "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
     },
     "vcs_share_base": "/builds/hg-shared",
     "push_dest": "ssh://hg.mozilla.org/releases/mozilla-esr52",
     "ignore_no_changes": True,
     "ssh_user": "ffxbld",
     "ssh_key": "~/.ssh/ffxbld_rsa",
     "ship_it_root": "https://ship-it.mozilla.org",
     "ship_it_username":  "ffxbld",
--- a/testing/mozharness/configs/releases/postrelease_firefox_release.py
+++ b/testing/mozharness/configs/releases/postrelease_firefox_release.py
@@ -5,16 +5,17 @@ config = {
         {"file": "browser/config/version_display.txt"},
         {"file": "config/milestone.txt"},
     ],
     "repo": {
         "repo": "https://hg.mozilla.org/releases/mozilla-release",
         "branch": "default",
         "dest": "mozilla-release",
         "vcs": "hg",
+        "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
     },
     "vcs_share_base": "/builds/hg-shared",
     "push_dest": "ssh://hg.mozilla.org/releases/mozilla-release",
     "ignore_no_changes": True,
     "ssh_user": "ffxbld",
     "ssh_key": "~/.ssh/ffxbld_rsa",
     "ship_it_root": "https://ship-it.mozilla.org",
     "ship_it_username":  "ffxbld",
--- a/toolkit/components/downloads/ApplicationReputation.cpp
+++ b/toolkit/components/downloads/ApplicationReputation.cpp
@@ -153,18 +153,18 @@ LookupTablesInPrefs(const nsACString& ta
     found = true;
 
     if (!shouldRecordTelemetry) {
       return found;
     }
 
     // We are checking if the table found is V2 or V4 to record telemetry
     // Both V2 and V4 begin with "goog" but V4 ends with "-proto"
-    if (StringBeginsWith(prefToken, NS_LITERAL_CSTRING("goog"))) {
-      if (StringEndsWith(prefToken, NS_LITERAL_CSTRING("-proto"))) {
+    if (StringBeginsWith(table, NS_LITERAL_CSTRING("goog"))) {
+      if (StringEndsWith(table, NS_LITERAL_CSTRING("-proto"))) {
         telemetryInfo |= TelemetryMatchInfo::eV4Match;
       } else {
         telemetryInfo |= TelemetryMatchInfo::eV2Match;
       }
     }
   }
 
   // Record telemetry for matching allow list and block list
rename from toolkit/crashreporter/jsoncpp/AUTHORS
rename to toolkit/components/jsoncpp/AUTHORS
rename from toolkit/crashreporter/jsoncpp/GIT-INFO
rename to toolkit/components/jsoncpp/GIT-INFO
rename from toolkit/crashreporter/jsoncpp/LICENSE
rename to toolkit/components/jsoncpp/LICENSE
rename from toolkit/crashreporter/jsoncpp/NEWS.txt
rename to toolkit/components/jsoncpp/NEWS.txt
rename from toolkit/crashreporter/jsoncpp/README.md
rename to toolkit/components/jsoncpp/README.md
rename from toolkit/crashreporter/jsoncpp/include/json/allocator.h
rename to toolkit/components/jsoncpp/include/json/allocator.h
rename from toolkit/crashreporter/jsoncpp/include/json/assertions.h
rename to toolkit/components/jsoncpp/include/json/assertions.h
rename from toolkit/crashreporter/jsoncpp/include/json/autolink.h
rename to toolkit/components/jsoncpp/include/json/autolink.h
rename from toolkit/crashreporter/jsoncpp/include/json/config.h
rename to toolkit/components/jsoncpp/include/json/config.h
rename from toolkit/crashreporter/jsoncpp/include/json/features.h
rename to toolkit/components/jsoncpp/include/json/features.h
rename from toolkit/crashreporter/jsoncpp/include/json/forwards.h
rename to toolkit/components/jsoncpp/include/json/forwards.h
rename from toolkit/crashreporter/jsoncpp/include/json/json.h
rename to toolkit/components/jsoncpp/include/json/json.h
rename from toolkit/crashreporter/jsoncpp/include/json/reader.h
rename to toolkit/components/jsoncpp/include/json/reader.h
rename from toolkit/crashreporter/jsoncpp/include/json/value.h
rename to toolkit/components/jsoncpp/include/json/value.h
rename from toolkit/crashreporter/jsoncpp/include/json/version.h
rename to toolkit/components/jsoncpp/include/json/version.h
rename from toolkit/crashreporter/jsoncpp/include/json/writer.h
rename to toolkit/components/jsoncpp/include/json/writer.h
rename from toolkit/crashreporter/jsoncpp/src/lib_json/json_reader.cpp
rename to toolkit/components/jsoncpp/src/lib_json/json_reader.cpp
rename from toolkit/crashreporter/jsoncpp/src/lib_json/json_tool.h
rename to toolkit/components/jsoncpp/src/lib_json/json_tool.h
rename from toolkit/crashreporter/jsoncpp/src/lib_json/json_value.cpp
rename to toolkit/components/jsoncpp/src/lib_json/json_value.cpp
rename from toolkit/crashreporter/jsoncpp/src/lib_json/json_valueiterator.inl
rename to toolkit/components/jsoncpp/src/lib_json/json_valueiterator.inl
rename from toolkit/crashreporter/jsoncpp/src/lib_json/json_writer.cpp
rename to toolkit/components/jsoncpp/src/lib_json/json_writer.cpp
new file mode 100644
--- /dev/null
+++ b/toolkit/components/jsoncpp/src/lib_json/moz.build
@@ -0,0 +1,49 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+UNIFIED_SOURCES += [
+    'json_reader.cpp',
+    'json_value.cpp',
+    'json_writer.cpp'
+]
+
+LOCAL_INCLUDES += [
+    '/toolkit/components/jsoncpp/include',
+]
+
+# This code is only used in the stand-alone minidump analyzer executable so
+# enabling exceptions should be fine.
+if CONFIG['CLANG_CL']:
+    CXXFLAGS += [
+        '-Xclang',
+        '-fcxx-exceptions',
+    ]
+elif not CONFIG['_MSC_VER']:
+    CXXFLAGS += [
+        '-fexceptions',
+    ]
+
+# Required to build with exceptions enabled
+DISABLE_STL_WRAPPING = True
+
+Library('jsoncpp')
+
+# Suppress warnings in third-party code.
+if CONFIG['_MSC_VER']:
+    CXXFLAGS += [
+        '-wd4005', # macro redefinition
+    ]
+elif CONFIG['GNU_CXX']:
+    CXXFLAGS += [
+        '-Wno-unused-local-typedefs',
+        '-Wno-shadow',
+    ]
+
+if CONFIG['CLANG_CXX'] or CONFIG['CLANG_CL']:
+    CXXFLAGS += [
+        '-Wno-implicit-fallthrough',
+        '-Wno-c++11-narrowing',
+    ]
--- a/toolkit/components/moz.build
+++ b/toolkit/components/moz.build
@@ -29,16 +29,17 @@ DIRS += [
     'downloads',
     'extensions',
     'exthelper',
     'filewatcher',
     'finalizationwitness',
     'formautofill',
     'find',
     'jsdownloads',
+    'jsoncpp/src/lib_json',
     'lz4',
     'mediasniffer',
     'microformats',
     'mozprotocol',
     'osfile',
     'parentalcontrols',
     'passwordmgr',
     'perf',
--- a/toolkit/components/perfmonitoring/tests/browser/browser_compartments.js
+++ b/toolkit/components/perfmonitoring/tests/browser/browser_compartments.js
@@ -160,17 +160,17 @@ function monotinicity_tester(source, tes
     for (let item of snapshot.componentsData) {
       for (let [probe, k] of [
         ["jank", "totalUserTime"],
         ["jank", "totalSystemTime"],
         ["cpow", "totalCPOWTime"]
       ]) {
         // Note that we cannot expect components data to be always smaller
         // than process data, as `getrusage` & co are not monotonic.
-        SilentAssert.leq(item[probe][k], 3 * snapshot.processData[probe][k],
+        SilentAssert.leq(item[probe][k], 6 * snapshot.processData[probe][k],
           `Sanity check (${name}): ${k} of component is not impossibly larger than that of process`);
       }
 
       let isCorrectPid = (item.processId == pid && !item.isChildProcess)
         || (item.processId != pid && item.isChildProcess);
       SilentAssert.ok(isCorrectPid, `Pid check (${name}): the item comes from the right process`);
 
       let key = item.groupId;
--- a/toolkit/components/places/tests/browser/399606-history.go-0.html
+++ b/toolkit/components/places/tests/browser/399606-history.go-0.html
@@ -1,11 +1,13 @@
 <html>
 <head>
 <title>history.go(0)</title>
 <script>
-setTimeout("history.go(0)", 1000);
+setTimeout(function() {
+  history.go(0)
+}, 1000);
 </script>
 </head>
 <body>
 Testing history.go(0)
 </body>
 </html>
--- a/toolkit/components/places/tests/browser/399606-location.reload.html
+++ b/toolkit/components/places/tests/browser/399606-location.reload.html
@@ -1,11 +1,13 @@
 <html>
 <head>
 <title>location.reload()</title>
 <script>
-setTimeout("location.reload();", 100);
+setTimeout(function() {
+  location.reload();
+}, 100);
 </script>
 </head>
 <body>
 Testing location.reload();
 </body>
 </html>
--- a/toolkit/components/places/tests/browser/399606-location.replace.html
+++ b/toolkit/components/places/tests/browser/399606-location.replace.html
@@ -1,11 +1,13 @@
 <html>
 <head>
 <title>location.replace</title>
 <script>
-setTimeout("location.replace(window.location.href)", 1000);
+setTimeout(function() {
+  location.replace(window.location.href)
+}, 1000);
 </script>
 </head>
 <body>
 Testing location.replace
 </body>
 </html>
--- a/toolkit/components/places/tests/browser/399606-window.location.href.html
+++ b/toolkit/components/places/tests/browser/399606-window.location.href.html
@@ -1,11 +1,13 @@
 <html>
 <head>
 <title>window.location.href</title>
 <script>
-setTimeout("window.location.href = window.location.href", 1000);
+setTimeout(function() {
+  window.location.href = window.location.href
+}, 1000);
 </script>
 </head>
 <body>
 Testing window.location.href
 </body>
 </html>
--- a/toolkit/components/places/tests/browser/399606-window.location.html
+++ b/toolkit/components/places/tests/browser/399606-window.location.html
@@ -1,11 +1,13 @@
 <html>
 <head>
 <title>window.location</title>
 <script>
-setTimeout("window.location = window.location", 1000);
+setTimeout(function() {
+  window.location = window.location
+}, 1000);
 </script>
 </head>
 <body>
 Testing window.location
 </body>
 </html>
--- a/toolkit/components/places/tests/favicons/test_root_icons.js
+++ b/toolkit/components/places/tests/favicons/test_root_icons.js
@@ -75,17 +75,23 @@ add_task(function* test_removePagesByTim
   Assert.equal(rows.length, 1, "There should only be 1 icon entry");
   Assert.equal(rows[0].getResultByName("root"), 1, "It should be marked as a root icon");
   rows = yield db.execute("SELECT * FROM moz_pages_w_icons");
   Assert.equal(rows.length, 0, "There should be no page entry");
   rows = yield db.execute("SELECT * FROM moz_icons_to_pages");
   Assert.equal(rows.length, 0, "There should be no relation entry");
 
   PlacesUtils.history.removePagesByTimeframe(0, PlacesUtils.toPRTime(new Date()));
+  yield PlacesTestUtils.promiseAsyncUpdates();
   rows = yield db.execute("SELECT * FROM moz_icons");
+  // Debug logging for possible intermittent failure (bug 1358368).
+  if (rows.length != 0) {
+    dump_table("moz_icons");
+    dump_table("moz_hosts");
+  }
   Assert.equal(rows.length, 0, "There should be no icon entry");
 });
 
 add_task(function* test_different_host() {
   let pageURI = NetUtil.newURI("http://places.test/page/");
   yield PlacesTestUtils.addVisits(pageURI);
   let faviconURI = NetUtil.newURI("http://mozilla.test/favicon.ico");
   PlacesUtils.favicons.replaceFaviconDataFromDataURL(
--- a/toolkit/components/places/tests/unit/test_telemetry.js
+++ b/toolkit/components/places/tests/unit/test_telemetry.js
@@ -12,17 +12,17 @@ var histograms = {
   PLACES_KEYWORDS_COUNT: val => do_check_eq(val, 1),
   PLACES_SORTED_BOOKMARKS_PERC: val => do_check_eq(val, 100),
   PLACES_TAGGED_BOOKMARKS_PERC: val => do_check_eq(val, 100),
   PLACES_DATABASE_FILESIZE_MB: val => do_check_true(val > 0),
   PLACES_DATABASE_PAGESIZE_B: val => do_check_eq(val, 32768),
   PLACES_DATABASE_SIZE_PER_PAGE_B: val => do_check_true(val > 0),
   PLACES_EXPIRATION_STEPS_TO_CLEAN2: val => do_check_true(val > 1),
   // PLACES_AUTOCOMPLETE_1ST_RESULT_TIME_MS:  val => do_check_true(val > 1),
-  PLACES_IDLE_FRECENCY_DECAY_TIME_MS: val => do_check_true(val > 0),
+  PLACES_IDLE_FRECENCY_DECAY_TIME_MS: val => do_check_true(val >= 0),
   PLACES_IDLE_MAINTENANCE_TIME_MS: val => do_check_true(val > 0),
   // One from the `setItemAnnotation` call; the other from the mobile root.
   // This can be removed along with the anno in bug 1306445.
   PLACES_ANNOS_BOOKMARKS_COUNT: val => do_check_eq(val, 2),
   PLACES_ANNOS_PAGES_COUNT: val => do_check_eq(val, 1),
   PLACES_MAINTENANCE_DAYSFROMLAST: val => do_check_true(val >= 0),
 }
 
--- a/toolkit/components/telemetry/TelemetryEnvironment.jsm
+++ b/toolkit/components/telemetry/TelemetryEnvironment.jsm
@@ -418,17 +418,17 @@ function getWindowsVersionInfo() {
       {wSuiteMask: WORD},
       {wProductType: BYTE},
       {wReserved: BYTE}
       ]);
 
   let kernel32 = ctypes.open("kernel32");
   try {
     let GetVersionEx = kernel32.declare("GetVersionExW",
-                                        ctypes.default_abi,
+                                        ctypes.winapi_abi,
                                         BOOL,
                                         OSVERSIONINFOEXW.ptr);
     let winVer = OSVERSIONINFOEXW();
     winVer.dwOSVersionInfoSize = OSVERSIONINFOEXW.size;
 
     if (0 === GetVersionEx(winVer.address())) {
       throw ("Failure in GetVersionEx (returned 0)");
     }
--- a/toolkit/components/telemetry/tests/unit/head.js
+++ b/toolkit/components/telemetry/tests/unit/head.js
@@ -306,16 +306,19 @@ if (runningInParent) {
   // Telemetry xpcshell tests cannot show the infobar.
   Services.prefs.setBoolPref("datareporting.policy.dataSubmissionPolicyBypassNotification", true);
   // FHR uploads should be enabled.
   Services.prefs.setBoolPref("datareporting.healthreport.uploadEnabled", true);
   // Many tests expect the shutdown ping to not be sent on shutdown and will fail
   // if receive an unexpected ping. Let's globally disable the shutdown ping sender:
   // the relevant tests will enable this pref when needed.
   Services.prefs.setBoolPref("toolkit.telemetry.shutdownPingSender.enabled", false);
+  // Ensure browser experiments are also disabled, to avoid network activity
+  // when toggling PREF_ENABLED.
+  Services.prefs.setBoolPref("experiments.enabled", false);
 
 
   fakePingSendTimer((callback, timeout) => {
     Services.tm.dispatchToMainThread(() => callback());
   },
   () => {});
 
   // This gets imported via fakeNow();
--- a/toolkit/components/url-classifier/Classifier.h
+++ b/toolkit/components/url-classifier/Classifier.h
@@ -21,19 +21,16 @@ class nsIThread;
 namespace mozilla {
 namespace safebrowsing {
 
 /**
  * Maintains the stores and LookupCaches for the url classifier.
  */
 class Classifier {
 public:
-  typedef nsClassHashtable<nsCStringHashKey, nsCString> ProviderDictType;
-
-public:
   Classifier();
   ~Classifier();
 
   nsresult Open(nsIFile& aCacheDirectory);
   void Close();
   void Reset(); // Not including any intermediary for update.
 
   /**
--- a/toolkit/components/url-classifier/nsUrlClassifierDBService.cpp
+++ b/toolkit/components/url-classifier/nsUrlClassifierDBService.cpp
@@ -13,17 +13,16 @@
 #include "nsIDirectoryService.h"
 #include "nsIKeyModule.h"
 #include "nsIObserverService.h"
 #include "nsIPermissionManager.h"
 #include "nsIPrefBranch.h"
 #include "nsIPrefService.h"
 #include "nsIProperties.h"
 #include "nsToolkitCompsCID.h"
-#include "nsIUrlClassifierUtils.h"
 #include "nsIXULRuntime.h"
 #include "nsUrlClassifierDBService.h"
 #include "nsUrlClassifierUtils.h"
 #include "nsUrlClassifierProxies.h"
 #include "nsURILoader.h"
 #include "nsString.h"
 #include "nsReadableUtils.h"
 #include "nsTArray.h"
@@ -62,18 +61,18 @@ namespace safebrowsing {
 
 nsresult
 TablesToResponse(const nsACString& tables)
 {
   if (tables.IsEmpty()) {
     return NS_OK;
   }
 
-  // We don't check mCheckMalware and friends because BuildTables never
-  // includes a table that is not enabled.
+  // We don't check mCheckMalware and friends because disabled tables are
+  // never included
   if (FindInReadable(NS_LITERAL_CSTRING("-malware-"), tables)) {
     return NS_ERROR_MALWARE_URI;
   }
   if (FindInReadable(NS_LITERAL_CSTRING("-phish-"), tables)) {
     return NS_ERROR_PHISHING_URI;
   }
   if (FindInReadable(NS_LITERAL_CSTRING("-unwanted-"), tables)) {
     return NS_ERROR_UNWANTED_URI;
@@ -93,39 +92,19 @@ TablesToResponse(const nsACString& table
 using namespace mozilla;
 using namespace mozilla::safebrowsing;
 
 // MOZ_LOG=UrlClassifierDbService:5
 LazyLogModule gUrlClassifierDbServiceLog("UrlClassifierDbService");
 #define LOG(args) MOZ_LOG(gUrlClassifierDbServiceLog, mozilla::LogLevel::Debug, args)
 #define LOG_ENABLED() MOZ_LOG_TEST(gUrlClassifierDbServiceLog, mozilla::LogLevel::Debug)
 
-// Prefs for implementing nsIURIClassifier to block page loads
-#define CHECK_MALWARE_PREF      "browser.safebrowsing.malware.enabled"
-#define CHECK_MALWARE_DEFAULT   false
-
-#define CHECK_PHISHING_PREF     "browser.safebrowsing.phishing.enabled"
-#define CHECK_PHISHING_DEFAULT  false
-
-#define CHECK_BLOCKED_PREF    "browser.safebrowsing.blockedURIs.enabled"
-#define CHECK_BLOCKED_DEFAULT false
-
 #define GETHASH_NOISE_PREF      "urlclassifier.gethashnoise"
 #define GETHASH_NOISE_DEFAULT   4
 
-// Comma-separated lists
-#define MALWARE_TABLE_PREF              "urlclassifier.malwareTable"
-#define PHISH_TABLE_PREF                "urlclassifier.phishTable"
-#define TRACKING_TABLE_PREF             "urlclassifier.trackingTable"
-#define TRACKING_WHITELIST_TABLE_PREF   "urlclassifier.trackingWhitelistTable"
-#define BLOCKED_TABLE_PREF              "urlclassifier.blockedTable"
-#define DOWNLOAD_BLOCK_TABLE_PREF       "urlclassifier.downloadBlockTable"
-#define DOWNLOAD_ALLOW_TABLE_PREF       "urlclassifier.downloadAllowTable"
-#define DISALLOW_COMPLETION_TABLE_PREF  "urlclassifier.disallow_completions"
-
 #define CONFIRM_AGE_PREF        "urlclassifier.max-complete-age"
 #define CONFIRM_AGE_DEFAULT_SEC (45 * 60)
 
 // 30 minutes as the maximum negative cache duration.
 #define MAXIMUM_NEGATIVE_CACHE_DURATION_SEC (30 * 60 * 1000)
 
 // TODO: The following two prefs are to be removed after we
 //       roll out full v4 hash completion. See Bug 1331534.
@@ -138,17 +117,18 @@ class nsUrlClassifierDBServiceWorker;
 static nsUrlClassifierDBService* sUrlClassifierDBService;
 
 nsIThread* nsUrlClassifierDBService::gDbBackgroundThread = nullptr;
 
 // Once we've committed to shutting down, don't do work in the background
 // thread.
 static bool gShuttingDownThread = false;
 
-static mozilla::Atomic<int32_t> gFreshnessGuarantee(CONFIRM_AGE_DEFAULT_SEC);
+static mozilla::Atomic<uint32_t, Relaxed> gFreshnessGuarantee(CONFIRM_AGE_DEFAULT_SEC);
+static uint32_t sGethashNoise = GETHASH_NOISE_DEFAULT;
 
 NS_IMPL_ISUPPORTS(nsUrlClassifierDBServiceWorker,
                   nsIUrlClassifierDBService)
 
 nsUrlClassifierDBServiceWorker::nsUrlClassifierDBServiceWorker()
   : mInStream(false)
   , mGethashNoise(0)
   , mPendingLookupLock("nsUrlClassifierDBServerWorker.mPendingLookupLock")
@@ -1587,58 +1567,73 @@ nsUrlClassifierDBService::nsUrlClassifie
 {
 }
 
 nsUrlClassifierDBService::~nsUrlClassifierDBService()
 {
   sUrlClassifierDBService = nullptr;
 }
 
+void
+AppendTables(const nsCString& aTables, nsCString &outTables)
+{
+  if (!aTables.IsEmpty()) {
+    if (!outTables.IsEmpty()) {
+      outTables.Append(',');
+    }
+    outTables.Append(aTables);
+  }
+}
+
 nsresult
 nsUrlClassifierDBService::ReadTablesFromPrefs()
 {
+  mCheckMalware = Preferences::GetBool(CHECK_MALWARE_PREF,
+    CHECK_MALWARE_DEFAULT);
+  mCheckPhishing = Preferences::GetBool(CHECK_PHISHING_PREF,
+    CHECK_PHISHING_DEFAULT);
+  mCheckBlockedURIs = Preferences::GetBool(CHECK_BLOCKED_PREF,
+    CHECK_BLOCKED_DEFAULT);
+
   nsCString allTables;
   nsCString tables;
+
+  mBaseTables.Truncate();
+  mTrackingProtectionTables.Truncate();
+
   Preferences::GetCString(PHISH_TABLE_PREF, &allTables);
+  if (mCheckPhishing) {
+    AppendTables(allTables, mBaseTables);
+  }
 
   Preferences::GetCString(MALWARE_TABLE_PREF, &tables);
-  if (!tables.IsEmpty()) {
-    allTables.Append(',');
-    allTables.Append(tables);
+  AppendTables(tables, allTables);
+  if (mCheckMalware) {
+    AppendTables(tables, mBaseTables);
+  }
+
+  Preferences::GetCString(BLOCKED_TABLE_PREF, &tables);
+  AppendTables(tables, allTables);
+  if (mCheckBlockedURIs) {
+    AppendTables(tables, mBaseTables);
   }
 
   Preferences::GetCString(DOWNLOAD_BLOCK_TABLE_PREF, &tables);
-  if (!tables.IsEmpty()) {
-    allTables.Append(',');
-    allTables.Append(tables);
-  }
+  AppendTables(tables, allTables);
 
   Preferences::GetCString(DOWNLOAD_ALLOW_TABLE_PREF, &tables);
-  if (!tables.IsEmpty()) {
-    allTables.Append(',');
-    allTables.Append(tables);
-  }
+  AppendTables(tables, allTables);
 
   Preferences::GetCString(TRACKING_TABLE_PREF, &tables);
-  if (!tables.IsEmpty()) {
-    allTables.Append(',');
-    allTables.Append(tables);
-  }
+  AppendTables(tables, allTables);
+  AppendTables(tables, mTrackingProtectionTables);
 
   Preferences::GetCString(TRACKING_WHITELIST_TABLE_PREF, &tables);
-  if (!tables.IsEmpty()) {
-    allTables.Append(',');
-    allTables.Append(tables);
-  }
-
-  Preferences::GetCString(BLOCKED_TABLE_PREF, &tables);
-  if (!tables.IsEmpty()) {
-    allTables.Append(',');
-    allTables.Append(tables);
-  }
+  AppendTables(tables, allTables);
+  AppendTables(tables, mTrackingProtectionTables);
 
   Classifier::SplitTables(allTables, mGethashTables);
 
   Preferences::GetCString(DISALLOW_COMPLETION_TABLE_PREF, &tables);
   Classifier::SplitTables(tables, mDisallowCompletionsTables);
 
   return NS_OK;
 }
@@ -1666,29 +1661,21 @@ nsUrlClassifierDBService::Init()
     // Note that since we never register an observer, Shutdown() will also never
     // be called in the content process.
     return NS_OK;
   default:
     // No other process type is supported!
     return NS_ERROR_NOT_AVAILABLE;
   }
 
-  // Retrieve all the preferences.
-  mCheckMalware = Preferences::GetBool(CHECK_MALWARE_PREF,
-    CHECK_MALWARE_DEFAULT);
-  mCheckPhishing = Preferences::GetBool(CHECK_PHISHING_PREF,
-    CHECK_PHISHING_DEFAULT);
-  mCheckBlockedURIs = Preferences::GetBool(CHECK_BLOCKED_PREF,
-    CHECK_BLOCKED_DEFAULT);
-  uint32_t gethashNoise = Preferences::GetUint(GETHASH_NOISE_PREF,
+  sGethashNoise = Preferences::GetUint(GETHASH_NOISE_PREF,
     GETHASH_NOISE_DEFAULT);
   gFreshnessGuarantee = Preferences::GetInt(CONFIRM_AGE_PREF,
     CONFIRM_AGE_DEFAULT_SEC);
   ReadTablesFromPrefs();
-
   nsresult rv;
 
   {
     // Force PSM loading on main thread
     nsCOMPtr<nsICryptoHash> dummy = do_CreateInstance(NS_CRYPTO_HASH_CONTRACTID, &rv);
     NS_ENSURE_SUCCESS(rv, rv);
   }
 
@@ -1715,17 +1702,17 @@ nsUrlClassifierDBService::Init()
   rv = NS_NewNamedThread("URL Classifier", &gDbBackgroundThread);
   if (NS_FAILED(rv))
     return rv;
 
   mWorker = new nsUrlClassifierDBServiceWorker();
   if (!mWorker)
     return NS_ERROR_OUT_OF_MEMORY;
 
-  rv = mWorker->Init(gethashNoise, cacheDir);
+  rv = mWorker->Init(sGethashNoise, cacheDir);
   if (NS_FAILED(rv)) {
     mWorker = nullptr;
     return rv;
   }
 
   // Proxy for calling the worker on the background thread
   mWorkerProxy = new UrlClassifierDBServiceWorkerProxy(mWorker);
   rv = mWorkerProxy->OpenDb();
@@ -1742,74 +1729,28 @@ nsUrlClassifierDBService::Init()
   // The application is about to quit
   observerService->AddObserver(this, "quit-application", false);
   observerService->AddObserver(this, "profile-before-change", false);
 
   // XXX: Do we *really* need to be able to change all of these at runtime?
   // Note: These observers should only be added when everything else above has
   //       succeeded. Failing to do so can cause long shutdown times in certain
   //       situations. See Bug 1247798 and Bug 1244803.
-  Preferences::AddStrongObserver(this, CHECK_MALWARE_PREF);
-  Preferences::AddStrongObserver(this, CHECK_PHISHING_PREF);
-  Preferences::AddStrongObserver(this, CHECK_BLOCKED_PREF);
-  Preferences::AddStrongObserver(this, GETHASH_NOISE_PREF);
-  Preferences::AddStrongObserver(this, CONFIRM_AGE_PREF);
-  Preferences::AddStrongObserver(this, PHISH_TABLE_PREF);
-  Preferences::AddStrongObserver(this, MALWARE_TABLE_PREF);
-  Preferences::AddStrongObserver(this, TRACKING_TABLE_PREF);
-  Preferences::AddStrongObserver(this, TRACKING_WHITELIST_TABLE_PREF);
-  Preferences::AddStrongObserver(this, BLOCKED_TABLE_PREF);
-  Preferences::AddStrongObserver(this, DOWNLOAD_BLOCK_TABLE_PREF);
-  Preferences::AddStrongObserver(this, DOWNLOAD_ALLOW_TABLE_PREF);
-  Preferences::AddStrongObserver(this, DISALLOW_COMPLETION_TABLE_PREF);
+  Preferences::AddUintVarCache(&sGethashNoise, GETHASH_NOISE_PREF,
+    GETHASH_NOISE_DEFAULT);
+  Preferences::AddAtomicUintVarCache(&gFreshnessGuarantee, CONFIRM_AGE_PREF,
+    CONFIRM_AGE_DEFAULT_SEC);
+
+  for (uint8_t i = 0; i < kObservedPrefs.Length(); i++) {
+    Preferences::AddStrongObserver(this, kObservedPrefs[i].get());
+  }
 
   return NS_OK;
 }
 
-void
-nsUrlClassifierDBService::BuildTables(bool aTrackingProtectionEnabled,
-                                      nsCString &tables)
-{
-  nsAutoCString malware;
-  // LookupURI takes a comma-separated list already.
-  Preferences::GetCString(MALWARE_TABLE_PREF, &malware);
-  if (mCheckMalware && !malware.IsEmpty()) {
-    tables.Append(malware);
-  }
-  nsAutoCString phishing;
-  Preferences::GetCString(PHISH_TABLE_PREF, &phishing);
-  if (mCheckPhishing && !phishing.IsEmpty()) {
-    tables.Append(',');
-    tables.Append(phishing);
-  }
-  if (aTrackingProtectionEnabled) {
-    nsAutoCString tracking, trackingWhitelist;
-    Preferences::GetCString(TRACKING_TABLE_PREF, &tracking);
-    if (!tracking.IsEmpty()) {
-      tables.Append(',');
-      tables.Append(tracking);
-    }
-    Preferences::GetCString(TRACKING_WHITELIST_TABLE_PREF, &trackingWhitelist);
-    if (!trackingWhitelist.IsEmpty()) {
-      tables.Append(',');
-      tables.Append(trackingWhitelist);
-    }
-  }
-  nsAutoCString blocked;
-  Preferences::GetCString(BLOCKED_TABLE_PREF, &blocked);
-  if (mCheckBlockedURIs && !blocked.IsEmpty()) {
-    tables.Append(',');
-    tables.Append(blocked);
-  }
-
-  if (StringBeginsWith(tables, NS_LITERAL_CSTRING(","))) {
-    tables.Cut(0, 1);
-  }
-}
-
 // nsChannelClassifier is the only consumer of this interface.
 NS_IMETHODIMP
 nsUrlClassifierDBService::Classify(nsIPrincipal* aPrincipal,
                                    nsIEventTarget* aEventTarget,
                                    bool aTrackingProtectionEnabled,
                                    nsIURIClassifierCallback* c,
                                    bool* result)
 {
@@ -1855,18 +1796,20 @@ nsUrlClassifierDBService::Classify(nsIPr
     return NS_OK;
   }
 
   RefPtr<nsUrlClassifierClassifyCallback> callback =
     new nsUrlClassifierClassifyCallback(c);
 
   if (!callback) return NS_ERROR_OUT_OF_MEMORY;
 
-  nsAutoCString tables;
-  BuildTables(aTrackingProtectionEnabled, tables);
+  nsCString tables = mBaseTables;
+  if (aTrackingProtectionEnabled) {
+    AppendTables(mTrackingProtectionTables, tables);
+  }
 
   nsresult rv = LookupURI(aPrincipal, tables, callback, false, result);
   if (rv == NS_ERROR_MALFORMED_URI) {
     *result = false;
     // The URI had no hostname, don't try to classify it.
     return NS_OK;
   }
   NS_ENSURE_SUCCESS(rv, rv);
@@ -2333,39 +2276,18 @@ nsUrlClassifierDBService::Observe(nsISup
                                   const char16_t *aData)
 {
   if (!strcmp(aTopic, NS_PREFBRANCH_PREFCHANGE_TOPIC_ID)) {
     nsresult rv;
     nsCOMPtr<nsIPrefBranch> prefs(do_QueryInterface(aSubject, &rv));
     NS_ENSURE_SUCCESS(rv, rv);
     Unused << prefs;
 
-    if (NS_LITERAL_STRING(CHECK_MALWARE_PREF).Equals(aData)) {
-      mCheckMalware = Preferences::GetBool(CHECK_MALWARE_PREF,
-        CHECK_MALWARE_DEFAULT);
-    } else if (NS_LITERAL_STRING(CHECK_PHISHING_PREF).Equals(aData)) {
-      mCheckPhishing = Preferences::GetBool(CHECK_PHISHING_PREF,
-        CHECK_PHISHING_DEFAULT);
-    } else if (NS_LITERAL_STRING(CHECK_BLOCKED_PREF).Equals(aData)) {
-      mCheckBlockedURIs = Preferences::GetBool(CHECK_BLOCKED_PREF,
-        CHECK_BLOCKED_DEFAULT);
-    } else if (
-      NS_LITERAL_STRING(PHISH_TABLE_PREF).Equals(aData) ||
-      NS_LITERAL_STRING(MALWARE_TABLE_PREF).Equals(aData) ||
-      NS_LITERAL_STRING(TRACKING_TABLE_PREF).Equals(aData) ||
-      NS_LITERAL_STRING(TRACKING_WHITELIST_TABLE_PREF).Equals(aData) ||
-      NS_LITERAL_STRING(BLOCKED_TABLE_PREF).Equals(aData) ||
-      NS_LITERAL_STRING(DOWNLOAD_BLOCK_TABLE_PREF).Equals(aData) ||
-      NS_LITERAL_STRING(DOWNLOAD_ALLOW_TABLE_PREF).Equals(aData) ||
-      NS_LITERAL_STRING(DISALLOW_COMPLETION_TABLE_PREF).Equals(aData)) {
-      // Just read everything again.
+    if (kObservedPrefs.Contains(NS_ConvertUTF16toUTF8(aData))) {
       ReadTablesFromPrefs();
-    } else if (NS_LITERAL_STRING(CONFIRM_AGE_PREF).Equals(aData)) {
-      gFreshnessGuarantee = Preferences::GetInt(CONFIRM_AGE_PREF,
-        CONFIRM_AGE_DEFAULT_SEC);
     }
   } else if (!strcmp(aTopic, "quit-application")) {
     // Tell the update thread to finish as soon as possible.
     gShuttingDownThread = true;
   } else if (!strcmp(aTopic, "profile-before-change")) {
     gShuttingDownThread = true;
     Shutdown();
   } else {
@@ -2387,28 +2309,19 @@ nsUrlClassifierDBService::Shutdown()
   }
 
   Telemetry::AutoTimer<Telemetry::URLCLASSIFIER_SHUTDOWN_TIME> timer;
 
   mCompleters.Clear();
 
   nsCOMPtr<nsIPrefBranch> prefs = do_GetService(NS_PREFSERVICE_CONTRACTID);
   if (prefs) {
-    prefs->RemoveObserver(CHECK_MALWARE_PREF, this);
-    prefs->RemoveObserver(CHECK_PHISHING_PREF, this);
-    prefs->RemoveObserver(CHECK_BLOCKED_PREF, this);
-    prefs->RemoveObserver(PHISH_TABLE_PREF, this);
-    prefs->RemoveObserver(MALWARE_TABLE_PREF, this);
-    prefs->RemoveObserver(TRACKING_TABLE_PREF, this);
-    prefs->RemoveObserver(TRACKING_WHITELIST_TABLE_PREF, this);
-    prefs->RemoveObserver(BLOCKED_TABLE_PREF, this);
-    prefs->RemoveObserver(DOWNLOAD_BLOCK_TABLE_PREF, this);
-    prefs->RemoveObserver(DOWNLOAD_ALLOW_TABLE_PREF, this);
-    prefs->RemoveObserver(DISALLOW_COMPLETION_TABLE_PREF, this);
-    prefs->RemoveObserver(CONFIRM_AGE_PREF, this);
+    for (uint8_t i = 0; i < kObservedPrefs.Length(); i++) {
+      prefs->RemoveObserver(kObservedPrefs[i].get(), this);
+    }
   }
 
   // 1. Synchronize with worker thread and update thread by
   //    *synchronously* dispatching an event to worker thread
   //    for shutting down the update thread. The reason not
   //    shutting down update thread directly from main thread
   //    is to avoid racing for Classifier::mUpdateThread
   //    between main thread and the worker thread. (Both threads
--- a/toolkit/components/url-classifier/nsUrlClassifierDBService.h
+++ b/toolkit/components/url-classifier/nsUrlClassifierDBService.h
@@ -36,16 +36,36 @@
 #define DOMAIN_LENGTH 4
 
 // The hash length of a partial hash entry.
 #define PARTIAL_LENGTH 4
 
 // The hash length of a complete hash entry.
 #define COMPLETE_LENGTH 32
 
+// Prefs for implementing nsIURIClassifier to block page loads
+#define CHECK_MALWARE_PREF      "browser.safebrowsing.malware.enabled"
+#define CHECK_MALWARE_DEFAULT   false
+
+#define CHECK_PHISHING_PREF     "browser.safebrowsing.phishing.enabled"
+#define CHECK_PHISHING_DEFAULT  false
+
+#define CHECK_BLOCKED_PREF      "browser.safebrowsing.blockedURIs.enabled"
+#define CHECK_BLOCKED_DEFAULT   false
+
+// Comma-separated lists
+#define MALWARE_TABLE_PREF              "urlclassifier.malwareTable"
+#define PHISH_TABLE_PREF                "urlclassifier.phishTable"
+#define TRACKING_TABLE_PREF             "urlclassifier.trackingTable"
+#define TRACKING_WHITELIST_TABLE_PREF   "urlclassifier.trackingWhitelistTable"
+#define BLOCKED_TABLE_PREF              "urlclassifier.blockedTable"
+#define DOWNLOAD_BLOCK_TABLE_PREF       "urlclassifier.downloadBlockTable"
+#define DOWNLOAD_ALLOW_TABLE_PREF       "urlclassifier.downloadAllowTable"
+#define DISALLOW_COMPLETION_TABLE_PREF  "urlclassifier.disallow_completions"
+
 using namespace mozilla::safebrowsing;
 
 class nsUrlClassifierDBServiceWorker;
 class nsIThread;
 class nsIURI;
 class UrlClassifierDBServiceWorkerProxy;
 namespace mozilla {
 namespace safebrowsing {
@@ -85,16 +105,31 @@ public:
   nsresult CacheCompletions(mozilla::safebrowsing::CacheResultArray *results);
   nsresult CacheMisses(mozilla::safebrowsing::PrefixArray *results);
 
   static nsIThread* BackgroundThread();
 
   static bool ShutdownHasStarted();
 
 private:
+
+  const nsTArray<nsCString> kObservedPrefs = {
+    NS_LITERAL_CSTRING(CHECK_MALWARE_PREF),
+    NS_LITERAL_CSTRING(CHECK_PHISHING_PREF),
+    NS_LITERAL_CSTRING(CHECK_BLOCKED_PREF),
+    NS_LITERAL_CSTRING(MALWARE_TABLE_PREF),
+    NS_LITERAL_CSTRING(PHISH_TABLE_PREF),
+    NS_LITERAL_CSTRING(TRACKING_TABLE_PREF),
+    NS_LITERAL_CSTRING(TRACKING_WHITELIST_TABLE_PREF),
+    NS_LITERAL_CSTRING(BLOCKED_TABLE_PREF),
+    NS_LITERAL_CSTRING(DOWNLOAD_BLOCK_TABLE_PREF),
+    NS_LITERAL_CSTRING(DOWNLOAD_ALLOW_TABLE_PREF),
+    NS_LITERAL_CSTRING(DISALLOW_COMPLETION_TABLE_PREF)
+  };
+
   // No subclassing
   ~nsUrlClassifierDBService();
 
   // Disallow copy constructor
   nsUrlClassifierDBService(nsUrlClassifierDBService&);
 
   nsresult LookupURI(nsIPrincipal* aPrincipal,
                      const nsACString& tables,
@@ -103,22 +138,18 @@ private:
 
   // Close db connection and join the background thread if it exists.
   nsresult Shutdown();
 
   // Check if the key is on a known-clean host.
   nsresult CheckClean(const nsACString &lookupKey,
                       bool *clean);
 
-  // Read everything into mGethashTables and mDisallowCompletionTables
   nsresult ReadTablesFromPrefs();
 
-  // Build a comma-separated list of tables to check
-  void BuildTables(bool trackingProtectionEnabled, nsCString& tables);
-
   RefPtr<nsUrlClassifierDBServiceWorker> mWorker;
   RefPtr<UrlClassifierDBServiceWorkerProxy> mWorkerProxy;
 
   nsInterfaceHashtable<nsCStringHashKey, nsIUrlClassifierHashCompleter> mCompleters;
 
   // TRUE if the nsURIClassifier implementation should check for malware
   // uris on document loads.
   bool mCheckMalware;
@@ -138,16 +169,20 @@ private:
   bool mInUpdate;
 
   // The list of tables that can use the default hash completer object.
   nsTArray<nsCString> mGethashTables;
 
   // The list of tables that should never be hash completed.
   nsTArray<nsCString> mDisallowCompletionsTables;
 
+  // Comma-separated list of tables to use in lookups.
+  nsCString mTrackingProtectionTables;
+  nsCString mBaseTables;
+
   // Thread that we do the updates on.
   static nsIThread* gDbBackgroundThread;
 };
 
 class nsUrlClassifierDBServiceWorker final : public nsIUrlClassifierDBService
 {
 public:
   nsUrlClassifierDBServiceWorker();
--- a/toolkit/crashreporter/client/moz.build
+++ b/toolkit/crashreporter/client/moz.build
@@ -8,17 +8,17 @@ if CONFIG['OS_TARGET'] != 'Android':
     Program('crashreporter')
 
     UNIFIED_SOURCES += [
         'crashreporter.cpp',
         'ping.cpp',
     ]
 
     LOCAL_INCLUDES += [
-        '/toolkit/crashreporter/jsoncpp/include',
+        '/toolkit/components/jsoncpp/include',
     ]
 
     USE_LIBS += [
         'jsoncpp',
     ]
 
 if CONFIG['OS_ARCH'] == 'WINNT':
     UNIFIED_SOURCES += [
deleted file mode 100644
--- a/toolkit/crashreporter/jsoncpp/src/lib_json/moz.build
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-UNIFIED_SOURCES += [
-    'json_reader.cpp',
-    'json_value.cpp',
-    'json_writer.cpp'
-]
-
-LOCAL_INCLUDES += [
-    '/toolkit/crashreporter/jsoncpp/include',
-]
-
-# This code is only used in the stand-alone minidump analyzer executable so
-# enabling exceptions should be fine.
-if CONFIG['CLANG_CL']:
-    CXXFLAGS += [
-        '-Xclang',
-        '-fcxx-exceptions',
-    ]
-elif not CONFIG['_MSC_VER']:
-    CXXFLAGS += [
-        '-fexceptions',
-    ]
-
-# Required to build with exceptions enabled
-DISABLE_STL_WRAPPING = True
-
-Library('jsoncpp')
-
-include('/toolkit/crashreporter/crashreporter.mozbuild')
--- a/toolkit/crashreporter/minidump-analyzer/moz.build
+++ b/toolkit/crashreporter/minidump-analyzer/moz.build
@@ -15,17 +15,17 @@ if CONFIG['OS_TARGET'] != 'Android':
     ]
 
     USE_LIBS += [
         'breakpad_processor',
         'jsoncpp',
     ]
 
     LOCAL_INCLUDES += [
-        '/toolkit/crashreporter/jsoncpp/include',
+        '/toolkit/components/jsoncpp/include',
     ]
 
 
     if CONFIG['OS_TARGET'] == 'Darwin':
         DIST_SUBDIR = 'crashreporter.app/Contents/MacOS'
 
 # Don't use the STL wrappers in the crashreporter clients; they don't
 # link with -lmozalloc, and it really doesn't matter here anyway.
--- a/toolkit/crashreporter/moz.build
+++ b/toolkit/crashreporter/moz.build
@@ -38,17 +38,16 @@ elif CONFIG['OS_ARCH'] == 'Linux':
         'google-breakpad/src/common/linux',
         'google-breakpad/src/processor',
         'google-breakpad/src/tools/linux/dump_syms',
     ]
 
 
 DIRS += [
     'client',
-    'jsoncpp/src/lib_json',
     'minidump-analyzer',
 ]
 
 if CONFIG['MOZ_CRASHREPORTER_INJECTOR']:
     DIRS += ['injector']
     UNIFIED_SOURCES += [
         'InjectCrashReporter.cpp',
         'LoadLibraryRemote.cpp',
--- a/toolkit/crashreporter/update-jsoncpp.sh
+++ b/toolkit/crashreporter/update-jsoncpp.sh
@@ -13,17 +13,17 @@ if [ $# -lt 1 ]; then
 fi
 
 crashreporter_dir=$(realpath $(dirname $0))
 repo=${crashreporter_dir}/../..
 rm -rf ${crashreporter_dir}/jsoncpp
 
 jsoncpp_repo=$1
 rev=${2-HEAD}
-(cd $jsoncpp_repo; git archive --prefix=toolkit/crashreporter/jsoncpp/ $rev) | (cd $repo; tar xf -)
+(cd $jsoncpp_repo; git archive --prefix=toolkit/components/jsoncpp/ $rev) | (cd $repo; tar xf -)
 
 # remove some extraneous bits
 rm -rf \
   ${crashreporter_dir}/jsoncpp/.clang-format \
   ${crashreporter_dir}/jsoncpp/.gitattributes \
   ${crashreporter_dir}/jsoncpp/.gitignore \
   ${crashreporter_dir}/jsoncpp/.travis.yml \
   ${crashreporter_dir}/jsoncpp/CMakeLists.txt \
deleted file mode 100644
index 281fb2acf6e55b5d0778d590f300c7ecf992d959..0000000000000000000000000000000000000000
GIT binary patch
literal 0
Hc$@<O00001
--- a/toolkit/themes/linux/global/jar.mn
+++ b/toolkit/themes/linux/global/jar.mn
@@ -30,17 +30,16 @@ toolkit.jar:
    skin/classic/global/scrollbox.css
    skin/classic/global/splitter.css
    skin/classic/global/tabbox.css
    skin/classic/global/textbox.css
    skin/classic/global/toolbar.css
    skin/classic/global/toolbarbutton.css
    skin/classic/global/tree.css
    skin/classic/global/alerts/alert.css                        (alerts/alert.css)
-   skin/classic/global/dirListing/remote.png                   (dirListing/remote.png)
 
    skin/classic/global/icons/Authentication.png                (icons/Authentication.png)
    skin/classic/global/icons/autocomplete-search.svg           (icons/autocomplete-search.svg)
    skin/classic/global/icons/autoscroll.png                    (icons/autoscroll.png)
    skin/classic/global/icons/blacklist_favicon.png             (icons/blacklist_favicon.png)
    skin/classic/global/icons/blacklist_large.png               (icons/blacklist_large.png)
    skin/classic/global/icons/close.svg                         (icons/close.svg)
    skin/classic/global/icons/resizer.png                       (icons/resizer.png)
deleted file mode 100644
index d854bd9d9f92356c4e941712ed0b2a23ea98bf82..0000000000000000000000000000000000000000
GIT binary patch
literal 0
Hc$@<O00001
--- a/toolkit/themes/osx/global/jar.mn
+++ b/toolkit/themes/osx/global/jar.mn
@@ -68,17 +68,16 @@ toolkit.jar:
   skin/classic/global/arrow/panelarrow-horizontal.png                (arrow/panelarrow-horizontal.png)
   skin/classic/global/arrow/panelarrow-horizontal@2x.png             (arrow/panelarrow-horizontal@2x.png)
   skin/classic/global/arrow/panelarrow-vertical.png                  (arrow/panelarrow-vertical.png)
   skin/classic/global/arrow/panelarrow-vertical@2x.png               (arrow/panelarrow-vertical@2x.png)
   skin/classic/global/checkbox/cbox-check.gif                        (checkbox/cbox-check.gif)
   skin/classic/global/checkbox/cbox-check-dis.gif                    (checkbox/cbox-check-dis.gif)
   skin/classic/global/dirListing/dirListing.css                      (dirListing/dirListing.css)
   skin/classic/global/dirListing/folder.png                          (dirListing/folder.png)
-  skin/classic/global/dirListing/remote.png                          (dirListing/remote.png)
   skin/classic/global/dirListing/up.png                              (dirListing/up.png)
   skin/classic/global/icons/autocomplete-dropmarker.png              (icons/autocomplete-dropmarker.png)
   skin/classic/global/icons/autocomplete-search.svg                  (icons/autocomplete-search.svg)
   skin/classic/global/icons/autoscroll.png                           (icons/autoscroll.png)
   skin/classic/global/icons/blacklist_favicon.png                    (icons/blacklist_favicon.png)
   skin/classic/global/icons/blacklist_64.png                         (icons/blacklist_64.png)
   skin/classic/global/icons/chevron.png                              (icons/chevron.png)
   skin/classic/global/icons/chevron-inverted.png                     (icons/chevron-inverted.png)
@@ -133,9 +132,8 @@ toolkit.jar:
   skin/classic/global/tree/folder.png                                (tree/folder.png)
   skin/classic/global/tree/folder@2x.png                             (tree/folder@2x.png)
 
 #if MOZ_BUILD_APP == browser
 [browser/extensions/{972ce4c6-7e08-4474-a285-3208198ce6fd}] chrome.jar:
 #elif MOZ_SEPARATE_MANIFEST_FOR_THEME_OVERRIDES
 [extensions/{972ce4c6-7e08-4474-a285-3208198ce6fd}] chrome.jar:
 #endif
-% override chrome://global/skin/dirListing/local.png                 chrome://global/skin/dirListing/folder.png
--- a/toolkit/themes/shared/non-mac.jar.inc.mn
+++ b/toolkit/themes/shared/non-mac.jar.inc.mn
@@ -44,19 +44,16 @@
   skin/classic/global/arrow/arrow-up-hov.gif               (../../windows/global/arrow/arrow-up-hov.gif)
   skin/classic/global/arrow/arrow-up-sharp.gif             (../../windows/global/arrow/arrow-up-sharp.gif)
   skin/classic/global/arrow/panelarrow-horizontal.svg      (../../windows/global/arrow/panelarrow-horizontal.svg)
   skin/classic/global/arrow/panelarrow-vertical.svg        (../../windows/global/arrow/panelarrow-vertical.svg)
   skin/classic/global/arrow/panelarrow-horizontal-themed.svg (../../windows/global/arrow/panelarrow-horizontal-themed.svg)
   skin/classic/global/arrow/panelarrow-vertical-themed.svg   (../../windows/global/arrow/panelarrow-vertical-themed.svg)
 
 * skin/classic/global/dirListing/dirListing.css            (../../windows/global/dirListing/dirListing.css)
-  skin/classic/global/dirListing/folder.png                (../../windows/global/dirListing/folder.png)
-  skin/classic/global/dirListing/local.png                 (../../windows/global/dirListing/local.png)
-  skin/classic/global/dirListing/up.png                    (../../windows/global/dirListing/up.png)
   skin/classic/global/icons/Close.gif                      (../../windows/global/icons/Close.gif)
   skin/classic/global/icons/close.png                      (../../windows/global/icons/close.png)
   skin/classic/global/icons/close@2x.png                   (../../windows/global/icons/close@2x.png)
   skin/classic/global/icons/close-inverted.png             (../../windows/global/icons/close-inverted.png)
   skin/classic/global/icons/close-inverted@2x.png          (../../windows/global/icons/close-inverted@2x.png)
   skin/classic/global/icons/collapse.png                   (../../windows/global/icons/collapse.png)
   skin/classic/global/icons/Error.png                      (../../windows/global/icons/Error.png)
   skin/classic/global/icons/error-16.png                   (../../windows/global/icons/error-16.png)
deleted file mode 100644
index c5da02b7fcba44bf1ed72db879d59e9f9519c69e..0000000000000000000000000000000000000000
GIT binary patch
literal 0
Hc$@<O00001
deleted file mode 100644
index c7d01f84168f728deae9f9d001b15759dbb353ab..0000000000000000000000000000000000000000
GIT binary patch
literal 0
Hc$@<O00001
--- a/toolkit/themes/windows/global/jar.mn
+++ b/toolkit/themes/windows/global/jar.mn
@@ -28,17 +28,18 @@ toolkit.jar:
   skin/classic/global/printPageSetup.css
   skin/classic/global/printPreview.css
   skin/classic/global/scrollbox.css
   skin/classic/global/splitter.css
   skin/classic/global/toolbar.css
   skin/classic/global/toolbarbutton.css
 * skin/classic/global/tree.css
   skin/classic/global/alerts/alert.css                     (alerts/alert.css)
-  skin/classic/global/dirListing/remote.png                (dirListing/remote.png)
+  skin/classic/global/dirListing/folder.png                (dirListing/folder.png)
+  skin/classic/global/dirListing/up.png                    (dirListing/up.png)
   skin/classic/global/icons/autocomplete-search.svg        (icons/autocomplete-search.svg)
   skin/classic/global/icons/blacklist_favicon.png          (icons/blacklist_favicon.png)
   skin/classic/global/icons/blacklist_large.png            (icons/blacklist_large.png)
   skin/classic/global/icons/close-win7.png                 (icons/close-win7.png)
   skin/classic/global/icons/close-win7@2x.png              (icons/close-win7@2x.png)
   skin/classic/global/icons/close-inverted-win7.png        (icons/close-inverted-win7.png)
   skin/classic/global/icons/close-inverted-win7@2x.png     (icons/close-inverted-win7@2x.png)
   skin/classic/global/icons/information-16.png             (icons/information-16.png)
--- a/tools/lint/eslint/eslint-plugin-mozilla/lib/configs/recommended.js
+++ b/tools/lint/eslint/eslint-plugin-mozilla/lib/configs/recommended.js
@@ -109,16 +109,19 @@ module.exports = {
     "no-dupe-keys": "error",
 
     // No duplicate cases in switch statements
     "no-duplicate-case": "error",
 
     // Disallow unnecessary calls to .bind()
     "no-extra-bind": "error",
 
+    // Disallow eval and setInteral/setTimeout with strings
+    "no-implied-eval": "error",
+
     // No labels
     "no-labels": "error",
 
     // Disallow unnecessary nested blocks
     "no-lone-blocks": "error",
 
     // If an if block ends with a return no need for an else block
     "no-else-return": "error",
--- a/tools/lint/eslint/eslint-plugin-mozilla/package.json
+++ b/tools/lint/eslint/eslint-plugin-mozilla/package.json
@@ -1,11 +1,11 @@
 {
   "name": "eslint-plugin-mozilla",
-  "version": "0.2.41",
+  "version": "0.2.42",
   "description": "A collection of rules that help enforce JavaScript coding standard in the Mozilla project.",
   "keywords": [
     "eslint",
     "eslintplugin",
     "eslint-plugin",
     "mozilla",
     "firefox"
   ],