Merge mozilla-central to mozilla-inbound.
authorCosmin Sabou <csabou@mozilla.com>
Tue, 14 May 2019 19:09:51 +0300
changeset 535728 ca0a32144a35f52ab1fce6a348f4bd047542289b
parent 535727 944f0cf988f6384155932ae0048ac6b9d06636d8 (current diff)
parent 535687 b0645d43f221993cd1810a50c27abbf3a34f3055 (diff)
child 535729 47ce033ef46a724bd37f1ce3743358de93888066
push id2082
push userffxbld-merge
push dateMon, 01 Jul 2019 08:34:18 +0000
treeherdermozilla-release@2fb19d0466d2 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
milestone68.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge mozilla-central to mozilla-inbound.
browser/base/content/test/general/browser_contextmenu.js
browser/base/content/test/general/browser_contextmenu_childprocess.js
browser/base/content/test/general/browser_contextmenu_input.js
browser/base/content/test/general/contextmenu_common.js
browser/base/content/test/general/ctxmenu-image.png
browser/base/content/test/general/subtst_contextmenu.html
browser/base/content/test/general/subtst_contextmenu_input.html
browser/base/content/test/general/subtst_contextmenu_xul.xul
testing/web-platform/meta/service-workers/service-worker/update-bytecheck.https.html.ini
testing/web-platform/meta/service-workers/service-worker/update-missing-import-scripts.https.html.ini
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -357,17 +357,17 @@ dependencies = [
 ]
 
 [[package]]
 name = "bookmark_sync"
 version = "0.1.0"
 dependencies = [
  "atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "cstr 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "dogear 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dogear 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "moz_task 0.1.0",
  "nserror 0.1.0",
  "nsstring 0.1.0",
  "storage 0.1.0",
  "storage_variant 0.1.0",
  "thin-vec 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -915,17 +915,17 @@ dependencies = [
  "regex 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde_derive 1.0.88 (git+https://github.com/servo/serde?branch=deserialize_from_enums10)",
  "strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "dogear"
-version = "0.2.4"
+version = "0.2.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "smallbitvec 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "dtoa"
@@ -3686,17 +3686,17 @@ dependencies = [
 "checksum darling_macro 0.8.6 (registry+https://github.com/rust-lang/crates.io-index)" = "244e8987bd4e174385240cde20a3657f607fb0797563c28255c353b5819a07b1"
 "checksum deflate 0.7.19 (registry+https://github.com/rust-lang/crates.io-index)" = "8a6abb26e16e8d419b5c78662aa9f82857c2386a073da266840e474d5055ec86"
 "checksum derive_more 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3f57d78cf3bd45270dad4e70c21ec77a960b36c7a841ff9db76aaa775a8fb871"
 "checksum devd-rs 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e7c9ac481c38baf400d3b732e4a06850dfaa491d1b6379a249d9d40d14c2434c"
 "checksum diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3c2b69f912779fbb121ceb775d74d51e915af17aaebc38d28a592843a2dd0a3a"
 "checksum digest 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "05f47366984d3ad862010e22c7ce81a7dbcaebbdfb37241a620f8b6596ee135c"
 "checksum dirs 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "88972de891f6118092b643d85a0b28e0678e0f948d7f879aa32f2d5aafe97d2a"
 "checksum docopt 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "db2906c2579b5b7207fc1e328796a9a8835dc44e22dbe8e460b1d636f9a7b225"
-"checksum dogear 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "30ac4a8e8f834f02deb2266b1f279aa5494e990c625d8be8f2988a7c708ba1f8"
+"checksum dogear 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "26b7583e1427e296c852f3217eaab3890e698f742b8d7349beb1f40c4e946fc9"
 "checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab"
 "checksum dtoa-short 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "068d4026697c1a18f0b0bb8cfcad1b0c151b90d8edb9bf4c235ad68128920d1d"
 "checksum dwrote 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c31c624339dab99c223a4b26c2e803b7c248adaca91549ce654c76f39a03f5c8"
 "checksum either 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18785c1ba806c258137c937e44ada9ee7e69a37e3c72077542cd2f069d78562a"
 "checksum ena 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "25b4e5febb25f08c49f1b07dc33a182729a6b21edfb562b5aef95f78e0dbe5bb"
 "checksum encoding_c 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "769ecb8b33323998e482b218c0d13cd64c267609023b4b7ec3ee740714c318ee"
 "checksum encoding_rs 0.8.16 (registry+https://github.com/rust-lang/crates.io-index)" = "0535f350c60aac0b87ccf28319abc749391e912192255b0c00a2c12c6917bd73"
 "checksum env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0561146661ae44c579e993456bc76d11ce1e0c7d745e57b2fa7146b6e49fa2ad"
--- a/browser/base/content/browser-addons.js
+++ b/browser/base/content/browser-addons.js
@@ -347,23 +347,20 @@ var gXPInstallObserver = {
     Services.telemetry
             .getHistogramById("SECURITY_UI")
             .add(Ci.nsISecurityUITelemetry.WARNING_CONFIRM_ADDON_INSTALL);
   },
 
   // IDs of addon install related notifications
   NOTIFICATION_IDS: [
     "addon-install-blocked",
-    "addon-install-blocked-silent",
     "addon-install-complete",
     "addon-install-confirmation",
-    "addon-install-disabled",
     "addon-install-failed",
     "addon-install-origin-blocked",
-    "addon-install-started",
     "addon-progress",
     "addon-webext-permissions",
     "xpinstall-disabled",
   ],
 
   // Remove all opened addon installation notifications
   removeAllNotifications(browser) {
     this.NOTIFICATION_IDS.forEach((id) => {
--- a/browser/base/content/test/contextMenu/browser.ini
+++ b/browser/base/content/test/contextMenu/browser.ini
@@ -1,15 +1,28 @@
 [DEFAULT]
 support-files =
-  !/browser/base/content/test/general/contextmenu_common.js
   subtst_contextmenu_webext.html
   test_contextmenu_links.html
+  subtst_contextmenu.html
+  subtst_contextmenu_input.html
+  subtst_contextmenu_xul.xul
+  ctxmenu-image.png
+  ../general/head.js
+  ../general/video.ogg
+  ../general/audio.ogg
+  contextmenu_common.js
 
 [browser_contextmenu_touch.js]
 skip-if = !(os == 'win' && os_version == '10.0')
 [browser_contextmenu_linkopen.js]
 [browser_contextmenu_iframe.js]
 support-files =
   test_contextmenu_iframe.html
 [browser_utilityOverlay.js]
 skip-if = os == "linux" || os == "mac" #Bug 1444631
 [browser_utilityOverlayPrincipal.js]
+[browser_contextmenu_childprocess.js]
+[browser_contextmenu.js]
+tags = fullscreen clipboard
+skip-if = toolkit == "gtk3" || verify || (os == "win" && processor == "aarch64") # disabled on Linux due to bug 513558, aarch64 due to 1531590
+[browser_contextmenu_input.js]
+skip-if = toolkit == "gtk3" || (os == "win" && processor == "aarch64") # disabled on Linux due to bug 513558, aarch64 due to 1533161
rename from browser/base/content/test/general/browser_contextmenu.js
rename to browser/base/content/test/contextMenu/browser_contextmenu.js
--- a/browser/base/content/test/general/browser_contextmenu.js
+++ b/browser/base/content/test/contextMenu/browser_contextmenu.js
@@ -8,22 +8,26 @@ let LOGIN_FILL_ITEMS = [
       "fill-login-no-logins", false,
       "---", null,
       "fill-login-saved-passwords", true,
     ], null,
 ];
 let hasPocket = Services.prefs.getBoolPref("extensions.pocket.enabled");
 let hasContainers = Services.prefs.getBoolPref("privacy.userContext.enabled");
 
-const example_base = "http://example.com/browser/browser/base/content/test/general/";
-const chrome_base = "chrome://mochitests/content/browser/browser/base/content/test/general/";
+const example_base = "http://example.com/browser/browser/base/content/test/contextMenu/";
+const chrome_base = "chrome://mochitests/content/browser/browser/base/content/test/contextMenu/";
+const head_base = "chrome://mochitests/content/browser/browser/base/content/test/contextMenu/";
 
 /* import-globals-from contextmenu_common.js */
 Services.scriptloader.loadSubScript(chrome_base + "contextmenu_common.js", this);
 
+/* import-globals-from ../general/head.js */
+Services.scriptloader.loadSubScript(head_base + "head.js", this);
+
 add_task(async function init() {
   // Ensure screenshots is really disabled (bug 1498738)
   const addon = await AddonManager.getAddonByID("screenshots@mozilla.org");
   await addon.disable({allowSystemAddons: true});
 });
 
 // Below are test cases for XUL element
 add_task(async function test_xul_text_link_label() {
rename from browser/base/content/test/general/browser_contextmenu_childprocess.js
rename to browser/base/content/test/contextMenu/browser_contextmenu_childprocess.js
--- a/browser/base/content/test/general/browser_contextmenu_childprocess.js
+++ b/browser/base/content/test/contextMenu/browser_contextmenu_childprocess.js
@@ -1,12 +1,12 @@
 /* Any copyright is dedicated to the Public Domain.
  * http://creativecommons.org/publicdomain/zero/1.0/ */
 
-const gBaseURL = "https://example.com/browser/browser/base/content/test/general/";
+const gBaseURL = "https://example.com/browser/browser/base/content/test/contextMenu/";
 
 add_task(async function() {
   let tab = await BrowserTestUtils.openNewForegroundTab(gBrowser, gBaseURL + "subtst_contextmenu.html");
 
   let contextMenu = document.getElementById("contentAreaContextMenu");
 
   // Get the point of the element with the page menu (test-pagemenu) and
   // synthesize a right mouse click there.
rename from browser/base/content/test/general/browser_contextmenu_input.js
rename to browser/base/content/test/contextMenu/browser_contextmenu_input.js
--- a/browser/base/content/test/general/browser_contextmenu_input.js
+++ b/browser/base/content/test/contextMenu/browser_contextmenu_input.js
@@ -1,19 +1,19 @@
 "use strict";
 
 let contextMenu;
 let hasPocket = Services.prefs.getBoolPref("extensions.pocket.enabled");
 
 add_task(async function test_setup() {
-  const example_base = "http://example.com/browser/browser/base/content/test/general/";
+  const example_base = "http://example.com/browser/browser/base/content/test/contextMenu/";
   const url = example_base + "subtst_contextmenu_input.html";
   await BrowserTestUtils.openNewForegroundTab(gBrowser, url);
 
-  const chrome_base = "chrome://mochitests/content/browser/browser/base/content/test/general/";
+  const chrome_base = "chrome://mochitests/content/browser/browser/base/content/test/contextMenu/";
   const contextmenu_common = chrome_base + "contextmenu_common.js";
   /* import-globals-from contextmenu_common.js */
   Services.scriptloader.loadSubScript(contextmenu_common, this);
 
   // Ensure screenshots is really disabled (bug 1498738)
   const addon = await AddonManager.getAddonByID("screenshots@mozilla.org");
   await addon.disable({allowSystemAddons: true});
 });
rename from browser/base/content/test/general/contextmenu_common.js
rename to browser/base/content/test/contextMenu/contextmenu_common.js
rename from browser/base/content/test/general/ctxmenu-image.png
rename to browser/base/content/test/contextMenu/ctxmenu-image.png
rename from browser/base/content/test/general/subtst_contextmenu.html
rename to browser/base/content/test/contextMenu/subtst_contextmenu.html
rename from browser/base/content/test/general/subtst_contextmenu_input.html
rename to browser/base/content/test/contextMenu/subtst_contextmenu_input.html
rename from browser/base/content/test/general/subtst_contextmenu_xul.xul
rename to browser/base/content/test/contextMenu/subtst_contextmenu_xul.xul
--- a/browser/base/content/test/general/browser.ini
+++ b/browser/base/content/test/general/browser.ini
@@ -20,18 +20,16 @@ support-files =
   browser_web_channel.html
   browser_web_channel_iframe.html
   bug592338.html
   bug792517-2.html
   bug792517.html
   bug792517.sjs
   bug839103.css
   clipboard_pastefile.html
-  contextmenu_common.js
-  ctxmenu-image.png
   discovery.html
   download_page.html
   download_page_1.txt
   download_page_2.txt
   dummy_page.html
   file_documentnavigation_frameset.html
   file_double_close_tab.html
   file_fullscreen-window-open.html
@@ -41,19 +39,16 @@ support-files =
   navigating_window_with_download.html
   page_style_sample.html
   pinning_headers.sjs
   ssl_error_reports.sjs
   print_postdata.sjs
   searchSuggestionEngine.sjs
   searchSuggestionEngine.xml
   searchSuggestionEngine2.xml
-  subtst_contextmenu.html
-  subtst_contextmenu_input.html
-  subtst_contextmenu_xul.xul
   test_bug462673.html
   test_bug628179.html
   test_bug839103.html
   test_process_flags_chrome.html
   title_test.svg
   unknownContentType_file.pif
   unknownContentType_file.pif^headers^
   video.ogg
@@ -242,23 +237,16 @@ tags = clipboard
 [browser_clipboard_pastefile.js]
 skip-if = true # Disabled due to the clipboard not supporting real file types yet (bug 1288773)
 # DO NOT ADD MORE TESTS HERE. USE A TOPICAL DIRECTORY INSTEAD.
 [browser_contentAreaClick.js]
 skip-if = e10s # Clicks in content don't go through contentAreaClick with e10s.
 # DO NOT ADD MORE TESTS HERE. USE A TOPICAL DIRECTORY INSTEAD.
 [browser_contentAltClick.js]
 # DO NOT ADD MORE TESTS HERE. USE A TOPICAL DIRECTORY INSTEAD.
-[browser_contextmenu.js]
-tags = fullscreen clipboard
-skip-if = toolkit == "gtk3" || verify || (os == "win" && processor == "aarch64") # disabled on Linux due to bug 513558, aarch64 due to 1531590
-# DO NOT ADD MORE TESTS HERE. USE A TOPICAL DIRECTORY INSTEAD.
-[browser_contextmenu_input.js]
-skip-if = toolkit == "gtk3" || (os == "win" && processor == "aarch64") # disabled on Linux due to bug 513558, aarch64 due to 1533161
-# DO NOT ADD MORE TESTS HERE. USE A TOPICAL DIRECTORY INSTEAD.
 [browser_ctrlTab.js]
 # DO NOT ADD MORE TESTS HERE. USE A TOPICAL DIRECTORY INSTEAD.
 [browser_datachoices_notification.js]
 skip-if = !datareporting || (verify && !debug && (os == 'win'))
 # DO NOT ADD MORE TESTS HERE. USE A TOPICAL DIRECTORY INSTEAD.
 [browser_decoderDoctor.js]
 # DO NOT ADD MORE TESTS HERE. USE A TOPICAL DIRECTORY INSTEAD.
 [browser_search_discovery.js]
@@ -447,18 +435,16 @@ uses-unsafe-cpows = true
 tags = psm
 # DO NOT ADD MORE TESTS HERE. USE A TOPICAL DIRECTORY INSTEAD.
 [browser_windowactivation.js]
 skip-if = verify
 support-files =
   file_window_activation.html
   file_window_activation2.html
 # DO NOT ADD MORE TESTS HERE. USE A TOPICAL DIRECTORY INSTEAD.
-[browser_contextmenu_childprocess.js]
-# DO NOT ADD MORE TESTS HERE. USE A TOPICAL DIRECTORY INSTEAD.
 [browser_bug963945.js]
 # DO NOT ADD MORE TESTS HERE. USE A TOPICAL DIRECTORY INSTEAD.
 [browser_domFullscreen_fullscreenMode.js]
 tags = fullscreen
 # DO NOT ADD MORE TESTS HERE. USE A TOPICAL DIRECTORY INSTEAD.
 # DO NOT ADD MORE TESTS HERE. USE A TOPICAL DIRECTORY INSTEAD.
 [browser_newTabDrop.js]
 # DO NOT ADD MORE TESTS HERE. USE A TOPICAL DIRECTORY INSTEAD.
--- a/devtools/client/aboutdebugging-new/documentation/TESTS_REAL_DEVICES.md
+++ b/devtools/client/aboutdebugging-new/documentation/TESTS_REAL_DEVICES.md
@@ -8,17 +8,17 @@ The tests that use a real device are loc
 
 ## Setup environment
 ### Real device side
 1. Enable USB debugging on your device
 2. Launch Firefox
 3. Enable USB debugging on your Firefox
 4. Connect to your PC via USB
 
-You can refer to https://developer.mozilla.org/docs/Tools/Remote_Debugging/Debugging_Firefox_for_Android_with_WebIDE#Setting_up_the_Android_device.
+You can refer to https://developer.mozilla.org/en-US/docs/Tools/Remote_Debugging/Debugging_over_USB
 
 ### PC side
 Setup the real device information to evaluate the validity in tests.
 
 1. Copy a sample file which is located at `devtools/client/aboutdebugging-new/test/browser/real/usb-runtimes-sample.json` and rename it for example to `devtools/client/aboutdebugging-new/test/browser/real/local-usb-runtimes.json`.
 2. Edit the file.
 
    This is a JSON file like below, write your real device information in here. This example indicates that there should be one USB device and should be displayed `Pixel 2` as device name and `Firefox Nightly` as short name on the sidebar of about:debugging. Regarding the other information, please see `Detail of config file` section of this document.
--- a/devtools/client/aboutdebugging-new/src/components/CompatibilityWarning.js
+++ b/devtools/client/aboutdebugging-new/src/components/CompatibilityWarning.js
@@ -10,17 +10,17 @@ const dom = require("devtools/client/sha
 const FluentReact = require("devtools/client/shared/vendor/fluent-react");
 const Localized = createFactory(FluentReact.Localized);
 
 const Message = createFactory(require("./shared/Message"));
 
 const { MESSAGE_LEVEL } = require("../constants");
 const { COMPATIBILITY_STATUS } = require("devtools/client/shared/remote-debugging/version-checker");
 
-const TROUBLESHOOTING_URL = "https://developer.mozilla.org/docs/Tools/WebIDE/Troubleshooting";
+const TROUBLESHOOTING_URL = "https://developer.mozilla.org/docs/Tools/about:debugging#Troubleshooting";
 
 const Types = require("../types/index");
 
 class CompatibilityWarning extends PureComponent {
   static get propTypes() {
     return {
       compatibilityReport: Types.compatibilityReport.isRequired,
     };
--- a/devtools/client/aboutdebugging-new/src/components/ServiceWorkersWarning.js
+++ b/devtools/client/aboutdebugging-new/src/components/ServiceWorkersWarning.js
@@ -8,17 +8,17 @@ const { createFactory, PureComponent } =
 const dom = require("devtools/client/shared/vendor/react-dom-factories");
 
 const FluentReact = require("devtools/client/shared/vendor/fluent-react");
 const Localized = createFactory(FluentReact.Localized);
 
 const Message = createFactory(require("./shared/Message"));
 
 const { MESSAGE_LEVEL } = require("../constants");
-const DOC_URL = "https://developer.mozilla.org/en-US/docs/Tools/about%3Adebugging#Service_workers_not_compatible";
+const DOC_URL = "https://developer.mozilla.org/docs/Tools/about:debugging#Service_workers_not_compatible";
 
 class ServiceWorkersWarning extends PureComponent {
   render() {
     return Message(
       {
         level: MESSAGE_LEVEL.WARNING,
         isCloseable: true,
       },
--- a/devtools/client/aboutdebugging-new/src/components/connect/ConnectPage.js
+++ b/devtools/client/aboutdebugging-new/src/components/connect/ConnectPage.js
@@ -141,25 +141,25 @@ class ConnectPage extends PureComponent 
         ),
       },
       isAddonInstalled
         ? ConnectSteps(
           {
             steps: [
               {
                 localizationId: "about-debugging-setup-usb-step-enable-dev-menu",
-                url: "https://developer.mozilla.org/docs/Tools/Remote_Debugging/Debugging_Firefox_for_Android_with_WebIDE#Setting_up_the_Android_device",
+                url: "https://developer.mozilla.org/docs/Tools/Remote_Debugging/Debugging_over_USB",
               },
               {
                 localizationId: "about-debugging-setup-usb-step-enable-debug",
-                url: "https://developer.mozilla.org/docs/Tools/Remote_Debugging/Debugging_Firefox_for_Android_with_WebIDE#Setting_up_the_Android_device",
+                url: "https://developer.mozilla.org/docs/Tools/Remote_Debugging/Debugging_over_USB",
               },
               {
                 localizationId: "about-debugging-setup-usb-step-enable-debug-firefox",
-                url: "https://developer.mozilla.org/docs/Tools/Remote_Debugging/Debugging_Firefox_for_Android_with_WebIDE#Setting_up_the_Android_device",
+                url: "https://developer.mozilla.org/docs/Tools/Remote_Debugging/Debugging_over_USB",
               },
               { localizationId: "about-debugging-setup-usb-step-plug-device" },
             ],
           }
         )
         : Localized(
           {
             id: "about-debugging-setup-usb-disabled",
--- a/devtools/client/aboutdebugging/aboutdebugging.css
+++ b/devtools/client/aboutdebugging/aboutdebugging.css
@@ -26,16 +26,18 @@ button {
 }
 
 .category {
   align-items: center;
   /* Override a `background-color` set on all buttons by common.inc.css */
   background-color: transparent;
   display: flex;
   flex-direction: row;
+  /* Override button min-width set by common.inc.css for compact width case */
+  min-width: initial;
 }
 
 .category.selected {
   /* Override a `color: inherit !important` forced on all buttons by common.inc.css */
   color: var(--in-content-category-text-selected) !important;
 }
 
 .category-name {
--- a/devtools/client/themes/webconsole.css
+++ b/devtools/client/themes/webconsole.css
@@ -1,49 +1,69 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 /* Webconsole specific theme variables */
 :root {
-  /* Output rows should be 20px tall for a single line of text;
-   * 20 = 3 (top padding) + 14 (line-height) + 3 (bottom padding)
-   */
-  --console-output-font-size: 11px;
-  --console-output-line-height: calc(14 / 11);
+  --console-output-line-height: 14px;
   --console-output-vertical-padding: 3px;
   /* Width of the left gutter where icons appear */
   --console-inline-start-gutter: 32px;
   /* Icons perfectly centered in the left gutter "feel" closer to the window
    * edge than to message text. This value pushes them slightly to the right. */
   --console-icon-horizontal-offset: 1px;
 }
 
 .theme-dark {
-  --console-output-icon-error-color: var(--red-40);
-  --console-output-icon-warning-color: var(--yellow-60);
-  --console-output-indent-border-color: var(--theme-highlight-blue);
+  --console-input-background: var(--theme-tab-toolbar-background);
+  --console-message-background: var(--theme-body-background);
+  --console-message-border: var(--theme-splitter-color);
+  --console-message-color: var(--theme-text-color-strong);
+  --console-error-background: hsl(345, 23%, 24%);
+  --console-error-border: hsl(345, 30%, 35%);
+  --console-error-color: var(--red-20);
+  --console-error-icon-color: var(--red-40);
+  --console-warning-background: hsl(42, 37%, 19%);
+  --console-warning-border: hsl(60, 30%, 26%);
+  --console-warning-color: hsl(43, 94%, 81%);
+  --console-warning-icon-color: var(--yellow-60);
+  --console-navigation-color: var(--theme-highlight-blue);
+  --console-navigation-border: var(--blue-60);
+  --console-indent-border-color: var(--theme-highlight-blue);
+  --console-repeat-bubble-background: var(--blue-60);
+
+  /* TODO in bug 1549195: colors used in shared components (e.g. Reps) should
+     be renamed and/or moved to variables.css so they work everywhere */
   --error-color: var(--red-20);
-  --error-background-color: hsl(345, 23%, 24%);
-  --warning-color: hsl(43, 94%, 81%);
-  --warning-background-color: hsl(42, 37%, 19%);
   --console-output-color: white;
-  --repeat-bubble-background-color: var(--blue-60);
 }
 
 .theme-light {
-  --console-output-icon-error-color: var(--red-60);
-  --console-output-icon-warning-color: var(--yellow-65);
-  --console-output-indent-border-color: var(--theme-highlight-blue);
+  --console-input-background: var(--theme-body-background);
+  --console-message-background: var(--theme-body-background);
+  --console-message-border: #f2f2f4; /* between Grey 10 and Grey 20 */
+  --console-message-color: var(--theme-text-color-strong);
+  --console-error-background: hsl(344, 73%, 97%);
+  --console-error-border: rgba(215, 0, 34, 0.12); /* Red 60 + opacity */
+  --console-error-color: var(--red-70);
+  --console-error-icon-color: var(--red-60);
+  --console-warning-background: hsl(54, 100%, 92%);
+  --console-warning-border: rgba(215, 182, 0, 0.28); /* Yellow 60 + opacity */
+  --console-warning-color: var(--yellow-80);
+  --console-warning-icon-color: var(--yellow-65);
+  --console-navigation-color: var(--theme-highlight-blue);
+  --console-navigation-border: var(--blue-30);
+  --console-indent-border-color: var(--theme-highlight-blue);
+  --console-repeat-bubble-background: var(--theme-highlight-blue);
+
+  /* TODO in bug 1549195: colors used in shared components (e.g. Reps) should
+     be renamed and/or moved to variables.css so they work everywhere */
   --error-color: var(--red-70);
-  --error-background-color: hsl(344, 73%, 97%);
-  --warning-color: var(--yellow-80);
-  --warning-background-color: hsl(54, 100%, 92%);
   --console-output-color: var(--grey-90);
-  --repeat-bubble-background-color: var(--theme-highlight-blue);
 }
 
 /* General output styles */
 
 a {
   -moz-user-focus: normal;
   cursor: pointer;
   text-decoration: underline;
@@ -52,97 +72,149 @@ a {
 /* Workaround for Bug 575675 - FindChildWithRules aRelevantLinkVisited
  * assertion when loading HTML page with links in XUL iframe */
 *:visited { }
 
 * {
   box-sizing: border-box;
 }
 
+/*
+ * Stack messages on the z axis so that we can make their borders overlap
+ * and show color borders on top:
+ *
+ *   -----------------  <-- Red
+ *   Error message
+ *   -----------------  <-- Red
+ *   Normal message
+ *   -----------------  <-- Grey
+ *
+ * and:
+ *
+ *   -----------------  <-- Grey
+ *   Normal message
+ *   -----------------  <-- Red
+ *   Error message
+ *   -----------------  <-- Red
+ *
+ * The exact stacking order is:
+ * 
+ *   - z-index: 3 = Navigation and Paused markers
+ *   - z-index: 2 = Errors and warnings
+ *   - z-index: 1 = Other (console.log, console.info, requests, etc.)
+ */
 .message {
+  position: relative;
+  z-index: 1;
   display: flex;
   width: 100%;
+  /* Make the top border cover the previous message's bottom border */
+  margin-top: -1px;
+  border-top: 1px solid var(--console-message-border);
+  border-bottom: 1px solid var(--console-message-border);
   /* Avoid vertical padding, so that we can draw full-height items (e.g. indent guides).
-   * Use vertical margins on children instead. */
-  padding-inline-start: 1px;
+   * Use vertical margins of --console-output-vertical-padding on children instead. */
+  padding-block: 0;
+  /* Layout of the 32px-wide left gutter:
+   * | 4px message padding | 24px icon container | 4px icon margin |
+   * Note: on hover we show a 3px pseudo-border on top of the left padding. */
+  padding-inline-start: 4px;
   padding-inline-end: 8px;
-  border-inline-start: solid 3px transparent;
-  font-size: var(--console-output-font-size);
+  font-size: var(--theme-code-font-size);
   line-height: var(--console-output-line-height);
-  position: relative;
+  color: var(--console-message-color);
+  background-color: var(--console-message-background);
 }
 
 @media (min-width: 1000px) {
   .message {
     padding-inline-end: 12px;
   }
 }
 
+/* We already paint a top border on jsterm-input-container (and we need to keep
+ * it when scrolling console content), so remove the last item's border. */
+.message:last-child {
+  border-bottom-width: 0;
+}
+
+/*
+ * Show a blue border on the left side of rows on hover. Avoid using border
+ * properties because the top/bottom borders would not span the full width.
+ */
+.message:hover::before {
+  content: "";
+  position: absolute;
+  inset-inline: 0;
+  top: 0;
+  bottom: 0;
+  background: var(--theme-highlight-blue);
+  width: 3px;
+}
+
 /*
  * By default, prevent any element in message to overflow.
  * This makes console reflows faster (See Bug 1487457).
  */
 .message * {
   overflow: hidden;
 }
 
 /* Reset the overflow for the network info panel (when a network message is expanded) */
 .message .network-info * {
   overflow: unset;
 }
 
-.message:hover {
-  border-inline-start-color: var(--theme-highlight-blue);
-}
-
 .message.error {
-  color: var(--error-color);
-  background-color: var(--error-background-color);
-}
-
-.message.navigationMarker {
-  border-top: 1px solid var(--theme-emphasized-splitter-color);
-  color: var(--object-color);
-}
-
-/* Removes the top border for message in console (See Bug 1512621). */
-.webconsole-output:first-child.message.navigationMarker {
-  border-top: none;
+  z-index: 2;
+  color: var(--console-error-color);
+  border-color: var(--console-error-border);
+  background-color: var(--console-error-background);
 }
 
 .message.warn {
-  color: var(--warning-color);
-  background-color: var(--warning-background-color);
+  z-index: 2;
+  color: var(--console-warning-color);
+  border-color: var(--console-warning-border);
+  background-color: var(--console-warning-background);
+}
+
+.message.navigationMarker {
+  z-index: 3;
+  color: var(--console-navigation-color);
+  border-color: var(--console-navigation-border);
 }
 
-.message.paused::before {
-  background: var(--purple-50);
-  opacity: 0.6;
-  width: 100vw;
-  height: 1px;
-  bottom: 0px;
-  left: -3px;
-  display: block;
-  content: "";
-  position: absolute;
+.message.paused {
+  z-index: 3;
 }
 
-.message.paused.paused-before::before {
-  top: 0px;
-  bottom: inherit;
+.message.paused.paused-before {
+  border-top-color: var(--purple-50);
+}
+
+.message.paused:not(.paused-before) {
+  border-bottom-color: var(--purple-50);
+  /* always show the border, even for the last child */
+  border-bottom-width: 1px;
 }
 
 .message.paused ~ .message:not(.command):not(.result) .message-body-wrapper,
 .message.paused.paused-before .message-body-wrapper {
   opacity: 0.5;
 }
 
 .message.startGroup,
 .message.startGroupCollapsed {
-  --console-output-indent-border-color: transparent;
+  --console-indent-border-color: transparent;
+}
+
+/* Hide border between a command and its result */
+.message.command + .result.log {
+  border-top-width: 0;
 }
 
 .message > .prefix,
 .message > .timestamp {
   flex: none;
   color: var(--theme-comment);
   margin: var(--console-output-vertical-padding) 4px;
 }
@@ -151,22 +223,25 @@ a {
   .message > .timestamp {
     display: none;
   }
 }
 
 .message > .indent {
   flex: none;
   display: inline-block;
+  /* Display indent borders above the message's top and bottom border.
+   * This avoids interrupted indent lines (looking like dashes). */
+  margin-block: -1px;
   margin-inline-start: 12px;
-  border-inline-end: solid 1px var(--console-output-indent-border-color);
+  border-inline-end: solid 1px var(--console-indent-border-color);
 }
 
 .message > .indent.warning-indent {
-  border-inline-end-color: var(--warning-color);
+  border-inline-end-color: var(--console-warning-color);
 }
 
 .message > .indent[data-indent="0"] {
   display: none;
 }
 
 /* Center first level indent within the left gutter */
 .message:not(.startGroup):not(.startGroupCollapsed) > .indent[data-indent="1"] {
@@ -208,27 +283,27 @@ a {
 }
 
 .message.info > .icon {
   color: var(--theme-icon-color);
   background-image: url(chrome://devtools/skin/images/webconsole/info.svg);
 }
 
 .message.error > .icon {
-  color: var(--console-output-icon-error-color);
+  color: var(--console-error-icon-color);
   background-image: url(chrome://devtools/skin/images/webconsole/error.svg);
 }
 
 .message.warn > .icon {
-  color: var(--console-output-icon-warning-color);
+  color: var(--console-warning-icon-color);
   background-image: url(chrome://devtools/skin/images/alert.svg);
 }
 
 .message.navigationMarker > .icon {
-  color: var(--object-color);
+  color: var(--console-navigation-color);
   background-image: url(chrome://devtools/skin/images/webconsole/navigation.svg);
 }
 
 .message:hover > .icon.rewindable {
   background-image: url(chrome://devtools/skin/images/next-circle.svg);
   cursor: pointer;
   transform: rotate(180deg);
 }
@@ -273,27 +348,27 @@ a {
   font: message-box;
   font-size: 0.8em;
   font-weight: normal;
 }
 
 .message-repeats {
   display: inline-block;
   color: white;
-  background-color: var(--repeat-bubble-background-color);
+  background-color: var(--console-repeat-bubble-background);
 }
 
 .message-repeats[value="1"] {
   display: none;
 }
 
 .warning-group-badge {
   display: inline-block;
-  color: var(--warning-background-color);
-  background-color: var(--warning-color);
+  color: var(--console-warning-background);
+  background-color: var(--console-warning-color);
 }
 
 .message-location {
   max-width: 40vw;
   flex-shrink: 0;
   color: var(--frame-link-source);
   margin-left: 1ch;
   /* Makes the file name truncated (and ellipsis shown) on the left side */
@@ -395,17 +470,17 @@ a {
 
 html .jsterm-input-node-html,
 html #webconsole-notificationbox {
   flex: 0;
   width: 100vw;
 }
 
 .jsterm-input-container {
-  background-color: var(--theme-tab-toolbar-background);
+  background-color: var(--console-input-background);
   border-top: 1px solid var(--theme-splitter-color);
   position: relative;
 }
 
 .jsterm-input-node {
   box-sizing: border-box;
   min-height: 100%;
   color: var(--theme-text-color-strong);
@@ -418,23 +493,16 @@ html #webconsole-notificationbox {
   -moz-context-properties: fill;
   fill: var(--theme-icon-dimmed-color);
 }
 
 .jsterm-complete-node {
   color: var(--theme-comment);
 }
 
-.theme-light .jsterm-input-container {
-  /* For light theme use a white background for the input - it looks better
-     than off-white */
-  background-color: #fff;
-  border-top-color: #e0e0e0;
-}
-
 textarea.jsterm-input-node,
 textarea.jsterm-complete-node {
   width: 100%;
   margin: 0;
   border: none;
   background-color: transparent;
   resize: none;
   font-size: inherit;
@@ -455,17 +523,17 @@ textarea.jsterm-complete-node {
 textarea.jsterm-input-node:focus {
   fill: var(--theme-icon-checked-color);
   box-shadow: none;
   outline: none;
 }
 
 /* CodeMirror-powered JsTerm */
 .jsterm-cm .jsterm-input-container > .CodeMirror {
-  font-size: var(--console-output-font-size);
+  font-size: var(--theme-code-font-size);
   line-height: var(--console-output-line-height);
   /* aim for a 32px left space (a descendent has 4px padding) */
   padding-inline-start: calc(var(--console-inline-start-gutter) - 4px);
   /* input icon */
   background-image: url(chrome://devtools/skin/images/webconsole/input.svg);
   background-position-x: calc(10px + var(--console-icon-horizontal-offset));
   background-position-y: 5px;
   background-repeat: no-repeat;
@@ -675,17 +743,17 @@ a.learn-more-link.webconsole-learn-more-
 .theme-dark .message.warn .objectLeftBrace,
 .theme-dark .message.warn .objectRightBrace,
 .theme-dark .message.warn .arrayLeftBracket,
 .theme-dark .message.warn .arrayRightBracket {
   color: var(--theme-body-color);
 }
 .theme-dark .message.error .tree.object-inspector,
 .theme-dark .message.warn .tree.object-inspector {
-  --console-output-indent-border-color: var(--theme-body-color);
+  --console-indent-border-color: var(--theme-body-color);
 }
 
 .webconsole-app .message-flex-body > .message-body {
   overflow: hidden;
 }
 
 .webconsole-app .message-body > * {
   flex-shrink: 0;
@@ -892,17 +960,17 @@ body {
 
 /* Object Inspector */
 .webconsole-app .object-inspector.tree {
   display: inline-block;
   max-width: 100%;
 }
 
 .webconsole-app .object-inspector.tree .tree-indent {
-  border-inline-start-color: var(--console-output-indent-border-color);
+  border-inline-start-color: var(--console-indent-border-color);
 }
 
 .webconsole-app .object-inspector.tree .tree-node:hover:not(.focused) {
   background-color: var(--object-inspector-hover-background);
 }
 
 /*
  * Make console.group, exception and XHR message's arrow look the same as the arrow
--- a/dom/base/nsDocumentEncoder.cpp
+++ b/dom/base/nsDocumentEncoder.cpp
@@ -333,18 +333,25 @@ class FixupNodeDeterminer {
   nsINode& mOriginalNode;
 };
 
 nsresult nsDocumentEncoder::SerializeNodeStart(nsINode& aOriginalNode,
                                                int32_t aStartOffset,
                                                int32_t aEndOffset,
                                                nsAString& aStr,
                                                nsINode* aFixupNode) {
-  if (mNeedsPreformatScanning && aOriginalNode.IsElement()) {
-    mSerializer->ScanElementForPreformat(aOriginalNode.AsElement());
+  if (mNeedsPreformatScanning) {
+    if (aOriginalNode.IsElement()) {
+      mSerializer->ScanElementForPreformat(aOriginalNode.AsElement());
+    } else if (aOriginalNode.IsText()) {
+      const nsCOMPtr<nsINode> parent = aOriginalNode.GetParent();
+      if (parent && parent->IsElement()) {
+        mSerializer->ScanElementForPreformat(parent->AsElement());
+      }
+    }
   }
 
   if (!IsVisibleNode(&aOriginalNode)) {
     return NS_OK;
   }
 
   FixupNodeDeterminer fixupNodeDeterminer{mNodeFixup, aFixupNode,
                                           aOriginalNode};
@@ -388,18 +395,25 @@ nsresult nsDocumentEncoder::SerializeNod
       break;
     }
   }
 
   return NS_OK;
 }
 
 nsresult nsDocumentEncoder::SerializeNodeEnd(nsINode& aNode, nsAString& aStr) {
-  if (mNeedsPreformatScanning && aNode.IsElement()) {
-    mSerializer->ForgetElementForPreformat(aNode.AsElement());
+  if (mNeedsPreformatScanning) {
+    if (aNode.IsElement()) {
+      mSerializer->ForgetElementForPreformat(aNode.AsElement());
+    } else if (aNode.IsText()) {
+      const nsCOMPtr<nsINode> parent = aNode.GetParent();
+      if (parent && parent->IsElement()) {
+        mSerializer->ForgetElementForPreformat(parent->AsElement());
+      }
+    }
   }
 
   if (!IsVisibleNode(&aNode)) {
     return NS_OK;
   }
 
   if (aNode.IsElement()) {
     mSerializer->AppendElementEnd(aNode.AsElement(), aStr);
--- a/dom/base/test/test_bug116083.html
+++ b/dom/base/test/test_bug116083.html
@@ -11,16 +11,20 @@ https://bugzilla.mozilla.org/show_bug.cg
 </head>
 <body>
 <a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=116083">Mozilla Bug 116083</a>
 <div id="content">
 <div style="white-space: pre">foo  bar</div>
 <div style="white-space: pre-wrap">foo  bar</div>
 <div style="white-space: pre-line">foo  bar</div>
 <div style="white-space: -moz-pre-space">foo  bar</div>
+<div style="white-space: pre" data-collapse-selection-to-child-and-extend>foo  bar</div>
+<div style="white-space: pre-wrap" data-collapse-selection-to-child-and-extend>foo  bar</div>
+<div style="white-space: pre-line" data-collapse-selection-to-child-and-extend>foo  bar</div>
+<div style="white-space: -moz-pre-space" data-collapse-selection-to-child-and-extend>foo  bar</div>
 <div data-result="bar  baz"><span style="white-space: pre">bar  </span>baz</div>
 <div data-result="bar  baz"><span style="white-space: pre-wrap">bar  </span>baz</div>
 <div data-result="bar  baz"><span style="white-space: pre-line">bar  </span>baz</div>
 <div data-result="bar  baz"><span style="white-space: -moz-pre-space">bar  </span>baz</div>
 <div data-result="foo  &#10;  bar&#10;&#10;!&#10;&#10;&#10;baz" style="white-space: pre"><div>foo  </div><div>  bar</div><div><br></div><div>!</div><div><br><br></div><div>baz</div></div>
 <div data-result="foo &#10; bar&#10;&#10;!&#10;&#10;&#10;baz" style="white-space: pre" contenteditable><div>foo </div><div> bar</div><div><br></div><div>!</div><div><br><br></div><div>baz</div></div>
 <div data-result="foo  &#10;  bar&#10;&#10;!&#10;&#10;&#10;baz" style="white-space: pre-wrap"><div>foo  </div><div>  bar</div><div><br></div><div>!</div><div><br><br></div><div>baz</div></div>
 <div data-result="foo &#10; bar&#10;&#10;!&#10;&#10;&#10;baz" style="white-space: pre-wrap" contenteditable><div>foo </div><div> bar</div><div><br></div><div>!</div><div><br><br></div><div>baz</div></div>
@@ -65,26 +69,45 @@ function hasExpectedFlavors() {
   }
 
   if (navigator.appVersion.includes("Win")) {
     ok(cb.hasDataMatchingFlavors(["application/x-moz-nativehtml"], 1, cb.kGlobalClipboard),
        "The clipboard has application/x-moz-nativehtml");
   }
 }
 
+function collapseSelectionToChildAndExtend(divElement) {
+  is(divElement.childNodes.length, 1, "Expected exactly one child node.");
+  var textChildNode = divElement.childNodes[0];
+  getSelection().collapse(textChildNode);
+  getSelection().extend(textChildNode, divElement.textContent.length);
+}
+
+function selectDependingOnAttributes(divElement) {
+  if (divElement.hasAttribute("data-collapse-selection-to-child-and-extend")) {
+    // Selecting text as follow comes closest to user behaviour.
+    collapseSelectionToChildAndExtend(divElement);
+  } else {
+    getSelection().selectAllChildren(divElement);
+  }
+}
+
 function nextTest() {
   var div = document.querySelector("#content>div");
   if (!div) {
     SimpleTest.finish();
     return;
   }
-  getSelection().selectAllChildren(div);
+
+  selectDependingOnAttributes(div);
+
   var expected = div.hasAttribute("data-result") ?
                  div.getAttribute("data-result") :
                  div.textContent;
+
   SimpleTest.waitForClipboard(expected, function() {
     synthesizeKey("C", {accelKey: true});
   }, function() {
     ok(true, div.getAttribute("style") + " passed");
     hasExpectedFlavors();
     div.remove();
     nextTest();
   }, function() {
--- a/dom/base/test/test_user_select.html
+++ b/dom/base/test/test_user_select.html
@@ -288,27 +288,27 @@ function test()
   checkText("aaaa bbbb", e);
   checkRanges([[0,0,-1,1],[6,0,6,5]], e);
   doneTest(e);
 
   clear();
   e = document.getElementById('testG');
   synthesizeMouse(e, 1, 1, {});
   synthesizeMouse(e, 400, 180, { shiftKey: true });
-  checkText("aaaa bbbb", e); // XXX this doesn't seem right - bug 1247799
+  checkText("aaaa\n\n\n\nbbbb", e);
   checkRanges([[0,0,-1,1],[2,0,-1,3],[4,0,-1,5],[6,0,6,5]], e);
   doneTest(e);
 
   clear();
   e = document.getElementById('testH');
   synthesizeMouse(e, 1, 1, {});
   synthesizeMouse(e, 30, 90, { shiftKey: true });
   synthesizeMouse(e, 50, 90, { shiftKey: true });
   synthesizeMouse(e, 70, 90, { shiftKey: true });
-  checkText("aaaa bbb", e);
+  checkText("aaaa\n\nbbb", e);
   checkRanges([[0,0,-1,1],[-1,2,3,4]], e);
 
   doneTest(e);
   // ======================================================
   // ==================== Script tests ====================
   // ======================================================
 
   clear();
--- a/dom/interfaces/base/nsIContentPrefService2.idl
+++ b/dom/interfaces/base/nsIContentPrefService2.idl
@@ -186,24 +186,21 @@ interface nsIContentPrefService2 : nsISu
    * The preferences are returned in an array through the out-parameter.  If a
    * preference for a particular subdomain is known not to exist, then an object
    * corresponding to that preference will be present in the array, and, as with
    * getCachedByDomainAndName, its value attribute will be undefined.
    *
    * @param domain   The preferences' domain.
    * @param name     The preferences' name.
    * @param context  The private-browsing context, if any.
-   * @param len      The length of the returned array.
-   * @param prefs    The array of preferences.
+   * @return         The array of preferences.
    */
-  void getCachedBySubdomainAndName(in AString domain,
-                                   in AString name,
-                                   in nsILoadContext context,
-                                   [optional] out unsigned long len,
-                                   [retval,array,size_is(len)] out nsIContentPref prefs);
+  Array<nsIContentPref> getCachedBySubdomainAndName(in AString domain,
+                                                    in AString name,
+                                                    in nsILoadContext context);
 
   /**
    * Synchronously retrieves from the in-memory cache the preference with no
    * domain and the given name.
    *
    * As with getCachedByDomainAndName, if the preference is cached then it is
    * returned; if the preference is known not to exist, then the value attribute
    * of the returned object will be undefined; if the preference is neither
--- a/dom/plugins/base/nsIPluginHost.idl
+++ b/dom/plugins/base/nsIPluginHost.idl
@@ -25,18 +25,17 @@ interface nsIClearSiteDataCallback : nsI
 interface nsIPluginHost : nsISupports
 {
   /**
    * Causes the plugins directory to be searched again for new plugin
    * libraries.
    */
   void reloadPlugins();
 
-  void getPluginTags([optional] out unsigned long aPluginCount,
-    [retval, array, size_is(aPluginCount)] out nsIPluginTag aResults);
+  Array<nsIPluginTag> getPluginTags();
 
   /*
    * Flags for use with clearSiteData.
    *
    * FLAG_CLEAR_ALL: clear all data associated with a site.
    * FLAG_CLEAR_CACHE: clear cached data that can be retrieved again without
    *                   loss of functionality. To be used out of concern for
    *                   space and not necessarily privacy.
--- a/dom/plugins/base/nsIPluginTag.idl
+++ b/dom/plugins/base/nsIPluginTag.idl
@@ -53,24 +53,19 @@ interface nsIPluginTag : nsISupports
   readonly attribute boolean clicktoplay;
   [infallible]
   readonly attribute boolean loaded;
   // See the STATE_* values above.
            attribute unsigned long enabledState;
 
   readonly attribute PRTime lastModifiedTime;
 
-  void getMimeTypes([optional] out unsigned long aCount,
-                    [retval, array, size_is(aCount)] out wstring aResults);
-  void getMimeDescriptions([optional] out unsigned long aCount,
-                           [retval, array, size_is(aCount)]
-                           out wstring aResults);
-  void getExtensions([optional] out unsigned long aCount,
-                     [retval, array, size_is(aCount)]
-                     out wstring aResults);
+  Array<AUTF8String> getMimeTypes();
+  Array<AUTF8String> getMimeDescriptions();
+  Array<AUTF8String> getExtensions();
 };
 
 /**
  * An interface representing a "fake" plugin: one implemented in JavaScript, not
  * as a NPAPI plug-in.  See nsIPluginHost.registerFakePlugin and the
  * documentation for the FakePluginTagInit dictionary.
  */
 [builtinclass, scriptable, uuid(6d22c968-226d-4156-b230-da6ad6bbf6e8)]
--- a/dom/plugins/base/nsPluginHost.cpp
+++ b/dom/plugins/base/nsPluginHost.cpp
@@ -1085,43 +1085,25 @@ void nsPluginHost::GetPlugins(
       aPluginArray.AppendElement(plugin);
     }
     plugin = plugin->mNext;
   }
 }
 
 // FIXME-jsplugins Check users for order of fake v non-fake
 NS_IMETHODIMP
-nsPluginHost::GetPluginTags(uint32_t* aPluginCount, nsIPluginTag*** aResults) {
+nsPluginHost::GetPluginTags(nsTArray<RefPtr<nsIPluginTag>>& aResults) {
   LoadPlugins();
 
-  uint32_t count = 0;
-  uint32_t fakeCount = mFakePlugins.Length();
-  RefPtr<nsPluginTag> plugin = mPlugins;
-  while (plugin != nullptr) {
-    count++;
-    plugin = plugin->mNext;
+  for (nsPluginTag* plugin = mPlugins; plugin; plugin = plugin->mNext) {
+    aResults.AppendElement(plugin);
   }
 
-  *aResults = static_cast<nsIPluginTag**>(
-      moz_xmalloc((fakeCount + count) * sizeof(**aResults)));
-
-  *aPluginCount = count + fakeCount;
-
-  plugin = mPlugins;
-  for (uint32_t i = 0; i < count; i++) {
-    (*aResults)[i] = plugin;
-    NS_ADDREF((*aResults)[i]);
-    plugin = plugin->mNext;
-  }
-
-  for (uint32_t i = 0; i < fakeCount; i++) {
-    (*aResults)[i + count] =
-        static_cast<nsIInternalPluginTag*>(mFakePlugins[i]);
-    NS_ADDREF((*aResults)[i + count]);
+  for (nsIInternalPluginTag* plugin : mFakePlugins) {
+    aResults.AppendElement(plugin);
   }
 
   return NS_OK;
 }
 
 nsPluginTag* nsPluginHost::FindPreferredPlugin(
     const InfallibleTArray<nsPluginTag*>& matches) {
   // We prefer the plugin with the highest version number.
--- a/dom/plugins/base/nsPluginTags.cpp
+++ b/dom/plugins/base/nsPluginTags.cpp
@@ -117,35 +117,16 @@ static nsCString MakePrefNameForPlugin(c
   pref.AssignLiteral("plugin.");
   pref.Append(subname);
   pref.Append('.');
   pref.Append(pluginName);
 
   return pref;
 }
 
-static nsresult CStringArrayToXPCArray(nsTArray<nsCString>& aArray,
-                                       uint32_t* aCount, char16_t*** aResults) {
-  uint32_t count = aArray.Length();
-  if (!count) {
-    *aResults = nullptr;
-    *aCount = 0;
-    return NS_OK;
-  }
-
-  *aResults = static_cast<char16_t**>(moz_xmalloc(count * sizeof(**aResults)));
-  *aCount = count;
-
-  for (uint32_t i = 0; i < count; i++) {
-    (*aResults)[i] = ToNewUnicode(NS_ConvertUTF8toUTF16(aArray[i]));
-  }
-
-  return NS_OK;
-}
-
 static nsCString GetStatePrefNameForPlugin(nsIInternalPluginTag* aTag) {
   return MakePrefNameForPlugin("state", aTag);
 }
 
 static nsresult IsEnabledStateLockedForPlugin(nsIInternalPluginTag* aTag,
                                               bool* aIsEnabledStateLocked) {
   *aIsEnabledStateLocked = false;
   nsCOMPtr<nsIPrefBranch> prefs(do_GetService(NS_PREFSERVICE_CONTRACTID));
@@ -586,28 +567,31 @@ void nsPluginTag::SetPluginState(PluginS
   static_assert((uint32_t)nsPluginTag::ePluginState_Enabled ==
                     nsIPluginTag::STATE_ENABLED,
                 "nsPluginTag::ePluginState_Enabled must match "
                 "nsIPluginTag::STATE_ENABLED");
   SetEnabledState((uint32_t)state);
 }
 
 NS_IMETHODIMP
-nsPluginTag::GetMimeTypes(uint32_t* aCount, char16_t*** aResults) {
-  return CStringArrayToXPCArray(mMimeTypes, aCount, aResults);
+nsPluginTag::GetMimeTypes(nsTArray<nsCString>& aResults) {
+  aResults = mMimeTypes;
+  return NS_OK;
 }
 
 NS_IMETHODIMP
-nsPluginTag::GetMimeDescriptions(uint32_t* aCount, char16_t*** aResults) {
-  return CStringArrayToXPCArray(mMimeDescriptions, aCount, aResults);
+nsPluginTag::GetMimeDescriptions(nsTArray<nsCString>& aResults) {
+  aResults = mMimeDescriptions;
+  return NS_OK;
 }
 
 NS_IMETHODIMP
-nsPluginTag::GetExtensions(uint32_t* aCount, char16_t*** aResults) {
-  return CStringArrayToXPCArray(mExtensions, aCount, aResults);
+nsPluginTag::GetExtensions(nsTArray<nsCString>& aResults) {
+  aResults = mExtensions;
+  return NS_OK;
 }
 
 bool nsPluginTag::HasSameNameAndMimes(const nsPluginTag* aPluginTag) const {
   NS_ENSURE_TRUE(aPluginTag, false);
 
   if ((!mName.Equals(aPluginTag->mName)) ||
       (mMimeTypes.Length() != aPluginTag->mMimeTypes.Length())) {
     return false;
@@ -859,28 +843,31 @@ NS_IMETHODIMP
 nsFakePluginTag::SetEnabledState(uint32_t aEnabledState) {
   // There are static asserts above enforcing that this enum matches
   mState = (nsPluginTag::PluginState)aEnabledState;
   // FIXME-jsplugins update
   return NS_OK;
 }
 
 NS_IMETHODIMP
-nsFakePluginTag::GetMimeTypes(uint32_t* aCount, char16_t*** aResults) {
-  return CStringArrayToXPCArray(mMimeTypes, aCount, aResults);
+nsFakePluginTag::GetMimeTypes(nsTArray<nsCString>& aResults) {
+  aResults = mMimeTypes;
+  return NS_OK;
 }
 
 NS_IMETHODIMP
-nsFakePluginTag::GetMimeDescriptions(uint32_t* aCount, char16_t*** aResults) {
-  return CStringArrayToXPCArray(mMimeDescriptions, aCount, aResults);
+nsFakePluginTag::GetMimeDescriptions(nsTArray<nsCString>& aResults) {
+  aResults = mMimeDescriptions;
+  return NS_OK;
 }
 
 NS_IMETHODIMP
-nsFakePluginTag::GetExtensions(uint32_t* aCount, char16_t*** aResults) {
-  return CStringArrayToXPCArray(mExtensions, aCount, aResults);
+nsFakePluginTag::GetExtensions(nsTArray<nsCString>& aResults) {
+  aResults = mExtensions;
+  return NS_OK;
 }
 
 NS_IMETHODIMP
 nsFakePluginTag::GetActive(bool* aResult) {
   // Fake plugins can't be blocklisted, so this is just !Disabled
   *aResult = IsEnabled();
   return NS_OK;
 }
--- a/dom/plugins/test/mochitest/test_bug986930.html
+++ b/dom/plugins/test/mochitest/test_bug986930.html
@@ -6,15 +6,15 @@
   <script src="/tests/SimpleTest/SimpleTest.js"></script>
   <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
   <script type="application/javascript" src="plugin-utils.js"></script>
 </head>
 <body>
   <script class="testbody" type="application/javascript">
     var testPlugin = getTestPlugin("Test Plug-in");
 
-    var mimeDescriptions = testPlugin.getMimeDescriptions({});
+    var mimeDescriptions = testPlugin.getMimeDescriptions();
 
     is(mimeDescriptions[0], "Test \u2122 mimetype",
        "Plugin should handle non-ascii mime description");
   </script>
 </body>
 </html>
--- a/gfx/webrender_bindings/RenderThread.cpp
+++ b/gfx/webrender_bindings/RenderThread.cpp
@@ -339,23 +339,22 @@ static void NotifyDidRender(layers::Comp
 static void NotifyDidStartRender(layers::CompositorBridgeParent* aBridge) {
   // Starting a render will change increment mRenderingCount, and potentially
   // change whether we can allow the bridge to intiate another frame.
   if (aBridge->GetWrBridge()) {
     aBridge->GetWrBridge()->CompositeIfNeeded();
   }
 }
 
-void RenderThread::UpdateAndRender(wr::WindowId aWindowId,
-                                   const VsyncId& aStartId,
-                                   const TimeStamp& aStartTime, bool aRender,
-                                   const Maybe<gfx::IntSize>& aReadbackSize,
-                                   const Maybe<wr::ImageFormat>& aReadbackFormat,
-                                   const Maybe<Range<uint8_t>>& aReadbackBuffer,
-                                   bool aHadSlowFrame) {
+void RenderThread::UpdateAndRender(
+    wr::WindowId aWindowId, const VsyncId& aStartId,
+    const TimeStamp& aStartTime, bool aRender,
+    const Maybe<gfx::IntSize>& aReadbackSize,
+    const Maybe<wr::ImageFormat>& aReadbackFormat,
+    const Maybe<Range<uint8_t>>& aReadbackBuffer, bool aHadSlowFrame) {
   AUTO_PROFILER_TRACING("Paint", "Composite", GRAPHICS);
   MOZ_ASSERT(IsInRenderThread());
   MOZ_ASSERT(aRender || aReadbackBuffer.isNothing());
 
   auto it = mRenderers.find(aWindowId);
   MOZ_ASSERT(it != mRenderers.end());
   if (it == mRenderers.end()) {
     return;
@@ -367,18 +366,18 @@ void RenderThread::UpdateAndRender(wr::W
 
   layers::CompositorThreadHolder::Loop()->PostTask(
       NewRunnableFunction("NotifyDidStartRenderRunnable", &NotifyDidStartRender,
                           renderer->GetCompositorBridge()));
 
   bool rendered = false;
   RendererStats stats = {0};
   if (aRender) {
-    rendered = renderer->UpdateAndRender(aReadbackSize, aReadbackFormat,
-                                         aReadbackBuffer, aHadSlowFrame, &stats);
+    rendered = renderer->UpdateAndRender(
+        aReadbackSize, aReadbackFormat, aReadbackBuffer, aHadSlowFrame, &stats);
   } else {
     renderer->Update();
   }
   // Check graphics reset status even when rendering is skipped.
   renderer->CheckGraphicsResetStatus();
 
   TimeStamp end = TimeStamp::Now();
   auto info = renderer->FlushPipelineInfo();
@@ -484,40 +483,16 @@ void RenderThread::IncPendingFrameCount(
     return;
   }
   it->second->mPendingCount++;
   it->second->mStartTimes.push(aStartTime);
   it->second->mStartIds.push(aStartId);
   it->second->mDocFrameCounts.push(aDocFrameCount);
 }
 
-void RenderThread::DecPendingFrameCount(wr::WindowId aWindowId) {
-  auto windows = mWindowInfos.Lock();
-  auto it = windows->find(AsUint64(aWindowId));
-  if (it == windows->end()) {
-    MOZ_ASSERT(false);
-    return;
-  }
-  WindowInfo* info = it->second;
-  MOZ_ASSERT(info->mPendingCount > 0);
-  if (info->mPendingCount <= 0) {
-    return;
-  }
-  info->mPendingCount--;
-  // This function gets called for "nop frames" where nothing was rendered or
-  // composited. But we count this time because the non-WR codepath equivalent
-  // in CompositorBridgeParent::ComposeToTarget also counts such frames. And
-  // anyway this should be relatively infrequent so it shouldn't skew the
-  // numbers much.
-  mozilla::Telemetry::AccumulateTimeDelta(mozilla::Telemetry::COMPOSITE_TIME,
-                                          info->mStartTimes.front());
-  info->mStartTimes.pop();
-  info->mStartIds.pop();
-}
-
 mozilla::Pair<bool, bool> RenderThread::IncRenderingFrameCount(
     wr::WindowId aWindowId, bool aRender) {
   auto windows = mWindowInfos.Lock();
   auto it = windows->find(AsUint64(aWindowId));
   if (it == windows->end()) {
     MOZ_ASSERT(false);
     return MakePair(false, false);
   }
--- a/gfx/webrender_bindings/RenderThread.h
+++ b/gfx/webrender_bindings/RenderThread.h
@@ -213,18 +213,16 @@ class RenderThread final {
   void SetDestroyed(wr::WindowId aWindowId);
   /// Can be called from any thread.
   bool TooManyPendingFrames(wr::WindowId aWindowId);
   /// Can be called from any thread.
   void IncPendingFrameCount(wr::WindowId aWindowId, const VsyncId& aStartId,
                             const TimeStamp& aStartTime,
                             uint8_t aDocFrameCount);
   /// Can be called from any thread.
-  void DecPendingFrameCount(wr::WindowId aWindowId);
-  /// Can be called from any thread.
   mozilla::Pair<bool, bool> IncRenderingFrameCount(wr::WindowId aWindowId,
                                                    bool aRender);
   /// Can be called from any thread.
   void FrameRenderingComplete(wr::WindowId aWindowId);
 
   void NotifySlowFrame(wr::WindowId aWindowId);
 
   /// Can be called from any thread.
--- a/gfx/wr/webrender/res/brush.glsl
+++ b/gfx/wr/webrender/res/brush.glsl
@@ -25,17 +25,18 @@ void brush_vs(
 #define BRUSH_FLAG_SEGMENT_REPEAT_Y             8
 #define BRUSH_FLAG_TEXEL_RECT                  16
 
 #define INVALID_SEGMENT_INDEX                   0xffff
 
 void main(void) {
     // Load the brush instance from vertex attributes.
     int prim_header_address = aData.x;
-    int clip_address = aData.y;
+    int render_task_index = aData.y >> 16;
+    int clip_address = aData.y & 0xffff;
     int segment_index = aData.z & 0xffff;
     int edge_flags = (aData.z >> 16) & 0xff;
     int brush_flags = (aData.z >> 24) & 0xff;
     int segment_user_data = aData.w;
     PrimitiveHeader ph = fetch_prim_header(prim_header_address);
 
     // Fetch the segment of this brush primitive we are drawing.
     vec4 segment_data;
@@ -52,17 +53,17 @@ void main(void) {
         segment_rect = RectWithSize(segment_info[0].xy, segment_info[0].zw);
         segment_rect.p0 += ph.local_rect.p0;
         segment_data = segment_info[1];
     }
 
     VertexInfo vi;
 
     // Fetch the dynamic picture that we are drawing on.
-    PictureTask pic_task = fetch_picture_task(ph.render_task_index);
+    PictureTask pic_task = fetch_picture_task(render_task_index);
     ClipArea clip_area = fetch_clip_area(clip_address);
 
     Transform transform = fetch_transform(ph.transform_id);
 
     // Write the normal vertex information out.
     if (transform.is_axis_aligned) {
         vi = write_vertex(
             segment_rect,
--- a/gfx/wr/webrender/res/prim_shared.glsl
+++ b/gfx/wr/webrender/res/prim_shared.glsl
@@ -51,17 +51,16 @@ in ivec4 aData;
 #define VECS_PER_PRIM_HEADER_I 2U
 
 struct PrimitiveHeader {
     RectWithSize local_rect;
     RectWithSize local_clip_rect;
     vec4 snap_offsets;
     float z;
     int specific_prim_address;
-    int render_task_index;
     int transform_id;
     ivec4 user_data;
 };
 
 PrimitiveHeader fetch_prim_header(int index) {
     PrimitiveHeader ph;
 
     ivec2 uv_f = get_fetch_uv(index, VECS_PER_PRIM_HEADER_F);
@@ -70,19 +69,18 @@ PrimitiveHeader fetch_prim_header(int in
     ph.snap_offsets = TEXEL_FETCH(sPrimitiveHeadersF, uv_f, 0, ivec2(2, 0));
     ph.local_rect = RectWithSize(local_rect.xy, local_rect.zw);
     ph.local_clip_rect = RectWithSize(local_clip_rect.xy, local_clip_rect.zw);
 
     ivec2 uv_i = get_fetch_uv(index, VECS_PER_PRIM_HEADER_I);
     ivec4 data0 = TEXEL_FETCH(sPrimitiveHeadersI, uv_i, 0, ivec2(0, 0));
     ivec4 data1 = TEXEL_FETCH(sPrimitiveHeadersI, uv_i, 0, ivec2(1, 0));
     ph.z = float(data0.x);
-    ph.render_task_index = data0.y;
-    ph.specific_prim_address = data0.z;
-    ph.transform_id = data0.w;
+    ph.specific_prim_address = data0.y;
+    ph.transform_id = data0.z;
     ph.user_data = data1;
 
     return ph;
 }
 
 struct VertexInfo {
     vec2 local_pos;
     vec2 snap_offset;
--- a/gfx/wr/webrender/res/ps_split_composite.glsl
+++ b/gfx/wr/webrender/res/ps_split_composite.glsl
@@ -37,33 +37,35 @@ vec2 bilerp(vec2 a, vec2 b, vec2 c, vec2
     vec2 y = mix(c, d, t);
     return mix(x, y, s);
 }
 
 struct SplitCompositeInstance {
     int prim_header_index;
     int polygons_address;
     float z;
+    int render_task_index;
 };
 
 SplitCompositeInstance fetch_composite_instance() {
     SplitCompositeInstance ci;
 
     ci.prim_header_index = aData.x;
     ci.polygons_address = aData.y;
     ci.z = float(aData.z);
+    ci.render_task_index = aData.w;
 
     return ci;
 }
 
 void main(void) {
     SplitCompositeInstance ci = fetch_composite_instance();
     SplitGeometry geometry = fetch_split_geometry(ci.polygons_address);
     PrimitiveHeader ph = fetch_prim_header(ci.prim_header_index);
-    PictureTask dest_task = fetch_picture_task(ph.render_task_index);
+    PictureTask dest_task = fetch_picture_task(ci.render_task_index);
     Transform transform = fetch_transform(ph.transform_id);
     ImageResource res = fetch_image_resource(ph.user_data.x);
     ClipArea clip_area = fetch_clip_area(ph.user_data.w);
 
     vec2 dest_origin = dest_task.common_data.task_rect.p0 -
                        dest_task.content_origin;
 
     vec2 local_pos = bilerp(geometry.local[0], geometry.local[1],
--- a/gfx/wr/webrender/res/ps_text_run.glsl
+++ b/gfx/wr/webrender/res/ps_text_run.glsl
@@ -157,25 +157,26 @@ VertexInfo write_text_vertex(RectWithSiz
     );
 
     return vi;
 }
 
 void main(void) {
     int prim_header_address = aData.x;
     int glyph_index = aData.y & 0xffff;
-    int raster_space = aData.y >> 16;
+    int render_task_index = aData.y >> 16;
     int resource_address = aData.z;
-    int subpx_dir = aData.w >> 16;
-    int color_mode = aData.w & 0xffff;
+    int raster_space = aData.w >> 16;
+    int subpx_dir = (aData.w >> 8) & 0xff;
+    int color_mode = aData.w & 0xff;
 
     PrimitiveHeader ph = fetch_prim_header(prim_header_address);
     Transform transform = fetch_transform(ph.transform_id);
     ClipArea clip_area = fetch_clip_area(ph.user_data.w);
-    PictureTask task = fetch_picture_task(ph.render_task_index);
+    PictureTask task = fetch_picture_task(render_task_index);
 
     TextRun text = fetch_text_run(ph.specific_prim_address);
     vec2 text_offset = vec2(ph.user_data.xy) / 256.0;
 
     if (color_mode == COLOR_MODE_FROM_PASS) {
         color_mode = uMode;
     }
 
--- a/gfx/wr/webrender/src/batch.rs
+++ b/gfx/wr/webrender/src/batch.rs
@@ -461,36 +461,39 @@ struct SegmentInstanceData {
     user_data: i32,
 }
 
 /// Encapsulates the logic of building batches for items that are blended.
 pub struct AlphaBatchBuilder {
     pub batch_lists: Vec<BatchList>,
     screen_size: DeviceIntSize,
     break_advanced_blend_batches: bool,
+    render_task_id: RenderTaskId,
 }
 
 impl AlphaBatchBuilder {
     pub fn new(
         screen_size: DeviceIntSize,
         break_advanced_blend_batches: bool,
+        render_task_id: RenderTaskId,
     ) -> Self {
         let batch_lists = vec![
             BatchList::new(
                 screen_size,
                 Vec::new(),
                 Vec::new(),
                 break_advanced_blend_batches,
             ),
         ];
 
         AlphaBatchBuilder {
             batch_lists,
             screen_size,
             break_advanced_blend_batches,
+            render_task_id,
         }
     }
 
     fn push_new_batch_list(
         &mut self,
         regions: Vec<DeviceIntRect>,
         tile_blits: Vec<TileBlit>,
     ) {
@@ -558,38 +561,33 @@ impl BatchBuilder {
         }
     }
 
     /// Add a picture to a given batch builder.
     pub fn add_pic_to_batch(
         &mut self,
         pic: &PicturePrimitive,
         batcher: &mut AlphaBatchBuilder,
-        task_id: RenderTaskId,
         ctx: &RenderTargetContext,
         gpu_cache: &mut GpuCache,
         render_tasks: &RenderTaskTree,
         deferred_resolves: &mut Vec<DeferredResolve>,
         prim_headers: &mut PrimitiveHeaders,
         transforms: &mut TransformPalette,
         root_spatial_node_index: SpatialNodeIndex,
         z_generator: &mut ZBufferIdGenerator,
     ) {
-        let task_address = render_tasks.get_task_address(task_id);
-
         // Add each run in this picture to the batch.
         for prim_instance in &pic.prim_list.prim_instances {
             self.add_prim_to_batch(
                 prim_instance,
                 batcher,
                 ctx,
                 gpu_cache,
                 render_tasks,
-                task_id,
-                task_address,
                 deferred_resolves,
                 prim_headers,
                 transforms,
                 root_spatial_node_index,
                 z_generator,
             );
         }
     }
@@ -600,18 +598,16 @@ impl BatchBuilder {
     // in that picture are being drawn into the same target.
     fn add_prim_to_batch(
         &mut self,
         prim_instance: &PrimitiveInstance,
         batcher: &mut AlphaBatchBuilder,
         ctx: &RenderTargetContext,
         gpu_cache: &mut GpuCache,
         render_tasks: &RenderTaskTree,
-        task_id: RenderTaskId,
-        task_address: RenderTaskAddress,
         deferred_resolves: &mut Vec<DeferredResolve>,
         prim_headers: &mut PrimitiveHeaders,
         transforms: &mut TransformPalette,
         root_spatial_node_index: SpatialNodeIndex,
         z_generator: &mut ZBufferIdGenerator,
     ) {
         if prim_instance.visibility_info == PrimitiveVisibilityIndex::INVALID {
             return;
@@ -640,16 +636,17 @@ impl BatchBuilder {
 
         let prim_common_data = &ctx.data_stores.as_common_data(&prim_instance);
         let prim_rect = LayoutRect::new(
             prim_instance.prim_origin,
             prim_common_data.prim_size,
         );
 
         let snap_offsets = prim_info.snap_offsets;
+        let render_task_address = render_tasks.get_task_address(batcher.render_task_id);
 
         if is_chased {
             println!("\tbatch {:?} with bound {:?}", prim_rect, bounding_rect);
         }
 
         match prim_instance.kind {
             PrimitiveInstanceKind::Clear { data_handle } => {
                 let prim_data = &ctx.data_stores.prim[data_handle];
@@ -658,17 +655,16 @@ impl BatchBuilder {
                 // TODO(gw): We can abstract some of the common code below into
                 //           helper methods, as we port more primitives to make
                 //           use of interning.
 
                 let prim_header = PrimitiveHeader {
                     local_rect: prim_rect,
                     local_clip_rect: prim_info.combined_local_clip_rect,
                     snap_offsets,
-                    task_address,
                     specific_prim_address: prim_cache_address,
                     transform_id,
                 };
 
                 let prim_header_index = prim_headers.push(
                     &prim_header,
                     z_id,
                     [get_shader_opacity(1.0), 0, 0, 0],
@@ -684,16 +680,17 @@ impl BatchBuilder {
                     prim_info.clip_task_index,
                     render_tasks,
                 ).unwrap_or(OPAQUE_TASK_ADDRESS);
 
                 let instance = PrimitiveInstanceData::from(BrushInstance {
                     segment_index: INVALID_SEGMENT_INDEX,
                     edge_flags: EdgeAaSegmentMask::all(),
                     clip_task_address,
+                    render_task_address,
                     brush_flags: BrushFlags::PERSPECTIVE_INTERPOLATION,
                     prim_header_index,
                     user_data: 0,
                 });
 
                 batcher.current_batch_list().push_single_instance(
                     batch_key,
                     bounding_rect,
@@ -733,17 +730,16 @@ impl BatchBuilder {
                 } else {
                     BlendMode::None
                 };
 
                 let prim_header = PrimitiveHeader {
                     local_rect: prim_rect,
                     local_clip_rect: prim_info.combined_local_clip_rect,
                     snap_offsets,
-                    task_address,
                     specific_prim_address: prim_cache_address,
                     transform_id,
                 };
 
                 let batch_params = BrushBatchParameters::instanced(
                     BrushBatchKind::Image(ImageBufferKind::Texture2DArray),
                     [
                         ShaderColorMode::Image as i32 | ((AlphaType::PremultipliedAlpha as i32) << 16),
@@ -768,16 +764,17 @@ impl BatchBuilder {
                     &batch_params,
                     specified_blend_mode,
                     non_segmented_blend_mode,
                     prim_header_index,
                     bounding_rect,
                     transform_kind,
                     render_tasks,
                     z_id,
+                    render_task_address,
                     prim_info.clip_task_index,
                     ctx,
                 );
             }
             PrimitiveInstanceKind::TextRun { data_handle, run_index, .. } => {
                 let run = &ctx.prim_store.text_runs[run_index];
                 let subpx_dir = run.used_font.get_subpx_dir();
 
@@ -786,17 +783,16 @@ impl BatchBuilder {
                 let prim_data = &ctx.data_stores.text_run[data_handle];
                 let alpha_batch_list = &mut batcher.batch_lists.last_mut().unwrap().alpha_batch_list;
                 let prim_cache_address = gpu_cache.get_address(&prim_data.gpu_cache_handle);
 
                 let prim_header = PrimitiveHeader {
                     local_rect: prim_rect,
                     local_clip_rect: prim_info.combined_local_clip_rect,
                     snap_offsets,
-                    task_address,
                     specific_prim_address: prim_cache_address,
                     transform_id,
                 };
 
                 let clip_task_address = ctx.get_prim_clip_task_address(
                     prim_info.clip_task_index,
                     render_tasks,
                 ).unwrap_or(OPAQUE_TASK_ADDRESS);
@@ -891,20 +887,20 @@ impl BatchBuilder {
                         );
 
                         let rasterization_space = match run.raster_space {
                             RasterSpace::Screen => RasterizationSpace::Screen,
                             RasterSpace::Local(..) => RasterizationSpace::Local,
                         };
                         for glyph in glyphs {
                             batch.push(base_instance.build(
-                                glyph.index_in_text_run |
-                                (rasterization_space as i32) << 16,
+                                glyph.index_in_text_run | ((render_task_address.0 as i32) << 16),
                                 glyph.uv_rect_address.as_int(),
-                                (subpx_dir as u32 as i32) << 16 |
+                                (rasterization_space as i32) << 16 |
+                                (subpx_dir as u32 as i32) << 8 |
                                 (color_mode as u32 as i32),
                             ));
                         }
                     },
                 );
             }
             PrimitiveInstanceKind::LineDecoration { data_handle, ref cache_handle, .. } => {
                 // The GPU cache data is stored in the template and reused across
@@ -954,17 +950,16 @@ impl BatchBuilder {
                 } else {
                     BlendMode::None
                 };
 
                 let prim_header = PrimitiveHeader {
                     local_rect: prim_rect,
                     local_clip_rect: prim_info.combined_local_clip_rect,
                     snap_offsets,
-                    task_address,
                     specific_prim_address: prim_cache_address,
                     transform_id,
                 };
 
                 let prim_header_index = prim_headers.push(
                     &prim_header,
                     z_id,
                     prim_user_data,
@@ -980,16 +975,17 @@ impl BatchBuilder {
                     prim_info.clip_task_index,
                     render_tasks,
                 ).unwrap_or(OPAQUE_TASK_ADDRESS);
 
                 let instance = PrimitiveInstanceData::from(BrushInstance {
                     segment_index: INVALID_SEGMENT_INDEX,
                     edge_flags: EdgeAaSegmentMask::all(),
                     clip_task_address,
+                    render_task_address,
                     brush_flags: BrushFlags::PERSPECTIVE_INTERPOLATION,
                     prim_header_index,
                     user_data: segment_user_data,
                 });
 
                 batcher.current_batch_list().push_single_instance(
                     batch_key,
                     bounding_rect,
@@ -1001,17 +997,16 @@ impl BatchBuilder {
                 let picture = &ctx.prim_store.pictures[pic_index.0];
                 let non_segmented_blend_mode = BlendMode::PremultipliedAlpha;
                 let prim_cache_address = gpu_cache.get_address(&ctx.globals.default_image_handle);
 
                 let prim_header = PrimitiveHeader {
                     local_rect: picture.snapped_local_rect,
                     local_clip_rect: prim_info.combined_local_clip_rect,
                     snap_offsets,
-                    task_address,
                     specific_prim_address: prim_cache_address,
                     transform_id,
                 };
 
                 match picture.context_3d {
                     // Convert all children of the 3D hierarchy root into batches.
                     Picture3DContext::In { root_data: Some(ref list), .. } => {
                         for child in list {
@@ -1040,18 +1035,17 @@ impl BatchBuilder {
                                 prim_info.clip_task_index,
                                 render_tasks,
                             ).unwrap_or(OPAQUE_TASK_ADDRESS);
 
                             let prim_header = PrimitiveHeader {
                                 local_rect: pic.snapped_local_rect,
                                 local_clip_rect: prim_info.combined_local_clip_rect,
                                 snap_offsets,
-                                task_address,
-                                specific_prim_address: GpuCacheAddress::invalid(),
+                                specific_prim_address: GpuCacheAddress::INVALID,
                                 transform_id: transforms
                                     .get_id(
                                         child.spatial_node_index,
                                         root_spatial_node_index,
                                         ctx.clip_scroll_tree,
                                     ),
                             };
 
@@ -1078,16 +1072,17 @@ impl BatchBuilder {
                                 BatchKind::SplitComposite,
                                 BlendMode::PremultipliedAlpha,
                                 BatchTextures::no_texture(),
                             );
 
                             let instance = SplitCompositeInstance::new(
                                 prim_header_index,
                                 child.gpu_address,
+                                render_task_address,
                                 z_id,
                             );
 
                             batcher.current_batch_list().push_single_instance(
                                 key,
                                 &prim_info.clip_chain.pic_clip_rect,
                                 z_id,
                                 PrimitiveInstanceData::from(instance),
@@ -1127,17 +1122,16 @@ impl BatchBuilder {
 
                                 // If the tile cache is disabled, just recurse into the
                                 // picture like a normal pass-through picture, adding
                                 // any child primitives into the parent surface batches.
                                 if !tile_cache.is_enabled {
                                     self.add_pic_to_batch(
                                         picture,
                                         batcher,
-                                        task_id,
                                         ctx,
                                         gpu_cache,
                                         render_tasks,
                                         deferred_resolves,
                                         prim_headers,
                                         transforms,
                                         root_spatial_node_index,
                                         z_generator,
@@ -1175,17 +1169,16 @@ impl BatchBuilder {
                                             prim_rect,
                                             snap_offsets,
                                         );
 
                                         let prim_header = PrimitiveHeader {
                                             local_rect: tile_rect,
                                             local_clip_rect,
                                             snap_offsets,
-                                            task_address,
                                             specific_prim_address: prim_cache_address,
                                             transform_id,
                                         };
 
                                         let prim_header_index = prim_headers.push(&prim_header, z_id, [
                                             ShaderColorMode::Image as i32 | ((AlphaType::PremultipliedAlpha as i32) << 16),
                                             RasterizationSpace::Local as i32,
                                             get_shader_opacity(1.0),
@@ -1204,16 +1197,17 @@ impl BatchBuilder {
 
                                         let uv_rect_address = gpu_cache
                                             .get_address(&cache_item.uv_rect_handle)
                                             .as_int();
 
                                         let instance = BrushInstance {
                                             prim_header_index,
                                             clip_task_address,
+                                            render_task_address,
                                             segment_index: INVALID_SEGMENT_INDEX,
                                             edge_flags: EdgeAaSegmentMask::empty(),
                                             brush_flags,
                                             user_data: uv_rect_address,
                                         };
 
                                         // Instead of retrieving the batch once and adding each tile instance,
                                         // use this API to get an appropriate batch for each tile, since
@@ -1228,17 +1222,18 @@ impl BatchBuilder {
                                         batch.push(PrimitiveInstanceData::from(instance));
                                     }
 
                                     // If there is a dirty rect for the tile cache, recurse into the
                                     // main picture primitive list, and draw them first.
                                     if !tile_cache.dirty_region.is_empty() {
                                         let mut tile_blits = Vec::new();
 
-                                        let (target_rect, _) = render_tasks[task_id].get_target_rect();
+                                        let (target_rect, _) = render_tasks[batcher.render_task_id]
+                                            .get_target_rect();
 
                                         for blit in &tile_cache.pending_blits {
                                             tile_blits.push(TileBlit {
                                                 dest_offset: blit.dest_offset,
                                                 size: blit.size,
                                                 target: blit.target.clone(),
                                                 src_offset: DeviceIntPoint::new(
                                                     blit.src_offset.x + target_rect.origin.x,
@@ -1261,17 +1256,16 @@ impl BatchBuilder {
                                         batcher.push_new_batch_list(
                                             batch_regions,
                                             tile_blits,
                                         );
 
                                         self.add_pic_to_batch(
                                             picture,
                                             batcher,
-                                            task_id,
                                             ctx,
                                             gpu_cache,
                                             render_tasks,
                                             deferred_resolves,
                                             prim_headers,
                                             transforms,
                                             root_spatial_node_index,
                                             z_generator,
@@ -1307,16 +1301,17 @@ impl BatchBuilder {
                                             0,
                                         ]);
 
                                         let instance = BrushInstance {
                                             prim_header_index,
                                             segment_index: INVALID_SEGMENT_INDEX,
                                             edge_flags: EdgeAaSegmentMask::empty(),
                                             brush_flags,
+                                            render_task_address,
                                             clip_task_address,
                                             user_data: uv_rect_address.as_int(),
                                         };
 
                                         batcher.current_batch_list().push_single_instance(
                                             key,
                                             bounding_rect,
                                             z_id,
@@ -1378,16 +1373,17 @@ impl BatchBuilder {
                                                 RasterizationSpace::Screen as i32,
                                                 get_shader_opacity(1.0),
                                                 0,
                                             ]);
 
                                             let shadow_instance = BrushInstance {
                                                 prim_header_index: shadow_prim_header_index,
                                                 clip_task_address,
+                                                render_task_address,
                                                 segment_index: INVALID_SEGMENT_INDEX,
                                                 edge_flags: EdgeAaSegmentMask::empty(),
                                                 brush_flags,
                                                 user_data: shadow_uv_rect_address,
                                             };
 
                                             batcher.current_batch_list().push_single_instance(
                                                 shadow_key,
@@ -1403,16 +1399,17 @@ impl BatchBuilder {
                                             RasterizationSpace::Screen as i32,
                                             get_shader_opacity(1.0),
                                             0,
                                         ]);
 
                                         let content_instance = BrushInstance {
                                             prim_header_index: content_prim_header_index,
                                             clip_task_address,
+                                            render_task_address,
                                             segment_index: INVALID_SEGMENT_INDEX,
                                             edge_flags: EdgeAaSegmentMask::empty(),
                                             brush_flags,
                                             user_data: content_uv_rect_address,
                                         };
 
                                         batcher.current_batch_list().push_single_instance(
                                             content_key,
@@ -1485,25 +1482,27 @@ impl BatchBuilder {
                                             RasterizationSpace::Screen as i32,
                                             get_shader_opacity(1.0),
                                             0,
                                         ]);
 
                                         let shadow_instance = BrushInstance {
                                             prim_header_index: shadow_prim_header_index,
                                             clip_task_address,
+                                            render_task_address,
                                             segment_index: INVALID_SEGMENT_INDEX,
                                             edge_flags: EdgeAaSegmentMask::empty(),
                                             brush_flags,
                                             user_data: shadow_uv_rect_address,
                                         };
 
                                         let content_instance = BrushInstance {
                                             prim_header_index: content_prim_header_index,
                                             clip_task_address,
+                                            render_task_address,
                                             segment_index: INVALID_SEGMENT_INDEX,
                                             edge_flags: EdgeAaSegmentMask::empty(),
                                             brush_flags,
                                             user_data: content_uv_rect_address,
                                         };
 
                                         batcher.current_batch_list().push_single_instance(
                                             shadow_key,
@@ -1582,16 +1581,17 @@ impl BatchBuilder {
                                             filter_mode,
                                             user_data,
                                             0,
                                         ]);
 
                                         let instance = BrushInstance {
                                             prim_header_index,
                                             clip_task_address,
+                                            render_task_address,
                                             segment_index: INVALID_SEGMENT_INDEX,
                                             edge_flags: EdgeAaSegmentMask::empty(),
                                             brush_flags,
                                             user_data: 0,
                                         };
 
                                         batcher.current_batch_list().push_single_instance(
                                             key,
@@ -1631,16 +1631,17 @@ impl BatchBuilder {
                                     filter_mode,
                                     user_data,
                                     0,
                                 ]);
 
                                 let instance = BrushInstance {
                                     prim_header_index,
                                     clip_task_address,
+                                    render_task_address,
                                     segment_index: INVALID_SEGMENT_INDEX,
                                     edge_flags: EdgeAaSegmentMask::empty(),
                                     brush_flags,
                                     user_data: 0,
                                 };
 
                                 batcher.current_batch_list().push_single_instance(
                                     key,
@@ -1666,16 +1667,17 @@ impl BatchBuilder {
                                     RasterizationSpace::Local as i32,
                                     get_shader_opacity(1.0),
                                     0,
                                 ]);
 
                                 let instance = BrushInstance {
                                     prim_header_index,
                                     clip_task_address,
+                                    render_task_address,
                                     segment_index: INVALID_SEGMENT_INDEX,
                                     edge_flags: EdgeAaSegmentMask::empty(),
                                     brush_flags,
                                     user_data: uv_rect_address.as_int(),
                                 };
 
                                 batcher.current_batch_list().push_single_instance(
                                     key,
@@ -1686,17 +1688,17 @@ impl BatchBuilder {
                             }
                             PictureCompositeMode::MixBlend(mode) => {
                                 let cache_task_id = surface.expect("bug: surface must be allocated by now");
                                 let backdrop_id = picture.secondary_render_task_id.expect("no backdrop!?");
 
                                 let key = BatchKey::new(
                                     BatchKind::Brush(
                                         BrushBatchKind::MixBlend {
-                                            task_id,
+                                            task_id: batcher.render_task_id,
                                             source_id: cache_task_id,
                                             backdrop_id,
                                         },
                                     ),
                                     BlendMode::PremultipliedAlpha,
                                     BatchTextures::no_texture(),
                                 );
                                 let backdrop_task_address = render_tasks.get_task_address(backdrop_id);
@@ -1706,16 +1708,17 @@ impl BatchBuilder {
                                     backdrop_task_address.0 as i32,
                                     source_task_address.0 as i32,
                                     0,
                                 ]);
 
                                 let instance = BrushInstance {
                                     prim_header_index,
                                     clip_task_address,
+                                    render_task_address,
                                     segment_index: INVALID_SEGMENT_INDEX,
                                     edge_flags: EdgeAaSegmentMask::empty(),
                                     brush_flags,
                                     user_data: 0,
                                 };
 
                                 batcher.current_batch_list().push_single_instance(
                                     key,
@@ -1752,17 +1755,16 @@ impl BatchBuilder {
                                 } else {
                                     (prim_cache_address, None)
                                 };
 
                                 let prim_header = PrimitiveHeader {
                                     local_rect: picture.snapped_local_rect,
                                     local_clip_rect: prim_info.combined_local_clip_rect,
                                     snap_offsets,
-                                    task_address,
                                     specific_prim_address: prim_cache_address,
                                     transform_id,
                                 };
 
                                 let prim_header_index = prim_headers.push(
                                     &prim_header,
                                     z_id,
                                     batch_params.prim_user_data,
@@ -1784,29 +1786,29 @@ impl BatchBuilder {
                                     &batch_params,
                                     specified_blend_mode,
                                     non_segmented_blend_mode,
                                     prim_header_index,
                                     bounding_rect,
                                     transform_kind,
                                     render_tasks,
                                     z_id,
+                                    render_task_address,
                                     prim_info.clip_task_index,
                                     ctx,
                                 );
                             }
                         }
                     }
                     None => {
                         // If this picture is being drawn into an existing target (i.e. with
                         // no composition operation), recurse and add to the current batch list.
                         self.add_pic_to_batch(
                             picture,
                             batcher,
-                            task_id,
                             ctx,
                             gpu_cache,
                             render_tasks,
                             deferred_resolves,
                             prim_headers,
                             transforms,
                             root_spatial_node_index,
                             z_generator,
@@ -1840,17 +1842,16 @@ impl BatchBuilder {
                 } else {
                     BlendMode::None
                 };
 
                 let prim_header = PrimitiveHeader {
                     local_rect: prim_rect,
                     local_clip_rect: prim_info.combined_local_clip_rect,
                     snap_offsets: snap_offsets,
-                    task_address,
                     specific_prim_address: prim_cache_address,
                     transform_id,
                 };
 
                 let batch_params = BrushBatchParameters::shared(
                     BrushBatchKind::Image(get_buffer_kind(cache_item.texture_id)),
                     textures,
                     [
@@ -1875,16 +1876,17 @@ impl BatchBuilder {
                     &batch_params,
                     specified_blend_mode,
                     non_segmented_blend_mode,
                     prim_header_index,
                     bounding_rect,
                     transform_kind,
                     render_tasks,
                     z_id,
+                    render_task_address,
                     prim_info.clip_task_index,
                     ctx,
                 );
             }
             PrimitiveInstanceKind::Rectangle { data_handle, segment_instance_index, opacity_binding_index, .. } => {
                 let prim_data = &ctx.data_stores.prim[data_handle];
                 let specified_blend_mode = BlendMode::PremultipliedAlpha;
                 let opacity_binding = ctx.prim_store.get_opacity_binding(opacity_binding_index);
@@ -1915,17 +1917,16 @@ impl BatchBuilder {
                     let segments = Some(&ctx.scratch.segments[segment_instance.segments_range]);
                     (gpu_cache.get_address(&segment_instance.gpu_cache_handle), segments)
                 };
 
                 let prim_header = PrimitiveHeader {
                     local_rect: prim_rect,
                     local_clip_rect: prim_info.combined_local_clip_rect,
                     snap_offsets: snap_offsets,
-                    task_address,
                     specific_prim_address: prim_cache_address,
                     transform_id,
                 };
 
                 let prim_header_index = prim_headers.push(
                     &prim_header,
                     z_id,
                     batch_params.prim_user_data,
@@ -1938,16 +1939,17 @@ impl BatchBuilder {
                     &batch_params,
                     specified_blend_mode,
                     non_segmented_blend_mode,
                     prim_header_index,
                     bounding_rect,
                     transform_kind,
                     render_tasks,
                     z_id,
+                    render_task_address,
                     prim_info.clip_task_index,
                     ctx,
                 );
             }
             PrimitiveInstanceKind::YuvImage { data_handle, segment_instance_index, .. } => {
                 let yuv_image_data = &ctx.data_stores.yuv_image[data_handle].kind;
                 let mut textures = BatchTextures::no_texture();
                 let mut uv_rect_addresses = [0; 3];
@@ -2024,17 +2026,16 @@ impl BatchBuilder {
                     let segments = Some(&ctx.scratch.segments[segment_instance.segments_range]);
                     (gpu_cache.get_address(&segment_instance.gpu_cache_handle), segments)
                 };
 
                 let prim_header = PrimitiveHeader {
                     local_rect: prim_rect,
                     local_clip_rect: prim_info.combined_local_clip_rect,
                     snap_offsets: snap_offsets,
-                    task_address,
                     specific_prim_address: prim_cache_address,
                     transform_id,
                 };
 
                 let prim_header_index = prim_headers.push(
                     &prim_header,
                     z_id,
                     batch_params.prim_user_data,
@@ -2047,16 +2048,17 @@ impl BatchBuilder {
                     &batch_params,
                     specified_blend_mode,
                     non_segmented_blend_mode,
                     prim_header_index,
                     bounding_rect,
                     transform_kind,
                     render_tasks,
                     z_id,
+                    render_task_address,
                     prim_info.clip_task_index,
                     ctx,
                 );
             }
             PrimitiveInstanceKind::Image { data_handle, image_instance_index, .. } => {
                 let image_data = &ctx.data_stores.image[data_handle].kind;
                 let common_data = &ctx.data_stores.image[data_handle].common;
                 let image_instance = &ctx.prim_store.images[image_instance_index];
@@ -2130,17 +2132,16 @@ impl BatchBuilder {
                         let segments = Some(&ctx.scratch.segments[segment_instance.segments_range]);
                         (gpu_cache.get_address(&segment_instance.gpu_cache_handle), segments)
                     };
 
                     let prim_header = PrimitiveHeader {
                         local_rect: prim_rect,
                         local_clip_rect: prim_info.combined_local_clip_rect,
                         snap_offsets: snap_offsets,
-                        task_address,
                         specific_prim_address: prim_cache_address,
                         transform_id,
                     };
 
                     let prim_header_index = prim_headers.push(
                         &prim_header,
                         z_id,
                         batch_params.prim_user_data,
@@ -2153,16 +2154,17 @@ impl BatchBuilder {
                         &batch_params,
                         specified_blend_mode,
                         non_segmented_blend_mode,
                         prim_header_index,
                         bounding_rect,
                         transform_kind,
                         render_tasks,
                         z_id,
+                        render_task_address,
                         prim_info.clip_task_index,
                         ctx,
                     );
                 } else {
                     const VECS_PER_SPECIFIC_BRUSH: usize = 3;
                     let max_tiles_per_header = (MAX_VERTEX_TEXTURE_WIDTH - VECS_PER_SPECIFIC_BRUSH) / VECS_PER_SEGMENT;
 
                     let clip_task_address = ctx.get_prim_clip_task_address(
@@ -2184,32 +2186,32 @@ impl BatchBuilder {
                             gpu_blocks.push(GpuBlockData::EMPTY);
                         }
 
                         let gpu_handle = gpu_cache.push_per_frame_blocks(&gpu_blocks);
                         let prim_header = PrimitiveHeader {
                             local_rect: prim_rect,
                             local_clip_rect: image_instance.tight_local_clip_rect,
                             snap_offsets,
-                            task_address,
                             specific_prim_address: gpu_cache.get_address(&gpu_handle),
                             transform_id,
                         };
                         let prim_header_index = prim_headers.push(&prim_header, z_id, prim_user_data);
 
                         for (i, tile) in chunk.iter().enumerate() {
                             if let Some((batch_kind, textures, uv_rect_address)) = get_image_tile_params(
                                 ctx.resource_cache,
                                 gpu_cache,
                                 deferred_resolves,
                                 request.with_tile(tile.tile_offset),
                             ) {
                                 let base_instance = BrushInstance {
                                     prim_header_index,
                                     clip_task_address,
+                                    render_task_address,
                                     segment_index: i as i32,
                                     edge_flags: tile.edge_flags,
                                     brush_flags: BrushFlags::SEGMENT_RELATIVE | BrushFlags::PERSPECTIVE_INTERPOLATION,
                                     user_data: uv_rect_address.as_int(),
                                 };
                                 let batch_key = BatchKey {
                                     blend_mode: specified_blend_mode,
                                     kind: BatchKind::Brush(batch_kind),
@@ -2230,18 +2232,17 @@ impl BatchBuilder {
                 let gradient = &ctx.prim_store.linear_gradients[gradient_index];
                 let prim_data = &ctx.data_stores.linear_grad[data_handle];
                 let specified_blend_mode = BlendMode::PremultipliedAlpha;
 
                 let mut prim_header = PrimitiveHeader {
                     local_rect: prim_rect,
                     local_clip_rect: prim_info.combined_local_clip_rect,
                     snap_offsets,
-                    task_address,
-                    specific_prim_address: GpuCacheAddress::invalid(),
+                    specific_prim_address: GpuCacheAddress::INVALID,
                     transform_id,
                 };
 
                 let non_segmented_blend_mode = if !prim_data.opacity.is_opaque ||
                     prim_info.clip_task_index != ClipTaskIndex::INVALID ||
                     transform_kind == TransformedRectKind::Complex
                 {
                     specified_blend_mode
@@ -2286,16 +2287,17 @@ impl BatchBuilder {
                         prim_info.clip_task_index,
                         render_tasks,
                     ).unwrap_or(OPAQUE_TASK_ADDRESS);
 
                     let instance = PrimitiveInstanceData::from(BrushInstance {
                         segment_index: INVALID_SEGMENT_INDEX,
                         edge_flags: EdgeAaSegmentMask::all(),
                         clip_task_address,
+                        render_task_address,
                         brush_flags: BrushFlags::PERSPECTIVE_INTERPOLATION,
                         prim_header_index,
                         user_data: segment_user_data,
                     });
 
                     batcher.current_batch_list().push_single_instance(
                         batch_key,
                         bounding_rect,
@@ -2336,16 +2338,17 @@ impl BatchBuilder {
                         &batch_params,
                         specified_blend_mode,
                         non_segmented_blend_mode,
                         prim_header_index,
                         bounding_rect,
                         transform_kind,
                         render_tasks,
                         z_id,
+                        render_task_address,
                         prim_info.clip_task_index,
                         ctx,
                     );
                 } else {
                     let visible_tiles = &ctx.scratch.gradient_tiles[gradient.visible_tiles_range];
 
                     let clip_task_address = ctx.get_prim_clip_task_address(
                         prim_info.clip_task_index,
@@ -2353,16 +2356,17 @@ impl BatchBuilder {
                     ).unwrap_or(OPAQUE_TASK_ADDRESS);
 
                     add_gradient_tiles(
                         visible_tiles,
                         &prim_data.stops_handle,
                         BrushBatchKind::LinearGradient,
                         specified_blend_mode,
                         bounding_rect,
+                        render_task_address,
                         clip_task_address,
                         gpu_cache,
                         batcher.current_batch_list(),
                         &prim_header,
                         prim_headers,
                         z_id,
                     );
                 }
@@ -2370,18 +2374,17 @@ impl BatchBuilder {
             PrimitiveInstanceKind::RadialGradient { data_handle, ref visible_tiles_range, .. } => {
                 let prim_data = &ctx.data_stores.radial_grad[data_handle];
                 let specified_blend_mode = BlendMode::PremultipliedAlpha;
 
                 let mut prim_header = PrimitiveHeader {
                     local_rect: prim_rect,
                     local_clip_rect: prim_info.combined_local_clip_rect,
                     snap_offsets,
-                    task_address,
-                    specific_prim_address: GpuCacheAddress::invalid(),
+                    specific_prim_address: GpuCacheAddress::INVALID,
                     transform_id,
                 };
 
                 if visible_tiles_range.is_empty() {
                     let non_segmented_blend_mode = if !prim_data.opacity.is_opaque ||
                         prim_info.clip_task_index != ClipTaskIndex::INVALID ||
                         transform_kind == TransformedRectKind::Complex
                     {
@@ -2423,16 +2426,17 @@ impl BatchBuilder {
                         &batch_params,
                         specified_blend_mode,
                         non_segmented_blend_mode,
                         prim_header_index,
                         bounding_rect,
                         transform_kind,
                         render_tasks,
                         z_id,
+                        render_task_address,
                         prim_info.clip_task_index,
                         ctx,
                     );
                 } else {
                     let visible_tiles = &ctx.scratch.gradient_tiles[*visible_tiles_range];
 
                     let clip_task_address = ctx.get_prim_clip_task_address(
                         prim_info.clip_task_index,
@@ -2440,16 +2444,17 @@ impl BatchBuilder {
                     ).unwrap_or(OPAQUE_TASK_ADDRESS);
 
                     add_gradient_tiles(
                         visible_tiles,
                         &prim_data.stops_handle,
                         BrushBatchKind::RadialGradient,
                         specified_blend_mode,
                         bounding_rect,
+                        render_task_address,
                         clip_task_address,
                         gpu_cache,
                         batcher.current_batch_list(),
                         &prim_header,
                         prim_headers,
                         z_id,
                     );
                 }
@@ -2467,16 +2472,17 @@ impl BatchBuilder {
         batch_kind: BrushBatchKind,
         prim_header_index: PrimitiveHeaderIndex,
         alpha_blend_mode: BlendMode,
         bounding_rect: &PictureRect,
         transform_kind: TransformedRectKind,
         render_tasks: &RenderTaskTree,
         z_id: ZBufferId,
         prim_opacity: PrimitiveOpacity,
+        render_task_address: RenderTaskAddress,
         clip_task_index: ClipTaskIndex,
         ctx: &RenderTargetContext,
     ) {
         debug_assert!(clip_task_index != ClipTaskIndex::INVALID);
 
         // Get GPU address of clip task for this segment, or None if
         // the entire segment is clipped out.
         let clip_task_address = match ctx.get_clip_task_address(
@@ -2493,16 +2499,17 @@ impl BatchBuilder {
         let needs_blending = !prim_opacity.is_opaque ||
                              clip_task_address != OPAQUE_TASK_ADDRESS ||
                              (!is_inner && transform_kind == TransformedRectKind::Complex);
 
         let instance = PrimitiveInstanceData::from(BrushInstance {
             segment_index,
             edge_flags: segment.edge_flags,
             clip_task_address,
+            render_task_address,
             brush_flags: BrushFlags::PERSPECTIVE_INTERPOLATION | segment.brush_flags,
             prim_header_index,
             user_data: segment_data.user_data,
         });
 
         let batch_key = BatchKey {
             blend_mode: if needs_blending { alpha_blend_mode } else { BlendMode::None },
             kind: BatchKind::Brush(batch_kind),
@@ -2526,16 +2533,17 @@ impl BatchBuilder {
         params: &BrushBatchParameters,
         alpha_blend_mode: BlendMode,
         non_segmented_blend_mode: BlendMode,
         prim_header_index: PrimitiveHeaderIndex,
         bounding_rect: &PictureRect,
         transform_kind: TransformedRectKind,
         render_tasks: &RenderTaskTree,
         z_id: ZBufferId,
+        render_task_address: RenderTaskAddress,
         clip_task_index: ClipTaskIndex,
         ctx: &RenderTargetContext,
     ) {
         match (brush_segments, &params.segment_data) {
             (Some(ref brush_segments), SegmentDataKind::Instanced(ref segment_data)) => {
                 // In this case, we have both a list of segments, and a list of
                 // per-segment instance data. Zip them together to build batches.
                 debug_assert_eq!(brush_segments.len(), segment_data.len());
@@ -2552,16 +2560,17 @@ impl BatchBuilder {
                         params.batch_kind,
                         prim_header_index,
                         alpha_blend_mode,
                         bounding_rect,
                         transform_kind,
                         render_tasks,
                         z_id,
                         prim_opacity,
+                        render_task_address,
                         clip_task_index,
                         ctx,
                     );
                 }
             }
             (Some(ref brush_segments), SegmentDataKind::Shared(ref segment_data)) => {
                 // A list of segments, but the per-segment data is common
                 // between all segments.
@@ -2577,16 +2586,17 @@ impl BatchBuilder {
                         params.batch_kind,
                         prim_header_index,
                         alpha_blend_mode,
                         bounding_rect,
                         transform_kind,
                         render_tasks,
                         z_id,
                         prim_opacity,
+                        render_task_address,
                         clip_task_index,
                         ctx,
                     );
                 }
             }
             (None, SegmentDataKind::Shared(ref segment_data)) => {
                 // No segments, and thus no per-segment instance data.
                 // Note: the blend mode already takes opacity into account
@@ -2598,16 +2608,17 @@ impl BatchBuilder {
                 let clip_task_address = ctx.get_prim_clip_task_address(
                     clip_task_index,
                     render_tasks,
                 ).unwrap_or(OPAQUE_TASK_ADDRESS);
                 let instance = PrimitiveInstanceData::from(BrushInstance {
                     segment_index: INVALID_SEGMENT_INDEX,
                     edge_flags: EdgeAaSegmentMask::all(),
                     clip_task_address,
+                    render_task_address,
                     brush_flags: BrushFlags::PERSPECTIVE_INTERPOLATION,
                     prim_header_index,
                     user_data: segment_data.user_data,
                 });
                 batcher.current_batch_list().push_single_instance(
                     batch_key,
                     bounding_rect,
                     z_id,
@@ -2624,16 +2635,17 @@ impl BatchBuilder {
 }
 
 fn add_gradient_tiles(
     visible_tiles: &[VisibleGradientTile],
     stops_handle: &GpuCacheHandle,
     kind: BrushBatchKind,
     blend_mode: BlendMode,
     bounding_rect: &PictureRect,
+    render_task_address: RenderTaskAddress,
     clip_task_address: RenderTaskAddress,
     gpu_cache: &GpuCache,
     batch_list: &mut BatchList,
     base_prim_header: &PrimitiveHeader,
     prim_headers: &mut PrimitiveHeaders,
     z_id: ZBufferId,
 ) {
     let batch = batch_list.set_params_and_get_batch(
@@ -2664,16 +2676,17 @@ fn add_gradient_tiles(
             ..*base_prim_header
         };
         let prim_header_index = prim_headers.push(&prim_header, z_id, user_data);
 
         batch.push(PrimitiveInstanceData::from(
             BrushInstance {
                 prim_header_index,
                 clip_task_address,
+                render_task_address,
                 segment_index: INVALID_SEGMENT_INDEX,
                 edge_flags: EdgeAaSegmentMask::all(),
                 brush_flags: BrushFlags::PERSPECTIVE_INTERPOLATION,
                 user_data: 0,
             }
         ));
     }
 }
@@ -2914,17 +2927,17 @@ impl ClipBatcher {
         task_origin: DevicePoint,
         screen_origin: DevicePoint,
         device_pixel_scale: f32,
     ) {
         let instance = ClipMaskInstance {
             clip_transform_id: TransformPaletteId::IDENTITY,
             prim_transform_id: TransformPaletteId::IDENTITY,
             clip_data_address,
-            resource_address: GpuCacheAddress::invalid(),
+            resource_address: GpuCacheAddress::INVALID,
             local_pos,
             tile_rect: LayoutRect::zero(),
             sub_rect,
             snap_offsets: SnapOffsets::empty(),
             task_origin,
             screen_origin,
             device_pixel_scale,
         };
@@ -3072,18 +3085,18 @@ impl ClipBatcher {
                 root_spatial_node_index,
                 ROOT_SPATIAL_NODE_INDEX,
                 clip_scroll_tree,
             );
 
             let instance = ClipMaskInstance {
                 clip_transform_id,
                 prim_transform_id,
-                clip_data_address: GpuCacheAddress::invalid(),
-                resource_address: GpuCacheAddress::invalid(),
+                clip_data_address: GpuCacheAddress::INVALID,
+                resource_address: GpuCacheAddress::INVALID,
                 local_pos: clip_instance.local_pos,
                 tile_rect: LayoutRect::zero(),
                 sub_rect: DeviceRect::new(
                     DevicePoint::zero(),
                     actual_rect.size.to_f32(),
                 ),
                 snap_offsets,
                 task_origin,
--- a/gfx/wr/webrender/src/frame_builder.rs
+++ b/gfx/wr/webrender/src/frame_builder.rs
@@ -608,18 +608,18 @@ impl FrameBuilder {
                     &mut deferred_resolves,
                     &self.clip_store,
                     &mut transform_palette,
                     &mut prim_headers,
                     &mut z_generator,
                 );
 
                 match pass.kind {
-                    RenderPassKind::MainFramebuffer(ref color) => {
-                        has_texture_cache_tasks |= color.must_be_drawn();
+                    RenderPassKind::MainFramebuffer { ref main_target, .. } => {
+                        has_texture_cache_tasks |= main_target.must_be_drawn();
                     }
                     RenderPassKind::OffScreen { ref texture_cache, ref color, .. } => {
                         has_texture_cache_tasks |= !texture_cache.is_empty();
                         has_texture_cache_tasks |= color.must_be_drawn();
                     }
                 }
             }
         }
--- a/gfx/wr/webrender/src/gpu_cache.rs
+++ b/gfx/wr/webrender/src/gpu_cache.rs
@@ -172,22 +172,20 @@ pub struct GpuCacheAddress {
 impl GpuCacheAddress {
     fn new(u: usize, v: usize) -> Self {
         GpuCacheAddress {
             u: u as u16,
             v: v as u16,
         }
     }
 
-    pub fn invalid() -> Self {
-        GpuCacheAddress {
-            u: u16::MAX,
-            v: u16::MAX,
-        }
-    }
+    pub const INVALID: GpuCacheAddress = GpuCacheAddress {
+        u: u16::MAX,
+        v: u16::MAX,
+    };
 }
 
 impl Add<usize> for GpuCacheAddress {
     type Output = GpuCacheAddress;
 
     fn add(self, other: usize) -> GpuCacheAddress {
         GpuCacheAddress {
             u: self.u + other as u16,
--- a/gfx/wr/webrender/src/gpu_types.rs
+++ b/gfx/wr/webrender/src/gpu_types.rs
@@ -235,34 +235,33 @@ impl PrimitiveHeaders {
         self.headers_float.push(PrimitiveHeaderF {
             local_rect: prim_header.local_rect,
             local_clip_rect: prim_header.local_clip_rect,
             snap_offsets: prim_header.snap_offsets,
         });
 
         self.headers_int.push(PrimitiveHeaderI {
             z,
-            task_address: prim_header.task_address,
+            unused: 0,
             specific_prim_address: prim_header.specific_prim_address.as_int(),
             transform_id: prim_header.transform_id,
             user_data,
         });
 
         PrimitiveHeaderIndex(id as i32)
     }
 }
 
 // This is a convenience type used to make it easier to pass
 // the common parts around during batching.
 #[derive(Debug)]
 pub struct PrimitiveHeader {
     pub local_rect: LayoutRect,
     pub local_clip_rect: LayoutRect,
     pub snap_offsets: SnapOffsets,
-    pub task_address: RenderTaskAddress,
     pub specific_prim_address: GpuCacheAddress,
     pub transform_id: TransformPaletteId,
 }
 
 // f32 parts of a primitive header
 #[derive(Debug)]
 #[repr(C)]
 #[cfg_attr(feature = "capture", derive(Serialize))]
@@ -276,19 +275,19 @@ pub struct PrimitiveHeaderF {
 // i32 parts of a primitive header
 // TODO(gw): Compress parts of these down to u16
 #[derive(Debug)]
 #[repr(C)]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub struct PrimitiveHeaderI {
     pub z: ZBufferId,
-    pub task_address: RenderTaskAddress,
     pub specific_prim_address: i32,
     pub transform_id: TransformPaletteId,
+    pub unused: i32,                    // To ensure required 16 byte alignment of vertex textures
     pub user_data: [i32; 4],
 }
 
 pub struct GlyphInstance {
     pub prim_header_index: PrimitiveHeaderIndex,
 }
 
 impl GlyphInstance {
@@ -314,40 +313,43 @@ impl GlyphInstance {
         }
     }
 }
 
 pub struct SplitCompositeInstance {
     pub prim_header_index: PrimitiveHeaderIndex,
     pub polygons_address: GpuCacheAddress,
     pub z: ZBufferId,
+    pub render_task_address: RenderTaskAddress,
 }
 
 impl SplitCompositeInstance {
     pub fn new(
         prim_header_index: PrimitiveHeaderIndex,
         polygons_address: GpuCacheAddress,
+        render_task_address: RenderTaskAddress,
         z: ZBufferId,
     ) -> Self {
         SplitCompositeInstance {
             prim_header_index,
             polygons_address,
             z,
+            render_task_address,
         }
     }
 }
 
 impl From<SplitCompositeInstance> for PrimitiveInstanceData {
     fn from(instance: SplitCompositeInstance) -> Self {
         PrimitiveInstanceData {
             data: [
                 instance.prim_header_index.0,
                 instance.polygons_address.as_int(),
                 instance.z.0,
-                0,
+                instance.render_task_address.0 as i32,
             ],
         }
     }
 }
 
 bitflags! {
     /// Flags that define how the common brush shader
     /// code should process this instance.
@@ -370,28 +372,30 @@ bitflags! {
 }
 
 // TODO(gw): Some of these fields can be moved to the primitive
 //           header since they are constant, and some can be
 //           compressed to a smaller size.
 #[repr(C)]
 pub struct BrushInstance {
     pub prim_header_index: PrimitiveHeaderIndex,
+    pub render_task_address: RenderTaskAddress,
     pub clip_task_address: RenderTaskAddress,
     pub segment_index: i32,
     pub edge_flags: EdgeAaSegmentMask,
     pub brush_flags: BrushFlags,
     pub user_data: i32,
 }
 
 impl From<BrushInstance> for PrimitiveInstanceData {
     fn from(instance: BrushInstance) -> Self {
         PrimitiveInstanceData {
             data: [
                 instance.prim_header_index.0,
+                ((instance.render_task_address.0 as i32) << 16) |
                 instance.clip_task_address.0 as i32,
                 instance.segment_index |
                 ((instance.edge_flags.bits() as i32) << 16) |
                 ((instance.brush_flags.bits() as i32) << 24),
                 instance.user_data,
             ]
         }
     }
--- a/gfx/wr/webrender/src/prim_store/mod.rs
+++ b/gfx/wr/webrender/src/prim_store/mod.rs
@@ -1328,16 +1328,42 @@ pub enum PrimitiveInstanceKind {
 #[derive(Debug, Copy, Clone, PartialEq)]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 pub struct PrimitiveVisibilityIndex(pub u32);
 
 impl PrimitiveVisibilityIndex {
     pub const INVALID: PrimitiveVisibilityIndex = PrimitiveVisibilityIndex(u32::MAX);
 }
 
+/// A bit mask describing which dirty regions a primitive is visible in.
+/// A value of 0 means not visible in any region, while a mask of 0xffff
+/// would be considered visible in all regions.
+pub struct PrimitiveVisibilityMask {
+    bits: u16,
+}
+
+impl PrimitiveVisibilityMask {
+    /// Construct a default mask, where no regions are considered visible
+    pub fn empty() -> Self {
+        PrimitiveVisibilityMask {
+            bits: 0,
+        }
+    }
+
+    /// Mark a given region index as visible
+    pub fn set_visible(&mut self, region_index: usize) {
+        self.bits |= 1 << region_index;
+    }
+
+    /// Returns true if there are no visible regions
+    pub fn is_empty(&self) -> bool {
+        self.bits == 0
+    }
+}
+
 /// Information stored for a visible primitive about the visible
 /// rect and associated clip information.
 pub struct PrimitiveVisibility {
     /// The clip chain instance that was built for this primitive.
     pub clip_chain: ClipChainInstance,
 
     /// The current world rect, clipped to screen / dirty rect boundaries.
     // TODO(gw): This is only used by a small number of primitives.
@@ -1347,16 +1373,19 @@ pub struct PrimitiveVisibility {
 
     /// An index into the clip task instances array in the primitive
     /// store. If this is ClipTaskIndex::INVALID, then the primitive
     /// has no clip mask. Otherwise, it may store the offset of the
     /// global clip mask task for this primitive, or the first of
     /// a list of clip task ids (one per segment).
     pub clip_task_index: ClipTaskIndex,
 
+    /// A mask defining which of the dirty regions this primitive is visible in.
+    pub visibility_mask: PrimitiveVisibilityMask,
+
     /// The current combined local clip for this primitive, from
     /// the primitive local clip above and the current clip chain.
     pub combined_local_clip_rect: LayoutRect,
 
     /// The snap offsets in device space for this primitive. They are
     /// generated based on the visible rect, which is the local rect
     /// clipped by the combined local clip for most primitives, or
     /// just the local rect for pictures.
@@ -1897,16 +1926,17 @@ impl PrimitiveStore {
                 frame_state.scratch.prim_info.push(
                     PrimitiveVisibility {
                         clipped_world_rect: WorldRect::max_rect(),
                         clip_chain: ClipChainInstance::empty(),
                         clip_task_index: ClipTaskIndex::INVALID,
                         combined_local_clip_rect: LayoutRect::zero(),
                         snap_offsets: SnapOffsets::empty(),
                         shadow_snap_offsets: SnapOffsets::empty(),
+                        visibility_mask: PrimitiveVisibilityMask::empty(),
                     }
                 );
 
                 prim_instance.visibility_info = vis_index;
             } else {
                 if prim_local_rect.size.width <= 0.0 || prim_local_rect.size.height <= 0.0 {
                     if prim_instance.is_chased() {
                         println!("\tculled for zero local rectangle");
@@ -2105,16 +2135,17 @@ impl PrimitiveStore {
                 frame_state.scratch.prim_info.push(
                     PrimitiveVisibility {
                         clipped_world_rect,
                         clip_chain,
                         clip_task_index: ClipTaskIndex::INVALID,
                         combined_local_clip_rect,
                         snap_offsets,
                         shadow_snap_offsets,
+                        visibility_mask: PrimitiveVisibilityMask::empty(),
                     }
                 );
 
                 prim_instance.visibility_info = vis_index;
 
                 self.request_resources_for_prim(
                     prim_instance,
                     surface,
@@ -2653,24 +2684,23 @@ impl PrimitiveStore {
                         // render task size calculations. In future, we may consider creating multiple
                         // render task trees, one per dirty region.
                         visibility_info.clipped_world_rect = rect;
 
                         // If there is more than one dirty region, it's possible that this primitive
                         // is inside the overal dirty rect, but doesn't intersect any of the individual
                         // dirty rects. If that's the case, then we can skip drawing this primitive too.
                         if dirty_region.dirty_rects.len() > 1 {
-                            let in_dirty_rects = dirty_region
-                                .dirty_rects
-                                .iter()
-                                .any(|dirty_rect| {
-                                    visibility_info.clipped_world_rect.intersects(&dirty_rect.world_rect)
-                                });
-
-                            if !in_dirty_rects {
+                            for (region_index, region) in dirty_region.dirty_rects.iter().enumerate() {
+                                if visibility_info.clipped_world_rect.intersects(&region.world_rect) {
+                                    visibility_info.visibility_mask.set_visible(region_index);
+                                }
+                            }
+
+                            if visibility_info.visibility_mask.is_empty() {
                                 prim_instance.visibility_info = PrimitiveVisibilityIndex::INVALID;
                                 continue;
                             }
                         }
                     }
                     None => {
                         // Outside the overall dirty rect, so can be skipped.
                         prim_instance.visibility_info = PrimitiveVisibilityIndex::INVALID;
--- a/gfx/wr/webrender/src/render_task.rs
+++ b/gfx/wr/webrender/src/render_task.rs
@@ -68,17 +68,17 @@ impl RenderTaskId {
         frame_id: FrameId::INVALID,
     };
 }
 
 #[derive(Debug, Copy, Clone, PartialEq)]
 #[repr(C)]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
-pub struct RenderTaskAddress(pub u32);
+pub struct RenderTaskAddress(pub u16);
 
 #[derive(Debug)]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub struct RenderTaskTree {
     pub tasks: Vec<RenderTask>,
     pub task_data: Vec<RenderTaskData>,
     /// Tasks that don't have dependencies, and that may be shared between
@@ -386,17 +386,17 @@ impl RenderTaskTree {
                 task_redirects[task_index] = Some(blit_id);
             }
         }
     }
 
     pub fn get_task_address(&self, id: RenderTaskId) -> RenderTaskAddress {
         #[cfg(debug_assertions)]
         debug_assert_eq!(self.frame_id, id.frame_id);
-        RenderTaskAddress(id.index)
+        RenderTaskAddress(id.index as u16)
     }
 
     pub fn write_task_data(&mut self) {
         for task in &self.tasks {
             self.task_data.push(task.write_task_data());
         }
     }
 
--- a/gfx/wr/webrender/src/renderer.rs
+++ b/gfx/wr/webrender/src/renderer.rs
@@ -384,22 +384,22 @@ pub(crate) mod desc {
                 count: 2,
                 kind: VertexAttributeKind::F32,
             },
         ],
         instance_attributes: &[
             VertexAttribute {
                 name: "aBlurRenderTaskAddress",
                 count: 1,
-                kind: VertexAttributeKind::I32,
+                kind: VertexAttributeKind::U16,
             },
             VertexAttribute {
                 name: "aBlurSourceTaskAddress",
                 count: 1,
-                kind: VertexAttributeKind::I32,
+                kind: VertexAttributeKind::U16,
             },
             VertexAttribute {
                 name: "aBlurDirection",
                 count: 1,
                 kind: VertexAttributeKind::I32,
             },
         ],
     };
@@ -561,22 +561,22 @@ pub(crate) mod desc {
                 count: 2,
                 kind: VertexAttributeKind::F32,
             },
         ],
         instance_attributes: &[
             VertexAttribute {
                 name: "aScaleRenderTaskAddress",
                 count: 1,
-                kind: VertexAttributeKind::I32,
+                kind: VertexAttributeKind::U16,
             },
             VertexAttribute {
                 name: "aScaleSourceTaskAddress",
                 count: 1,
-                kind: VertexAttributeKind::I32,
+                kind: VertexAttributeKind::U16,
             },
         ],
     };
 
     pub const CLIP: VertexDescriptor = VertexDescriptor {
         vertex_attributes: &[
             VertexAttribute {
                 name: "aPosition",
@@ -2834,18 +2834,18 @@ impl Renderer {
     #[cfg(feature = "debugger")]
     fn get_passes_for_debugger(&self) -> String {
         let mut debug_passes = debug_server::PassList::new();
 
         for &(_, ref render_doc) in &self.active_documents {
             for pass in &render_doc.frame.passes {
                 let mut debug_targets = Vec::new();
                 match pass.kind {
-                    RenderPassKind::MainFramebuffer(ref target) => {
-                        debug_targets.push(Self::debug_color_target(target));
+                    RenderPassKind::MainFramebuffer { ref main_target, .. } => {
+                        debug_targets.push(Self::debug_color_target(main_target));
                     }
                     RenderPassKind::OffScreen { ref alpha, ref color, ref texture_cache } => {
                         debug_targets.extend(alpha.targets.iter().map(Self::debug_alpha_target));
                         debug_targets.extend(color.targets.iter().map(Self::debug_color_target));
                         debug_targets.extend(texture_cache.iter().map(|(_, target)| Self::debug_texture_cache_target(target)))
                     }
                 }
 
@@ -4733,17 +4733,17 @@ impl Renderer {
             );
             self.texture_resolver.bind(
                 &TextureSource::PrevPassColor,
                 TextureSampler::PrevPassColor,
                 &mut self.device,
             );
 
             match pass.kind {
-                RenderPassKind::MainFramebuffer(ref target) => {
+                RenderPassKind::MainFramebuffer { ref main_target, .. } => {
                     if let Some(device_size) = device_size {
                         stats.color_target_count += 1;
 
                         let offset = frame.content_origin.to_f32();
                         let size = frame.device_rect.size.to_f32();
                         let projection = Transform3D::ortho(
                             offset.x,
                             offset.x + size.width,
@@ -4766,17 +4766,17 @@ impl Renderer {
                             self.device.enable_depth_write();
                             self.device.clear_target(self.clear_color.map(|color| color.to_array()),
                                                      Some(1.0),
                                                      None);
                         }
 
                         self.draw_color_target(
                             draw_target,
-                            target,
+                            main_target,
                             frame.content_origin,
                             None,
                             None,
                             &frame.render_tasks,
                             &projection,
                             frame_id,
                             stats,
                         );
--- a/gfx/wr/webrender/src/tiling.rs
+++ b/gfx/wr/webrender/src/tiling.rs
@@ -434,22 +434,22 @@ impl RenderTarget for ColorRenderTarget 
                         None
                     } else {
                         Some(target_rect)
                     };
 
                     let mut alpha_batch_builder = AlphaBatchBuilder::new(
                         self.screen_size,
                         ctx.break_advanced_blend_batches,
+                        *task_id,
                     );
 
                     self.batch_builder.add_pic_to_batch(
                         pic,
                         &mut alpha_batch_builder,
-                        *task_id,
                         ctx,
                         gpu_cache,
                         render_tasks,
                         deferred_resolves,
                         prim_headers,
                         transforms,
                         pic_task.root_spatial_node_index,
                         z_generator,
@@ -904,17 +904,19 @@ impl TextureCacheRenderTarget {
 }
 
 /// Contains the set of `RenderTarget`s specific to the kind of pass.
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub enum RenderPassKind {
     /// The final pass to the main frame buffer, where we have a single color
     /// target for display to the user.
-    MainFramebuffer(ColorRenderTarget),
+    MainFramebuffer {
+        main_target: ColorRenderTarget,
+    },
     /// An intermediate pass, where we may have multiple targets.
     OffScreen {
         alpha: RenderTargetList<AlphaRenderTarget>,
         color: RenderTargetList<ColorRenderTarget>,
         texture_cache: FastHashMap<(CacheTextureId, usize), TextureCacheRenderTarget>,
     },
 }
 
@@ -936,19 +938,21 @@ pub struct RenderPass {
 
 impl RenderPass {
     /// Creates a pass for the main framebuffer. There is only one of these, and
     /// it is always the last pass.
     pub fn new_main_framebuffer(
         screen_size: DeviceIntSize,
         gpu_supports_fast_clears: bool,
     ) -> Self {
-        let target = ColorRenderTarget::new(screen_size, gpu_supports_fast_clears);
+        let main_target = ColorRenderTarget::new(screen_size, gpu_supports_fast_clears);
         RenderPass {
-            kind: RenderPassKind::MainFramebuffer(target),
+            kind: RenderPassKind::MainFramebuffer {
+                main_target,
+            },
             tasks: vec![],
         }
     }
 
     /// Creates an intermediate off-screen pass.
     pub fn new_off_screen(
         screen_size: DeviceIntSize,
         gpu_supports_fast_clears: bool,
@@ -1011,30 +1015,30 @@ impl RenderPass {
         clip_store: &ClipStore,
         transforms: &mut TransformPalette,
         prim_headers: &mut PrimitiveHeaders,
         z_generator: &mut ZBufferIdGenerator,
     ) {
         profile_scope!("RenderPass::build");
 
         match self.kind {
-            RenderPassKind::MainFramebuffer(ref mut target) => {
+            RenderPassKind::MainFramebuffer { ref mut main_target, .. } => {
                 for &task_id in &self.tasks {
                     assert_eq!(render_tasks[task_id].target_kind(), RenderTargetKind::Color);
-                    target.add_task(
+                    main_target.add_task(
                         task_id,
                         ctx,
                         gpu_cache,
                         render_tasks,
                         clip_store,
                         transforms,
                         deferred_resolves,
                     );
                 }
-                target.build(
+                main_target.build(
                     ctx,
                     gpu_cache,
                     render_tasks,
                     deferred_resolves,
                     prim_headers,
                     transforms,
                     z_generator,
                 );
--- a/gfx/wr/wrench/src/main.rs
+++ b/gfx/wr/wrench/src/main.rs
@@ -653,16 +653,21 @@ fn render<'a>(
                             wrench.api.send_debug_cmd(DebugCommand::SetFlags(debug_flags));
                             do_render = true;
                         }
                         VirtualKeyCode::S => {
                             debug_flags.toggle(DebugFlags::COMPACT_PROFILER);
                             wrench.api.send_debug_cmd(DebugCommand::SetFlags(debug_flags));
                             do_render = true;
                         }
+                        VirtualKeyCode::D => {
+                            debug_flags.toggle(DebugFlags::PICTURE_CACHING_DBG);
+                            wrench.api.send_debug_cmd(DebugCommand::SetFlags(debug_flags));
+                            do_render = true;
+                        }
                         VirtualKeyCode::Q => {
                             debug_flags.toggle(DebugFlags::GPU_TIME_QUERIES | DebugFlags::GPU_SAMPLE_QUERIES);
                             wrench.api.send_debug_cmd(DebugCommand::SetFlags(debug_flags));
                             do_render = true;
                         }
                         VirtualKeyCode::V => {
                             debug_flags.toggle(DebugFlags::SHOW_OVERDRAW);
                             wrench.api.send_debug_cmd(DebugCommand::SetFlags(debug_flags));
--- a/mobile/android/components/ContentPermissionPrompt.js
+++ b/mobile/android/components/ContentPermissionPrompt.js
@@ -6,16 +6,19 @@ const {Services} = ChromeUtils.import("r
 const {XPCOMUtils} = ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
 
 ChromeUtils.defineModuleGetter(this, "RuntimePermissions",
                                "resource://gre/modules/RuntimePermissions.jsm");
 
 ChromeUtils.defineModuleGetter(this, "DoorHanger",
                                "resource://gre/modules/Prompt.jsm");
 
+ChromeUtils.defineModuleGetter(this, "PrivateBrowsingUtils",
+                               "resource://gre/modules/PrivateBrowsingUtils.jsm");
+
 const kEntities = {
   "contacts": "contacts",
   "desktop-notification": "desktopNotification2",
   "geolocation": "geolocation",
 };
 
 function ContentPermissionPrompt() {}
 
@@ -49,16 +52,18 @@ ContentPermissionPrompt.prototype = {
     if (request.window) {
       let requestingWindow = request.window.top;
       return this.getChromeWindow(requestingWindow).wrappedJSObject;
     }
     return request.element.ownerGlobal;
   },
 
   prompt: function(request) {
+    let isApp = request.principal.appId !== Ci.nsIScriptSecurityManager.NO_APP_ID && request.principal.appId !== Ci.nsIScriptSecurityManager.UNKNOWN_APP_ID;
+
     // Only allow exactly one permission rquest here.
     let types = request.types.QueryInterface(Ci.nsIArray);
     if (types.length != 1) {
       request.cancel();
       return;
     }
 
     let perm = types.queryElementAt(0, Ci.nsIContentPermissionType);
@@ -73,16 +78,19 @@ ContentPermissionPrompt.prototype = {
           RuntimePermissions.ACCESS_FINE_LOCATION).then((granted) => {
             (granted ? request.allow : request.cancel)();
           });
         return;
       }
       request.allow();
     };
 
+    // We don't want to remember permissions in private mode
+    let isPrivate = PrivateBrowsingUtils.isWindowPrivate(request.window.ownerGlobal);
+
     // Returns true if the request was handled
     if (this.handleExistingPermission(request, perm.type, callback)) {
        return;
     }
 
     if (perm.type === "desktop-notification" &&
         Services.prefs.getBoolPref("dom.webnotifications.requireuserinteraction", false) &&
         !request.isHandlingUserInput) {
@@ -101,19 +109,27 @@ ContentPermissionPrompt.prototype = {
           Services.perms.addFromPrincipal(request.principal, perm.type, Ci.nsIPermissionManager.DENY_ACTION);
 
         callback(/* allow */ false);
       },
     },
     {
       label: browserBundle.GetStringFromName(entityName + ".allow"),
       callback: function(aChecked) {
+        let isPermanent = (aChecked || entityName == "desktopNotification2");
         // If the user checked "Don't ask again" or this is a desktopNotification, make a permanent exception
-        if (aChecked || entityName == "desktopNotification2") {
+        // Also, we don't want to permanently store this exception if the user is in private mode
+        if (!isPrivate && isPermanent) {
           Services.perms.addFromPrincipal(request.principal, perm.type, Ci.nsIPermissionManager.ALLOW_ACTION);
+        // If we are in private mode, then it doesn't matter if the notification is desktop and also
+        // it shouldn't matter if the Don't show checkbox was checked because it shouldn't be show in the first place
+        } else if (isApp || (isPrivate && isPermanent)) {
+          // Otherwise allow the permission for the current session if the request comes from an app
+          // or if the request was made in private mode
+          Services.perms.addFromPrincipal(request.principal, perm.type, Ci.nsIPermissionManager.ALLOW_ACTION, Ci.nsIPermissionManager.EXPIRE_SESSION);
         }
 
         callback(/* allow */ true);
       },
       positive: true,
     }];
 
     let chromeWin = this.getChromeForRequest(request);
@@ -124,18 +140,22 @@ ContentPermissionPrompt.prototype = {
     let options;
     if (entityName == "desktopNotification2") {
       options = {
         link: {
           label: browserBundle.GetStringFromName("doorhanger.learnMore"),
           url: "https://www.mozilla.org/firefox/push/",
         },
       };
+    // it doesn't make sense to display the checkbox since we won't be remembering
+    // this specific permission if the user is in Private mode
+    } else if (!isPrivate) {
+      options = { checkbox: browserBundle.GetStringFromName(entityName + ".dontAskAgain") };
     } else {
-      options = { checkbox: browserBundle.GetStringFromName(entityName + ".dontAskAgain") };
+      options = { };
     }
 
     options.defaultCallback = () => {
       callback(/* allow */ false);
     };
 
     DoorHanger.show(request.window || request.element.ownerGlobal,
                     message, entityName + request.principal.URI.host,
--- a/mobile/android/geckoview/src/androidTest/java/org/mozilla/geckoview/test/ContentDelegateTest.kt
+++ b/mobile/android/geckoview/src/androidTest/java/org/mozilla/geckoview/test/ContentDelegateTest.kt
@@ -59,17 +59,17 @@ class ContentDelegateTest : BaseSessionT
                            equalTo(forEachCall("Title1", "Title2")))
             }
         })
     }
 
     @Test fun download() {
         sessionRule.session.loadTestPath(DOWNLOAD_HTML_PATH)
         // disable test on pgo for frequently failing Bug 1543355
-            assumeThat(sessionRule.env.isDebugBuild, equalTo(true))
+        assumeThat(sessionRule.env.isDebugBuild, equalTo(true))
 
         sessionRule.waitUntilCalled(object : Callbacks.NavigationDelegate, Callbacks.ContentDelegate {
 
             @AssertCalled(count = 2)
             override fun onLoadRequest(session: GeckoSession,
                                        request: LoadRequest):
                                        GeckoResult<AllowOrDeny>? {
                 return null
--- a/netwerk/dns/TRR.cpp
+++ b/netwerk/dns/TRR.cpp
@@ -235,17 +235,18 @@ nsresult TRR::SendHTTPRequest() {
   rv = NS_NewChannel(
       getter_AddRefs(mChannel), dnsURI, nsContentUtils::GetSystemPrincipal(),
       nsILoadInfo::SEC_ALLOW_CROSS_ORIGIN_DATA_IS_NULL,
       nsIContentPolicy::TYPE_OTHER,
       nullptr,  // nsICookieSettings
       nullptr,  // PerformanceStorage
       nullptr,  // aLoadGroup
       this,
-      nsIRequest::LOAD_ANONYMOUS | (mPB ? nsIRequest::INHIBIT_CACHING : 0),
+      nsIRequest::LOAD_ANONYMOUS | (mPB ? nsIRequest::INHIBIT_CACHING : 0) |
+          nsIChannel::LOAD_BYPASS_URL_CLASSIFIER,
       ios);
   if (NS_FAILED(rv)) {
     LOG(("TRR:SendHTTPRequest: NewChannel failed!\n"));
     return rv;
   }
 
   nsCOMPtr<nsIHttpChannel> httpChannel = do_QueryInterface(mChannel);
   if (!httpChannel) {
--- a/netwerk/protocol/http/HttpChannelChild.cpp
+++ b/netwerk/protocol/http/HttpChannelChild.cpp
@@ -662,31 +662,39 @@ class SyntheticDiversionListener final :
 
   NS_DECL_ISUPPORTS
 };
 
 NS_IMPL_ISUPPORTS(SyntheticDiversionListener, nsIStreamListener);
 
 void HttpChannelChild::DoOnStartRequest(nsIRequest* aRequest,
                                         nsISupports* aContext) {
+  nsresult rv;
+
   LOG(("HttpChannelChild::DoOnStartRequest [this=%p]\n", this));
 
   // In theory mListener should not be null, but in practice sometimes it is.
   MOZ_ASSERT(mListener);
   if (!mListener) {
     Cancel(NS_ERROR_FAILURE);
     return;
   }
 
   if (mSynthesizedResponsePump && mLoadFlags & LOAD_CALL_CONTENT_SNIFFERS) {
     mSynthesizedResponsePump->PeekStream(CallTypeSniffers,
                                          static_cast<nsIChannel*>(this));
   }
 
-  nsresult rv = mListener->OnStartRequest(aRequest);
+  if (mListener) {
+    nsCOMPtr<nsIStreamListener> listener(mListener);
+    rv = listener->OnStartRequest(aRequest);
+  } else {
+    rv = NS_ERROR_UNEXPECTED;
+  }
+
   mOnStartRequestCalled = true;
   if (NS_FAILED(rv)) {
     Cancel(rv);
     return;
   }
 
   if (mDivertingToParent) {
     mListener = nullptr;
@@ -975,19 +983,22 @@ void HttpChannelChild::DoOnProgress(nsIR
 void HttpChannelChild::DoOnDataAvailable(nsIRequest* aRequest,
                                          nsISupports* aContext,
                                          nsIInputStream* aStream,
                                          uint64_t offset, uint32_t count) {
   AUTO_PROFILER_LABEL("HttpChannelChild::DoOnDataAvailable", NETWORK);
   LOG(("HttpChannelChild::DoOnDataAvailable [this=%p]\n", this));
   if (mCanceled) return;
 
-  nsresult rv = mListener->OnDataAvailable(aRequest, aStream, offset, count);
-  if (NS_FAILED(rv)) {
-    CancelOnMainThread(rv);
+  if (mListener) {
+    nsCOMPtr<nsIStreamListener> listener(mListener);
+    nsresult rv = listener->OnDataAvailable(aRequest, aStream, offset, count);
+    if (NS_FAILED(rv)) {
+      CancelOnMainThread(rv);
+    }
   }
 }
 
 class StopRequestEvent : public NeckoTargetChannelEvent<HttpChannelChild> {
  public:
   StopRequestEvent(HttpChannelChild* child, const nsresult& channelStatus,
                    const ResourceTimingStruct& timing,
                    const nsHttpHeaderArray& aResponseTrailers)
@@ -1243,17 +1254,18 @@ void HttpChannelChild::DoOnStopRequest(n
   };
   checkForBlockedContent();
 
   MOZ_ASSERT(!mOnStopRequestCalled, "We should not call OnStopRequest twice");
 
   // In theory mListener should not be null, but in practice sometimes it is.
   MOZ_ASSERT(mListener);
   if (mListener) {
-    mListener->OnStopRequest(aRequest, mStatus);
+    nsCOMPtr<nsIStreamListener> listener(mListener);
+    listener->OnStopRequest(aRequest, mStatus);
   }
   mOnStopRequestCalled = true;
 
   // notify "http-on-stop-connect" observers
   gHttpHandler->OnStopRequest(this);
 
   ReleaseListeners();
 
@@ -1533,16 +1545,89 @@ mozilla::ipc::IPCResult HttpChannelChild
                         this, &HttpChannelChild::FinishInterceptedRedirect),
       NS_DISPATCH_NORMAL);
 
   return IPC_OK();
 }
 
 void HttpChannelChild::DeleteSelf() { Send__delete__(this); }
 
+class ContinueDoNotifyListenerEvent
+    : public NeckoTargetChannelEvent<HttpChannelChild> {
+ public:
+  explicit ContinueDoNotifyListenerEvent(HttpChannelChild* child)
+      : NeckoTargetChannelEvent<HttpChannelChild>(child) {}
+  void Run() override { mChild->ContinueDoNotifyListener(); }
+};
+
+void HttpChannelChild::DoNotifyListener() {
+  LOG(("HttpChannelChild::DoNotifyListener this=%p", this));
+  MOZ_ASSERT(NS_IsMainThread());
+
+  // In case nsHttpChannel::OnStartRequest wasn't called (e.g. due to flag
+  // LOAD_ONLY_IF_MODIFIED) we want to set mAfterOnStartRequestBegun to true
+  // before notifying listener.
+  if (!mAfterOnStartRequestBegun) {
+    mAfterOnStartRequestBegun = true;
+  }
+
+  if (mListener && !mOnStartRequestCalled) {
+    nsCOMPtr<nsIStreamListener> listener = mListener;
+    listener->OnStartRequest(this);
+  }
+  mOnStartRequestCalled = true;
+
+  mEventQ->RunOrEnqueue(new ContinueDoNotifyListenerEvent(this));
+}
+
+void HttpChannelChild::ContinueDoNotifyListener() {
+  LOG(("HttpChannelChild::ContinueDoNotifyListener this=%p", this));
+  MOZ_ASSERT(NS_IsMainThread());
+
+  // Make sure mIsPending is set to false. At this moment we are done from
+  // the point of view of our consumer and we have to report our self
+  // as not-pending.
+  mIsPending = false;
+
+  if (mListener && !mOnStopRequestCalled) {
+    nsCOMPtr<nsIStreamListener> listener = mListener;
+    listener->OnStopRequest(this, mStatus);
+
+    mOnStopRequestCalled = true;
+  }
+
+  // notify "http-on-stop-request" observers
+  gHttpHandler->OnStopRequest(this);
+
+  // This channel has finished its job, potentially release any tail-blocked
+  // requests with this.
+  RemoveAsNonTailRequest();
+
+  // We have to make sure to drop the references to listeners and callbacks
+  // no longer needed.
+  ReleaseListeners();
+
+  DoNotifyListenerCleanup();
+
+  // If this is a navigation, then we must let the docshell flush the reports
+  // to the console later.  The LoadDocument() is pointing at the detached
+  // document that started the navigation.  We want to show the reports on the
+  // new document.  Otherwise the console is wiped and the user never sees
+  // the information.
+  if (!IsNavigation()) {
+    if (mLoadGroup) {
+      FlushConsoleReports(mLoadGroup);
+    } else if (mLoadInfo) {
+      RefPtr<dom::Document> doc;
+      mLoadInfo->GetLoadingDocument(getter_AddRefs(doc));
+      FlushConsoleReports(doc);
+    }
+  }
+}
+
 void HttpChannelChild::FinishInterceptedRedirect() {
   nsresult rv;
   MOZ_ASSERT(!mInterceptedRedirectContext, "the context should be null!");
   rv = AsyncOpen(mInterceptedRedirectListener);
 
   mInterceptedRedirectListener = nullptr;
   mInterceptedRedirectContext = nullptr;
 
--- a/netwerk/protocol/http/HttpChannelChild.h
+++ b/netwerk/protocol/http/HttpChannelChild.h
@@ -497,16 +497,18 @@ class HttpChannelChild final : public PH
                       const uint32_t& newLoadFlags,
                       const uint32_t& redirectFlags,
                       const ParentLoadInfoForwarderArgs& loadInfoForwarder,
                       const nsHttpResponseHead& responseHead,
                       const nsACString& securityInfoSerialization,
                       const uint64_t& channelId);
   bool Redirect3Complete(OverrideRunnable* aRunnable);
   void DeleteSelf();
+  void DoNotifyListener();
+  void ContinueDoNotifyListener();
 
   // Create a a new channel to be used in a redirection, based on the provided
   // response headers.
   MOZ_MUST_USE nsresult SetupRedirect(nsIURI* uri,
                                       const nsHttpResponseHead* responseHead,
                                       const uint32_t& redirectFlags,
                                       nsIChannel** outChannel);
 
@@ -536,16 +538,17 @@ class HttpChannelChild final : public PH
   friend class HttpFlushedForDiversionEvent;
   friend class CancelEvent;
   friend class HttpAsyncAborter<HttpChannelChild>;
   friend class InterceptStreamListener;
   friend class InterceptedChannelContent;
   friend class SyntheticDiversionListener;
   friend class HttpBackgroundChannelChild;
   friend class NeckoTargetChannelEvent<HttpChannelChild>;
+  friend class ContinueDoNotifyListenerEvent;
 };
 
 NS_DEFINE_STATIC_IID_ACCESSOR(HttpChannelChild, HTTP_CHANNEL_CHILD_IID)
 
 // A stream listener interposed between the nsInputStreamPump used for
 // intercepted channels and this channel's original listener. This is only used
 // to ensure the original listener sees the channel as the request object, and
 // to synthesize OnStatus and OnProgress notifications.
--- a/netwerk/protocol/http/nsHttpChannel.cpp
+++ b/netwerk/protocol/http/nsHttpChannel.cpp
@@ -7944,17 +7944,18 @@ nsresult nsHttpChannel::ContinueOnStopRe
     MOZ_ASSERT(NS_FAILED(aStatus), "should have a failure code here");
     // NOTE: since we have a failure status, we can ignore the return
     // value from onStartRequest.
     LOG(("  calling mListener->OnStartRequest [this=%p, listener=%p]\n", this,
          mListener.get()));
     if (mListener) {
       MOZ_ASSERT(!mOnStartRequestCalled,
                  "We should not call OnStartRequest twice.");
-      mListener->OnStartRequest(this);
+      nsCOMPtr<nsIStreamListener> listener(mListener);
+      listener->OnStartRequest(this);
       mOnStartRequestCalled = true;
     } else {
       NS_WARNING("OnStartRequest skipped because of null listener");
     }
   }
 
   // if this transaction has been replaced, then bail.
   if (mTransactionReplaced) {
--- a/netwerk/protocol/http/nsHttpConnectionMgr.cpp
+++ b/netwerk/protocol/http/nsHttpConnectionMgr.cpp
@@ -443,18 +443,22 @@ nsresult nsHttpConnectionMgr::PruneNoTra
 
 nsresult nsHttpConnectionMgr::VerifyTraffic() {
   LOG(("nsHttpConnectionMgr::VerifyTraffic\n"));
   return PostEvent(&nsHttpConnectionMgr::OnMsgVerifyTraffic);
 }
 
 nsresult nsHttpConnectionMgr::DoShiftReloadConnectionCleanup(
     nsHttpConnectionInfo* aCI) {
+  RefPtr<nsHttpConnectionInfo> ci;
+  if (aCI) {
+    ci = aCI->Clone();
+  }
   return PostEvent(&nsHttpConnectionMgr::OnMsgDoShiftReloadConnectionCleanup, 0,
-                   aCI);
+                   ci);
 }
 
 class SpeculativeConnectArgs : public ARefBase {
  public:
   SpeculativeConnectArgs()
       : mParallelSpeculativeConnectLimit(0),
         mIgnoreIdle(false),
         mIsFromPredictor(false),
@@ -585,18 +589,22 @@ nsresult nsHttpConnectionMgr::CompleteUp
 }
 
 nsresult nsHttpConnectionMgr::UpdateParam(nsParamName name, uint16_t value) {
   uint32_t param = (uint32_t(name) << 16) | uint32_t(value);
   return PostEvent(&nsHttpConnectionMgr::OnMsgUpdateParam,
                    static_cast<int32_t>(param), nullptr);
 }
 
-nsresult nsHttpConnectionMgr::ProcessPendingQ(nsHttpConnectionInfo* ci) {
-  LOG(("nsHttpConnectionMgr::ProcessPendingQ [ci=%s]\n", ci->HashKey().get()));
+nsresult nsHttpConnectionMgr::ProcessPendingQ(nsHttpConnectionInfo* aCI) {
+  LOG(("nsHttpConnectionMgr::ProcessPendingQ [ci=%s]\n", aCI->HashKey().get()));
+  RefPtr<nsHttpConnectionInfo> ci;
+  if (aCI) {
+    ci = aCI->Clone();
+  }
   return PostEvent(&nsHttpConnectionMgr::OnMsgProcessPendingQ, 0, ci);
 }
 
 nsresult nsHttpConnectionMgr::ProcessPendingQ() {
   LOG(("nsHttpConnectionMgr::ProcessPendingQ [All CI]\n"));
   return PostEvent(&nsHttpConnectionMgr::OnMsgProcessPendingQ, 0, nullptr);
 }
 
--- a/netwerk/protocol/http/nsHttpHandler.h
+++ b/netwerk/protocol/http/nsHttpHandler.h
@@ -310,17 +310,18 @@ class nsHttpHandler final : public nsIHt
   MOZ_MUST_USE nsresult GetSocketThreadTarget(nsIEventTarget** target) {
     return mConnMgr->GetSocketThreadTarget(target);
   }
 
   MOZ_MUST_USE nsresult SpeculativeConnect(nsHttpConnectionInfo* ci,
                                            nsIInterfaceRequestor* callbacks,
                                            uint32_t caps = 0) {
     TickleWifi(callbacks);
-    return mConnMgr->SpeculativeConnect(ci, callbacks, caps);
+    RefPtr<nsHttpConnectionInfo> clone = ci->Clone();
+    return mConnMgr->SpeculativeConnect(clone, callbacks, caps);
   }
 
   // Alternate Services Maps are main thread only
   void UpdateAltServiceMapping(AltSvcMapping* map, nsProxyInfo* proxyInfo,
                                nsIInterfaceRequestor* callbacks, uint32_t caps,
                                const OriginAttributes& originAttributes) {
     mConnMgr->UpdateAltServiceMapping(map, proxyInfo, callbacks, caps,
                                       originAttributes);
--- a/netwerk/url-classifier/AsyncUrlChannelClassifier.cpp
+++ b/netwerk/url-classifier/AsyncUrlChannelClassifier.cpp
@@ -428,25 +428,33 @@ bool FeatureData::MaybeCompleteClassific
     UC_LOG(
         ("FeatureData::MaybeCompleteClassification[%p] - uri found in skiplist",
          this));
     return true;
   }
 
   nsTArray<nsCString> list;
   nsTArray<nsCString> hashes;
-  list.AppendElement(mHostInPrefTables[nsIUrlClassifierFeature::blacklist]);
+  if (!mHostInPrefTables[nsIUrlClassifierFeature::blacklist].IsEmpty()) {
+    list.AppendElement(mHostInPrefTables[nsIUrlClassifierFeature::blacklist]);
+
+    // Telemetry expects every tracking channel has hash, create it for test
+    // entry
+    Completion complete;
+    complete.FromPlaintext(
+        mHostInPrefTables[nsIUrlClassifierFeature::blacklist]);
+    hashes.AppendElement(complete.ToString());
+  }
 
   for (TableData* tableData : mBlacklistTables) {
     if (tableData->MatchState() == TableData::eMatch) {
       list.AppendElement(tableData->Table());
 
       for (const auto& r : tableData->Result()) {
-        nsCString* hash = hashes.AppendElement();
-        r->hash.complete.ToString(*hash);
+        hashes.AppendElement(r->hash.complete.ToString());
       }
     }
   }
 
   UC_LOG(("FeatureData::MaybeCompleteClassification[%p] - process channel %p",
           this, aChannel));
 
   bool shouldContinue = false;
--- a/python/mozbuild/mozbuild/mach_commands.py
+++ b/python/mozbuild/mozbuild/mach_commands.py
@@ -3048,25 +3048,33 @@ class StaticAnalysis(MachCommandBase):
         Parse clang-tidy output into structured issues
         '''
 
         # Limit clang output parsing to 'Enabled checks:'
         end = re.search(r'^Enabled checks:\n', clang_output, re.MULTILINE)
         if end is not None:
             clang_output = clang_output[:end.start()-1]
 
+        platform, _ = self.platform
+        # Starting with clang 8, for the diagnostic messages we have multiple `LF CR`
+        # in order to be compatiable with msvc compiler format, and for this
+        # we are not interested to match the end of line.
+        regex_string = r'(.+):(\d+):(\d+): (warning|error): ([^\[\]\n]+)(?: \[([\.\w-]+)\])'
+
+        # For non 'win' based platforms we also need the 'end of the line' regex
+        if platform not in ('win64', 'win32'):
+            regex_string += '?$'
+
+        regex_header = re.compile(regex_string, re.MULTILINE)
+
         # Sort headers by positions
-        regex_header = re.compile(
-            r'(.+):(\d+):(\d+): (warning|error): ([^\[\]\n]+)(?: \[([\.\w-]+)\])?$', re.MULTILINE)
-
         headers = sorted(
             regex_header.finditer(clang_output),
             key=lambda h: h.start()
         )
-
         issues = []
         for _, header in enumerate(headers):
             header_group = header.groups()
             element = [header_group[3], header_group[4], header_group[5]]
             issues.append(element)
         return issues
 
     def _get_checks(self):
--- a/remote/domains/content/Page.jsm
+++ b/remote/domains/content/Page.jsm
@@ -72,20 +72,25 @@ class Page extends ContentProcessDomain 
     };
   }
 
   async reload() {
     this.docShell.reload(Ci.nsIWebNavigation.LOAD_FLAGS_NONE);
   }
 
   getFrameTree() {
+    const frameId = this.content.windowUtils.outerWindowID;
     return {
       frameTree: {
         frame: {
-          // id, parentId
+          id: frameId,
+          url: this.content.location.href,
+          loaderId: null,
+          securityOrigin: null,
+          mimeType: null,
         },
         childFrames: [],
       },
     };
   }
 
   setLifecycleEventsEnabled() {}
   addScriptToEvaluateOnNewDocument() {}
--- a/remote/test/browser/browser_page_frameNavigated.js
+++ b/remote/test/browser/browser_page_frameNavigated.js
@@ -30,16 +30,22 @@ add_task(async function() {
   ok(true, "CDP client has been instantiated");
 
   const {Page} = client;
 
   // turn on navigation related events, such as DOMContentLoaded et al.
   await Page.enable();
   ok(true, "Page domain has been enabled");
 
+  const { frameTree } = await Page.getFrameTree();
+  ok(!!frameTree.frame, "getFrameTree exposes one frame");
+  is(frameTree.childFrames.length, 0, "getFrameTree reports no child frame");
+  ok(frameTree.frame.id, "getFrameTree's frame has an id");
+  is(frameTree.frame.url, TEST_URI, "getFrameTree's frame has the right url");
+
   // Save the given `promise` resolution into the `promises` global Set
   function recordPromise(name, promise) {
     promise.then(event => {
       ok(true, `Received Page.${name}`);
       resolutions.set(name, event);
     });
     promises.add(promise);
   }
@@ -54,16 +60,18 @@ add_task(async function() {
 
   info("Test Page.navigate");
   recordPromises();
 
   const url = "data:text/html;charset=utf-8,test-page";
   const { frameId } = await Page.navigate({ url });
   ok(true, "A new page has been loaded");
   ok(frameId, "Page.navigate returned a frameId");
+  is(frameId, frameTree.frame.id, "The Page.navigate's frameId is the same than " +
+    "getFrameTree's one");
 
   await assertNavigationEvents({ url, frameId });
 
   info("Test Page.reload");
   recordPromises();
 
   await Page.reload();
   ok(true, "The page has been reloaded");
--- a/security/manager/ssl/tests/unit/test_intermediate_preloads.js
+++ b/security/manager/ssl/tests/unit/test_intermediate_preloads.js
@@ -87,16 +87,17 @@ function setupKintoPreloadServer(certGen
   attachmentCB: null,
   hashFunc: null,
   lengthFunc: null,
 }) {
   const dummyServerURL = `http://localhost:${server.identity.primaryPort}/v1`;
   Services.prefs.setCharPref("services.settings.server", dummyServerURL);
 
   const configPath = "/v1/";
+  const metadataPath = "/v1/buckets/security-state/collections/intermediates";
   const recordsPath = "/v1/buckets/security-state/collections/intermediates/records";
   const attachmentsPath = "/attachments/";
 
   if (options.hashFunc == null) {
     options.hashFunc = getHash;
   }
   if (options.lengthFunc == null) {
     options.lengthFunc = arr => arr.length;
@@ -106,32 +107,34 @@ function setupKintoPreloadServer(certGen
     for (let headerLine of headers) {
       let headerElements = headerLine.split(":");
       response.setHeader(headerElements[0], headerElements[1].trimLeft());
     }
     response.setHeader("Date", (new Date()).toUTCString());
   }
 
   // Basic server information, all static
-  server.registerPathHandler(configPath, (request, response) => {
+  const handler = (request, response) => {
     try {
       const respData = getResponseData(request, server.identity.primaryPort);
       if (!respData) {
         do_throw(`unexpected ${request.method} request for ${request.path}?${request.queryString}`);
         return;
       }
 
       response.setStatusLine(null, respData.status.status,
                              respData.status.statusText);
       setHeader(response, respData.responseHeaders);
       response.write(respData.responseBody);
     } catch (e) {
       info(e);
     }
-  });
+  };
+  server.registerPathHandler(configPath, handler);
+  server.registerPathHandler(metadataPath, handler);
 
   // Lists of certs
   server.registerPathHandler(recordsPath, (request, response) => {
     response.setStatusLine(null, 200, "OK");
     setHeader(response, [
         "Access-Control-Allow-Origin: *",
         "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
         "Content-Type: application/json; charset=UTF-8",
@@ -500,14 +503,30 @@ function getResponseData(req, port) {
         "hello": "kinto",
         "capabilities": {
           "attachments": {
             "base_url": `http://localhost:${port}/attachments/`,
           },
         },
       }),
     },
+    "GET:/v1/buckets/security-state/collections/intermediates?": {
+      "responseHeaders": [
+        "Access-Control-Allow-Origin: *",
+        "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+        "Content-Type: application/json; charset=UTF-8",
+        "Server: waitress",
+        "Etag: \"1234\"",
+      ],
+      "status": { status: 200, statusText: "OK" },
+      "responseBody": JSON.stringify({
+        "data": {
+          "id": "intermediates",
+          "last_modified": 1234,
+        },
+      }),
+    },
   };
   let result = cannedResponses[`${req.method}:${req.path}?${req.queryString}`] ||
                cannedResponses[`${req.method}:${req.path}`] ||
                cannedResponses[req.method];
   return result;
 }
--- a/services/common/kinto-offline-client.js
+++ b/services/common/kinto-offline-client.js
@@ -28,17 +28,17 @@
 //
 // See https://bugzilla.mozilla.org/show_bug.cgi?id=1394556#c3 for
 // more details.
 const global = this;
 
 var EXPORTED_SYMBOLS = ["Kinto"];
 
 /*
- * Version 12.3.0 - f7a9e81
+ * Version 12.4.0 - 896d337
  */
 
 (function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.Kinto = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(require,module,exports){
 /*
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
@@ -64,17 +64,19 @@ var _base = _interopRequireDefault(requi
 
 var _IDB = _interopRequireDefault(require("../src/adapters/IDB"));
 
 var _utils = require("../src/utils");
 
 function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
 
 ChromeUtils.import("resource://gre/modules/Timer.jsm", global);
-const {XPCOMUtils} = ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
+const {
+  XPCOMUtils
+} = ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
 XPCOMUtils.defineLazyGlobalGetters(global, ["fetch", "indexedDB"]);
 ChromeUtils.defineModuleGetter(global, "EventEmitter", "resource://gre/modules/EventEmitter.jsm"); // Use standalone kinto-http module landed in FFx.
 
 ChromeUtils.defineModuleGetter(global, "KintoHttpClient", "resource://services-common/kinto-http-client.js");
 XPCOMUtils.defineLazyGetter(global, "generateUUID", () => {
   const {
     generateUUID
   } = Cc["@mozilla.org/uuid-generator;1"].getService(Ci.nsIUUIDGenerator);
@@ -497,16 +499,25 @@ function createListRequest(cid, store, f
 
 
   const indexField = filterFields.find(field => {
     return INDEXED_FIELDS.includes(field);
   });
 
   if (!indexField) {
     // Iterate on all records for this collection (ie. cid)
+    const isSubQuery = Object.keys(filters).some(key => key.includes(".")); // (ie. filters: {"article.title": "hello"})
+
+    if (isSubQuery) {
+      const newFilter = (0, _utils.transformSubObjectFilters)(filters);
+      const request = store.index("cid").openCursor(IDBKeyRange.only(cid));
+      request.onsuccess = cursorHandlers.all(newFilter, done);
+      return request;
+    }
+
     const request = store.index("cid").openCursor(IDBKeyRange.only(cid));
     request.onsuccess = cursorHandlers.all(filters, done);
     return request;
   } // If `indexField` was used already, don't filter again.
 
 
   const remainingFilters = (0, _utils.omitKeys)(filters, [indexField]); // value specified in the filter (eg. `filters: { _status: ["created", "updated"] }`)
 
@@ -583,34 +594,44 @@ class IDB extends _base.default {
     } // In previous versions, we used to have a database with name `${bid}/${cid}`.
     // Check if it exists, and migrate data once new schema is in place.
     // Note: the built-in migrations from IndexedDB can only be used if the
     // database name does not change.
 
 
     const dataToMigrate = this._options.migrateOldData ? await migrationRequired(this.cid) : null;
     this._db = await open(this.dbName, {
-      version: 1,
+      version: 2,
       onupgradeneeded: event => {
-        const db = event.target.result; // Records store
-
-        const recordsStore = db.createObjectStore("records", {
-          keyPath: ["_cid", "id"]
-        }); // An index to obtain all the records in a collection.
-
-        recordsStore.createIndex("cid", "_cid"); // Here we create indices for every known field in records by collection.
-        // Local record status ("synced", "created", "updated", "deleted")
-
-        recordsStore.createIndex("_status", ["_cid", "_status"]); // Last modified field
-
-        recordsStore.createIndex("last_modified", ["_cid", "last_modified"]); // Timestamps store
-
-        db.createObjectStore("timestamps", {
-          keyPath: "cid"
-        });
+        const db = event.target.result;
+
+        if (event.oldVersion < 1) {
+          // Records store
+          const recordsStore = db.createObjectStore("records", {
+            keyPath: ["_cid", "id"]
+          }); // An index to obtain all the records in a collection.
+
+          recordsStore.createIndex("cid", "_cid"); // Here we create indices for every known field in records by collection.
+          // Local record status ("synced", "created", "updated", "deleted")
+
+          recordsStore.createIndex("_status", ["_cid", "_status"]); // Last modified field
+
+          recordsStore.createIndex("last_modified", ["_cid", "last_modified"]); // Timestamps store
+
+          db.createObjectStore("timestamps", {
+            keyPath: "cid"
+          });
+        }
+
+        if (event.oldVersion < 2) {
+          // Collections store
+          db.createObjectStore("collections", {
+            keyPath: "cid"
+          });
+        }
       }
     });
 
     if (dataToMigrate) {
       const {
         records,
         timestamp
       } = dataToMigrate;
@@ -932,16 +953,42 @@ class IDB extends _base.default {
       }
 
       return records;
     } catch (e) {
       this._handleError("importBulk", e);
     }
   }
 
+  async saveMetadata(metadata) {
+    try {
+      await this.prepare("collections", store => store.put({
+        cid: this.cid,
+        metadata
+      }), {
+        mode: "readwrite"
+      });
+      return metadata;
+    } catch (e) {
+      this._handleError("saveMetadata", e);
+    }
+  }
+
+  async getMetadata() {
+    try {
+      let entry = null;
+      await this.prepare("collections", store => {
+        store.get(this.cid).onsuccess = e => entry = e.target.result;
+      });
+      return entry ? entry.metadata : null;
+    } catch (e) {
+      this._handleError("getMetadata", e);
+    }
+  }
+
 }
 /**
  * IDB transaction proxy.
  *
  * @param  {IDB} adapter        The call IDB adapter
  * @param  {IDBStore} store     The IndexedDB database store.
  * @param  {Array}    preloaded The list of records to make available to
  *                              get() (default: []).
@@ -1150,27 +1197,36 @@ class BaseAdapter {
    * @return {Promise}
    */
 
 
   loadDump(records) {
     throw new Error("Not Implemented.");
   }
 
+  saveMetadata(metadata) {
+    throw new Error("Not Implemented.");
+  }
+
+  getMetadata() {
+    throw new Error("Not Implemented.");
+  }
+
 }
 
 exports.default = BaseAdapter;
 
 },{}],6:[function(require,module,exports){
 "use strict";
 
 Object.defineProperty(exports, "__esModule", {
   value: true
 });
 exports.recordsEqual = recordsEqual;
+exports.createKeyValueStoreIdSchema = createKeyValueStoreIdSchema;
 exports.CollectionTransaction = exports.default = exports.ServerWasFlushedError = exports.SyncResultObject = void 0;
 
 var _base = _interopRequireDefault(require("./adapters/base"));
 
 var _IDB = _interopRequireDefault(require("./adapters/IDB"));
 
 var _utils = require("./utils");
 
@@ -1312,16 +1368,40 @@ function createUUIDSchema() {
     },
 
     validate(id) {
       return typeof id == "string" && _utils.RE_RECORD_ID.test(id);
     }
 
   };
 }
+/**
+ * IDSchema for when using kinto.js as a key-value store.
+ * Using this IDSchema requires you to set a property as the id.
+ * This will be the property used to retrieve this record.
+ *
+ * @example
+ * const exampleCollection = db.collection("example", { idSchema: createKeyValueStoreIdSchema() })
+ * await exampleCollection.create({ title: "How to tie a tie", favoriteColor: "blue", id: "user123" }, { useRecordId: true })
+ * await exampleCollection.getAny("user123")
+ */
+
+
+function createKeyValueStoreIdSchema() {
+  return {
+    generate() {
+      throw new Error("createKeyValueStoreIdSchema() does not generate an id");
+    },
+
+    validate() {
+      return true;
+    }
+
+  };
+}
 
 function markStatus(record, status) {
   return { ...record,
     _status: status
   };
 }
 
 function markDeleted(record) {
@@ -1670,16 +1750,17 @@ class Collection {
    * never synced.
    *
    * @return {Promise}
    */
 
 
   async clear() {
     await this.db.clear();
+    await this.db.saveMetadata(null);
     await this.db.saveLastModified(null);
     return {
       data: [],
       permissions: {}
     };
   }
   /**
    * Encodes a record.
@@ -2551,17 +2632,19 @@ class Collection {
       const seconds = Math.ceil(this.api.backoff / 1000);
       return Promise.reject(new Error(`Server is asking clients to back off; retry in ${seconds}s or use the ignoreBackoff option.`));
     }
 
     const client = this.api.bucket(options.bucket).collection(options.collection);
     const result = new SyncResultObject();
 
     try {
-      // Fetch last changes from the server.
+      // Fetch collection metadata.
+      await this.pullMetadata(client, options); // Fetch last changes from the server.
+
       await this.pullChanges(client, result, options);
       const {
         lastModified
       } = result; // Fetch local changes
 
       const toSync = await this.gatherLocalChanges(); // Publish local changes and pull local resolutions
 
       await this.pushChanges(client, toSync, result, options); // Publish local resolution of push conflicts to server (on CLIENT_WINS)
@@ -2675,16 +2758,33 @@ class Collection {
       localRecord._status === "synced" && // And was synced from server
       localRecord.last_modified !== undefined && // And is older than imported one.
       record.last_modified > localRecord.last_modified;
       return shouldKeep;
     });
     return await this.db.importBulk(newRecords.map(markSynced));
   }
 
+  async pullMetadata(client, options = {}) {
+    const {
+      expectedTimestamp
+    } = options;
+    const query = expectedTimestamp ? {
+      query: {
+        _expected: expectedTimestamp
+      }
+    } : undefined;
+    const metadata = await client.getData(query);
+    return this.db.saveMetadata(metadata);
+  }
+
+  async metadata() {
+    return this.db.getMetadata();
+  }
+
 }
 /**
  * A Collection-oriented wrapper for an adapter's transaction.
  *
  * This defines the high-level functions available on a collection.
  * The collection itself offers functions of the same name. These will
  * perform just one operation in its own transaction.
  */
@@ -3053,16 +3153,17 @@ Object.defineProperty(exports, "__esModu
 });
 exports.sortObjects = sortObjects;
 exports.filterObject = filterObject;
 exports.filterObjects = filterObjects;
 exports.waterfall = waterfall;
 exports.deepEqual = deepEqual;
 exports.omitKeys = omitKeys;
 exports.arrayEqual = arrayEqual;
+exports.transformSubObjectFilters = transformSubObjectFilters;
 exports.RE_RECORD_ID = void 0;
 const RE_RECORD_ID = /^[a-zA-Z0-9][a-zA-Z0-9_-]*$/;
 /**
  * Checks if a value is undefined.
  * @param  {Any}  value
  * @return {Boolean}
  */
 
@@ -3110,16 +3211,21 @@ function sortObjects(order, list) {
 
 
 function filterObject(filters, entry) {
   return Object.keys(filters).every(filter => {
     const value = filters[filter];
 
     if (Array.isArray(value)) {
       return value.some(candidate => candidate === entry[filter]);
+    } else if (typeof value === "object") {
+      return filterObject(value, entry[filter]);
+    } else if (!entry.hasOwnProperty(filter)) {
+      console.error(`The property ${filter} does not exist`);
+      return false;
     }
 
     return entry[filter] === value;
   });
 }
 /**
  * Filters records in a list matching all given filters.
  *
@@ -3217,11 +3323,36 @@ function arrayEqual(a, b) {
     if (a[i] !== b[i]) {
       return false;
     }
   }
 
   return true;
 }
 
+function makeNestedObjectFromArr(arr, val, nestedFiltersObj) {
+  const last = arr.length - 1;
+  return arr.reduce((acc, cv, i) => {
+    if (i === last) {
+      return acc[cv] = val;
+    } else if (acc.hasOwnProperty(cv)) {
+      return acc[cv];
+    } else {
+      return acc[cv] = {};
+    }
+  }, nestedFiltersObj);
+}
+
+function transformSubObjectFilters(filtersObj) {
+  const transformedFilters = {};
+
+  for (const key in filtersObj) {
+    const keysArr = key.split(".");
+    const val = filtersObj[key];
+    makeNestedObjectFromArr(keysArr, val, transformedFilters);
+  }
+
+  return transformedFilters;
+}
+
 },{}]},{},[1])(1)
 });
 
--- a/services/common/kinto-storage-adapter.js
+++ b/services/common/kinto-storage-adapter.js
@@ -105,17 +105,18 @@ const statements = {
       collection_name TEXT,
       record_id TEXT,
       record TEXT
     );`,
 
   "createCollectionMetadata": `
     CREATE TABLE collection_metadata (
       collection_name TEXT PRIMARY KEY,
-      last_modified INTEGER
+      last_modified INTEGER,
+      metadata TEXT
     ) WITHOUT ROWID;`,
 
   "createCollectionDataRecordIdIndex": `
     CREATE UNIQUE INDEX unique_collection_record
       ON collection_data(collection_name, record_id);`,
 
   "clearData": `
     DELETE FROM collection_data
@@ -130,24 +131,35 @@ const statements = {
       VALUES (:collection_name, :record_id, :record);`,
 
   "deleteData": `
     DELETE FROM collection_data
       WHERE collection_name = :collection_name
       AND record_id = :record_id;`,
 
   "saveLastModified": `
-    REPLACE INTO collection_metadata (collection_name, last_modified)
-      VALUES (:collection_name, :last_modified);`,
+    INSERT INTO collection_metadata(collection_name, last_modified)
+      VALUES(:collection_name, :last_modified)
+        ON CONFLICT(collection_name) DO UPDATE SET last_modified = :last_modified`,
 
   "getLastModified": `
     SELECT last_modified
       FROM collection_metadata
         WHERE collection_name = :collection_name;`,
 
+  "saveMetadata": `
+    INSERT INTO collection_metadata(collection_name, metadata)
+      VALUES(:collection_name, :metadata)
+        ON CONFLICT(collection_name) DO UPDATE SET metadata = :metadata`,
+
+  "getMetadata": `
+    SELECT metadata
+      FROM collection_metadata
+        WHERE collection_name = :collection_name;`,
+
   "getRecord": `
     SELECT record
       FROM collection_data
         WHERE collection_name = :collection_name
         AND record_id = :record_id;`,
 
   "listRecords": `
     SELECT record
@@ -169,25 +181,29 @@ const statements = {
   "scanAllRecords": `SELECT * FROM collection_data;`,
 
   "clearCollectionMetadata": `DELETE FROM collection_metadata;`,
 
   "calculateStorage": `
     SELECT collection_name, SUM(LENGTH(record)) as size, COUNT(record) as num_records
       FROM collection_data
         GROUP BY collection_name;`,
+
+  "addMetadataColumn": `
+    ALTER TABLE collection_metadata
+      ADD COLUMN metadata TEXT;`,
 };
 
 const createStatements = [
   "createCollectionData",
   "createCollectionMetadata",
   "createCollectionDataRecordIdIndex",
 ];
 
-const currentSchemaVersion = 1;
+const currentSchemaVersion = 2;
 
 /**
  * Firefox adapter.
  *
  * Uses Sqlite as a backing store.
  *
  * Options:
  *  - sqliteHandle: a handle to the Sqlite database this adapter will
@@ -211,19 +227,21 @@ class FirefoxAdapter extends Kinto.adapt
   static async _init(connection) {
     await connection.executeTransaction(async function doSetup() {
       const schema = await connection.getSchemaVersion();
 
       if (schema == 0) {
         for (let statementName of createStatements) {
           await connection.execute(statements[statementName]);
         }
-
         await connection.setSchemaVersion(currentSchemaVersion);
-      } else if (schema != 1) {
+      } else if (schema == 1) {
+        await connection.execute(statements.addMetadataColumn);
+        await connection.setSchemaVersion(currentSchemaVersion);
+      } else if (schema != 2) {
         throw new Error("Unknown database schema: " + schema);
       }
     });
     return connection;
   }
 
   _executeStatement(statement, params) {
     return this._connection.executeCached(statement, params);
@@ -396,16 +414,36 @@ class FirefoxAdapter extends Kinto.adapt
       .then(result => {
         if (result.length == 0) {
           return 0;
         }
         return result[0].getResultByName("last_modified");
       });
   }
 
+  async saveMetadata(metadata) {
+    const params = {
+      collection_name: this.collection,
+      metadata: JSON.stringify(metadata),
+    };
+    await this._executeStatement(statements.saveMetadata, params);
+    return metadata;
+  }
+
+  async getMetadata() {
+    const params = {
+      collection_name: this.collection,
+    };
+    const result = await this._executeStatement(statements.getMetadata, params);
+    if (result.length == 0) {
+      return null;
+    }
+    return JSON.parse(result[0].getResultByName("metadata"));
+  }
+
   calculateStorage() {
     return this._executeStatement(statements.calculateStorage, {})
       .then(result => {
         return Array.from(result, row => ({
           collectionName: row.getResultByName("collection_name"),
           size: row.getResultByName("size"),
           numRecords: row.getResultByName("num_records"),
         }));
--- a/services/common/tests/unit/test_blocklist_onecrl.js
+++ b/services/common/tests/unit/test_blocklist_onecrl.js
@@ -8,19 +8,16 @@ const BinaryInputStream = CC("@mozilla.o
 
 let server;
 
 // Some simple tests to demonstrate that the logic inside maybeSync works
 // correctly and that simple kinto operations are working as expected. There
 // are more tests for core Kinto.js (and its storage adapter) in the
 // xpcshell tests under /services/common
 add_task(async function test_something() {
-  const configPath = "/v1/";
-  const recordsPath = "/v1/buckets/security-state/collections/onecrl/records";
-
   const dummyServerURL = `http://localhost:${server.identity.primaryPort}/v1`;
   Services.prefs.setCharPref("services.settings.server", dummyServerURL);
 
   const {OneCRLBlocklistClient} = BlocklistClients.initialize({verifySignature: false});
 
   // register a handler
   function handleResponse(request, response) {
     try {
@@ -38,18 +35,19 @@ add_task(async function test_something()
       }
       response.setHeader("Date", (new Date()).toUTCString());
 
       response.write(sample.responseBody);
     } catch (e) {
       info(e);
     }
   }
-  server.registerPathHandler(configPath, handleResponse);
-  server.registerPathHandler(recordsPath, handleResponse);
+  server.registerPathHandler("/v1/", handleResponse);
+  server.registerPathHandler("/v1/buckets/security-state/collections/onecrl", handleResponse);
+  server.registerPathHandler("/v1/buckets/security-state/collections/onecrl/records", handleResponse);
 
   // Test an empty db populates from JSON dump.
   await OneCRLBlocklistClient.maybeSync(42);
 
   // Open the collection, verify it's been populated:
   const list = await OneCRLBlocklistClient.get();
   // We know there will be initial values from the JSON dump.
   // (at least as many as in the dump shipped when this test was written).
@@ -143,16 +141,32 @@ function getSampleResponse(req, port) {
         },
         "url": `http://localhost:${port}/v1/`,
         "documentation": "https://kinto.readthedocs.org/",
         "version": "1.5.1",
         "commit": "cbc6f58",
         "hello": "kinto",
       }),
     },
+    "GET:/v1/buckets/security-state/collections/onecrl": {
+      "sampleHeaders": [
+        "Access-Control-Allow-Origin: *",
+        "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+        "Content-Type: application/json; charset=UTF-8",
+        "Server: waitress",
+        "Etag: \"1234\"",
+      ],
+      "status": { status: 200, statusText: "OK" },
+      "responseBody": JSON.stringify({
+        "data": {
+          "id": "onecrl",
+          "last_modified": 1234,
+        },
+      }),
+    },
     "GET:/v1/buckets/security-state/collections/onecrl/records?_expected=2000&_sort=-last_modified&_since=1000": {
       "sampleHeaders": [
         "Access-Control-Allow-Origin: *",
         "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
         "Content-Type: application/json; charset=UTF-8",
         "Server: waitress",
         "Etag: \"3000\"",
       ],
@@ -208,10 +222,11 @@ function getSampleResponse(req, port) {
         "subject": "MCIxIDAeBgNVBAMMF0Fub3RoZXIgVGVzdCBFbmQtZW50aXR5",
         "pubKeyHash": "VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8=",
         "id": "dabafde9-df4a-ddba-2548-748da04cc02g",
         "last_modified": 5000,
       }]}),
     },
   };
   return responses[`${req.method}:${req.path}?${req.queryString}`] ||
+         responses[`${req.method}:${req.path}`] ||
          responses[req.method];
 }
--- a/services/common/tests/unit/test_blocklist_pinning.js
+++ b/services/common/tests/unit/test_blocklist_pinning.js
@@ -26,19 +26,16 @@ let server;
 
 // Some simple tests to demonstrate that the core preload sync operations work
 // correctly and that simple kinto operations are working as expected.
 add_task(async function test_something() {
   const {
     PinningBlocklistClient: PinningPreloadClient,
   } = BlocklistClients.initialize({ verifySignature: false });
 
-  const configPath = "/v1/";
-  const recordsPath = "/v1/buckets/pinning/collections/pins/records";
-
   Services.prefs.setCharPref("services.settings.server",
                              `http://localhost:${server.identity.primaryPort}/v1`);
 
   // register a handler
   function handleResponse(request, response) {
     try {
       const sample = getSampleResponse(request, server.identity.primaryPort);
       if (!sample) {
@@ -54,18 +51,19 @@ add_task(async function test_something()
       }
       response.setHeader("Date", (new Date()).toUTCString());
 
       response.write(sample.responseBody);
     } catch (e) {
       info(e);
     }
   }
-  server.registerPathHandler(configPath, handleResponse);
-  server.registerPathHandler(recordsPath, handleResponse);
+  server.registerPathHandler("/v1/", handleResponse);
+  server.registerPathHandler("/v1/buckets/pinning/collections/pins", handleResponse);
+  server.registerPathHandler("/v1/buckets/pinning/collections/pins/records", handleResponse);
 
   let sss = Cc["@mozilla.org/ssservice;1"]
               .getService(Ci.nsISiteSecurityService);
 
   // ensure our pins are all missing before we start
   ok(!sss.isSecureURI(sss.HEADER_HPKP,
                       Services.io.newURI("https://one.example.com"), 0));
   ok(!sss.isSecureURI(sss.HEADER_HPKP,
@@ -179,16 +177,32 @@ function getSampleResponse(req, port) {
         },
         "url": `http://localhost:${port}/v1/`,
         "documentation": "https://kinto.readthedocs.org/",
         "version": "1.5.1",
         "commit": "cbc6f58",
         "hello": "kinto",
       }),
     },
+    "GET:/v1/buckets/pinning/collections/pins": {
+      "sampleHeaders": [
+        "Access-Control-Allow-Origin: *",
+        "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+        "Content-Type: application/json; charset=UTF-8",
+        "Server: waitress",
+        "Etag: \"1234\"",
+      ],
+      "status": { status: 200, statusText: "OK" },
+      "responseBody": JSON.stringify({
+        "data": {
+          "id": "pins",
+          "last_modified": 1234,
+        },
+      }),
+    },
     "GET:/v1/buckets/pinning/collections/pins/records?_expected=2000&_sort=-last_modified": {
       "sampleHeaders": [
         "Access-Control-Allow-Origin: *",
         "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
         "Content-Type: application/json; charset=UTF-8",
         "Server: waitress",
         "Etag: \"3000\"",
       ],
@@ -293,10 +307,11 @@ function getSampleResponse(req, port) {
         "expires": new Date().getTime() + 1000000,
         "versions": [Services.appinfo.version, "some version that won't match"],
         "id": "dabafde9-df4a-ddba-2548-748da04cc032",
         "last_modified": 5000,
       }]}),
     },
   };
   return responses[`${req.method}:${req.path}?${req.queryString}`] ||
+         responses[`${req.method}:${req.path}`] ||
          responses[req.method];
 }
--- a/services/common/tests/unit/test_kinto.js
+++ b/services/common/tests/unit/test_kinto.js
@@ -280,16 +280,17 @@ add_task(async function test_loadDump_sh
 
 add_task(clear_collection);
 
 // Now do some sanity checks against a server - we're not looking to test
 // core kinto.js functionality here (there is excellent test coverage in
 // kinto.js), more making sure things are basically working as expected.
 add_task(async function test_kinto_sync() {
   const configPath = "/v1/";
+  const metadataPath = "/v1/buckets/default/collections/test_collection";
   const recordsPath = "/v1/buckets/default/collections/test_collection/records";
   // register a handler
   function handleResponse(request, response) {
     try {
       const sampled = getSampleResponse(request, server.identity.primaryPort);
       if (!sampled) {
         do_throw(`unexpected ${request.method} request for ${request.path}?${request.queryString}`);
       }
@@ -304,16 +305,17 @@ add_task(async function test_kinto_sync(
       response.setHeader("Date", (new Date()).toUTCString());
 
       response.write(sampled.responseBody);
     } catch (e) {
       dump(`${e}\n`);
     }
   }
   server.registerPathHandler(configPath, handleResponse);
+  server.registerPathHandler(metadataPath, handleResponse);
   server.registerPathHandler(recordsPath, handleResponse);
 
   // create an empty collection, sync to populate
   let sqliteHandle;
   try {
     let result;
     sqliteHandle = await do_get_kinto_sqliteHandle();
     const collection = do_get_kinto_collection(sqliteHandle);
@@ -387,16 +389,32 @@ function getSampleResponse(req, port) {
         },
         "url": `http://localhost:${port}/v1/`,
         "documentation": "https://kinto.readthedocs.org/",
         "version": "1.5.1",
         "commit": "cbc6f58",
         "hello": "kinto",
       }),
     },
+    "GET:/v1/buckets/default/collections/test_collection": {
+      "sampleHeaders": [
+        "Access-Control-Allow-Origin: *",
+        "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+        "Content-Type: application/json; charset=UTF-8",
+        "Server: waitress",
+        "Etag: \"1234\"",
+      ],
+      "status": { status: 200, statusText: "OK" },
+      "responseBody": JSON.stringify({
+        "data": {
+          "id": "test_collection",
+          "last_modified": 1234,
+        },
+      }),
+    },
     "GET:/v1/buckets/default/collections/test_collection/records?_sort=-last_modified": {
       "sampleHeaders": [
         "Access-Control-Allow-Origin: *",
         "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
         "Content-Type: application/json; charset=UTF-8",
         "Server: waitress",
         "Etag: \"1445606341071\"",
       ],
@@ -448,10 +466,11 @@ function getSampleResponse(req, port) {
           "done": true,
           "id": "some-manually-chosen-id",
           "title": "New record with custom ID",
         }],
       }),
     },
   };
   return responses[`${req.method}:${req.path}?${req.queryString}`] ||
+         responses[`${req.method}:${req.path}`] ||
          responses[req.method];
 }
--- a/services/common/tests/unit/test_storage_adapter.js
+++ b/services/common/tests/unit/test_storage_adapter.js
@@ -1,11 +1,12 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
+const {Sqlite} = ChromeUtils.import("resource://gre/modules/Sqlite.jsm");
 const {FirefoxAdapter} = ChromeUtils.import("resource://services-common/kinto-storage-adapter.js");
 
 // set up what we need to make storage adapters
 const kintoFilename = "kinto.sqlite";
 
 function do_get_kinto_connection() {
   return FirefoxAdapter.openConnection({path: kintoFilename});
 }
@@ -216,16 +217,29 @@ function test_collection_operations() {
     await adapter.loadDump([
       {id: 1, foo: "bar", last_modified: 1457896541},
       {id: 2, foo: "baz", last_modified: 1458796542},
     ]);
     let lastModified = await adapter.getLastModified();
     Assert.equal(lastModified, 1458796543);
     await sqliteHandle.close();
   });
+
+  add_task(async function test_save_metadata_preserves_lastModified() {
+    let sqliteHandle = await do_get_kinto_connection();
+
+    let adapter = do_get_kinto_adapter(sqliteHandle);
+    await adapter.saveLastModified(42);
+
+    await adapter.saveMetadata({id: "col"});
+
+    let lastModified = await adapter.getLastModified();
+    Assert.equal(lastModified, 42);
+    await sqliteHandle.close();
+  });
 }
 
 // test kinto db setup and operations in various scenarios
 // test from scratch - no current existing database
 add_test(function test_db_creation() {
   add_test(function test_create_from_scratch() {
     // ensure the file does not exist in the profile
     let kintoDB = do_get_kinto_db();
@@ -252,8 +266,40 @@ add_test(function test_creation_from_emp
     run_next_test();
   });
 
   test_collection_operations();
 
   cleanup_kinto();
   run_next_test();
 });
+
+// test schema version upgrade at v2
+add_test(function test_migration_from_v1_to_v2() {
+  add_test(function test_migrate_from_v1_to_v2() {
+    // place an empty kinto db file in the profile
+    let profile = do_get_profile();
+
+    let v1DB = do_get_file("test_storage_adapter/v1.sqlite");
+    v1DB.copyTo(profile, kintoFilename);
+
+    run_next_test();
+  });
+
+  add_test(async function schema_is_update_from_1_to_2() {
+    // The `v1.sqlite` has schema version 1.
+    let sqliteHandle = await Sqlite.openConnection({ path: kintoFilename });
+    Assert.equal(await sqliteHandle.getSchemaVersion(), 1);
+    await sqliteHandle.close();
+
+    // The `.openConnection()` migrates it to version 2.
+    sqliteHandle = await FirefoxAdapter.openConnection({ path: kintoFilename });
+    Assert.equal(await sqliteHandle.getSchemaVersion(), 2);
+    await sqliteHandle.close();
+
+    run_next_test();
+  });
+
+  test_collection_operations();
+
+  cleanup_kinto();
+  run_next_test();
+});
new file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..8482b8b31d045e1d98a3906254ac02708862956d
GIT binary patch
literal 131072
zc%1FhUrSSA90%}cDhP&G5#Bx&g`!2^O$33ahZ>p7HBpHe%N=3iHnbxIf!l&!N*BF_
zUP^bplQvx`LlZ0+(dTnG=kPq|_j}Iw{CT;yQp?h~)7-CDvN#{wA(smaF@|t!BZM#(
zPK*wWbN&4{x@+9K|8TO`e0Vdo-X4DKo&W#<000000000000000000000000000000
z00000{!yJ9qvKPLW^<iFtx-+iA2e!v2Wfe`xx1TgXSHUdyq|72_p7HSs+FuVwp2_O
zOG#W`&9AK|F~7Q;Y{v6?C*qO9Y8+T)`qa2lsi$-CxS?FD&UEgMj8Dza=RW4L%A0P{
zNo76FD!uMArbhd*N{f$Il6XczobHDixN|Heo2B@on18-l+=|bVt+{^PZslE8t~aZ-
zomyJ$K4&R;k`#}w&BTp->1koT6pMw8{Bq}dZhUIC`yO8p<{ik|8;+cp^TLcjbNzZz
zXUrZ=9cP^Eed2Zq*{dP{W&i*H0000000000000000000000000000000Pw%L_IcO|
z004kfCVGc<2(1sTrQ>bLfdBvi00000000000000000000000000000000000xB{*3
zVc`$^Z(m&WB|Ny~`kyY@_xDWn4(;wkd>e8Q000000000000000000000000000000
R000000002EDt9g}`2mowv?Krk
--- a/services/common/tests/unit/test_uptake_telemetry.js
+++ b/services/common/tests/unit/test_uptake_telemetry.js
@@ -44,17 +44,19 @@ add_task(async function test_unknown_sta
   const expectedIncrements = {};
   checkUptakeTelemetry(startHistogram, endHistogram, expectedIncrements);
 });
 
 add_task(async function test_age_is_converted_to_string_and_reported() {
   const status = UptakeTelemetry.STATUS.SUCCESS;
   const age = 42;
 
-  await UptakeTelemetry.report(COMPONENT, status, { source: "s", age });
+  await withFakeChannel("nightly", async () => { // no sampling.
+    await UptakeTelemetry.report(COMPONENT, status, { source: "s", age });
+  });
 
   TelemetryTestUtils.assertEvents(
     [["uptake.remotecontent.result", "uptake", COMPONENT, status, { source: "s", age: `${age}` }]]);
 });
 
 add_task(async function test_each_status_can_be_caught_in_snapshot() {
   const source = "some-source";
   const startHistogram = getUptakeTelemetrySnapshot(source);
--- a/services/settings/RemoteSettingsClient.jsm
+++ b/services/settings/RemoteSettingsClient.jsm
@@ -64,53 +64,16 @@ class ClientEnvironment extends ClientEn
 
   static get toolkitVersion() {
     Services.appinfo.QueryInterface(Ci.nsIPlatformInfo);
     return Services.appinfo.platformVersion;
   }
 }
 
 /**
- * Retrieve the Autograph signature information from the collection metadata.
- *
- * @param {String} bucket Bucket name.
- * @param {String} collection Collection name.
- * @param {int} expectedTimestamp Timestamp to be used for cache busting.
- * @returns {Promise<{String, String}>}
- */
-async function fetchCollectionSignature(bucket, collection, expectedTimestamp) {
-  const client = new KintoHttpClient(gServerURL);
-  const { signature: signaturePayload } = await client.bucket(bucket)
-    .collection(collection)
-    .getData({ query: { _expected: expectedTimestamp } });
-  if (!signaturePayload) {
-    throw new RemoteSettingsClient.MissingSignatureError(`${bucket}/${collection}`);
-  }
-  const { x5u, signature } = signaturePayload;
-  const certChainResponse = await fetch(x5u);
-  const certChain = await certChainResponse.text();
-
-  return { signature, certChain };
-}
-
-/**
- * Retrieve the current list of remote records.
- *
- * @param {String} bucket Bucket name.
- * @param {String} collection Collection name.
- * @param {int} expectedTimestamp Timestamp to be used for cache busting.
- */
-async function fetchRemoteRecords(bucket, collection, expectedTimestamp) {
-  const client = new KintoHttpClient(gServerURL);
-  return client.bucket(bucket)
-    .collection(collection)
-    .listRecords({ sort: "id", filters: { _expected: expectedTimestamp } });
-}
-
-/**
  * Minimalist event emitter.
  *
  * Note: we don't use `toolkit/modules/EventEmitter` because **we want** to throw
  * an error when a listener fails to execute.
  */
 class EventEmitter {
   constructor(events) {
     this._listeners = new Map();
@@ -183,16 +146,17 @@ class RemoteSettingsClient extends Event
   constructor(collectionName, { bucketNamePref, signerName, filterFunc, localFields = [], lastCheckTimePref }) {
     super(["sync"]); // emitted events
 
     this.collectionName = collectionName;
     this.signerName = signerName;
     this.filterFunc = filterFunc;
     this.localFields = localFields;
     this._lastCheckTimePref = lastCheckTimePref;
+    this._verifier = null;
 
     // This attribute allows signature verification to be disabled, when running tests
     // or when pulling data from a dev server.
     this.verifySignature = true;
 
     // The bucket preference value can be changed (eg. `main` to `main-preview`) in order
     // to preview the changes to be approved in a real client.
     this.bucketNamePref = bucketNamePref;
@@ -224,50 +188,66 @@ class RemoteSettingsClient extends Event
       bucket: this.bucketName,
     };
     return this._kinto.collection(this.collectionName, options);
   }
 
   /**
    * Lists settings.
    *
-   * @param  {Object} options             The options object.
-   * @param  {Object} options.filters     Filter the results (default: `{}`).
-   * @param  {Object} options.order       The order to apply (eg. `"-last_modified"`).
-   * @param  {Object} options.syncIfEmpty Synchronize from server if local data is empty (default: `true`).
+   * @param  {Object} options                  The options object.
+   * @param  {Object} options.filters          Filter the results (default: `{}`).
+   * @param  {String} options.order            The order to apply (eg. `"-last_modified"`).
+   * @param  {boolean} options.syncIfEmpty     Synchronize from server if local data is empty (default: `true`).
+   * @param  {boolean} options.verifySignature Verify the signature of the local data (default: `false`).
    * @return {Promise}
    */
   async get(options = {}) {
     const {
       filters = {},
       order = "", // not sorted by default.
       syncIfEmpty = true,
     } = options;
+    let { verifySignature = false } = options;
 
     if (syncIfEmpty && !(await Utils.hasLocalData(this))) {
       try {
         // .get() was called before we had the chance to synchronize the local database.
         // We'll try to avoid returning an empty list.
         if (await Utils.hasLocalDump(this.bucketName, this.collectionName)) {
           // Since there is a JSON dump, load it as default data.
           await RemoteSettingsWorker.importJSONDump(this.bucketName, this.collectionName);
         } else {
           // There is no JSON dump, force a synchronization from the server.
           await this.sync({ loadDump: false });
         }
+        // Either from trusted dump, or already done during sync.
+        verifySignature = false;
       } catch (e) {
         // Report but return an empty list since there will be no data anyway.
         Cu.reportError(e);
         return [];
       }
     }
 
     // Read from the local DB.
-    const kintoCol = await this.openCollection();
-    const { data } = await kintoCol.list({ filters, order });
+    const kintoCollection = await this.openCollection();
+    const { data } = await kintoCollection.list({ filters, order });
+
+    // Verify signature of local data.
+    if (verifySignature) {
+      const localRecords = data.map(r => kintoCollection.cleanLocalFields(r));
+      const timestamp = await kintoCollection.db.getLastModified();
+      const metadata = await kintoCollection.metadata();
+      await this._validateCollectionSignature([],
+                                              timestamp,
+                                              metadata,
+                                              { localRecords });
+    }
+
     // Filter the records based on `this.filterFunc` results.
     return this._filterEntries(data);
   }
 
   /**
    * Synchronize the local database with the remote server.
    *
    * @param {Object} options See #maybeSync() options.
@@ -331,20 +311,25 @@ class RemoteSettingsClient extends Event
         reportStatus = UptakeTelemetry.STATUS.UP_TO_DATE;
         return;
       }
 
       // If signature verification is enabled, then add a synchronization hook
       // for incoming changes that validates the signature.
       if (this.verifySignature) {
         kintoCollection.hooks["incoming-changes"] = [async (payload, collection) => {
-          await this._validateCollectionSignature(payload.changes,
-                                                  payload.lastModified,
-                                                  collection,
-                                                  { expectedTimestamp });
+          const { changes: remoteRecords, lastModified: timestamp } = payload;
+          const { data } = await kintoCollection.list({ order: "" }); // no need to sort.
+          const metadata = await collection.metadata();
+          // Local fields are stripped to compute the collection signature (server does not have them).
+          const localRecords = data.map(r => kintoCollection.cleanLocalFields(r));
+          await this._validateCollectionSignature(remoteRecords,
+                                                  timestamp,
+                                                  metadata,
+                                                  { localRecords });
           // In case the signature is valid, apply the changes locally.
           return payload;
         }];
       }
 
       let syncResult;
       try {
         // Fetch changes from server, and make sure we overwrite local data.
@@ -428,85 +413,93 @@ class RemoteSettingsClient extends Event
   }
 
   /**
    * Fetch the signature info from the collection metadata and verifies that the
    * local set of records has the same.
    *
    * @param {Array<Object>} remoteRecords   The list of changes to apply to the local database.
    * @param {int} timestamp                 The timestamp associated with the list of remote records.
-   * @param {Collection} kintoCollection    Kinto.js Collection instance.
+   * @param {Object} metadata               The collection metadata, that contains the signature payload.
    * @param {Object} options
-   * @param {int} options.expectedTimestamp Cache busting of collection metadata
-   * @param {Boolean} options.ignoreLocal   When the signature verification is retried, since we refetch
-   *                                        the whole collection, we don't take into account the local
-   *                                        data (default: `false`)
+   * @param {Array<Object>} options.localRecords List of additional local records to take into account (default: `[]`).
    * @returns {Promise}
    */
-  async _validateCollectionSignature(remoteRecords, timestamp, kintoCollection, options = {}) {
-    const { expectedTimestamp, ignoreLocal = false } = options;
-    // this is a content-signature field from an autograph response.
-    const { name: collection, bucket } = kintoCollection;
-    const { signature, certChain } = await fetchCollectionSignature(bucket, collection, expectedTimestamp);
+  async _validateCollectionSignature(remoteRecords, timestamp, metadata, options = {}) {
+    const { localRecords = [] } = options;
 
-    let localRecords = [];
-    if (!ignoreLocal) {
-      const { data } = await kintoCollection.list({ order: "" }); // no need to sort.
-      // Local fields are stripped to compute the collection signature (server does not have them).
-      localRecords = data.map(r => kintoCollection.cleanLocalFields(r));
+    if (!metadata || !metadata.signature) {
+      throw new RemoteSettingsClient.MissingSignatureError(this.identifier);
     }
 
+    if (!this._verifier) {
+        this._verifier = Cc["@mozilla.org/security/contentsignatureverifier;1"]
+          .createInstance(Ci.nsIContentSignatureVerifier);
+    }
+
+    // This is a content-signature field from an autograph response.
+    const { signature: { x5u, signature } } = metadata;
+    const certChain = await (await fetch(x5u)).text();
+    // Merge remote records with local ones and serialize as canonical JSON.
     const serialized = await RemoteSettingsWorker.canonicalStringify(localRecords,
                                                                      remoteRecords,
                                                                      timestamp);
-    const verifier = Cc["@mozilla.org/security/contentsignatureverifier;1"]
-      .createInstance(Ci.nsIContentSignatureVerifier);
-    if (!await verifier.asyncVerifyContentSignature(serialized,
-                                                    "p384ecdsa=" + signature,
-                                                    certChain,
-                                                    this.signerName)) {
-      throw new RemoteSettingsClient.InvalidSignatureError(`${bucket}/${collection}`);
+    if (!await this._verifier.asyncVerifyContentSignature(serialized,
+                                                          "p384ecdsa=" + signature,
+                                                          certChain,
+                                                          this.signerName)) {
+      throw new RemoteSettingsClient.InvalidSignatureError(this.identifier);
     }
   }
 
   /**
    * Fetch the whole list of records from the server, verify the signature again
    * and then compute a synchronization result as if the diff-based sync happened.
    * And eventually, wipe out the local data.
    *
    * @param {Collection} kintoCollection    Kinto.js Collection instance.
    * @param {int}        expectedTimestamp  Cache busting of collection metadata
    *
    * @returns {Promise<Object>} the computed sync result.
    */
   async _retrySyncFromScratch(kintoCollection, expectedTimestamp) {
-    const payload = await fetchRemoteRecords(kintoCollection.bucket, kintoCollection.name, expectedTimestamp);
-    await this._validateCollectionSignature(payload.data,
-      payload.last_modified,
-      kintoCollection,
-      { expectedTimestamp, ignoreLocal: true });
+    // Fetch collection metadata.
+    const api = new KintoHttpClient(gServerURL);
+    const client = await api.bucket(this.bucketName).collection(this.collectionName);
+    const metadata = await client.getData({ query: { _expected: expectedTimestamp }});
+    // Fetch whole list of records.
+    const {
+      data: remoteRecords,
+      last_modified: timestamp,
+    } = await client.listRecords({ sort: "id", filters: { _expected: expectedTimestamp } });
+    // Verify signature of remote content, before importing it locally.
+    await this._validateCollectionSignature(remoteRecords,
+                                            timestamp,
+                                            metadata);
 
-    // The signature is good (we haven't thrown).
-    // Now we will Inspect what we had locally.
+    // The signature of this remote content is good (we haven't thrown).
+    // Now we will store it locally. In order to replicate what `.sync()` returns
+    // we will inspect what we had locally.
     const { data: oldData } = await kintoCollection.list({ order: "" }); // no need to sort.
 
     // We build a sync result as if a diff-based sync was performed.
     const syncResult = { created: [], updated: [], deleted: [] };
 
     // If the remote last_modified is newer than the local last_modified,
     // replace the local data
     const localLastModified = await kintoCollection.db.getLastModified();
-    if (payload.last_modified >= localLastModified) {
-      const { data: newData } = payload;
+    if (timestamp >= localLastModified) {
       await kintoCollection.clear();
-      await kintoCollection.loadDump(newData);
+      await kintoCollection.loadDump(remoteRecords);
+      await kintoCollection.db.saveLastModified(timestamp);
+      await kintoCollection.db.saveMetadata(metadata);
 
       // Compare local and remote to populate the sync result
       const oldById = new Map(oldData.map(e => [e.id, e]));
-      for (const r of newData) {
+      for (const r of remoteRecords) {
         const old = oldById.get(r.id);
         if (old) {
           if (old.last_modified != r.last_modified) {
             syncResult.updated.push({ old, new: r });
           }
           oldById.delete(r.id);
         } else {
           syncResult.created.push(r);
--- a/services/settings/RemoteSettingsWorker.js
+++ b/services/settings/RemoteSettingsWorker.js
@@ -10,17 +10,17 @@
  * A worker dedicated to Remote Settings.
  */
 
 importScripts("resource://gre/modules/workers/require.js",
               "resource://gre/modules/CanonicalJSON.jsm",
               "resource://gre/modules/third_party/jsesc/jsesc.js");
 
 const IDB_NAME = "remote-settings";
-const IDB_VERSION = 1;
+const IDB_VERSION = 2;
 const IDB_RECORDS_STORE = "records";
 const IDB_TIMESTAMPS_STORE = "timestamps";
 
 const Agent = {
   /**
    * Return the canonical JSON serialization of the changes
    * applied to the local records.
    * It has to match what is done on the server (See Kinto/kinto-signer).
--- a/services/settings/test/unit/test_remote_settings.js
+++ b/services/settings/test/unit/test_remote_settings.js
@@ -73,20 +73,23 @@ function run_test() {
       response.write(body);
       response.finish();
     } catch (e) {
       info(e);
     }
   }
   const configPath = "/v1/";
   const changesPath = "/v1/buckets/monitor/collections/changes/records";
+  const metadataPath = "/v1/buckets/main/collections/password-fields";
   const recordsPath  = "/v1/buckets/main/collections/password-fields/records";
   server.registerPathHandler(configPath, handleResponse);
   server.registerPathHandler(changesPath, handleResponse);
+  server.registerPathHandler(metadataPath, handleResponse);
   server.registerPathHandler(recordsPath, handleResponse);
+  server.registerPathHandler("/fake-x5u", handleResponse);
 
   run_next_test();
 
   registerCleanupFunction(function() {
     server.stop(() => { });
   });
 }
 
@@ -178,16 +181,52 @@ add_task(async function test_get_ignores
   let data = await RemoteSettings("some-unknown-key").get();
   equal(data.length, 0);
   // The sync endpoints are not mocked, this fails internally.
   data = await RemoteSettings("no-mocked-responses").get();
   equal(data.length, 0);
 });
 add_task(clear_state);
 
+add_task(async function test_get_can_verify_signature() {
+  // No signature in metadata.
+  let error;
+  try {
+    await client.get({ verifySignature: true, syncIfEmpty: false });
+  } catch (e) {
+    error = e;
+  }
+  equal(error.message, "Missing signature (main/password-fields)");
+
+  // Populate the local DB (record and metadata)
+  await client.maybeSync(2000);
+
+  // It validates signature that was stored in local DB.
+  let calledSignature;
+  client._verifier = {
+    async asyncVerifyContentSignature(serialized, signature) {
+      calledSignature = signature;
+      return JSON.parse(serialized).data.length == 1;
+    },
+  };
+  await client.get({ verifySignature: true });
+  ok(calledSignature.endsWith("abcdef"));
+
+  // It throws when signature does not verify.
+  const col = await client.openCollection();
+  await col.delete("9d500963-d80e-3a91-6e74-66f3811b99cc");
+  try {
+    await client.get({ verifySignature: true });
+  } catch (e) {
+    error = e;
+  }
+  equal(error.message, "Invalid content signature (main/password-fields)");
+});
+add_task(clear_state);
+
 add_task(async function test_sync_event_provides_information_about_records() {
   let eventData;
   client.on("sync", ({ data }) => eventData = data);
 
   await client.maybeSync(2000);
   equal(eventData.current.length, 1);
 
   await client.maybeSync(3001);
@@ -515,16 +554,48 @@ function getSampleResponse(req, port) {
         }, {
           "id": "58697bd1-315f-4185-9bee-3371befc2585",
           "bucket": "main-preview",
           "collection": "crash-rate",
           "last_modified": 1000,
         }],
       },
     },
+    "GET:/v1/buckets/main/collections/password-fields": {
+      "sampleHeaders": [
+        "Access-Control-Allow-Origin: *",
+        "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+        "Content-Type: application/json; charset=UTF-8",
+        "Server: waitress",
+        "Etag: \"1234\"",
+      ],
+      "status": { status: 200, statusText: "OK" },
+      "responseBody": JSON.stringify({
+        "data": {
+          "id": "password-fields",
+          "last_modified": 1234,
+          "signature": {
+            "signature": "abcdef",
+            "x5u": `http://localhost:${port}/fake-x5u`,
+          },
+        },
+      }),
+    },
+    "GET:/fake-x5u": {
+      "sampleHeaders": [
+        "Content-Type: /octet-stream",
+      ],
+      "status": { status: 200, statusText: "OK" },
+      "responseBody": `-----BEGIN CERTIFICATE-----
+MIIGYTCCBEmgAwIBAgIBATANBgkqhkiG9w0BAQwFADB9MQswCQYDVQQGEwJVU
+ZARKjbu1TuYQHf0fs+GwID8zeLc2zJL7UzcHFwwQ6Nda9OJN4uPAuC/BKaIpxCLL
+26b24/tRam4SJjqpiq20lynhUrmTtt6hbG3E1Hpy3bmkt2DYnuMFwEx2gfXNcnbT
+wNuvFqc=
+-----END CERTIFICATE-----`,
+    },
     "GET:/v1/buckets/main/collections/password-fields/records?_expected=2000&_sort=-last_modified": {
       "sampleHeaders": [
         "Access-Control-Allow-Origin: *",
         "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
         "Content-Type: application/json; charset=UTF-8",
         "Server: waitress",
         "Etag: \"3000\"",
       ],
--- a/taskcluster/ci/static-analysis-autotest/kind.yml
+++ b/taskcluster/ci/static-analysis-autotest/kind.yml
@@ -61,17 +61,16 @@ jobs:
             - linux64-rust
             - linux64-sccache
             - linux64-cbindgen
             - linux64-nasm
             - linux64-node
 
     win64-st-autotest/debug:
         description: "Win64 Debug Static Analysis Autotest"
-        run-on-projects: []  # Don't run by default until bug 1503453 has been fixed.
         index:
             job-name: win64-st-autotest-debug
         treeherder:
             platform: windows2012-64/debug
             tier: 2
         worker-type: b-win2012
         worker:
             env:
--- a/taskcluster/ci/test/raptor.yml
+++ b/taskcluster/ci/test/raptor.yml
@@ -605,134 +605,134 @@ raptor-tp6m-1-fennec:
     description: "Raptor tp6m-1 on Fennec"
     try-name: raptor-tp6m-1-fennec
     treeherder-symbol: Rap(tp6m-1)
     target: target.apk
     run-on-projects: ['try']
     tier: 2
     mozharness:
         extra-options:
-            - --test=raptor-tp6m-1-fennec
+            - --test=raptor-tp6m-1
             - --app=fennec
             - --binary=org.mozilla.fennec_aurora
 
 raptor-tp6m-2-fennec:
     description: "Raptor tp6m-2 on Fennec"
     try-name: raptor-tp6m-2-fennec
     treeherder-symbol: Rap(tp6m-2)
     target: target.apk
     run-on-projects: ['try']
     tier: 2
     mozharness:
         extra-options:
-            - --test=raptor-tp6m-2-fennec
+            - --test=raptor-tp6m-2
             - --app=fennec
             - --binary=org.mozilla.fennec_aurora
 
 raptor-tp6m-3-fennec:
     description: "Raptor tp6m-3 on Fennec"
     try-name: raptor-tp6m-3-fennec
     treeherder-symbol: Rap(tp6m-3)
     target: target.apk
     run-on-projects: ['try']
     tier: 2
     mozharness:
         extra-options:
-            - --test=raptor-tp6m-3-fennec
+            - --test=raptor-tp6m-3
             - --app=fennec
             - --binary=org.mozilla.fennec_aurora
 
 raptor-tp6m-4-fennec:
     description: "Raptor tp6m-4 on Fennec"
     try-name: raptor-tp6m-4-fennec
     treeherder-symbol: Rap(tp6m-4)
     target: target.apk
     run-on-projects: ['try']
     tier: 2
     mozharness:
         extra-options:
-            - --test=raptor-tp6m-4-fennec
+            - --test=raptor-tp6m-4
             - --app=fennec
             - --binary=org.mozilla.fennec_aurora
 
 raptor-tp6m-5-fennec:
     description: "Raptor tp6m-5 on Fennec"
     try-name: raptor-tp6m-5-fennec
     treeherder-symbol: Rap(tp6m-5)
     target: target.apk
     run-on-projects: ['try']
     tier: 2
     mozharness:
         extra-options:
-            - --test=raptor-tp6m-5-fennec
+            - --test=raptor-tp6m-5
             - --app=fennec
             - --binary=org.mozilla.fennec_aurora
 
 raptor-tp6m-6-fennec:
     description: "Raptor tp6m-6 on Fennec"
     try-name: raptor-tp6m-6-fennec
     treeherder-symbol: Rap(tp6m-6)
     target: target.apk
     run-on-projects: ['try']
     tier: 2
     mozharness:
         extra-options:
-            - --test=raptor-tp6m-6-fennec
+            - --test=raptor-tp6m-6
             - --app=fennec
             - --binary=org.mozilla.fennec_aurora
 
 raptor-tp6m-7-fennec:
     description: "Raptor tp6m-7 on Fennec"
     try-name: raptor-tp6m-7-fennec
     treeherder-symbol: Rap(tp6m-7)
     target: target.apk
     run-on-projects: ['try']
     tier: 2
     mozharness:
         extra-options:
-            - --test=raptor-tp6m-7-fennec
+            - --test=raptor-tp6m-7
             - --app=fennec
             - --binary=org.mozilla.fennec_aurora
 
 raptor-tp6m-8-fennec:
     description: "Raptor tp6m-8 on Fennec"
     try-name: raptor-tp6m-8-fennec
     treeherder-symbol: Rap(tp6m-8)
     target: target.apk
     run-on-projects: ['try']
     tier: 2
     mozharness:
         extra-options:
-            - --test=raptor-tp6m-8-fennec
+            - --test=raptor-tp6m-8
             - --app=fennec
             - --binary=org.mozilla.fennec_aurora
 
 raptor-tp6m-9-fennec:
     description: "Raptor tp6m-9 on Fennec"
     try-name: raptor-tp6m-9-fennec
     treeherder-symbol: Rap(tp6m-9)
     target: target.apk
     run-on-projects: ['try']
     tier: 2
     mozharness:
         extra-options:
-            - --test=raptor-tp6m-9-fennec
+            - --test=raptor-tp6m-9
             - --app=fennec
             - --binary=org.mozilla.fennec_aurora
 
 raptor-tp6m-10-fennec:
     description: "Raptor tp6m-10 on Fennec"
     try-name: raptor-tp6m-10-fennec
     treeherder-symbol: Rap(tp6m-10)
     target: target.apk
     run-on-projects: ['try']
     tier: 2
     mozharness:
         extra-options:
-            - --test=raptor-tp6m-10-fennec
+            - --test=raptor-tp6m-10
             - --app=fennec
             - --binary=org.mozilla.fennec_aurora
 
 raptor-tp6m-1-fenix-cold:
     description: "Raptor tp6m-1 cold page-load on Fenix"
     try-name: raptor-tp6m-1-fenix-cold
     treeherder-symbol: Rap-fenix(tp6m-c-1)
     run-on-projects: ['try']
--- a/taskcluster/scripts/misc/build-sccache.sh
+++ b/taskcluster/scripts/misc/build-sccache.sh
@@ -61,28 +61,20 @@ EOF
     make -j `nproc --all`
     # `make install` installs a *ton* of docs that we don't care about.
     # Just the software, please.
     make install_sw
     popd
 
     # We don't need to set OPENSSL_STATIC here, because we only have static
     # libraries in the directory we are passing.
-    if [ -n "${SCCACHE_GCS_KEY_PATH}" ]; then
-        env "OPENSSL_DIR=$OPENSSL_BUILD_DIRECTORY" cargo build --features "all dist-server gcs" --verbose --release
-    else
-        env "OPENSSL_DIR=$OPENSSL_BUILD_DIRECTORY" cargo build --features "all dist-server" --verbose --release
-    fi
+    env "OPENSSL_DIR=$OPENSSL_BUILD_DIRECTORY" cargo build --features "all dist-server" --verbose --release
     ;;
 MINGW*)
-    if [ -n "${SCCACHE_GCS_KEY_PATH}" ]; then
-        cargo build --verbose --release --features=gcs
-    else
-        cargo build --verbose --release
-    fi
+    cargo build --verbose --release --features="dist-client s3 gcs"
     ;;
 esac
 
 mkdir sccache2
 cp target/release/sccache* sccache2/
 tar -acf sccache2.tar.$COMPRESS_EXT sccache2
 mkdir -p $UPLOAD_DIR
 cp sccache2.tar.$COMPRESS_EXT $UPLOAD_DIR
--- a/testing/mozharness/mozharness/mozilla/testing/codecoverage.py
+++ b/testing/mozharness/mozharness/mozilla/testing/codecoverage.py
@@ -177,31 +177,31 @@ class CodeCoverageMixin(SingleTestMixin)
         self.find_modified_tests()
 
         # TODO: Add tests that haven't been run for a while (a week? N pushes?)
 
         # Add baseline code coverage collection tests
         baseline_tests_by_ext = {
             '.html': {
                 'test': 'testing/mochitest/baselinecoverage/plain/test_baselinecoverage.html',
-                'suite': 'plain'
+                'suite': 'mochitest-plain'
             },
             '.js': {
                 'test': 'testing/mochitest/baselinecoverage/browser_chrome/browser_baselinecoverage.js',  # NOQA: E501
-                'suite': 'browser-chrome'
+                'suite': 'mochitest-browser-chrome'
             },
             '.xul': {
                 'test': 'testing/mochitest/baselinecoverage/chrome/test_baselinecoverage.xul',
-                'suite': 'chrome'
+                'suite': 'mochitest-chrome'
             }
         }
 
         baseline_tests_by_suite = {
-            'browser-chrome': 'testing/mochitest/baselinecoverage/browser_chrome/'
-                              'browser_baselinecoverage_browser-chrome.js'
+            'mochitest-browser-chrome': 'testing/mochitest/baselinecoverage/browser_chrome/'
+                                        'browser_baselinecoverage_browser-chrome.js'
         }
 
         wpt_baseline_test = 'tests/web-platform/mozilla/tests/baselinecoverage/wpt_baselinecoverage.html'  # NOQA: E501
         if self.config.get('per_test_category') == "web-platform":
             if 'testharness' not in self.suites:
                 self.suites['testharness'] = []
             if wpt_baseline_test not in self.suites['testharness']:
                 self.suites["testharness"].append(wpt_baseline_test)
--- a/testing/mozharness/mozharness/mozilla/testing/raptor.py
+++ b/testing/mozharness/mozharness/mozilla/testing/raptor.py
@@ -158,16 +158,22 @@ class Raptor(TestingMixin, MercurialScri
             "help": "Use Raptor to measure memory usage.",
         }],
         [["--debug-mode"], {
             "dest": "debug_mode",
             "action": "store_true",
             "default": False,
             "help": "Run Raptor in debug mode (open browser console, limited page-cycles, etc.)",
         }],
+        [["--disable-e10s"], {
+            "dest": "e10s",
+            "action": "store_false",
+            "default": True,
+            "help": "Run without multiple processes (e10s).",
+        }],
 
     ] + testing_config_options + copy.deepcopy(code_coverage_config_options)
 
     def __init__(self, **kwargs):
         kwargs.setdefault('config_options', self.config_options)
         kwargs.setdefault('all_actions', ['clobber',
                                           'download-and-extract',
                                           'populate-webroot',
--- a/testing/raptor/raptor/cmdline.py
+++ b/testing/raptor/raptor/cmdline.py
@@ -124,16 +124,18 @@ def create_parser(mach_interface=False):
             help='How long to wait (ms) after browser start-up before starting the tests')
     add_arg('--browser-cycles', dest="browser_cycles", type=int,
             help="The number of times a cold load test is repeated (for cold load tests only, "
             "where the browser is shutdown and restarted between test iterations)")
     add_arg('--print-tests', action=_PrintTests,
             help="Print all available Raptor tests")
     add_arg('--debug-mode', dest="debug_mode", action="store_true",
             help="Run Raptor in debug mode (open browser console, limited page-cycles, etc.)")
+    add_arg('--disable-e10s', dest="e10s", action="store_false", default=True,
+            help="Run without multiple processes (e10s).")
     if not mach_interface:
         add_arg('--run-local', dest="run_local", default=False, action="store_true",
                 help="Flag that indicates if raptor is running locally or in production")
         add_arg('--obj-path', dest="obj_path", default=None,
                 help="Browser build obj_path (received when running in production)")
 
     add_logging_group(parser)
     return parser
--- a/testing/raptor/raptor/raptor.py
+++ b/testing/raptor/raptor/raptor.py
@@ -82,17 +82,17 @@ class SignalHandlerException(Exception):
 
 class Raptor(object):
     """Container class for Raptor"""
 
     def __init__(self, app, binary, run_local=False, obj_path=None,
                  gecko_profile=False, gecko_profile_interval=None, gecko_profile_entries=None,
                  symbols_path=None, host=None, power_test=False, memory_test=False,
                  is_release_build=False, debug_mode=False, post_startup_delay=None,
-                 interrupt_handler=None, **kwargs):
+                 interrupt_handler=None, e10s=True, **kwargs):
 
         # Override the magic --host HOST_IP with the value of the environment variable.
         if host == 'HOST_IP':
             host = os.environ['HOST_IP']
 
         self.config = {
             'app': app,
             'binary': binary,
@@ -104,16 +104,17 @@ class Raptor(object):
             'gecko_profile_interval': gecko_profile_interval,
             'gecko_profile_entries': gecko_profile_entries,
             'symbols_path': symbols_path,
             'host': host,
             'power_test': power_test,
             'memory_test': memory_test,
             'is_release_build': is_release_build,
             'enable_control_server_wait': memory_test,
+            'e10s': e10s,
         }
 
         self.raptor_venv = os.path.join(os.getcwd(), 'raptor-venv')
         self.log = get_default_logger(component='raptor-main')
         self.control_server = None
         self.playback = None
         self.benchmark = None
         self.benchmark_port = 0
@@ -901,17 +902,17 @@ class RaptorAndroid(Raptor):
 
                 # Additional command line arguments that the app will read and use (e.g.
                 # with a custom profile)
                 extras = {}
                 if extra_args:
                     extras['args'] = " ".join(extra_args)
 
                 # add e10s=True
-                extras['use_multiprocess'] = True
+                extras['use_multiprocess'] = self.config['e10s']
 
                 self.device.launch_application(self.config['binary'],
                                                self.config['activity'],
                                                self.config['intent'],
                                                extras=extras,
                                                url='about:blank',
                                                fail_if_running=False)
 
--- a/testing/web-platform/meta/service-workers/service-worker/update-after-navigation-redirect.https.html.ini
+++ b/testing/web-platform/meta/service-workers/service-worker/update-after-navigation-redirect.https.html.ini
@@ -1,5 +1,7 @@
 [update-after-navigation-redirect.https.html]
+  disabled:
+    if debug and (os == "win"): https://bugzilla.mozilla.org/show_bug.cgi?id=1525580
   expected: TIMEOUT
   [service workers are updated on redirects during navigation]
     expected: TIMEOUT
 
--- a/testing/web-platform/meta/service-workers/service-worker/update-after-oneday.https.html.ini
+++ b/testing/web-platform/meta/service-workers/service-worker/update-after-oneday.https.html.ini
@@ -1,5 +1,2 @@
 [update-after-oneday.https.html]
-  disabled:
-    if debug and (os == "win"): https://bugzilla.mozilla.org/show_bug.cgi?id=1525580
-    if ccov and (os == "win"): https://bugzilla.mozilla.org/show_bug.cgi?id=1525580
   prefs: [dom.serviceWorkers.testUpdateOverOneDay:true]
deleted file mode 100644
--- a/testing/web-platform/meta/service-workers/service-worker/update-bytecheck.https.html.ini
+++ /dev/null
@@ -1,4 +0,0 @@
-[update-bytecheck.https.html]
-  disabled:
-    if debug and (os == "win"): https://bugzilla.mozilla.org/show_bug.cgi?id=1525580
-    if webrender and (os == "win"): https://bugzilla.mozilla.org/show_bug.cgi?id=1525580
deleted file mode 100644
--- a/testing/web-platform/meta/service-workers/service-worker/update-missing-import-scripts.https.html.ini
+++ /dev/null
@@ -1,3 +0,0 @@
-[update-missing-import-scripts.https.html]
-  disabled:
-    if debug and (os == "win"): https://bugzilla.mozilla.org/show_bug.cgi?id=1525580
--- a/third_party/rust/dogear/.cargo-checksum.json
+++ b/third_party/rust/dogear/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{"CODE_OF_CONDUCT.md":"e85149c44f478f164f7d5f55f6e66c9b5ae236d4a11107d5e2a93fe71dd874b9","Cargo.toml":"ef36c6d2e8475c91f1a28a4ae1de871f8311f1b0044e6ba20b7c21c0af11f0a1","LICENSE":"c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4","README.md":"303ea5ec53d4e86f2c321056e8158e31aa061353a99e52de3d76859d40919efc","src/driver.rs":"541d0d5a3f87ebafb4294bebc8a08b259b174b2c0607fa7edef570b0d7b52b7f","src/error.rs":"b78609cf0f0a87b2e6d01bcaf565f1ce8723f33f22f36e1847639557bcd49a2e","src/guid.rs":"6185985ca3e416c1bb9b1691b83789f718fd532fc011efd4a28c82f1edd23650","src/lib.rs":"0bdb83959fc75d9ec99108e0c4c0ced4b9a80c08e950ad2ac59d095e74b39f0f","src/merge.rs":"176353b45ce1079e20d705ca82a154a375eaf927e5a6075d1469d490ff8662d3","src/store.rs":"612d90ea0614aa7cc943c4ac0faaee35c155f57b553195ac28518ae7c0b8ebb1","src/tests.rs":"8a12b2d571ca4c59d645879b555c321c7a6fb6445956d41fcb37747ac06b54df","src/tree.rs":"194ccd6642d64347cf79dea3237e6d124aa4a75cad654360d65945617e749afc"},"package":"30ac4a8e8f834f02deb2266b1f279aa5494e990c625d8be8f2988a7c708ba1f8"}
\ No newline at end of file
+{"files":{"CODE_OF_CONDUCT.md":"e85149c44f478f164f7d5f55f6e66c9b5ae236d4a11107d5e2a93fe71dd874b9","Cargo.toml":"1530236b0a46fdd3afe7225a1e83009fd3e7817ae22566161ec698b724efb562","LICENSE":"c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4","README.md":"303ea5ec53d4e86f2c321056e8158e31aa061353a99e52de3d76859d40919efc","book.toml":"ff2de447613bd392bb16e4c72b6a39d83f76decbe35ae79df25a9e84e7f8bec4","src/driver.rs":"e07c4c1c0646d12a11fabb288a6fad569c806611e46f36e1f7e952c5829d1308","src/error.rs":"d4ef0cba5c7fc54959ed62da166f10435548d705e0a817eed449fb001fe4e21d","src/guid.rs":"790700aa07b1d1616d76476c48c9bfda6014350b4b028d4b7c05ac1b8f1c8870","src/lib.rs":"ee32682a94a2eb363b2b9708a2dd0c8eb0eb6afc1e5e6a58fba5ab7681bbde7c","src/merge.rs":"63b2f30fea4034d4ca37f835bf18e6344781cd0d5db0f0b56c49395ba3650ed7","src/store.rs":"fdac19148c662aff1ea2ece90dbc384b59acf3adbf469157844d04812e7c048a","src/tests.rs":"f17cd786b501867174b8fa2524c509d4301064c219ee001129c889f0b84ee6e9","src/tree.rs":"7d3a1fbe7f92673f0c1f8297c86dd03ce1abe1b0f64e1554b635911e8edef82b"},"package":"26b7583e1427e296c852f3217eaab3890e698f742b8d7349beb1f40c4e946fc9"}
\ No newline at end of file
--- a/third_party/rust/dogear/Cargo.toml
+++ b/third_party/rust/dogear/Cargo.toml
@@ -8,20 +8,21 @@
 # If you believe there's an error in this file please file an
 # issue against the rust-lang/cargo repository. If you're
 # editing this file be aware that the upstream Cargo.toml
 # will likely look very different (and much more reasonable)
 
 [package]
 edition = "2018"
 name = "dogear"
-version = "0.2.4"
+version = "0.2.5"
 authors = ["Lina Cambridge <lina@mozilla.com>"]
-exclude = ["/.travis/**", ".travis.yml"]
+exclude = ["/.travis/**", ".travis.yml", "/docs/**"]
 description = "A library for merging bookmark trees."
+readme = "README.md"
 license = "Apache-2.0"
 repository = "https://github.com/mozilla/dogear"
 [dependencies.log]
 version = "0.4"
 
 [dependencies.smallbitvec]
 version = "2.3.0"
 [dev-dependencies.env_logger]
new file mode 100644
--- /dev/null
+++ b/third_party/rust/dogear/book.toml
@@ -0,0 +1,9 @@
+[book]
+title = "The Dogeared Book"
+author = "Lina Cambridge"
+description = "A guided introduction to bookmark merging with Dogear."
+src = "docs"
+
+[build]
+build-dir = "book"
+create-missing = false
--- a/third_party/rust/dogear/src/driver.rs
+++ b/third_party/rust/dogear/src/driver.rs
@@ -14,16 +14,50 @@
 
 use std::fmt::Arguments;
 
 use log::{Level, LevelFilter, Log};
 
 use crate::error::{ErrorKind, Result};
 use crate::guid::Guid;
 
+/// An abort signal is used to abort merging. Implementations of `AbortSignal`
+/// can store an aborted flag, usually as an atomic integer or Boolean, set
+/// the flag on abort, and have `AbortSignal::aborted` return the flag's value.
+///
+/// Since merging is synchronous, it's not possible to interrupt a merge from
+/// the same thread that started it. In practice, this means a signal will
+/// implement `Send` and `Sync`, too, so that another thread can set the
+/// aborted flag.
+///
+/// The name comes from the `AbortSignal` DOM API.
+pub trait AbortSignal {
+    /// Indicates if the caller signaled to abort.
+    fn aborted(&self) -> bool;
+
+    /// Returns an error if the caller signaled to abort. This helper makes it
+    /// easier to use the signal with the `?` operator.
+    fn err_if_aborted(&self) -> Result<()> {
+        if self.aborted() {
+            Err(ErrorKind::Abort.into())
+        } else {
+            Ok(())
+        }
+    }
+}
+
+/// A default signal that can't be aborted.
+pub struct DefaultAbortSignal;
+
+impl AbortSignal for DefaultAbortSignal {
+    fn aborted(&self) -> bool {
+        false
+    }
+}
+
 /// A merge driver provides methods to customize merging behavior.
 pub trait Driver {
     /// Generates a new GUID for the given invalid GUID. This is used to fix up
     /// items with GUIDs that Places can't store (bug 1380606, bug 1313026).
     ///
     /// The default implementation returns an error, forbidding invalid GUIDs.
     ///
     /// Implementations of `Driver` can either use the `rand` and `base64`
--- a/third_party/rust/dogear/src/error.rs
+++ b/third_party/rust/dogear/src/error.rs
@@ -83,16 +83,17 @@ impl fmt::Display for Error {
             ErrorKind::UnmergedRemoteItems => {
                 write!(f, "Merged tree doesn't mention all items from remote tree")
             }
             ErrorKind::InvalidGuid(invalid_guid) => {
                 write!(f, "Merged tree contains invalid GUID {}", invalid_guid)
             }
             ErrorKind::InvalidByte(b) => write!(f, "Invalid byte {} in UTF-16 encoding", b),
             ErrorKind::MalformedString(err) => err.fmt(f),
+            ErrorKind::Abort => write!(f, "Operation aborted"),
         }
     }
 }
 
 #[derive(Debug)]
 pub enum ErrorKind {
     MismatchedItemKind(Kind, Kind),
     DuplicateItem(Guid),
@@ -101,9 +102,10 @@ pub enum ErrorKind {
     MissingItem(Guid),
     Cycle(Guid),
     MergeConflict,
     UnmergedLocalItems,
     UnmergedRemoteItems,
     InvalidGuid(Guid),
     InvalidByte(u16),
     MalformedString(Box<dyn error::Error + Send + Sync + 'static>),
+    Abort,
 }
--- a/third_party/rust/dogear/src/guid.rs
+++ b/third_party/rust/dogear/src/guid.rs
@@ -32,17 +32,17 @@ pub trait IsValidGuid {
 
 /// The internal representation of a GUID. Valid GUIDs are 12 bytes, and contain
 /// only Base64url characters; we can store them on the stack without a heap
 /// allocation. However, both local and remote items might have invalid GUIDs,
 /// in which case we fall back to a heap-allocated string.
 #[derive(Clone)]
 enum Repr {
     Valid([u8; 12]),
-    Invalid(String),
+    Invalid(Box<str>),
 }
 
 /// The Places root GUID, used to root all items in a bookmark tree.
 pub const ROOT_GUID: Guid = Guid(Repr::Valid(*b"root________"));
 
 /// The bookmarks toolbar GUID.
 pub const TOOLBAR_GUID: Guid = Guid(Repr::Valid(*b"toolbar_____"));
 
@@ -90,29 +90,29 @@ impl Guid {
                 if byte > u16::from(u8::max_value()) {
                     return Err(ErrorKind::InvalidByte(byte).into());
                 }
                 bytes[index] = byte as u8;
             }
             Repr::Valid(bytes)
         } else {
             match String::from_utf16(b) {
-                Ok(s) => Repr::Invalid(s),
+                Ok(s) => Repr::Invalid(s.into()),
                 Err(err) => return Err(err.into()),
             }
         };
         Ok(Guid(repr))
     }
 
     /// Returns the GUID as a byte slice.
     #[inline]
     pub fn as_bytes(&self) -> &[u8] {
         match self.0 {
             Repr::Valid(ref bytes) => bytes,
-            Repr::Invalid(ref s) => s.as_ref(),
+            Repr::Invalid(ref s) => s.as_bytes(),
         }
     }
 
     /// Returns the GUID as a string slice.
     #[inline]
     pub fn as_str(&self) -> &str {
         // We actually could use from_utf8_unchecked here, and depending on how
         // often we end up doing this, it's arguable that we should. We know
@@ -209,17 +209,17 @@ impl PartialOrd for Guid {
         Some(self.cmp(other))
     }
 }
 
 // Allow direct comparison with str
 impl PartialEq<str> for Guid {
     #[inline]
     fn eq(&self, other: &str) -> bool {
-        self.as_str() == other
+        self.as_bytes() == other.as_bytes()
     }
 }
 
 impl<'a> PartialEq<&'a str> for Guid {
     #[inline]
     fn eq(&self, other: &&'a str) -> bool {
         self == *other
     }
--- a/third_party/rust/dogear/src/lib.rs
+++ b/third_party/rust/dogear/src/lib.rs
@@ -22,17 +22,17 @@ mod guid;
 mod merge;
 #[macro_use]
 mod store;
 mod tree;
 
 #[cfg(test)]
 mod tests;
 
-pub use crate::driver::{DefaultDriver, Driver};
+pub use crate::driver::{AbortSignal, DefaultAbortSignal, DefaultDriver, Driver};
 pub use crate::error::{Error, ErrorKind, Result};
 pub use crate::guid::{Guid, MENU_GUID, MOBILE_GUID, ROOT_GUID, TOOLBAR_GUID, UNFILED_GUID};
 pub use crate::merge::{Deletion, Merger, StructureCounts};
 pub use crate::store::{MergeTimings, Stats, Store};
 pub use crate::tree::{
-    Content, IntoTree, Item, Kind, MergeState, MergedDescendant, MergedNode, MergedRoot, Tree,
-    UploadReason, Validity,
+    Content, Item, Kind, MergeState, MergedDescendant, MergedNode, MergedRoot, Tree, UploadReason,
+    Validity,
 };
--- a/third_party/rust/dogear/src/merge.rs
+++ b/third_party/rust/dogear/src/merge.rs
@@ -8,21 +8,21 @@
 //
 // Unless required by applicable law or agreed to in writing, software
 // distributed under the License is distributed on an "AS IS" BASIS,
 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
 use std::{
-    collections::{HashMap, HashSet, VecDeque},
+    collections::{hash_map::Entry, HashMap, HashSet, VecDeque},
     mem,
 };
 
-use crate::driver::{DefaultDriver, Driver};
+use crate::driver::{AbortSignal, DefaultAbortSignal, DefaultDriver, Driver};
 use crate::error::{ErrorKind, Result};
 use crate::guid::{Guid, IsValidGuid};
 use crate::tree::{Content, MergeState, MergedNode, MergedRoot, Node, Tree, Validity};
 
 /// Structure change types, used to indicate if a node on one side is moved
 /// or deleted on the other.
 #[derive(Eq, PartialEq)]
 enum StructureChange {
@@ -91,35 +91,37 @@ enum ConflictResolution {
 /// The `needs_merge` flag notes *that* a bookmark changed, but not *how*. This
 /// means we might detect conflicts, and revert changes on one side, for cases
 /// that iOS can merge cleanly.
 ///
 /// Fortunately, most of our users don't organize their bookmarks into deeply
 /// nested hierarchies, or make conflicting changes on multiple devices
 /// simultaneously. A simpler two-way tree merge strikes a good balance between
 /// correctness and complexity.
-pub struct Merger<'t, D = DefaultDriver> {
+pub struct Merger<'t, D = DefaultDriver, A = DefaultAbortSignal> {
     driver: &'t D,
+    signal: &'t A,
     local_tree: &'t Tree,
     new_local_contents: Option<&'t HashMap<Guid, Content>>,
     remote_tree: &'t Tree,
     new_remote_contents: Option<&'t HashMap<Guid, Content>>,
     matching_dupes_by_local_parent_guid: HashMap<Guid, MatchingDupes<'t>>,
     merged_guids: HashSet<Guid>,
     delete_locally: HashSet<Guid>,
     delete_remotely: HashSet<Guid>,
     structure_counts: StructureCounts,
 }
 
 #[cfg(test)]
-impl<'t> Merger<'t, DefaultDriver> {
+impl<'t> Merger<'t, DefaultDriver, DefaultAbortSignal> {
     /// Creates a merger with the default merge driver.
     pub fn new(local_tree: &'t Tree, remote_tree: &'t Tree) -> Merger<'t> {
         Merger {
             driver: &DefaultDriver,
+            signal: &DefaultAbortSignal,
             local_tree,
             new_local_contents: None,
             remote_tree,
             new_remote_contents: None,
             matching_dupes_by_local_parent_guid: HashMap::new(),
             merged_guids: HashSet::new(),
             delete_locally: HashSet::new(),
             delete_remotely: HashSet::new(),
@@ -131,35 +133,38 @@ impl<'t> Merger<'t, DefaultDriver> {
     pub fn with_contents(
         local_tree: &'t Tree,
         new_local_contents: &'t HashMap<Guid, Content>,
         remote_tree: &'t Tree,
         new_remote_contents: &'t HashMap<Guid, Content>,
     ) -> Merger<'t> {
         Merger::with_driver(
             &DefaultDriver,
+            &DefaultAbortSignal,
             local_tree,
             new_local_contents,
             remote_tree,
             new_remote_contents,
         )
     }
 }
 
-impl<'t, D: Driver> Merger<'t, D> {
+impl<'t, D: Driver, A: AbortSignal> Merger<'t, D, A> {
     /// Creates a merger with the given merge driver and contents.
     pub fn with_driver(
         driver: &'t D,
+        signal: &'t A,
         local_tree: &'t Tree,
         new_local_contents: &'t HashMap<Guid, Content>,
         remote_tree: &'t Tree,
         new_remote_contents: &'t HashMap<Guid, Content>,
-    ) -> Merger<'t, D> {
+    ) -> Merger<'t, D, A> {
         Merger {
             driver,
+            signal,
             local_tree,
             new_local_contents: Some(new_local_contents),
             remote_tree,
             new_remote_contents: Some(new_remote_contents),
             matching_dupes_by_local_parent_guid: HashMap::new(),
             merged_guids: HashSet::new(),
             delete_locally: HashSet::new(),
             delete_remotely: HashSet::new(),
@@ -175,22 +180,24 @@ impl<'t, D: Driver> Merger<'t, D> {
             self.two_way_merge(local_root_node, remote_root_node)?
         };
 
         // Any remaining deletions on one side should be deleted on the other side.
         // This happens when the remote tree has tombstones for items that don't
         // exist locally, or the local tree has tombstones for items that
         // aren't on the server.
         for guid in self.local_tree.deletions() {
+            self.signal.err_if_aborted()?;
             if !self.mentions(guid) {
                 self.delete_remotely.insert(guid.clone());
                 self.structure_counts.merged_deletions += 1;
             }
         }
         for guid in self.remote_tree.deletions() {
+            self.signal.err_if_aborted()?;
             if !self.mentions(guid) {
                 self.delete_locally.insert(guid.clone());
                 self.structure_counts.merged_deletions += 1;
             }
         }
 
         Ok(MergedRoot::with_size(
             merged_root_node,
@@ -265,16 +272,17 @@ impl<'t, D: Driver> Merger<'t, D> {
 
         let merged_guid = if local_node.guid.is_valid_guid() {
             local_node.guid.clone()
         } else {
             warn!(
                 self.driver,
                 "Generating new GUID for local node {}", local_node
             );
+            self.signal.err_if_aborted()?;
             let new_guid = self.driver.generate_new_guid(&local_node.guid)?;
             if new_guid != local_node.guid {
                 if self.merged_guids.contains(&new_guid) {
                     return Err(ErrorKind::DuplicateItem(new_guid).into());
                 }
                 self.merged_guids.insert(new_guid.clone());
             }
             new_guid
@@ -282,16 +290,17 @@ impl<'t, D: Driver> Merger<'t, D> {
 
         let mut merged_node = MergedNode::new(merged_guid, MergeState::LocalOnly(local_node));
         if local_node.is_folder() {
             // The local folder doesn't exist remotely, but its children might, so
             // we still need to recursively walk and merge them. This method will
             // change the merge state from local to new if any children were moved
             // or deleted.
             for local_child_node in local_node.children() {
+                self.signal.err_if_aborted()?;
                 self.merge_local_child_into_merged_node(
                     &mut merged_node,
                     local_node,
                     None,
                     local_child_node,
                 )?;
             }
         }
@@ -306,16 +315,17 @@ impl<'t, D: Driver> Merger<'t, D> {
 
         let merged_guid = if remote_node.guid.is_valid_guid() {
             remote_node.guid.clone()
         } else {
             warn!(
                 self.driver,
                 "Generating new GUID for remote node {}", remote_node
             );
+            self.signal.err_if_aborted()?;
             let new_guid = self.driver.generate_new_guid(&remote_node.guid)?;
             if new_guid != remote_node.guid {
                 if self.merged_guids.contains(&new_guid) {
                     return Err(ErrorKind::DuplicateItem(new_guid).into());
                 }
                 self.merged_guids.insert(new_guid.clone());
                 // Upload tombstones for changed remote GUIDs.
                 self.delete_remotely.insert(remote_node.guid.clone());
@@ -324,16 +334,17 @@ impl<'t, D: Driver> Merger<'t, D> {
             new_guid
         };
         let mut merged_node = MergedNode::new(merged_guid, MergeState::RemoteOnly(remote_node));
         if remote_node.is_folder() {
             // As above, a remote folder's children might still exist locally, so we
             // need to merge them and update the merge state from remote to new if
             // any children were moved or deleted.
             for remote_child_node in remote_node.children() {
+                self.signal.err_if_aborted()?;
                 self.merge_remote_child_into_merged_node(
                     &mut merged_node,
                     None,
                     remote_node,
                     remote_child_node,
                 )?;
             }
         }
@@ -376,16 +387,17 @@ impl<'t, D: Driver> Merger<'t, D> {
 
         let merged_guid = if remote_node.guid.is_valid_guid() {
             remote_node.guid.clone()
         } else {
             warn!(
                 self.driver,
                 "Generating new valid GUID for node {}", remote_node
             );
+            self.signal.err_if_aborted()?;
             let new_guid = self.driver.generate_new_guid(&remote_node.guid)?;
             if new_guid != remote_node.guid {
                 if self.merged_guids.contains(&new_guid) {
                     return Err(ErrorKind::DuplicateItem(new_guid).into());
                 }
                 self.merged_guids.insert(new_guid.clone());
                 // Upload tombstones for changed remote GUIDs.
                 self.delete_remotely.insert(remote_node.guid.clone());
@@ -412,43 +424,47 @@ impl<'t, D: Driver> Merger<'t, D> {
                     remote_node,
                 },
             },
         );
 
         match children {
             ConflictResolution::Local => {
                 for local_child_node in local_node.children() {
+                    self.signal.err_if_aborted()?;
                     self.merge_local_child_into_merged_node(
                         &mut merged_node,
                         local_node,
                         Some(remote_node),
                         local_child_node,
                     )?;
                 }
                 for remote_child_node in remote_node.children() {
+                    self.signal.err_if_aborted()?;
                     self.merge_remote_child_into_merged_node(
                         &mut merged_node,
                         Some(local_node),
                         remote_node,
                         remote_child_node,
                     )?;
                 }
             }
 
             ConflictResolution::Remote | ConflictResolution::Unchanged => {
                 for remote_child_node in remote_node.children() {
+                    self.signal.err_if_aborted()?;
                     self.merge_remote_child_into_merged_node(
                         &mut merged_node,
                         Some(local_node),
                         remote_node,
                         remote_child_node,
                     )?;
                 }
                 for local_child_node in local_node.children() {
+                    self.signal.err_if_aborted()?;
                     self.merge_local_child_into_merged_node(
                         &mut merged_node,
                         local_node,
                         Some(remote_node),
                         local_child_node,
                     )?;
                 }
             }
@@ -633,17 +649,17 @@ impl<'t, D: Driver> Merger<'t, D> {
         );
 
         let mut merged_child_node = if let Some(local_child_node_by_content) = self
             .find_local_node_matching_remote_node(
                 merged_node,
                 local_parent_node,
                 remote_parent_node,
                 remote_child_node,
-            ) {
+            )? {
             self.two_way_merge(local_child_node_by_content, remote_child_node)
         } else {
             self.merge_remote_only_node(remote_child_node)
         }?;
         if merged_node.remote_guid_changed() {
             merged_child_node.merge_state = merged_child_node.merge_state.with_new_structure();
         }
         if merged_child_node.remote_guid_changed() {
@@ -839,17 +855,17 @@ impl<'t, D: Driver> Merger<'t, D> {
         );
 
         let merged_child_node = if let Some(remote_child_node_by_content) = self
             .find_remote_node_matching_local_node(
                 merged_node,
                 local_parent_node,
                 remote_parent_node,
                 local_child_node,
-            ) {
+            )? {
             // The local child has a remote content match, so take the remote GUID
             // and merge.
             let mut merged_child_node =
                 self.two_way_merge(local_child_node, remote_child_node_by_content)?;
             if merged_node.remote_guid_changed() {
                 merged_child_node.merge_state = merged_child_node.merge_state.with_new_structure();
             }
             if merged_child_node.remote_guid_changed() {
@@ -1192,16 +1208,17 @@ impl<'t, D: Driver> Merger<'t, D> {
     /// This is the inverse of `delete_local_node`.
     fn delete_remote_node(
         &mut self,
         merged_node: &mut MergedNode<'t>,
         remote_node: Node<'t>,
     ) -> Result<StructureChange> {
         self.delete_remotely.insert(remote_node.guid.clone());
         for remote_child_node in remote_node.children() {
+            self.signal.err_if_aborted()?;
             if self.merged_guids.contains(&remote_child_node.guid) {
                 trace!(
                     self.driver,
                     "Remote child {} can't be an orphan; already merged",
                     remote_child_node
                 );
                 continue;
             }
@@ -1249,16 +1266,17 @@ impl<'t, D: Driver> Merger<'t, D> {
     /// This is the inverse of `delete_remote_node`.
     fn delete_local_node(
         &mut self,
         merged_node: &mut MergedNode<'t>,
         local_node: Node<'t>,
     ) -> Result<StructureChange> {
         self.delete_locally.insert(local_node.guid.clone());
         for local_child_node in local_node.children() {
+            self.signal.err_if_aborted()?;
             if self.merged_guids.contains(&local_child_node.guid) {
                 trace!(
                     self.driver,
                     "Local child {} can't be an orphan; already merged",
                     local_child_node
                 );
                 continue;
             }
@@ -1319,20 +1337,21 @@ impl<'t, D: Driver> Merger<'t, D> {
     /// children. We cache matches in
     /// `matching_dupes_by_local_parent_guid`, so deduping all
     /// remaining children of the same folder, on both sides, only needs two
     /// O(1) map lookups per child.
     fn find_all_matching_dupes_in_folders(
         &self,
         local_parent_node: Node<'t>,
         remote_parent_node: Node<'t>,
-    ) -> MatchingDupes<'t> {
+    ) -> Result<MatchingDupes<'t>> {
         let mut dupe_key_to_local_nodes: HashMap<&Content, VecDeque<_>> = HashMap::new();
 
         for local_child_node in local_parent_node.children() {
+            self.signal.err_if_aborted()?;
             if local_child_node.is_user_content_root() {
                 continue;
             }
             if let Some(local_child_content) = self
                 .new_local_contents
                 .and_then(|contents| contents.get(&local_child_node.guid))
             {
                 if let Some(remote_child_node) =
@@ -1369,16 +1388,17 @@ impl<'t, D: Driver> Merger<'t, D> {
                 );
             }
         }
 
         let mut local_to_remote = HashMap::new();
         let mut remote_to_local = HashMap::new();
 
         for remote_child_node in remote_parent_node.children() {
+            self.signal.err_if_aborted()?;
             if remote_to_local.contains_key(&remote_child_node.guid) {
                 trace!(
                     self.driver,
                     "Not deduping remote child {}; already deduped",
                     remote_child_node
                 );
                 continue;
             }
@@ -1421,121 +1441,129 @@ impl<'t, D: Driver> Merger<'t, D> {
                 trace!(
                     self.driver,
                     "Not deduping remote child {}; already merged",
                     remote_child_node
                 );
             }
         }
 
-        (local_to_remote, remote_to_local)
+        Ok((local_to_remote, remote_to_local))
     }
 
     /// Finds a remote node with a different GUID that matches the content of a
     /// local node.
     ///
     /// This is the inverse of `find_local_node_matching_remote_node`.
     fn find_remote_node_matching_local_node(
         &mut self,
         merged_node: &MergedNode<'t>,
         local_parent_node: Node<'t>,
         remote_parent_node: Option<Node<'t>>,
         local_child_node: Node<'t>,
-    ) -> Option<Node<'t>> {
+    ) -> Result<Option<Node<'t>>> {
         if let Some(remote_parent_node) = remote_parent_node {
             let mut matching_dupes_by_local_parent_guid = mem::replace(
                 &mut self.matching_dupes_by_local_parent_guid,
                 HashMap::new(),
             );
             let new_remote_node = {
-                let (local_to_remote, _) = matching_dupes_by_local_parent_guid
+                let (local_to_remote, _) = match matching_dupes_by_local_parent_guid
                     .entry(local_parent_node.guid.clone())
-                    .or_insert_with(|| {
+                {
+                    Entry::Occupied(entry) => entry.into_mut(),
+                    Entry::Vacant(entry) => {
                         trace!(
                             self.driver,
                             "First local child {} doesn't exist remotely; \
                              finding all matching dupes in local {} and remote {}",
                             local_child_node,
                             local_parent_node,
                             remote_parent_node
                         );
-                        self.find_all_matching_dupes_in_folders(
+                        let matching_dupes = self.find_all_matching_dupes_in_folders(
                             local_parent_node,
                             remote_parent_node,
-                        )
-                    });
+                        )?;
+                        entry.insert(matching_dupes)
+                    }
+                };
                 let new_remote_node = local_to_remote.get(&local_child_node.guid);
                 new_remote_node.map(|node| {
                     self.structure_counts.dupes += 1;
                     *node
                 })
             };
             mem::replace(
                 &mut self.matching_dupes_by_local_parent_guid,
                 matching_dupes_by_local_parent_guid,
             );
-            new_remote_node
+            Ok(new_remote_node)
         } else {
             trace!(
                 self.driver,
                 "Merged node {} doesn't exist remotely; no potential dupes for local child {}",
                 merged_node,
                 local_child_node
             );
-            None
+            Ok(None)
         }
     }
 
     /// Finds a local node with a different GUID that matches the content of a
     /// remote node.
     ///
     /// This is the inverse of `find_remote_node_matching_local_node`.
     fn find_local_node_matching_remote_node(
         &mut self,
         merged_node: &MergedNode<'t>,
         local_parent_node: Option<Node<'t>>,
         remote_parent_node: Node<'t>,
         remote_child_node: Node<'t>,
-    ) -> Option<Node<'t>> {
+    ) -> Result<Option<Node<'t>>> {
         if let Some(local_parent_node) = local_parent_node {
             let mut matching_dupes_by_local_parent_guid = mem::replace(
                 &mut self.matching_dupes_by_local_parent_guid,
                 HashMap::new(),
             );
             let new_local_node = {
-                let (_, remote_to_local) = matching_dupes_by_local_parent_guid
+                let (_, remote_to_local) = match matching_dupes_by_local_parent_guid
                     .entry(local_parent_node.guid.clone())
-                    .or_insert_with(|| {
+                {
+                    Entry::Occupied(entry) => entry.into_mut(),
+                    Entry::Vacant(entry) => {
                         trace!(
                             self.driver,
                             "First remote child {} doesn't exist locally; \
                              finding all matching dupes in local {} and remote {}",
                             remote_child_node,
                             local_parent_node,
                             remote_parent_node
                         );
-                        self.find_all_matching_dupes_in_folders(
+                        let matching_dupes = self.find_all_matching_dupes_in_folders(
                             local_parent_node,
                             remote_parent_node,
-                        )
-                    });
+                        )?;
+                        entry.insert(matching_dupes)
+                    }
+                };
                 let new_local_node = remote_to_local.get(&remote_child_node.guid);
                 new_local_node.map(|node| {
                     self.structure_counts.dupes += 1;
                     *node
                 })
             };
             mem::replace(
                 &mut self.matching_dupes_by_local_parent_guid,
                 matching_dupes_by_local_parent_guid,
             );
-            new_local_node
+            Ok(new_local_node)
         } else {
             trace!(
                 self.driver,
                 "Merged node {} doesn't exist locally; no potential dupes for remote child {}",
                 merged_node,
                 remote_child_node
             );
-            None
+            Ok(None)
         }
     }
 }
--- a/third_party/rust/dogear/src/store.rs
+++ b/third_party/rust/dogear/src/store.rs
@@ -9,17 +9,17 @@
 // Unless required by applicable law or agreed to in writing, software
 // distributed under the License is distributed on an "AS IS" BASIS,
 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
 use std::{collections::HashMap, time::Duration};
 
-use crate::driver::{DefaultDriver, Driver};
+use crate::driver::{AbortSignal, DefaultAbortSignal, DefaultDriver, Driver};
 use crate::error::{Error, ErrorKind};
 use crate::guid::Guid;
 use crate::merge::{Deletion, Merger, StructureCounts};
 use crate::tree::{Content, MergedRoot, Tree};
 
 /// Records timings and counters for telemetry.
 #[derive(Clone, Debug, Default, Eq, PartialEq)]
 pub struct Stats {
@@ -76,42 +76,52 @@ pub trait Store<E: From<Error>> {
     fn apply<'t>(
         &mut self,
         root: MergedRoot<'t>,
         deletions: impl Iterator<Item = Deletion<'t>>,
     ) -> Result<(), E>;
 
     /// Builds and applies a merged tree using the default merge driver.
     fn merge(&mut self) -> Result<Stats, E> {
-        self.merge_with_driver(&DefaultDriver)
+        self.merge_with_driver(&DefaultDriver, &DefaultAbortSignal)
     }
 
     /// Builds a complete merged tree from the local and remote trees, resolves
     /// conflicts, dedupes local items, and applies the merged tree using the
     /// given driver.
-    fn merge_with_driver<D: Driver>(&mut self, driver: &D) -> Result<Stats, E> {
+    fn merge_with_driver<D: Driver, A: AbortSignal>(
+        &mut self,
+        driver: &D,
+        signal: &A,
+    ) -> Result<Stats, E> {
         let mut merge_timings = MergeTimings::default();
+
+        signal.err_if_aborted()?;
         let local_tree = time!(merge_timings, fetch_local_tree, { self.fetch_local_tree() })?;
         debug!(driver, "Built local tree from mirror\n{}", local_tree);
 
+        signal.err_if_aborted()?;
         let new_local_contents = time!(merge_timings, fetch_new_local_contents, {
             self.fetch_new_local_contents()
         })?;
 
+        signal.err_if_aborted()?;
         let remote_tree = time!(merge_timings, fetch_remote_tree, {
             self.fetch_remote_tree()
         })?;
         debug!(driver, "Built remote tree from mirror\n{}", remote_tree);
 
+        signal.err_if_aborted()?;
         let new_remote_contents = time!(merge_timings, fetch_new_remote_contents, {
             self.fetch_new_remote_contents()
         })?;
 
         let mut merger = Merger::with_driver(
             driver,
+            signal,
             &local_tree,
             &new_local_contents,
             &remote_tree,
             &new_remote_contents,
         );
         let merged_root = time!(merge_timings, merge, merger.merge())?;
         debug!(
             driver,
@@ -127,19 +137,23 @@ pub trait Store<E: From<Error>> {
                 .map(|d| d.guid.as_str())
                 .collect::<Vec<_>>()
                 .join(", ")
         );
 
         // The merged tree should know about all items mentioned in the local
         // and remote trees. Otherwise, it's incomplete, and we can't apply it.
         // This indicates a bug in the merger.
+
+        signal.err_if_aborted()?;
         if !merger.subsumes(&local_tree) {
             Err(E::from(ErrorKind::UnmergedLocalItems.into()))?;
         }
+
+        signal.err_if_aborted()?;
         if !merger.subsumes(&remote_tree) {
             Err(E::from(ErrorKind::UnmergedRemoteItems.into()))?;
         }
 
         time!(
             merge_timings,
             apply,
             self.apply(merged_root, merger.deletions())
--- a/third_party/rust/dogear/src/tests.rs
+++ b/third_party/rust/dogear/src/tests.rs
@@ -7,72 +7,86 @@
 //     http://www.apache.org/licenses/LICENSE-2.0
 //
 // Unless required by applicable law or agreed to in writing, software
 // distributed under the License is distributed on an "AS IS" BASIS,
 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-use std::{cell::Cell, collections::HashMap, sync::Once};
+use std::{
+    cell::Cell,
+    collections::HashMap,
+    convert::{TryFrom, TryInto},
+    sync::Once,
+};
 
 use env_logger;
 
-use crate::driver::Driver;
-use crate::error::{ErrorKind, Result};
+use crate::driver::{DefaultAbortSignal, Driver};
+use crate::error::{Error, ErrorKind, Result};
 use crate::guid::{Guid, ROOT_GUID, UNFILED_GUID};
 use crate::merge::{Merger, StructureCounts};
 use crate::tree::{
-    Builder, Content, DivergedParent, DivergedParentGuid, IntoTree, Item, Kind, Problem, Problems,
-    Tree, Validity,
+    Builder, Content, DivergedParent, DivergedParentGuid, Item, Kind, Problem, Problems, Tree,
+    Validity,
 };
 
 #[derive(Debug)]
 struct Node {
     item: Item,
     children: Vec<Node>,
 }
 
 impl Node {
     fn new(item: Item) -> Node {
         Node {
             item,
             children: Vec::new(),
         }
     }
+    /// For convenience.
+    fn into_tree(self) -> Result<Tree> {
+        self.try_into()
+    }
+}
 
-    fn into_builder(self) -> Result<Builder> {
+impl TryFrom<Node> for Builder {
+    type Error = Error;
+
+    fn try_from(node: Node) -> Result<Builder> {
         fn inflate(b: &mut Builder, parent_guid: &Guid, node: Node) -> Result<()> {
             let guid = node.item.guid.clone();
             b.item(node.item)
                 .map(|_| ())
                 .or_else(|err| match err.kind() {
                     ErrorKind::DuplicateItem(_) => Ok(()),
                     _ => Err(err),
                 })?;
             b.parent_for(&guid).by_structure(&parent_guid)?;
             for child in node.children {
                 inflate(b, &guid, child)?;
             }
             Ok(())
         }
 
-        let guid = self.item.guid.clone();
-        let mut builder = Tree::with_root(self.item);
+        let guid = node.item.guid.clone();
+        let mut builder = Tree::with_root(node.item);
         builder.reparent_orphans_to(&UNFILED_GUID);
-        for child in self.children {
+        for child in node.children {
             inflate(&mut builder, &guid, child)?;
         }
         Ok(builder)
     }
 }
 
-impl IntoTree for Node {
-    fn into_tree(self) -> Result<Tree> {
-        self.into_builder()?.into_tree()
+impl TryFrom<Node> for Tree {
+    type Error = Error;
+    fn try_from(node: Node) -> Result<Tree> {
+        Builder::try_from(node)?.try_into()
     }
 }
 
 macro_rules! nodes {
     ($children:tt) => { nodes!(ROOT_GUID, Folder[needs_merge = true], $children) };
     ($guid:expr, $kind:ident) => { nodes!(Guid::from($guid), $kind[]) };
     ($guid:expr, $kind:ident [ $( $name:ident = $value:expr ),* ]) => {{
         #[allow(unused_mut)]
@@ -2274,16 +2288,17 @@ fn invalid_guids() {
     })
     .into_tree()
     .unwrap();
     let new_remote_contents: HashMap<Guid, Content> = HashMap::new();
 
     let driver = GenerateNewGuid::default();
     let mut merger = Merger::with_driver(
         &driver,
+        &DefaultAbortSignal,
         &local_tree,
         &new_local_contents,
         &remote_tree,
         &new_remote_contents,
     );
     let merged_root = merger.merge().unwrap();
     assert!(merger.subsumes(&local_tree));
     assert!(merger.subsumes(&remote_tree));
@@ -2396,27 +2411,27 @@ fn reparent_orphans() {
         }),
         ("unfiled_____", Folder, {
             ("bookmarkCCCC", Bookmark)
         })
     })
     .into_tree()
     .unwrap();
 
-    let mut remote_tree_builder = nodes!({
+    let mut remote_tree_builder: Builder = nodes!({
         ("toolbar_____", Folder[needs_merge = true], {
             ("bookmarkBBBB", Bookmark),
             ("bookmarkAAAA", Bookmark)
         }),
         ("unfiled_____", Folder[needs_merge = true], {
             ("bookmarkDDDD", Bookmark[needs_merge = true]),
             ("bookmarkCCCC", Bookmark)
         })
     })
-    .into_builder()
+    .try_into()
     .unwrap();
     remote_tree_builder
         .item(Item {
             guid: "bookmarkEEEE".into(),
             kind: Kind::Bookmark,
             age: 0,
             needs_merge: true,
             validity: Validity::Valid,
@@ -2608,17 +2623,17 @@ fn moved_user_content_roots() {
 }
 
 #[test]
 fn cycle() {
     before_each();
 
     // Try to create a cycle: move A into B, and B into the menu, but keep
     // B's parent by children as A.
-    let mut b = nodes!({ ("menu________", Folder) }).into_builder().unwrap();
+    let mut b: Builder = nodes!({ ("menu________", Folder) }).try_into().unwrap();
 
     b.item(Item::new("folderAAAAAA".into(), Kind::Folder))
         .and_then(|p| p.by_parent_guid("folderBBBBBB".into()))
         .expect("Should insert A");
 
     b.item(Item::new("folderBBBBBB".into(), Kind::Folder))
         .and_then(|p| p.by_parent_guid("menu________".into()))
         .and_then(|b| {
--- a/third_party/rust/dogear/src/tree.rs
+++ b/third_party/rust/dogear/src/tree.rs
@@ -11,35 +11,30 @@
 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
 use std::{
     borrow::Cow,
     cmp::Ordering,
     collections::{HashMap, HashSet},
+    convert::{TryFrom, TryInto},
     fmt, mem,
     ops::Deref,
     ptr,
 };
 
 use smallbitvec::SmallBitVec;
 
-use crate::error::{ErrorKind, Result};
+use crate::error::{Error, ErrorKind, Result};
 use crate::guid::Guid;
 
 /// The type for entry indices in the tree.
 type Index = usize;
 
-/// Anything that can be turned into a tree.
-pub trait IntoTree {
-    /// Performs the conversion.
-    fn into_tree(self) -> Result<Tree>;
-}
-
 /// A complete, rooted bookmark tree with tombstones.
 ///
 /// The tree stores bookmark items in a vector, and uses indices in the vector
 /// to identify parents and children. This makes traversal and lookup very
 /// efficient. Retrieving a node's parent takes one indexing operation,
 /// retrieving children takes one indexing operation per child, and retrieving
 /// a node by random GUID takes one hash map lookup and one indexing operation.
 #[derive(Debug)]
@@ -113,23 +108,16 @@ impl Tree {
 
     /// Returns the structure divergences found when building the tree.
     #[inline]
     pub fn problems(&self) -> &Problems {
         &self.problems
     }
 }
 
-impl IntoTree for Tree {
-    #[inline]
-    fn into_tree(self) -> Result<Tree> {
-        Ok(self)
-    }
-}
-
 impl fmt::Display for Tree {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         let root = self.root();
         f.write_str(&root.to_ascii_string())?;
         if !self.deleted_guids.is_empty() {
             f.write_str("\nDeleted: [")?;
             for (i, guid) in self.deleted_guids.iter().enumerate() {
                 if i != 0 {
@@ -219,18 +207,18 @@ impl PartialEq for Tree {
 /// - Items whose `parentid`s don't mention the item in their `children`.
 /// - Items with `parentid`s that point to nonexistent or deleted folders.
 /// - Folders with nonexistent `children`.
 /// - Non-syncable items, like custom roots.
 /// - Any combination of these.
 ///
 /// # Resolving divergences
 ///
-/// Building a tree using `Builder::into_tree` resolves divergences using
-/// these rules:
+/// Building a tree using `std::convert::TryInto<Tree>::try_into` resolves
+/// divergences using these rules:
 ///
 /// 1. User content roots should always be children of the Places root. If
 ///    they appear in other parents, we move them.
 /// 2. Items that appear in multiple `children`, and items with mismatched
 ///    `parentid`s, use the chronologically newer parent, based on the parent's
 ///    last modified time. We always prefer parents by `children` over
 ///    `parentid,` because `children` also gives us the item's position.
 /// 3. Items that aren't mentioned in any parent's `children`, but have a
@@ -282,55 +270,64 @@ impl Builder {
     pub fn parent_for(&mut self, child_guid: &Guid) -> ParentBuilder<'_> {
         assert_eq!(self.entries.len(), self.entry_index_by_guid.len());
         let entry_child = match self.entry_index_by_guid.get(child_guid) {
             Some(&child_index) => BuilderEntryChild::Exists(child_index),
             None => BuilderEntryChild::Missing(child_guid.clone()),
         };
         ParentBuilder(self, entry_child)
     }
+
+    /// Equivalent to using our implementation of`TryInto<Tree>::try_into`, but
+    /// provided both for convenience when updating from previous versions of
+    /// `dogear`, and for cases where a type hint would otherwise be needed to
+    /// clarify the target type of the conversion.
+    pub fn into_tree(self) -> Result<Tree> {
+        self.try_into()
+    }
 }
 
-impl IntoTree for Builder {
+impl TryFrom<Builder> for Tree {
+    type Error = Error;
     /// Builds a tree from all stored items and parent-child associations,
     /// resolving inconsistencies like orphans, multiple parents, and
     /// parent-child disagreements.
-    fn into_tree(self) -> Result<Tree> {
+    fn try_from(builder: Builder) -> Result<Tree> {
         let mut problems = Problems::default();
 
         // First, resolve parents for all entries, and build a lookup table for
         // items without a position.
-        let mut parents = Vec::with_capacity(self.entries.len());
+        let mut parents = Vec::with_capacity(builder.entries.len());
         let mut reparented_child_indices_by_parent: HashMap<Index, Vec<Index>> = HashMap::new();
-        for (entry_index, entry) in self.entries.iter().enumerate() {
-            let r = ResolveParent::new(&self, entry, &mut problems);
+        for (entry_index, entry) in builder.entries.iter().enumerate() {
+            let r = ResolveParent::new(&builder, entry, &mut problems);
             let resolved_parent = r.resolve();
             if let ResolvedParent::ByParentGuid(parent_index) = &resolved_parent {
                 // Reparented items are special: since they aren't mentioned in
                 // that parent's `children`, we don't know their positions. Note
                 // them for when we resolve children. We also clone the GUID,
                 // since we use it for sorting, but can't access it by
-                // reference once we call `self.entries.into_iter()` below.
+                // reference once we call `builder.entries.into_iter()` below.
                 let reparented_child_indices = reparented_child_indices_by_parent
                     .entry(*parent_index)
                     .or_default();
                 reparented_child_indices.push(entry_index);
             }
             parents.push(resolved_parent);
         }
 
         // If any parents form cycles, abort. We haven't seen cyclic trees in
         // the wild, and breaking cycles would add complexity.
         if let Some(index) = detect_cycles(&parents) {
-            return Err(ErrorKind::Cycle(self.entries[index].item.guid.clone()).into());
+            return Err(ErrorKind::Cycle(builder.entries[index].item.guid.clone()).into());
         }
 
         // Then, resolve children, and build a slab of entries for the tree.
-        let mut entries = Vec::with_capacity(self.entries.len());
-        for (entry_index, entry) in self.entries.into_iter().enumerate() {
+        let mut entries = Vec::with_capacity(builder.entries.len());
+        for (entry_index, entry) in builder.entries.into_iter().enumerate() {
             // Each entry is consistent, until proven otherwise!
             let mut divergence = Divergence::Consistent;
 
             let parent_index = match &parents[entry_index] {
                 ResolvedParent::Root => {
                     // The Places root doesn't have a parent, and should always
                     // be the first entry.
                     assert_eq!(entry_index, 0);
@@ -412,17 +409,17 @@ impl IntoTree for Builder {
                 parent_index,
                 child_indices,
                 divergence,
             });
         }
 
         // Now we have a consistent tree.
         Ok(Tree {
-            entry_index_by_guid: self.entry_index_by_guid,
+            entry_index_by_guid: builder.entry_index_by_guid,
             entries,
             deleted_guids: HashSet::new(),
             problems,
         })
     }
 }
 
 /// Describes where an item's parent comes from.
@@ -467,17 +464,17 @@ impl<'b> ParentBuilder<'b> {
             }
         }
         Ok(self.0)
     }
 
     /// Records a `parent_guid` from a valid tree structure. This is for
     /// callers who already know their structure is consistent, like
     /// `Store::fetch_local_tree()` on Desktop, and
-    /// `{MergedNode, Node}::into_tree()` in the tests.
+    /// `std::convert::TryInto<Tree>` in the tests.
     ///
     /// Both the item and `parent_guid` must exist, and the `parent_guid` must
     /// refer to a folder.
     ///
     /// `by_structure(parent_guid)` is logically the same as:
     ///
     /// ```no_run
     /// # use dogear::{Item, Kind, Result, ROOT_GUID, Tree};
@@ -1472,40 +1469,47 @@ impl<'t> MergedRoot<'t> {
         results
     }
 
     /// Returns an ASCII art representation of the root and its descendants,
     /// similar to `Node::to_ascii_string`.
     pub fn to_ascii_string(&self) -> String {
         self.node.to_ascii_fragment("")
     }
+
+    /// Lets us avoid needing to specify the target type in tests.
+    #[cfg(test)]
+    pub(crate) fn into_tree(self) -> Result<Tree> {
+        self.try_into()
+    }
 }
 
 #[cfg(test)]
-impl<'t> IntoTree for MergedRoot<'t> {
-    fn into_tree(self) -> Result<Tree> {
+impl<'t> TryFrom<MergedRoot<'t>> for Tree {
+    type Error = Error;
+    fn try_from(merged_root: MergedRoot<'t>) -> Result<Tree> {
         fn to_item(merged_node: &MergedNode<'_>) -> Item {
             let node = merged_node.merge_state.node();
             let mut item = Item::new(merged_node.guid.clone(), node.kind);
             item.age = node.age;
             item.needs_merge = merged_node.merge_state.upload_reason() != UploadReason::None;
             item
         }
 
-        let mut b = Tree::with_root(to_item(&self.node));
+        let mut b = Tree::with_root(to_item(&merged_root.node));
         for MergedDescendant {
             merged_parent_node,
             merged_node,
             ..
-        } in self.descendants()
+        } in merged_root.descendants()
         {
             b.item(to_item(merged_node))?
                 .by_structure(&merged_parent_node.guid)?;
         }
-        b.into_tree()
+        b.try_into()
     }
 }
 
 /// A merged bookmark node that indicates which side to prefer, and holds merged
 /// child nodes.
 #[derive(Debug)]
 pub struct MergedNode<'t> {
     pub guid: Guid,
@@ -1746,17 +1750,17 @@ impl<'t> MergeState<'t> {
                     remote_node,
                 }
             }
         }
     }
 
     /// Returns the node from the preferred side. Unlike `local_node()` and
     /// `remote_node()`, this doesn't indicate which side, so it's only used
-    /// for logging and `into_tree()`.
+    /// for logging and `try_from()`.
     fn node(&self) -> &Node<'t> {
         match self {
             MergeState::LocalOnly(local_node) | MergeState::Local { local_node, .. } => local_node,
 
             MergeState::RemoteOnly(remote_node)
             | MergeState::Remote { remote_node, .. }
             | MergeState::RemoteOnlyWithNewStructure(remote_node)
             | MergeState::RemoteWithNewStructure { remote_node, .. } => remote_node,
--- a/toolkit/components/contentprefs/ContentPrefService2.jsm
+++ b/toolkit/components/contentprefs/ContentPrefService2.jsm
@@ -291,23 +291,19 @@ ContentPrefService2.prototype = {
                                                                    context) {
     checkGroupArg(group);
     let prefs = this._getCached(group, name, false, context);
     return prefs[0] || null;
   },
 
   getCachedBySubdomainAndName: function CPS2_getCachedBySubdomainAndName(group,
                                                                          name,
-                                                                         context,
-                                                                         len) {
+                                                                         context) {
     checkGroupArg(group);
-    let prefs = this._getCached(group, name, true, context);
-    if (len)
-      len.value = prefs.length;
-    return prefs;
+    return this._getCached(group, name, true, context);
   },
 
   getCachedGlobal: function CPS2_getCachedGlobal(name, context) {
     let prefs = this._getCached(null, name, false, context);
     return prefs[0] || null;
   },
 
   _getCached: function CPS2__getCached(group, name, includeSubdomains,
--- a/toolkit/components/contentprefs/tests/unit_cps2/head.js
+++ b/toolkit/components/contentprefs/tests/unit_cps2/head.js
@@ -161,23 +161,20 @@ function getCachedOK(args, expectedIsCac
     value: expectedVal,
   };
   getCachedOKEx("getCachedByDomainAndName", args, expectedPref, strict);
 }
 
 function getCachedSubdomainsOK(args, expectedGroupValPairs) {
   if (args.length == 2)
     args.push(undefined);
-  let len = {};
-  args.push(len);
   let actualPrefs = cps.getCachedBySubdomainAndName.apply(cps, args);
   actualPrefs = actualPrefs.sort(function(a, b) {
     return a.domain.localeCompare(b.domain);
   });
-  equal(actualPrefs.length, len.value);
   let expectedPrefs = expectedGroupValPairs.map(function([group, val]) {
     return { domain: group, name: args[1], value: val };
   });
   arraysOfArraysOK([actualPrefs], [expectedPrefs], prefOK);
 }
 
 function getCachedGlobalOK(args, expectedIsCached, expectedVal) {
   if (args.length == 1)
--- a/toolkit/components/extensions/test/xpcshell/test_ext_storage_sync.js
+++ b/toolkit/components/extensions/test/xpcshell/test_ext_storage_sync.js
@@ -29,16 +29,20 @@ function handleCannedResponse(cannedResp
     let headerElements = headerLine.split(":");
     response.setHeader(headerElements[0], headerElements[1].trimLeft());
   }
   response.setHeader("Date", (new Date()).toUTCString());
 
   response.write(cannedResponse.responseBody);
 }
 
+function collectionPath(collectionId) {
+  return `/buckets/default/collections/${collectionId}`;
+}
+
 function collectionRecordsPath(collectionId) {
   return `/buckets/default/collections/${collectionId}/records`;
 }
 
 class KintoServer {
   constructor() {
     // Set up an HTTP Server
     this.httpServer = new HttpServer();
@@ -251,20 +255,33 @@ class KintoServer {
    * @param {string} collectionId   the collection whose route we
    *    should set up.
    */
   installCollection(collectionId) {
     if (this.collections.has(collectionId)) {
       return;
     }
     this.collections.add(collectionId);
+    const remoteCollectionPath = "/v1" + collectionPath(encodeURIComponent(collectionId));
+    this.httpServer.registerPathHandler(remoteCollectionPath, this.handleGetCollection.bind(this, collectionId));
     const remoteRecordsPath = "/v1" + collectionRecordsPath(encodeURIComponent(collectionId));
     this.httpServer.registerPathHandler(remoteRecordsPath, this.handleGetRecords.bind(this, collectionId));
   }
 
+  handleGetCollection(collectionId, request, response) {
+    response.setStatusLine(null, 200, "OK");
+    response.setHeader("Content-Type", "application/json; charset=UTF-8");
+    response.setHeader("Date", (new Date()).toUTCString());
+    response.write(JSON.stringify({
+      data: {
+        id: collectionId,
+      },
+    }));
+  }
+
   handleGetRecords(collectionId, request, response) {
     if (this.checkAuth(request, response)) {
       return;
     }
 
     if (request.method != "GET") {
       do_throw(`only GET is supported on ${request.path}`);
     }
--- a/toolkit/components/places/bookmark_sync/Cargo.toml
+++ b/toolkit/components/places/bookmark_sync/Cargo.toml
@@ -1,17 +1,17 @@
 [package]
 name = "bookmark_sync"
 version = "0.1.0"
 authors = ["Lina Cambridge <lina@yakshaving.ninja>"]
 edition = "2018"
 
 [dependencies]
 atomic_refcell = "0.1"
-dogear = "0.2.4"
+dogear = "0.2.5"
 libc = "0.2"
 log = "0.4"
 cstr = "0.1"
 moz_task = { path = "../../../../xpcom/rust/moz_task" }
 nserror = { path = "../../../../xpcom/rust/nserror" }
 nsstring = { path = "../../../../xpcom/rust/nsstring" }
 storage = { path = "../../../../storage/rust" }
 storage_variant = { path = "../../../../storage/variant" }
--- a/toolkit/components/places/bookmark_sync/src/driver.rs
+++ b/toolkit/components/places/bookmark_sync/src/driver.rs
@@ -1,15 +1,18 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-use std::fmt::Write;
+use std::{
+    fmt::Write,
+    sync::atomic::{AtomicBool, Ordering},
+};
 
-use dogear::Guid;
+use dogear::{AbortSignal, Guid};
 use log::{Level, LevelFilter, Log, Metadata, Record};
 use moz_task::{Task, TaskRunnable, ThreadPtrHandle};
 use nserror::nsresult;
 use nsstring::{nsACString, nsCString, nsString};
 use xpcom::interfaces::mozISyncedBookmarksMirrorLogger;
 
 extern "C" {
     fn NS_GeneratePlacesGUID(guid: *mut nsACString) -> nsresult;
@@ -20,16 +23,43 @@ fn generate_guid() -> Result<nsCString, 
     let rv = unsafe { NS_GeneratePlacesGUID(&mut *guid) };
     if rv.succeeded() {
         Ok(guid)
     } else {
         Err(rv)
     }
 }
 
+/// An abort controller is used to abort merges running on the storage thread
+/// from the main thread. Its design is based on the DOM API of the same name.
+pub struct AbortController {
+    aborted: AtomicBool,
+}
+
+impl AbortController {
+    /// Signals the store to stop merging as soon as it can.
+    pub fn abort(&self) {
+        self.aborted.store(true, Ordering::Release)
+    }
+}
+
+impl Default for AbortController {
+    fn default() -> AbortController {
+        AbortController {
+            aborted: AtomicBool::new(false),
+        }
+    }
+}
+
+impl AbortSignal for AbortController {
+    fn aborted(&self) -> bool {
+        self.aborted.load(Ordering::Acquire)
+    }
+}
+
 /// The merger driver, created and used on the storage thread.
 pub struct Driver {
     log: Logger,
 }
 
 impl Driver {
     #[inline]
     pub fn new(log: Logger) -> Driver {
--- a/toolkit/components/places/bookmark_sync/src/error.rs
+++ b/toolkit/components/places/bookmark_sync/src/error.rs
@@ -1,15 +1,18 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 use std::{error, fmt, result, string::FromUtf16Error};
 
-use nserror::{nsresult, NS_ERROR_INVALID_ARG, NS_ERROR_STORAGE_BUSY, NS_ERROR_UNEXPECTED};
+use nserror::{
+    nsresult, NS_ERROR_ABORT, NS_ERROR_FAILURE, NS_ERROR_INVALID_ARG, NS_ERROR_STORAGE_BUSY,
+    NS_ERROR_UNEXPECTED,
+};
 
 pub type Result<T> = result::Result<T, Error>;
 
 #[derive(Debug)]
 pub enum Error {
     Dogear(dogear::Error),
     Storage(storage::Error),
     InvalidLocalRoots,
@@ -53,19 +56,21 @@ impl From<FromUtf16Error> for Error {
     fn from(error: FromUtf16Error) -> Error {
         Error::MalformedString(error.into())
     }
 }
 
 impl From<Error> for nsresult {
     fn from(error: Error) -> nsresult {
         match error {
-            Error::Dogear(_) | Error::InvalidLocalRoots | Error::InvalidRemoteRoots => {
-                NS_ERROR_UNEXPECTED
-            }
+            Error::Dogear(err) => match err.kind() {
+                dogear::ErrorKind::Abort => NS_ERROR_ABORT,
+                _ => NS_ERROR_FAILURE,
+            },
+            Error::InvalidLocalRoots | Error::InvalidRemoteRoots => NS_ERROR_UNEXPECTED,
             Error::Storage(err) => err.into(),
             Error::Nsresult(result) => result.clone(),
             Error::UnknownItemKind(_)
             | Error::MalformedString(_)
             | Error::UnknownItemValidity(_) => NS_ERROR_INVALID_ARG,
             Error::MergeConflict => NS_ERROR_STORAGE_BUSY,
         }
     }
--- a/toolkit/components/places/bookmark_sync/src/merger.rs
+++ b/toolkit/components/places/bookmark_sync/src/merger.rs
@@ -1,13 +1,13 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-use std::{cell::RefCell, fmt::Write, mem, time::Duration};
+use std::{cell::RefCell, fmt::Write, mem, sync::Arc, time::Duration};
 
 use atomic_refcell::AtomicRefCell;
 use dogear::{MergeTimings, Stats, Store, StructureCounts};
 use log::LevelFilter;
 use moz_task::{Task, TaskRunnable, ThreadPtrHandle, ThreadPtrHolder};
 use nserror::{nsresult, NS_ERROR_FAILURE, NS_ERROR_UNEXPECTED, NS_OK};
 use nsstring::nsString;
 use storage::Conn;
@@ -15,31 +15,33 @@ use storage_variant::HashPropertyBag;
 use thin_vec::ThinVec;
 use xpcom::{
     interfaces::{
         mozIStorageConnection, mozISyncedBookmarksMirrorCallback, mozISyncedBookmarksMirrorLogger,
     },
     RefPtr,
 };
 
-use crate::driver::{Driver, Logger};
+use crate::driver::{AbortController, Driver, Logger};
 use crate::error;
 use crate::store;
 
 #[derive(xpcom)]
 #[xpimplements(mozISyncedBookmarksMerger)]
 #[refcnt = "nonatomic"]
 pub struct InitSyncedBookmarksMerger {
+    controller: Arc<AbortController>,
     db: RefCell<Option<Conn>>,
     logger: RefCell<Option<RefPtr<mozISyncedBookmarksMirrorLogger>>>,
 }
 
 impl SyncedBookmarksMerger {
     pub fn new() -> RefPtr<SyncedBookmarksMerger> {
         SyncedBookmarksMerger::allocate(InitSyncedBookmarksMerger {
+            controller: Arc::new(AbortController::default()),
             db: RefCell::default(),
             logger: RefCell::default(),
         })
     }
 
     xpcom_method!(get_db => GetDb() -> *const mozIStorageConnection);
     fn get_db(&self) -> Result<RefPtr<mozIStorageConnection>, nsresult> {
         self.db
@@ -89,16 +91,17 @@ impl SyncedBookmarksMerger {
         let db = match *self.db.borrow() {
             Some(ref db) => db.clone(),
             None => return Err(NS_ERROR_FAILURE),
         };
         let logger = &*self.logger.borrow();
         let async_thread = db.thread()?;
         let task = MergeTask::new(
             &db,
+            Arc::clone(&self.controller),
             logger.as_ref().cloned(),
             local_time_seconds,
             remote_time_seconds,
             weak_uploads
                 .map(|w| w.as_slice().to_vec())
                 .unwrap_or_default(),
             callback,
         )?;
@@ -106,36 +109,39 @@ impl SyncedBookmarksMerger {
             "bookmark_sync::SyncedBookmarksMerger::merge",
             Box::new(task),
         )?;
         runnable.dispatch(&async_thread)
     }
 
     xpcom_method!(finalize => Finalize());
     fn finalize(&self) -> Result<(), nsresult> {
+        self.controller.abort();
         mem::drop(self.db.borrow_mut().take());
         mem::drop(self.logger.borrow_mut().take());
         Ok(())
     }
 }
 
 struct MergeTask {
     db: Conn,
+    controller: Arc<AbortController>,
     max_log_level: LevelFilter,
     logger: Option<ThreadPtrHandle<mozISyncedBookmarksMirrorLogger>>,
     local_time_millis: i64,
     remote_time_millis: i64,
     weak_uploads: Vec<nsString>,
     callback: ThreadPtrHandle<mozISyncedBookmarksMirrorCallback>,
     result: AtomicRefCell<Option<error::Result<Stats>>>,
 }
 
 impl MergeTask {
     fn new(
         db: &Conn,
+        controller: Arc<AbortController>,
         logger: Option<RefPtr<mozISyncedBookmarksMirrorLogger>>,
         local_time_seconds: i64,
         remote_time_seconds: i64,
         weak_uploads: Vec<nsString>,
         callback: RefPtr<mozISyncedBookmarksMirrorCallback>,
     ) -> Result<MergeTask, nsresult> {
         let max_log_level = logger
             .as_ref()
@@ -156,39 +162,41 @@ impl MergeTask {
             Some(logger) => Some(ThreadPtrHolder::new(
                 cstr!("mozISyncedBookmarksMirrorLogger"),
                 logger,
             )?),
             None => None,
         };
         Ok(MergeTask {
             db: db.clone(),
+            controller,
             max_log_level,
             logger,
             local_time_millis: local_time_seconds * 1000,
             remote_time_millis: remote_time_seconds * 1000,
             weak_uploads,
             callback: ThreadPtrHolder::new(cstr!("mozISyncedBookmarksMirrorCallback"), callback)?,
             result: AtomicRefCell::default(),
         })
     }
 }
 
 impl Task for MergeTask {
     fn run(&self) {
         let mut db = self.db.clone();
         let mut store = store::Store::new(
             &mut db,
+            &self.controller,
             self.local_time_millis,
             self.remote_time_millis,
             &self.weak_uploads,
         );
         let log = Logger::new(self.max_log_level, self.logger.clone());
         let driver = Driver::new(log);
-        *self.result.borrow_mut() = Some(store.merge_with_driver(&driver));
+        *self.result.borrow_mut() = Some(store.merge_with_driver(&driver, &*self.controller));
     }
 
     fn done(&self) -> Result<(), nsresult> {
         let callback = self.callback.get().unwrap();
         match self.result.borrow_mut().take() {
             Some(Ok(stats)) => {
                 let mut telem = HashPropertyBag::new();
                 telem.set("fetchLocalTreeTime", stats.time(|t| t.fetch_local_tree));
--- a/toolkit/components/places/bookmark_sync/src/store.rs
+++ b/toolkit/components/places/bookmark_sync/src/store.rs
@@ -1,22 +1,23 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-use std::{collections::HashMap, fmt};
+use std::{collections::HashMap, convert::TryFrom, fmt};
 
 use dogear::{
-    Content, Deletion, Guid, IntoTree, Item, Kind, MergedDescendant, MergedRoot, Tree,
+    AbortSignal, Content, Deletion, Guid, Item, Kind, MergedDescendant, MergedRoot, Tree,
     UploadReason, Validity,
 };
 use nsstring::{nsCString, nsString};
 use storage::{Conn, Step};
 use xpcom::interfaces::{mozISyncedBookmarksMerger, nsINavBookmarksService};
 
+use crate::driver::AbortController;
 use crate::error::{Error, Result};
 
 pub const LMANNO_FEEDURI: &'static str = "livemark/feedURI";
 
 pub const LOCAL_ITEMS_SQL_FRAGMENT: &str = "
   localItems(id, guid, parentId, parentGuid, position, type, title, parentTitle,
              placeId, dateAdded, lastModified, syncChangeCounter, level) AS (
   SELECT b.id, b.guid, 0, NULL, b.position, b.type, b.title, NULL,
@@ -35,37 +36,40 @@ extern "C" {
 }
 
 fn total_sync_changes() -> i64 {
     unsafe { NS_NavBookmarksTotalSyncChanges() }
 }
 
 pub struct Store<'s> {
     db: &'s mut Conn,
+    controller: &'s AbortController,
 
     /// The total Sync change count before merging. We store this before
     /// accessing Places, and compare the current and stored counts after
     /// opening our transaction. If they match, we can safely apply the
     /// tree. Otherwise, we bail and try merging again on the next sync.
     total_sync_changes: i64,
 
     local_time_millis: i64,
     remote_time_millis: i64,
     weak_uploads: &'s [nsString],
 }
 
 impl<'s> Store<'s> {
     pub fn new(
         db: &'s mut Conn,
+        controller: &'s AbortController,
         local_time_millis: i64,
         remote_time_millis: i64,
         weak_uploads: &'s [nsString],
     ) -> Store<'s> {
         Store {
             db,
+            controller,
             total_sync_changes: total_sync_changes(),
             local_time_millis,
             remote_time_millis,
             weak_uploads,
         }
     }
 
     /// Creates a local tree item from a row in the `localItems` CTE.
@@ -147,27 +151,29 @@ impl<'s> dogear::Store<Error> for Store<
         items_statement.bind_by_name("separatorKind", mozISyncedBookmarksMerger::KIND_SEPARATOR)?;
         let mut builder = match items_statement.step()? {
             // The first row is always the root.
             Some(step) => Tree::with_root(self.local_row_to_item(&step)?),
             None => return Err(Error::InvalidLocalRoots.into()),
         };
         while let Some(step) = items_statement.step()? {
             // All subsequent rows are descendants.
+            self.controller.err_if_aborted()?;
             let raw_parent_guid: nsString = step.get_by_name("parentGuid")?;
             let parent_guid = Guid::from_utf16(&*raw_parent_guid)?;
             builder
                 .item(self.local_row_to_item(&step)?)?
                 .by_structure(&parent_guid)?;
         }
 
-        let mut tree = builder.into_tree()?;
+        let mut tree = Tree::try_from(builder)?;
 
         let mut deletions_statement = self.db.prepare("SELECT guid FROM moz_bookmarks_deleted")?;
         while let Some(step) = deletions_statement.step()? {
+            self.controller.err_if_aborted()?;
             let raw_guid: nsString = step.get_by_name("guid")?;
             let guid = Guid::from_utf16(&*raw_guid)?;
             tree.note_deleted(guid);
         }
 
         Ok(tree)
     }
 
@@ -186,16 +192,17 @@ impl<'s> dogear::Store<Error> for Store<
                LEFT JOIN items v ON v.guid = b.guid
                WHERE v.guid IS NULL AND
                      p.guid <> :rootGuid AND
                      b.syncStatus <> :syncStatus"#,
         )?;
         statement.bind_by_name("rootGuid", nsCString::from(&*dogear::ROOT_GUID))?;
         statement.bind_by_name("syncStatus", nsINavBookmarksService::SYNC_STATUS_NORMAL)?;
         while let Some(step) = statement.step()? {
+            self.controller.err_if_aborted()?;
             let typ: i64 = step.get_by_name("type")?;
             let content = match typ {
                 nsINavBookmarksService::TYPE_BOOKMARK => {
                     let raw_title: nsString = step.get_by_name("title")?;
                     let title = String::from_utf16(&*raw_title)?;
                     let raw_url_href: nsString = step.get_by_name("url")?;
                     let url_href = String::from_utf16(&*raw_url_href)?;
                     Content::Bookmark { title, url_href }
@@ -241,47 +248,50 @@ impl<'s> dogear::Store<Error> for Store<
             "SELECT guid, parentGuid, serverModified, kind, needsMerge, validity
              FROM items
              WHERE NOT isDeleted AND
                    guid <> :rootGuid
              ORDER BY guid",
         )?;
         items_statement.bind_by_name("rootGuid", nsCString::from(&*dogear::ROOT_GUID))?;
         while let Some(step) = items_statement.step()? {
+            self.controller.err_if_aborted()?;
             let p = builder.item(self.remote_row_to_item(&step)?)?;
             let raw_parent_guid: Option<nsString> = step.get_by_name("parentGuid")?;
             if let Some(raw_parent_guid) = raw_parent_guid {
                 p.by_parent_guid(Guid::from_utf16(&*raw_parent_guid)?)?;
             }
         }
 
         let mut structure_statement = self.db.prepare(
             "SELECT guid, parentGuid FROM structure
              WHERE guid <> :rootGuid
              ORDER BY parentGuid, position",
         )?;
         structure_statement.bind_by_name("rootGuid", nsCString::from(&*dogear::ROOT_GUID))?;
         while let Some(step) = structure_statement.step()? {
+            self.controller.err_if_aborted()?;
             let raw_guid: nsString = step.get_by_name("guid")?;
             let guid = Guid::from_utf16(&*raw_guid)?;
 
             let raw_parent_guid: nsString = step.get_by_name("parentGuid")?;
             let parent_guid = Guid::from_utf16(&*raw_parent_guid)?;
 
             builder.parent_for(&guid).by_children(&parent_guid)?;
         }
 
-        let mut tree = builder.into_tree()?;
+        let mut tree = Tree::try_from(builder)?;
 
         let mut deletions_statement = self.db.prepare(
             "SELECT guid FROM items
              WHERE isDeleted AND
                    needsMerge",
         )?;
         while let Some(step) = deletions_statement.step()? {
+            self.controller.err_if_aborted()?;
             let raw_guid: nsString = step.get_by_name("guid")?;
             let guid = Guid::from_utf16(&*raw_guid)?;
             tree.note_deleted(guid);
         }
 
         Ok(tree)
     }
 
@@ -300,16 +310,17 @@ impl<'s> dogear::Store<Error> for Store<
                WHERE NOT v.isDeleted AND
                      v.needsMerge AND
                      b.guid IS NULL AND
                      IFNULL(s.parentGuid, :unfiledGuid) <> :rootGuid"#,
         )?;
         statement.bind_by_name("unfiledGuid", nsCString::from(&*dogear::UNFILED_GUID))?;
         statement.bind_by_name("rootGuid", nsCString::from(&*dogear::ROOT_GUID))?;
         while let Some(step) = statement.step()? {
+            self.controller.err_if_aborted()?;
             let kind: i64 = step.get_by_name("kind")?;
             let content = match kind {
                 mozISyncedBookmarksMerger::KIND_BOOKMARK
                 | mozISyncedBookmarksMerger::KIND_QUERY => {
                     let raw_title: nsString = step.get_by_name("title")?;
                     let title = String::from_utf16(&*raw_title)?;
 
                     let raw_url_href: nsString = step.get_by_name("url")?;
@@ -339,28 +350,36 @@ impl<'s> dogear::Store<Error> for Store<
         Ok(contents)
     }
 
     fn apply<'t>(
         &mut self,
         root: MergedRoot<'t>,
         deletions: impl Iterator<Item = Deletion<'t>>,
     ) -> Result<()> {
+        self.controller.err_if_aborted()?;
         let descendants = root.descendants();
+
+        self.controller.err_if_aborted()?;
         let deletions = deletions.collect::<Vec<_>>();
 
         // Apply the merged tree and stage outgoing items. This transaction
         // blocks writes from the main connection until it's committed, so we
         // try to do as little work as possible within it.
         let tx = self.db.transaction()?;
         if self.total_sync_changes != total_sync_changes() {
             return Err(Error::MergeConflict);
         }
+
+        self.controller.err_if_aborted()?;
         update_local_items_in_places(&tx, descendants, deletions)?;
+
+        self.controller.err_if_aborted()?;
         stage_items_to_upload(&tx, &self.weak_uploads)?;
+
         cleanup(&tx)?;
         tx.commit()?;
 
         Ok(())
     }
 }
 
 /// Builds a temporary table with the merge states of all nodes in the merged
new file mode 100644
--- /dev/null
+++ b/toolkit/components/places/tests/sync/test_bookmark_abort_merging.js
@@ -0,0 +1,32 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+add_task(async function test_abort_merging() {
+  let buf = await openMirror("abort_merging");
+
+  let promise = new Promise((resolve, reject) => {
+    let callback = {
+      handleResult() {
+        reject(new Error("Shouldn't have merged after aborting"));
+      },
+      handleError(code, message) {
+        equal(code, Cr.NS_ERROR_ABORT, "Should abort merge with result code");
+        resolve();
+      },
+    };
+    // `merge` schedules a runnable to start the merge on the storage thread, on
+    // the next turn of the event loop. In the same turn, before the runnable is
+    // scheduled, we call `finalize`, which sets the abort controller's aborted
+    // flag.
+    buf.merger.merge(0, 0, [], callback);
+    buf.merger.finalize();
+  });
+
+  await promise;
+
+  // Even though the merger is already finalized on the Rust side, the DB
+  // connection is still open on the JS side. Finalizing `buf` closes it.
+  await buf.finalize();
+  await PlacesUtils.bookmarks.eraseEverything();
+  await PlacesSyncUtils.bookmarks.reset();
+});
--- a/toolkit/components/places/tests/sync/xpcshell.ini
+++ b/toolkit/components/places/tests/sync/xpcshell.ini
@@ -1,16 +1,17 @@
 [DEFAULT]
 head = head_sync.js
 support-files =
   sync_utils_bookmarks.html
   sync_utils_bookmarks.json
   mirror_corrupt.sqlite
   mirror_v1.sqlite
 
+[test_bookmark_abort_merging.js]
 [test_bookmark_chunking.js]
 [test_bookmark_corruption.js]
 [test_bookmark_deduping.js]
 [test_bookmark_deletion.js]
 [test_bookmark_explicit_weakupload.js]
 [test_bookmark_haschanges.js]
 [test_bookmark_kinds.js]
 [test_bookmark_merge_conflicts.js]
--- a/toolkit/components/telemetry/app/TelemetryEnvironment.jsm
+++ b/toolkit/components/telemetry/app/TelemetryEnvironment.jsm
@@ -823,17 +823,17 @@ EnvironmentAddonBuilder.prototype = {
       return [{
         name: "dummy", version: "0.1", description: "Blocklist unavailable",
         blocklisted: false, disabled: true, clicktoplay: false,
         mimeTypes: ["text/there.is.only.blocklist"],
         updateDay: Utils.millisecondsToDays(Date.now()),
       }];
     }
     let pluginTags =
-      Cc["@mozilla.org/plugin/host;1"].getService(Ci.nsIPluginHost).getPluginTags({});
+      Cc["@mozilla.org/plugin/host;1"].getService(Ci.nsIPluginHost).getPluginTags();
 
     let activePlugins = [];
     for (let tag of pluginTags) {
       // Skip plugins which are not active.
       if (tag.disabled) {
         continue;
       }
 
@@ -843,17 +843,17 @@ EnvironmentAddonBuilder.prototype = {
 
         activePlugins.push({
           name: limitStringToLength(tag.name, MAX_ADDON_STRING_LENGTH),
           version: limitStringToLength(tag.version, MAX_ADDON_STRING_LENGTH),
           description: limitStringToLength(tag.description, MAX_ADDON_STRING_LENGTH),
           blocklisted: tag.blocklisted,
           disabled: tag.disabled,
           clicktoplay: tag.clicktoplay,
-          mimeTypes: tag.getMimeTypes({}),
+          mimeTypes: tag.getMimeTypes(),
           updateDay: Utils.millisecondsToDays(updateDate.getTime()),
         });
       } catch (ex) {
         this._environment._log.error("_getActivePlugins - A plugin was discarded due to an error", ex);
         continue;
       }
     }
 
--- a/toolkit/components/telemetry/tests/marionette/harness/telemetry_harness/testcase.py
+++ b/toolkit/components/telemetry/tests/marionette/harness/telemetry_harness/testcase.py
@@ -1,16 +1,17 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import contextlib
 import os
 import re
 import textwrap
+import time
 
 from marionette_driver.addons import Addons
 from marionette_driver.errors import MarionetteException
 from marionette_driver.wait import Wait
 from marionette_driver import By, keys
 from marionette_harness import MarionetteTestCase
 from marionette_harness.runner.mixins.window_manager import WindowManagerMixin
 
@@ -61,16 +62,20 @@ class TelemetryTestCase(WindowManagerMix
     def search(self, text):
         """Perform a search via the browser's URL bar."""
 
         with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
             urlbar = self.marionette.find_element(By.ID, "urlbar")
             urlbar.send_keys(keys.Keys.DELETE)
             urlbar.send_keys(text + keys.Keys.ENTER)
 
+        # Wait for 0.1 seconds before proceeding to decrease the chance
+        # of Firefox being shut down before Telemetry is recorded
+        time.sleep(0.1)
+
     def search_in_new_tab(self, text):
         """Open a new tab and perform a search via the browser's URL bar,
         then close the new tab."""
 
         with self.new_tab():
             self.search(text)
 
     def assertIsValidUUID(self, value):
--- a/toolkit/components/telemetry/tests/unit/test_TelemetryEnvironment.js
+++ b/toolkit/components/telemetry/tests/unit/test_TelemetryEnvironment.js
@@ -109,32 +109,30 @@ PluginTag.prototype = {
     return this.pluginTag.enabledState == Ci.nsIPluginTag.STATE_DISABLED;
   },
   set disabled(val) {
     this.pluginTag.enabledState = Ci.nsIPluginTag[val ? "STATE_DISABLED" : "STATE_CLICKTOPLAY"];
   },
 
   mimeTypes: [ PLUGIN_MIME_TYPE1, PLUGIN_MIME_TYPE2 ],
 
-  getMimeTypes(count) {
-    count.value = this.mimeTypes.length;
+  getMimeTypes() {
     return this.mimeTypes;
   },
 };
 
 // A container for the plugins handled by the fake plugin host.
 var gInstalledPlugins = [
   new PluginTag("Java", "A mock Java plugin", "1.0", false /* Disabled */),
   new PluginTag(FLASH_PLUGIN_NAME, FLASH_PLUGIN_DESC, FLASH_PLUGIN_VERSION, true),
 ];
 
 // A fake plugin host for testing plugin telemetry environment.
 var PluginHost = {
-  getPluginTags(countRef) {
-    countRef.value = gInstalledPlugins.length;
+  getPluginTags() {
     return gInstalledPlugins.map(plugin => plugin.pluginTag);
   },
 
   QueryInterface: ChromeUtils.generateQI(["nsIPluginHost"]),
 };
 
 function registerFakePluginHost() {
   MockRegistrar.register("@mozilla.org/plugin/host;1", PluginHost);
--- a/toolkit/components/url-classifier/Entries.h
+++ b/toolkit/components/url-classifier/Entries.h
@@ -81,23 +81,30 @@ struct SafebrowsingHash {
     return Comparator::Compare(buf, aOther.buf) != 0;
   }
 
   bool operator<(const self_type& aOther) const {
     return Comparator::Compare(buf, aOther.buf) < 0;
   }
 
   void ToString(nsACString& aStr) const {
+    // Base64 represents 6-bits data as 8-bits output.
     uint32_t len = ((sHashSize + 2) / 3) * 4;
 
     aStr.SetLength(len);
     PL_Base64Encode((char*)buf, sHashSize, aStr.BeginWriting());
     MOZ_ASSERT(aStr.BeginReading()[len] == '\0');
   }
 
+  nsCString ToString() const {
+    nsAutoCString str;
+    ToString(str);
+    return std::move(str);
+  }
+
   void ToHexString(nsACString& aStr) const {
     static const char* const lut = "0123456789ABCDEF";
     // 32 bytes is the longest hash
     size_t len = 32;
 
     aStr.SetCapacity(2 * len);
     for (size_t i = 0; i < len; ++i) {
       const char c = static_cast<char>(buf[i]);
--- a/toolkit/components/url-classifier/UrlClassifierHashCompleter.jsm
+++ b/toolkit/components/url-classifier/UrlClassifierHashCompleter.jsm
@@ -525,19 +525,17 @@ HashCompleterRequest.prototype = {
     let prefixArray = Array.from(prefixSet).sort();
 
     log("Build v4 gethash request with " + JSON.stringify(tableNameArray) + ", "
                                          + JSON.stringify(stateArray) + ", "
                                          + JSON.stringify(prefixArray));
 
     return gUrlUtil.makeFindFullHashRequestV4(tableNameArray,
                                               stateArray,
-                                              prefixArray,
-                                              tableNameArray.length,
-                                              prefixArray.length);
+                                              prefixArray);
   },
 
   // Returns a string for the request body based on the contents of
   // this._requests.
   buildRequest: function HCR_buildRequest() {
     // Sometimes duplicate entries are sent to HashCompleter but we do not need
     // to propagate these to the server. (bug 633644)
     let prefixes = [];
--- a/toolkit/components/url-classifier/UrlClassifierListManager.jsm
+++ b/toolkit/components/url-classifier/UrlClassifierListManager.jsm
@@ -486,18 +486,17 @@ PROT_ListManager.prototype.makeUpdateReq
     });
 
     log("stateArray: " + stateArray);
 
     let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"]
                      .getService(Ci.nsIUrlClassifierUtils);
 
     streamerMap.requestPayload = urlUtils.makeUpdateRequestV4(tableArray,
-                                                              stateArray,
-                                                              tableArray.length);
+                                                              stateArray);
     streamerMap.isPostRequest = false;
   } else {
     // Build the request. For each table already in the database, include the
     // chunk data from the database
     var lines = tableData.split("\n");
     for (var i = 0; i < lines.length; i++) {
       var fields = lines[i].split(";");
       var name = fields[0];
--- a/toolkit/components/url-classifier/nsIUrlClassifierUtils.idl
+++ b/toolkit/components/url-classifier/nsIUrlClassifierUtils.idl
@@ -105,40 +105,38 @@ interface nsIUrlClassifierUtils : nsISup
    */
   uint32_t convertListNameToThreatType(in ACString listName);
 
   /**
    * Make update request for given lists and their states.
    *
    * @param aListNames An array of list name represented in string.
    * @param aState An array of states (encoded in base64 format) for each list.
-   * @param aCount The array length of aList and aState.
+   *
+   * The two argument arrays must be the same length.
    *
    * @returns A base64url encoded string.
    */
-  ACString makeUpdateRequestV4([array, size_is(aCount)] in string aListNames,
-                               [array, size_is(aCount)] in string aStatesBase64,
-                               in uint32_t aCount);
+  ACString makeUpdateRequestV4(in Array<ACString> aListNames,
+                               in Array<ACString> aStatesBase64);
 
     /**
    * Make "find full hash" request by for the given prefixes.
    *
    * @param aListNames An array of list names represented in string.
    * @param aListStatesBase64 An array of list states represented in base64.
    * @param aPrefixes An array of prefixes for which we'd like to find full hashes..
-   * @param aListCount The array length of aListNames
-   * @param aPrefixCount The array length of aPrefixes
+   *
+   * The aListNames and aListStatesBase64 arrays must be the same length.
    *
    * @returns A base64url encoded string.
    */
-  ACString makeFindFullHashRequestV4([array, size_is(aListCount)] in string aListNames,
-                                     [array, size_is(aListCount)] in string aListStatesBase64,
-                                     [array, size_is(aPrefixCount)] in string aPrefixes,
-                                     in uint32_t aListCount,
-                                     in uint32_t aPrefixCount);
+  ACString makeFindFullHashRequestV4(in Array<ACString> aListNames,
+                                     in Array<ACString> aListStatesBase64,
+                                     in Array<ACString> aPrefixes);
 
   /**
    * Make ThreatHit report request body.
    *
    * @param aChannel channel which encountered the threat.
    * @param aListName listname represented in string.
    * @param aHashBase64 hash-based hit represented in base64.
    *
--- a/toolkit/components/url-classifier/nsUrlClassifierUtils.cpp
+++ b/toolkit/components/url-classifier/nsUrlClassifierUtils.cpp
@@ -117,17 +117,17 @@ static PlatformType GetPlatformType() {
   return LINUX_PLATFORM;
 #endif
 }
 
 typedef FetchThreatListUpdatesRequest_ListUpdateRequest ListUpdateRequest;
 typedef FetchThreatListUpdatesRequest_ListUpdateRequest_Constraints Constraints;
 
 static void InitListUpdateRequest(ThreatType aThreatType,
-                                  const char* aStateBase64,
+                                  const nsCString& aStateBase64,
                                   ListUpdateRequest* aListUpdateRequest) {
   aListUpdateRequest->set_threat_type(aThreatType);
   PlatformType platform = GetPlatformType();
 #if defined(ANDROID)
   // Temporary hack to fix bug 1441345.
   if ((aThreatType == SOCIAL_ENGINEERING_PUBLIC) ||
       (aThreatType == SOCIAL_ENGINEERING)) {
     platform = LINUX_PLATFORM;
@@ -136,19 +136,19 @@ static void InitListUpdateRequest(Threat
   aListUpdateRequest->set_platform_type(platform);
   aListUpdateRequest->set_threat_entry_type(URL);
 
   Constraints* contraints = new Constraints();
   contraints->add_supported_compressions(RICE);
   aListUpdateRequest->set_allocated_constraints(contraints);
 
   // Only set non-empty state.
-  if (aStateBase64[0] != '\0') {
+  if (!aStateBase64.IsEmpty()) {
     nsCString stateBinary;
-    nsresult rv = Base64Decode(nsDependentCString(aStateBase64), stateBinary);
+    nsresult rv = Base64Decode(aStateBase64, stateBinary);
     if (NS_SUCCEEDED(rv)) {
       aListUpdateRequest->set_state(stateBinary.get(), stateBinary.Length());
     }
   }
 }
 
 static ClientInfo* CreateClientInfo() {
   ClientInfo* c = new ClientInfo();
@@ -387,37 +387,39 @@ nsUrlClassifierUtils::GetProtocolVersion
   } else {
     aVersion = DEFAULT_PROTOCOL_VERSION;
   }
 
   return NS_OK;
 }
 
 NS_IMETHODIMP
-nsUrlClassifierUtils::MakeUpdateRequestV4(const char** aListNames,
-                                          const char** aStatesBase64,
-                                          uint32_t aCount,
-                                          nsACString& aRequest) {
+nsUrlClassifierUtils::MakeUpdateRequestV4(
+    const nsTArray<nsCString>& aListNames,
+    const nsTArray<nsCString>& aStatesBase64, nsACString& aRequest) {
   using namespace mozilla::safebrowsing;
 
+  if (aListNames.Length() != aStatesBase64.Length()) {
+    return NS_ERROR_INVALID_ARG;
+  }
+
   FetchThreatListUpdatesRequest r;
   r.set_allocated_client(CreateClientInfo());
 
-  for (uint32_t i = 0; i < aCount; i++) {
-    nsCString listName(aListNames[i]);
+  for (uint32_t i = 0; i < aListNames.Length(); i++) {
     uint32_t threatType;
-    nsresult rv = ConvertListNameToThreatType(listName, &threatType);
+    nsresult rv = ConvertListNameToThreatType(aListNames[i], &threatType);
     if (NS_FAILED(rv)) {
       continue;  // Unknown list name.
     }
     if (!IsAllowedOnCurrentPlatform(threatType)) {
       NS_WARNING(
           nsPrintfCString(
               "Threat type %d (%s) is unsupported on current platform: %d",
-              threatType, aListNames[i], GetPlatformType())
+              threatType, aListNames[i].get(), GetPlatformType())
               .get());
       continue;  // Some threat types are not available on some platforms.
     }
     auto lur = r.mutable_list_update_requests()->Add();
     InitListUpdateRequest(static_cast<ThreatType>(threatType), aStatesBase64[i],
                           lur);
   }
 
@@ -431,76 +433,77 @@ nsUrlClassifierUtils::MakeUpdateRequestV
   NS_ENSURE_SUCCESS(rv, rv);
 
   aRequest = out;
 
   return NS_OK;
 }
 
 NS_IMETHODIMP
-nsUrlClassifierUtils::MakeFindFullHashRequestV4(const char** aListNames,
-                                                const char** aListStatesBase64,
-                                                const char** aPrefixesBase64,
-                                                uint32_t aListCount,
-                                                uint32_t aPrefixCount,
-                                                nsACString& aRequest) {
+nsUrlClassifierUtils::MakeFindFullHashRequestV4(
+    const nsTArray<nsCString>& aListNames,
+    const nsTArray<nsCString>& aListStatesBase64,
+    const nsTArray<nsCString>& aPrefixesBase64, nsACString& aRequest) {
+  if (aListNames.Length() != aListStatesBase64.Length()) {
+    return NS_ERROR_INVALID_ARG;
+  }
+
   FindFullHashesRequest r;
   r.set_allocated_client(CreateClientInfo());
 
   nsresult rv;
 
   //-------------------------------------------------------------------
   // Set up FindFullHashesRequest.threat_info.
   auto threatInfo = r.mutable_threat_info();
 
   PlatformType platform = GetPlatformType();
 
   // 1) Set threat types.
-  for (uint32_t i = 0; i < aListCount; i++) {
+  for (uint32_t i = 0; i < aListNames.Length(); i++) {
     // Add threat types.
     uint32_t threatType;
-    rv = ConvertListNameToThreatType(nsDependentCString(aListNames[i]),
-                                     &threatType);
+    rv = ConvertListNameToThreatType(aListNames[i], &threatType);
     NS_ENSURE_SUCCESS(rv, rv);
     if (!IsAllowedOnCurrentPlatform(threatType)) {
       NS_WARNING(
           nsPrintfCString(
               "Threat type %d (%s) is unsupported on current platform: %d",
-              threatType, aListNames[i], GetPlatformType())
+              threatType, aListNames[i].get(), GetPlatformType())
               .get());
       continue;
     }
     threatInfo->add_threat_types((ThreatType)threatType);
 
 #if defined(ANDROID)
     // Temporary hack to fix bug 1441345.
     if (((ThreatType)threatType == SOCIAL_ENGINEERING_PUBLIC) ||
         ((ThreatType)threatType == SOCIAL_ENGINEERING)) {
       platform = LINUX_PLATFORM;
     }
 #endif
 
     // Add client states for index 'i' only when the threat type is available
     // on current platform.
     nsCString stateBinary;
-    rv = Base64Decode(nsDependentCString(aListStatesBase64[i]), stateBinary);
+    rv = Base64Decode(aListStatesBase64[i], stateBinary);
     NS_ENSURE_SUCCESS(rv, rv);
     r.add_client_states(stateBinary.get(), stateBinary.Length());
   }
 
   // 2) Set platform type.
   threatInfo->add_platform_types(platform);
 
   // 3) Set threat entry type.
   threatInfo->add_threat_entry_types(URL);
 
   // 4) Set threat entries.
-  for (uint32_t i = 0; i < aPrefixCount; i++) {
+  for (const nsCString& prefix : aPrefixesBase64) {
     nsCString prefixBinary;
-    rv = Base64Decode(nsDependentCString(aPrefixesBase64[i]), prefixBinary);
+    rv = Base64Decode(prefix, prefixBinary);
     threatInfo->add_threat_entries()->set_hash(prefixBinary.get(),
                                                prefixBinary.Length());
   }
   //-------------------------------------------------------------------
 
   // Then serialize.
   std::string s;
   r.SerializeToString(&s);
--- a/toolkit/components/url-classifier/tests/gtest/TestFindFullHash.cpp
+++ b/toolkit/components/url-classifier/tests/gtest/TestFindFullHash.cpp
@@ -2,66 +2,46 @@
 #include "gtest/gtest.h"
 #include "nsUrlClassifierUtils.h"
 #include "mozilla/Base64.h"
 
 using namespace mozilla;
 using namespace mozilla::safebrowsing;
 
 namespace {
-
-// |Base64EncodedStringArray| and |MakeBase64EncodedStringArray|
-// works together to make us able to do things "literally" and easily.
-
-// Given a nsCString array, construct an object which can be implicitly
-// casted to |const char**|, where all owning c-style strings have been
-// base64 encoded. The memory life cycle of what the "cast operator"
-// returns is just as the object itself.
-class Base64EncodedStringArray {
- public:
-  Base64EncodedStringArray(nsCString aArray[], size_t N);
-  operator const char**() const { return (const char**)&mArray[0]; }
-
- private:
-  // Since we can't guarantee the layout of nsCString (can we?),
-  // an additional nsTArray<nsCString> is required to manage the
-  // allocated string.
-  nsTArray<const char*> mArray;
-  nsTArray<nsCString> mStringStorage;
-};
-
-// Simply used to infer the fixed-array size automatically.
 template <size_t N>
-Base64EncodedStringArray MakeBase64EncodedStringArray(nsCString (&aArray)[N]) {
-  return Base64EncodedStringArray(aArray, N);
-}
-
+void ToBase64EncodedStringArray(nsCString (&aInput)[N],
+                                nsTArray<nsCString>& aEncodedArray);
 }  // end of unnamed namespace.
 
 TEST(UrlClassifierFindFullHash, Request)
 {
   nsUrlClassifierUtils* urlUtil = nsUrlClassifierUtils::GetInstance();
 
-  const char* listNames[] = {"test-phish-proto", "test-unwanted-proto"};
+  nsTArray<nsCString> listNames;
+  listNames.AppendElement("test-phish-proto");
+  listNames.AppendElement("test-unwanted-proto");
 
   nsCString listStates[] = {nsCString("sta\x00te1", 7),
                             nsCString("sta\x00te2", 7)};
+  nsTArray<nsCString> listStateArray;
+  ToBase64EncodedStringArray(listStates, listStateArray);
 
   nsCString prefixes[] = {nsCString("\x00\x00\x00\x01", 4),
                           nsCString("\x00\x00\x00\x00\x01", 5),
                           nsCString("\x00\xFF\x00\x01", 4),
                           nsCString("\x00\xFF\x00\x01\x11\x23\xAA\xBC", 8),
                           nsCString("\x00\x00\x00\x01\x00\x01\x98", 7)};
+  nsTArray<nsCString> prefixArray;
+  ToBase64EncodedStringArray(prefixes, prefixArray);
 
   nsCString requestBase64;
   nsresult rv;
-  rv = urlUtil->MakeFindFullHashRequestV4(
-      listNames, MakeBase64EncodedStringArray(listStates),
-      MakeBase64EncodedStringArray(prefixes), ArrayLength(listNames),
-      ArrayLength(prefixes), requestBase64);
+  rv = urlUtil->MakeFindFullHashRequestV4(listNames, listStateArray,
+                                          prefixArray, requestBase64);
   ASSERT_TRUE(NS_SUCCEEDED(rv));
 
   // Base64 URL decode first.
   FallibleTArray<uint8_t> requestBinary;
   rv = Base64URLDecode(requestBase64, Base64URLDecodePaddingPolicy::Require,
                        requestBinary);
   ASSERT_TRUE(NS_SUCCEEDED(rv));
 
@@ -77,18 +57,18 @@ TEST(UrlClassifierFindFullHash, Request)
   }
 
   auto threatInfo = r.threat_info();
 
   // Compare threat types.
   ASSERT_EQ(threatInfo.threat_types_size(), (int)ArrayLength(listStates));
   for (int i = 0; i < threatInfo.threat_types_size(); i++) {
     uint32_t expectedThreatType;
-    rv = urlUtil->ConvertListNameToThreatType(nsCString(listNames[i]),
-                                              &expectedThreatType);
+    rv =
+        urlUtil->ConvertListNameToThreatType(listNames[i], &expectedThreatType);
     ASSERT_TRUE(NS_SUCCEEDED(rv));
     ASSERT_EQ(threatInfo.threat_types(i), expectedThreatType);
   }
 
   // Compare prefixes.
   ASSERT_EQ(threatInfo.threat_entries_size(), (int)ArrayLength(prefixes));
   for (int i = 0; i < threatInfo.threat_entries_size(); i++) {
     auto p = threatInfo.threat_entries(i).hash();
@@ -223,20 +203,20 @@ TEST(UrlClassifierFindFullHash, ParseReq
   NS_ENSURE_SUCCESS_VOID(rv);
 
   ASSERT_EQ(callbackCount, ArrayLength(EXPECTED_MATCH));
 }
 
 /////////////////////////////////////////////////////////////
 namespace {
 
-Base64EncodedStringArray::Base64EncodedStringArray(nsCString aArray[],
-                                                   size_t N) {
+template <size_t N>
+void ToBase64EncodedStringArray(nsCString (&aArray)[N],
+                                nsTArray<nsCString>& aEncodedArray) {
   for (size_t i = 0; i < N; i++) {
     nsCString encoded;
     nsresult rv = Base64Encode(aArray[i], encoded);
     NS_ENSURE_SUCCESS_VOID(rv);
-    mStringStorage.AppendElement(encoded);
-    mArray.AppendElement(encoded.get());
+    aEncodedArray.AppendElement(encoded);
   }
 }
 
 }  // namespace
--- a/toolkit/components/url-classifier/tests/unit/test_hashcompleter_v4.js
+++ b/toolkit/components/url-classifier/tests/unit/test_hashcompleter_v4.js
@@ -77,19 +77,17 @@ add_test(function test_update_v4() {
   // Force table update.
   Services.prefs.setCharPref(PREF_NEXTUPDATETIME_V4, "1");
   gListManager.maybeToggleUpdateChecking();
 });
 
 add_test(function test_getHashRequestV4() {
   let request = gUrlUtil.makeFindFullHashRequestV4([TEST_TABLE_DATA_V4.tableName],
                                                    [btoa(NEW_CLIENT_STATE)],
-                                                   [btoa("0123"), btoa("1234567"), btoa("1111")].sort(),
-                                                   1,
-                                                   3);
+                                                   [btoa("0123"), btoa("1234567"), btoa("1111")].sort());
   registerHandlerGethashV4("&$req=" + request);
   let completeFinishedCnt = 0;
 
   gCompleter.complete("0123", TEST_TABLE_DATA_V4.gethashUrl, TEST_TABLE_DATA_V4.tableName, {
     completionV4(hash, table, duration, fullhashes) {
       equal(hash, "0123");
       equal(table, TEST_TABLE_DATA_V4.tableName);
       equal(duration, 120);
@@ -182,19 +180,17 @@ add_test(function test_minWaitDuration()
         equal(status, Cr.NS_OK);
         run_next_test();
       },
     });
   };
 
   let request = gUrlUtil.makeFindFullHashRequestV4([TEST_TABLE_DATA_V4.tableName],
                                                    [btoa(NEW_CLIENT_STATE)],
-                                                   [btoa("1234567")],
-                                                   1,
-                                                   1);
+                                                   [btoa("1234567")]);
   registerHandlerGethashV4("&$req=" + request);
 
   // The last gethash response contained a min wait duration 12 secs 10 nano
   // So subsequent requests can happen only after the min wait duration
   do_timeout(1000, failedComplete);
   do_timeout(2000, failedComplete);
   do_timeout(4000, failedComplete);
   do_timeout(13000, successComplete);
--- a/toolkit/components/url-classifier/tests/unit/test_listmanager.js
+++ b/toolkit/components/url-classifier/tests/unit/test_listmanager.js
@@ -147,18 +147,17 @@ const SERVER_INVOLVED_TEST_CASE_LIST = [
                              TEST_TABLE_DATA_LIST[1].tableName + ";\n" +
                              TEST_TABLE_DATA_LIST[2].tableName + ";\n";
     gUpdateResponse = "n:1000\n";
 
     // We test the request against the query string since v4 request
     // would be appened to the query string. The request is generated
     // by protobuf API (binary) then encoded to base64 format.
     let requestV4 = gUrlUtils.makeUpdateRequestV4([TEST_TABLE_DATA_V4.tableName],
-                                                  [""],
-                                                  1);
+                                                  [""]);
     gExpectedQueryV4 = "&$req=" + requestV4;
 
     forceTableUpdate();
   },
 
 ];
 
 SERVER_INVOLVED_TEST_CASE_LIST.forEach(t => add_test(t));
@@ -167,18 +166,17 @@ add_test(function test_partialUpdateV4()
   disableAllUpdates();
 
   gListManager.enableUpdate(TEST_TABLE_DATA_V4.tableName);
 
   // Since the new client state has been responded and saved in
   // test_update_all_tables, this update request should send
   // a partial update to the server.
   let requestV4 = gUrlUtils.makeUpdateRequestV4([TEST_TABLE_DATA_V4.tableName],
-                                                [btoa(NEW_CLIENT_STATE)],
-                                                1);
+                                                [btoa(NEW_CLIENT_STATE)]);
   gExpectedQueryV4 = "&$req=" + requestV4;
 
   forceTableUpdate();
 });
 
 // Tests nsIUrlListManager.getGethashUrl.
 add_test(function test_getGethashUrl() {
   TEST_TABLE_DATA_LIST.forEach(function(t) {
--- a/toolkit/components/url-classifier/tests/unit/test_platform_specific_threats.js
+++ b/toolkit/components/url-classifier/tests/unit/test_platform_specific_threats.js
@@ -8,42 +8,40 @@ let urlUtils = Cc["@mozilla.org/url-clas
 
 function testMobileOnlyThreats() {
   // Mobile-only threat type(s):
   //   - goog-harmful-proto (POTENTIALLY_HARMFUL_APPLICATION)
 
   (function testUpdateRequest() {
     let requestWithPHA =
       urlUtils.makeUpdateRequestV4(["goog-phish-proto", "goog-harmful-proto"],
-                                   ["AAAAAA", "AAAAAA"], 2);
+                                   ["AAAAAA", "AAAAAA"]);
 
     let requestNoPHA =
-      urlUtils.makeUpdateRequestV4(["goog-phish-proto"], ["AAAAAA"], 1);
+      urlUtils.makeUpdateRequestV4(["goog-phish-proto"], ["AAAAAA"]);
 
     if (AppConstants.platform === "android") {
       notEqual(requestWithPHA, requestNoPHA,
                "PHA (i.e. goog-harmful-proto) shouldn't be filtered on mobile platform.");
     } else {
       equal(requestWithPHA, requestNoPHA,
             "PHA (i.e. goog-harmful-proto) should be filtered on non-mobile platform.");
     }
   })();
 
   (function testFullHashRequest() {
     let requestWithPHA =
       urlUtils.makeFindFullHashRequestV4(["goog-phish-proto", "goog-harmful-proto"],
                                          ["", ""], // state.
-                                         [btoa("0123")], // prefix.
-                                         2, 1);
+                                         [btoa("0123")]); // prefix.
 
     let requestNoPHA =
       urlUtils.makeFindFullHashRequestV4(["goog-phish-proto"],
                                          [""], // state.
-                                         [btoa("0123")], // prefix.
-                                         1, 1);
+                                         [btoa("0123")]); // prefix.
 
     if (AppConstants.platform === "android") {
       notEqual(requestWithPHA, requestNoPHA,
                "PHA (i.e. goog-harmful-proto) shouldn't be filtered on mobile platform.");
     } else {
       equal(requestWithPHA, requestNoPHA,
             "PHA (i.e. goog-harmful-proto) should be filtered on non-mobile platform.");
     }
@@ -54,20 +52,20 @@ function testDesktopOnlyThreats() {
   // Desktop-only threats:
   //   - goog-downloadwhite-proto (CSD_WHITELIST)
   //   - goog-badbinurl-proto (MALICIOUS_BINARY)
 
   let requestWithDesktopOnlyThreats =
     urlUtils.makeUpdateRequestV4(["goog-phish-proto",
                                   "goog-downloadwhite-proto",
                                   "goog-badbinurl-proto"],
-                                 ["", "", ""], 3);
+                                 ["", "", ""]);
 
   let requestNoDesktopOnlyThreats =
-    urlUtils.makeUpdateRequestV4(["goog-phish-proto"], [""], 1);
+    urlUtils.makeUpdateRequestV4(["goog-phish-proto"], [""]);
 
   if (AppConstants.platform === "android") {
     equal(requestWithDesktopOnlyThreats, requestNoDesktopOnlyThreats,
           "Android shouldn't contain 'goog-downloadwhite-proto' and 'goog-badbinurl-proto'.");
   } else {
     notEqual(requestWithDesktopOnlyThreats, requestNoDesktopOnlyThreats,
              "Desktop should contain 'goog-downloadwhite-proto' and 'goog-badbinurl-proto'.");
   }
--- a/toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js
+++ b/toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js
@@ -1,23 +1,23 @@
 function run_test() {
   let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"]
                    .getService(Ci.nsIUrlClassifierUtils);
 
   // No list at all.
-  let requestNoList = urlUtils.makeUpdateRequestV4([], [], 0);
+  let requestNoList = urlUtils.makeUpdateRequestV4([], []);
 
   // Only one valid list name.
   let requestOneValid =
-    urlUtils.makeUpdateRequestV4(["goog-phish-proto"], ["AAAAAA"], 1);
+    urlUtils.makeUpdateRequestV4(["goog-phish-proto"], ["AAAAAA"]);
 
   // Only one invalid list name.
   let requestOneInvalid =
-    urlUtils.makeUpdateRequestV4(["bad-list-name"], ["AAAAAA"], 1);
+    urlUtils.makeUpdateRequestV4(["bad-list-name"], ["AAAAAA"]);
 
   // One valid and one invalid list name.
   let requestOneInvalidOneValid =
     urlUtils.makeUpdateRequestV4(["goog-phish-proto", "bad-list-name"],
-                                 ["AAAAAA", "AAAAAA"], 2);
+                                 ["AAAAAA", "AAAAAA"]);
 
   equal(requestNoList, requestOneInvalid);
   equal(requestOneValid, requestOneInvalidOneValid);
 }
--- a/toolkit/mozapps/extensions/AddonManager.jsm
+++ b/toolkit/mozapps/extensions/AddonManager.jsm
@@ -1789,37 +1789,37 @@ var AddonManagerInternal = {
     // main tab's browser). Check this by seeing if the browser we've been
     // passed is in a content type docshell and if so get the outer-browser.
     let topBrowser = aBrowser;
     let docShell = aBrowser.ownerGlobal.docShell;
     if (docShell.itemType == Ci.nsIDocShellTreeItem.typeContent)
       topBrowser = docShell.chromeEventHandler;
 
     try {
-      if (!this.isInstallEnabled(aMimetype)) {
+      if (topBrowser.ownerGlobal.fullScreen) {
+        // Addon installation and the resulting notifications should be blocked in fullscreen for security and usability reasons.
+        // Installation prompts in fullscreen can trick the user into installing unwanted addons.
+        // In fullscreen the notification box does not have a clear visual association with its parent anymore.
+        aInstall.cancel();
+
+        this.installNotifyObservers("addon-install-blocked-silent", topBrowser,
+                                    aInstallingPrincipal.URI, aInstall);
+        return;
+      } else if (!this.isInstallEnabled(aMimetype)) {
         aInstall.cancel();
 
         this.installNotifyObservers("addon-install-disabled", topBrowser,
                                     aInstallingPrincipal.URI, aInstall);
         return;
       } else if (aInstallingPrincipal.isNullPrincipal || !aBrowser.contentPrincipal || !aInstallingPrincipal.subsumes(aBrowser.contentPrincipal)) {
         aInstall.cancel();
 
         this.installNotifyObservers("addon-install-origin-blocked", topBrowser,
                                     aInstallingPrincipal.URI, aInstall);
         return;
-      } else if (topBrowser.ownerGlobal.fullScreen) {
-        // Addon installation and the resulting notifications should be blocked in fullscreen for security and usability reasons.
-        // Installation prompts in fullscreen can trick the user into installing unwanted addons.
-        // In fullscreen the notification box does not have a clear visual association with its parent anymore.
-        aInstall.cancel();
-
-        this.installNotifyObservers("addon-install-blocked-silent", topBrowser,
-                                    aInstallingPrincipal.URI, aInstall);
-        return;
       }
 
       // The install may start now depending on the web install listener,
       // listen for the browser navigating to a new origin and cancel the
       // install in that case.
       new BrowserListener(aBrowser, aInstallingPrincipal, aInstall);
 
       let startInstall = (source) => {
--- a/toolkit/mozapps/extensions/internal/PluginProvider.jsm
+++ b/toolkit/mozapps/extensions/internal/PluginProvider.jsm
@@ -135,17 +135,17 @@ var PluginProvider = {
   /**
    * Builds a list of the current plugins reported by the plugin host
    *
    * @return a dictionary of plugins indexed by our generated ID
    */
   getPluginList() {
     let tags = Cc["@mozilla.org/plugin/host;1"].
                getService(Ci.nsIPluginHost).
-               getPluginTags({});
+               getPluginTags();
 
     let list = {};
     let seenPlugins = {};
     for (let tag of tags) {
       if (!(tag.name in seenPlugins))
         seenPlugins[tag.name] = {};
       if (!(tag.description in seenPlugins[tag.name])) {
         let plugin = {
@@ -382,19 +382,19 @@ PluginWrapper.prototype = {
     for (let tag of pluginFor(this).tags)
       paths.push(tag.fullpath);
     return paths;
   },
 
   get pluginMimeTypes() {
     let types = [];
     for (let tag of pluginFor(this).tags) {
-      let mimeTypes = tag.getMimeTypes({});
-      let mimeDescriptions = tag.getMimeDescriptions({});
-      let extensions = tag.getExtensions({});
+      let mimeTypes = tag.getMimeTypes();
+      let mimeDescriptions = tag.getMimeDescriptions();
+      let extensions = tag.getExtensions();
       for (let i = 0; i < mimeTypes.length; i++) {
         let type = {};
         type.type = mimeTypes[i];
         type.description = mimeDescriptions[i];
         type.suffixes = extensions[i];
 
         types.push(type);
       }
--- a/toolkit/mozapps/extensions/test/xpcshell/head_addons.js
+++ b/toolkit/mozapps/extensions/test/xpcshell/head_addons.js
@@ -1065,18 +1065,17 @@ class MockPluginTag {
   }
   set enabledState(val) {
     this.pluginTag.enabledState = val;
   }
 }
 
 function mockPluginHost(plugins) {
   let PluginHost = {
-    getPluginTags(count) {
-      count.value = plugins.length;
+    getPluginTags() {
       return plugins.map(p => p.pluginTag);
     },
 
     QueryInterface: ChromeUtils.generateQI(["nsIPluginHost"]),
   };
 
   MockRegistrar.register("@mozilla.org/plugin/host;1", PluginHost);
 }
--- a/toolkit/mozapps/extensions/test/xpinstall/browser_block_fullscreen_prompt.js
+++ b/toolkit/mozapps/extensions/test/xpinstall/browser_block_fullscreen_prompt.js
@@ -5,104 +5,85 @@
 "use strict";
 
 // This test tends to trigger a race in the fullscreen time telemetry,
 // where the fullscreen enter and fullscreen exit events (which use the
 // same histogram ID) overlap. That causes TelemetryStopwatch to log an
 // error.
 SimpleTest.ignoreAllUncaughtExceptions(true);
 
-const ADDON_FILE_URI = "http://example.com/browser/toolkit/mozapps/extensions/test/xpinstall/amosigned.xpi";
-
-const ADDON_EVENTS = [
-  "addon-install-blocked",
-  "addon-install-blocked-silent",
-  "addon-install-complete",
-  "addon-install-confirmation",
-  "addon-install-disabled",
-  "addon-install-failed",
-  "addon-install-origin-blocked",
-  "addon-install-started",
-  "addon-progress",
-  "addon-webext-permissions",
-  "xpinstall-disabled",
-];
-
-/**
- * Registers observers for addon installation events and resolves promise on first matching event
- */
-function waitForNextAddonEvent() {
-  return Promise.race(ADDON_EVENTS.map( async (eventStr) => {
-    await TestUtils.topicObserved(eventStr);
-    return eventStr;
-  }));
-}
-
 /**
  * Spawns content task in browser to enter / leave fullscreen
  * @param browser - Browser to use for JS fullscreen requests
  * @param {Boolean} fullscreenState - true to enter fullscreen, false to leave
  */
 function changeFullscreen(browser, fullscreenState) {
   return ContentTask.spawn(browser, fullscreenState, async function(state) {
     if (state) {
       await content.document.body.requestFullscreen();
     } else {
       await content.document.exitFullscreen();
     }
   });
 }
 
+function triggerInstall(browser, trigger) {
+  return ContentTask.spawn(browser, trigger, async function(trigger) {
+    content.InstallTrigger.install(trigger);
+  });
+}
+
 // This tests if addon installation is blocked when requested in fullscreen
 add_task(async function testFullscreenBlockAddonInstallPrompt() {
   // Open example.com
-  await BrowserTestUtils.withNewTab("http://example.com", async function(browser) {
-    await changeFullscreen(browser, true);
+  await BrowserTestUtils.openNewForegroundTab(gBrowser, TESTROOT);
 
-    // Navigate to addon file path
-    BrowserTestUtils.loadURI(browser, ADDON_FILE_URI);
+  // Enter and wait for fullscreen
+  await changeFullscreen(gBrowser.selectedBrowser, true);
+  await TestUtils.waitForCondition(() => window.fullScreen, "Waiting for window to enter fullscreen");
 
-    // Wait for addon manager event and check if installation has been blocked
-    let eventStr = await waitForNextAddonEvent();
-
-    Assert.equal(eventStr, "addon-install-blocked-silent", "Addon installation was blocked");
+  // Trigger addon installation and expect it to be blocked
+  let addonEventPromise = TestUtils.topicObserved("addon-install-blocked-silent");
+  await triggerInstall(gBrowser.selectedBrowser, {"XPI": "amosigned.xpi"});
+  await addonEventPromise;
 
-    // Test if addon installation prompt has been blocked
-    let panelOpened;
-    try {
-      panelOpened = await TestUtils.waitForCondition(() => PopupNotifications.isPanelOpen, 100, 10);
-    } catch (ex) {
-      panelOpened = false;
-    }
-    is(panelOpened, false, "Addon installation prompt not opened");
+  // Test if addon installation prompt has been blocked
+  let panelOpened;
+  try {
+    panelOpened = await TestUtils.waitForCondition(() => PopupNotifications.isPanelOpen, 100, 10);
+  } catch (ex) {
+    panelOpened = false;
+  }
+  is(panelOpened, false, "Addon installation prompt not opened");
 
-    window.fullScreen = false;
-  });
+  window.fullScreen = false;
+  await BrowserTestUtils.removeTab(gBrowser.selectedTab);
 });
 
 
 // This tests if the addon install prompt is closed when entering fullscreen
 add_task(async function testFullscreenCloseAddonInstallPrompt() {
-  // Open example.com
-  await BrowserTestUtils.withNewTab("http://example.com", async function(browser) {
-    // Navigate to addon file path
-    BrowserTestUtils.loadURI(browser, ADDON_FILE_URI);
+  let triggers = encodeURIComponent(JSON.stringify({
+    "XPI": "amosigned.xpi",
+  }));
+  let target = TESTROOT + "installtrigger.html?" + triggers;
 
-    // Test if addon installation started
-    let eventStr = await waitForNextAddonEvent();
-
-    Assert.ok(eventStr === "addon-install-started", "Addon installation started");
+  // Open example.com
+  await BrowserTestUtils.openNewForegroundTab(gBrowser, "http://example.com");
 
-    // Test if addon installation prompt is visible
-    await TestUtils.waitForCondition(() => PopupNotifications.isPanelOpen, "Waiting for addon installation prompt to open");
-    Assert.ok(ADDON_EVENTS.some(id => PopupNotifications.getNotification(id, browser) != null), "Opened notification is installation prompt");
-
-    // Test for addon installation prompt close
-    let panelClosePromise = TestUtils.waitForCondition(() => !PopupNotifications.isPanelOpen, "Waiting for addon installation prompt to close");
+  // Trigger addon installation
+  let addonEventPromise = TestUtils.topicObserved("addon-install-blocked");
+  BrowserTestUtils.loadURI(gBrowser.selectedBrowser, target);
+  // Wait for addon install event
+  await addonEventPromise;
 
-    // Switch to fullscreen
-    await changeFullscreen(browser, true);
+  // Test if addon installation prompt is visible
+  await TestUtils.waitForCondition(() => PopupNotifications.isPanelOpen, "Waiting for addon installation prompt to open");
+  Assert.ok(PopupNotifications.getNotification("addon-install-blocked", gBrowser.selectedBrowser) != null, "Opened notification is installation blocked prompt");
 
-    await panelClosePromise;
+  // Switch to fullscreen and test for addon installation prompt close
+  await changeFullscreen(gBrowser.selectedBrowser, true);
+  await TestUtils.waitForCondition(() => window.fullScreen, "Waiting for window to enter fullscreen");
+  await TestUtils.waitForCondition(() => !PopupNotifications.isPanelOpen, "Waiting for addon installation prompt to close");
 
-    window.fullScreen = false;
-  });
+  window.fullScreen = false;
+  await BrowserTestUtils.removeTab(gBrowser.selectedTab);
 });
--- a/toolkit/themes/windows/global/in-content/common.css
+++ b/toolkit/themes/windows/global/in-content/common.css
@@ -21,16 +21,21 @@ xul|radio {
 @media (-moz-windows-default-theme: 0) {
   xul|checkbox[checked] > xul|*.checkbox-check,
   xul|*.radio-check[selected] {
     fill: -moz-fieldText;
     background-color: -moz-field;
   }
 }
 
+/* Override menulist.css */
+xul|menulist[disabled="true"] {
+  background-color: var(--in-content-button-background);
+}
+
 xul|menulist:-moz-focusring > xul|*.menulist-label-box {
   outline: none !important;
 }
 
 html|input[type="checkbox"]:-moz-focusring + html|label:before {
   outline: 1px dotted;
 }
 
--- a/tools/clang-tidy/config.yaml
+++ b/tools/clang-tidy/config.yaml
@@ -35,16 +35,18 @@ clang_checkers:
   - name: bugprone-integer-division
     reliability: high
   - name: bugprone-macro-parentheses
     reliability: medium
   - name: bugprone-macro-repeated-side-effects
     reliability: high
   - name: bugprone-misplaced-widening-cast
     reliability: high
+  - name: bugprone-move-forwarding-reference
+    reliability: high
   - name: bugprone-multiple-statement-macro
     # Incompatible with our code base, see bug 1496379.
     publish: !!bool no
     reliability: high
   - name: bugprone-sizeof-expression
     reliability: high
   - name: bugprone-string-constructor
     reliability: high
new file mode 100644
--- /dev/null
+++ b/tools/clang-tidy/test/bugprone-move-forwarding-reference.cpp
@@ -0,0 +1,26 @@
+
+namespace std {
+template <typename> struct remove_reference;
+
+template <typename _Tp> struct remove_reference { typedef _Tp type; };
+
+template <typename _Tp> struct remove_reference<_Tp &> { typedef _Tp type; };
+
+template <typename _Tp> struct remove_reference<_Tp &&> { typedef _Tp type; };
+
+template <typename _Tp>
+constexpr typename std::remove_reference<_Tp>::type &&move(_Tp &&__t);
+
+} // namespace std
+
+// Standard case.
+template <typename T, typename U> void f1(U &&SomeU) {
+  T SomeT(std::move(SomeU));
+  // CHECK-MESSAGES: :[[@LINE-1]]:11: warning: forwarding reference passed to
+  // CHECK-FIXES: T SomeT(std::forward<U>(SomeU));
+}
+
+void foo() {
+  f1<int, int>(2);
+}
+
new file mode 100644
--- /dev/null
+++ b/tools/clang-tidy/test/bugprone-move-forwarding-reference.json
@@ -0,0 +1,1 @@
+[["warning", "forwarding reference passed to std::move(), which may unexpectedly cause lvalues to be moved; use std::forward() instead", "bugprone-move-forwarding-reference"], {"reliability": "high"}]
\ No newline at end of file
--- a/tools/clang-tidy/test/modernize-avoid-bind.json
+++ b/tools/clang-tidy/test/modernize-avoid-bind.json
@@ -1,1 +1,1 @@
-[["warning", "prefer a lambda to std::bind", "modernize-avoid-bind"]]
\ No newline at end of file
+[["warning", "prefer a lambda to std::bind", "modernize-avoid-bind"], {"reliability": "medium"}]
\ No newline at end of file
--- a/tools/profiler/core/ProfileBufferEntry.cpp
+++ b/tools/profiler/core/ProfileBufferEntry.cpp
@@ -153,17 +153,17 @@ class ForEachTrackedOptimizationTypeInfo
   nsTArray<TypeInfo> mTypesetForUpcomingEntry;
   LambdaT mLambda;
 };
 
 template <typename LambdaT>
 ForEachTrackedOptimizationTypeInfoLambdaOp<LambdaT>
 MakeForEachTrackedOptimizationTypeInfoLambdaOp(LambdaT&& aLambda) {
   return ForEachTrackedOptimizationTypeInfoLambdaOp<LambdaT>(
-      std::move(aLambda));
+      std::forward<LambdaT>(aLambda));
 }
 
 // As mentioned in ProfileBufferEntry.h, the JSON format contains many
 // arrays whose elements are laid out according to various schemas to help
 // de-duplication. This RAII class helps write these arrays by keeping track of
 // the last non-null element written and adding the appropriate number of null
 // elements when writing new non-null elements. It also automatically opens and
 // closes an array element on the given JSON writer.
--- a/tools/tryselect/try_presets.yml
+++ b/tools/tryselect/try_presets.yml
@@ -59,8 +59,29 @@ sample-suites:
         Runs one chunk of every test suite plus all suites that aren't chunked.
         It is useful for testing infrastructure changes that can affect the
         harnesses themselves but are unlikely to break specific tests.
     query:
         - ^test- -1$
         # Only run a single talos + raptor suite per platform
         - ^test- !1$ !2$ !3$ !4$ !5$ !6$ !7$ !8$ !9$ !0$ !raptor !talos
         - ^test- 'raptor-speedometer | 'talos-g1
+
+sm-shell:
+    selector: fuzzy
+    description: <-
+        Runs a set of tests aimed to give a reasonable level of confidence for
+        basic SpiderMonkey changes (shell only)
+    query:
+        - "'spidermonkey | 'shell-haz"
+        - "!shippable !android 'jittest"  # macosx64 jittests
+
+sm-all:
+    selector: fuzzy
+    description: <-
+        Runs a set of tests aimed to give a reasonable level of confidence for
+        basic SpiderMonkey changes, including those that would require a
+        browser build.
+    query:
+        - "'spidermonkey | 'hazard"
+        - "!android !asan !shippable 'xpcshell"
+        - "!android !asan !shippable 'jsreftest"
+        - "!shippable !android 'jittest"  # macosx64 jittests