Merge mozilla-central to mozilla-inbound.
authorCosmin Sabou <csabou@mozilla.com>
Wed, 28 Nov 2018 00:04:55 +0200
changeset 448385 e52f3a7ece193bdf93ac2c87796cad693bb1e882
parent 448317 95651672782a47a2031a4ea492f85053c98d5769 (current diff)
parent 448384 204cda7581188cfc8c8ef11dce4680dadf2b43bb (diff)
child 448386 07cdc29645eeffc12a018f5d4a89e61be6ed9529
push id110164
push usercsabou@mozilla.com
push dateTue, 27 Nov 2018 22:05:30 +0000
treeherdermozilla-inbound@e52f3a7ece19 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
milestone65.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge mozilla-central to mozilla-inbound.
devtools/shared/client/addon-client.js
dom/base/nsGlobalWindowOuter.cpp
modules/libpref/init/all.js
netwerk/protocol/http/DocumentAnalyticsTrackerFastBlocked.h
toolkit/components/url-classifier/tests/mochitest/fastblock.html
toolkit/components/url-classifier/tests/mochitest/fastblock_iframe.html
toolkit/components/url-classifier/tests/mochitest/fastblock_slow_iframe.html
toolkit/components/url-classifier/tests/mochitest/test_fastblock_bug1477046.html
--- a/.clang-format-ignore
+++ b/.clang-format-ignore
@@ -9,19 +9,18 @@ dom/media/gtest/.*
 gfx/testsd/.*
 .*/gtest/ExampleStylesheet.h
 image/test/.*
 ipc/ipdl/test/.*
 ipc/testshell/.*
 # Generated code
 js/src/builtin/intl/TimeZoneDataGenerated.h
 
-# Generated by js/src/util/make_unicode.py
-js/src/irregexp/RegExpCharacters-inl.h
-js/src/irregexp/RegExpCharacters.cpp
+# Don't want to reformat irregexp. bug 1510128
+js/src/irregexp/.*
 js/src/util/Unicode.cpp
 js/src/util/UnicodeNonBMP.h
 
 # Ignored because of bug 1506117 & 1342657
 layout/style/nsCSSAnonBoxList.h
 layout/style/nsCSSCounterDescList.h
 layout/style/nsCSSFontDescList.h
 layout/style/nsCSSKeywordList.h
--- a/.taskcluster.yml
+++ b/.taskcluster.yml
@@ -120,27 +120,27 @@ tasks:
         dependencies: []
         requires: all-completed
 
         priority:
           # Most times, there is plenty of worker capacity so everything runs
           # quickly, but sometimes a storm of action tasks lands.  Then we
           # want, from highest to lowest:
           # - cron tasks (time-sensitive) (low)
-          # - decision tasks (minimize user-visible delay) (very-low)
-          # - action tasks (avoid interfering with the other two) (lowest)
+          # - action tasks (avoid interfering with the other two) (very-low)
+          # - decision tasks (minimize user-visible delay) (lowest)
           # SCM levels all use different workerTypes, so there is no need for priority
           # between levels; "low" is the highest priority available at all levels, and
           # nothing runs at any higher priority on these workerTypes.
           $if: "tasks_for == 'cron'"
           then: low
           else:
-            $if: "tasks_for == 'hg-push'"
+            $if: "tasks_for == 'action'"
             then: very-low
-            else: lowest  # tasks_for == 'action'
+            else: lowest  # tasks_for == 'hg-push'
         retries: 5
 
         payload:
           env:
             # checkout-gecko uses these to check out the source; the inputs
             # to `mach taskgraph decision` are all on the command line.
             $merge:
               - GECKO_BASE_REPOSITORY: 'https://hg.mozilla.org/mozilla-unified'
--- a/browser/base/content/browser.js
+++ b/browser/base/content/browser.js
@@ -1685,17 +1685,17 @@ var gBrowserInit = {
         });
       } else {
         // If the initial browser is not remote, we can focus the browser
         // immediately with no paint performance impact.
         gBrowser.selectedBrowser.focus();
       }
     });
     // Delay removing the attribute using requestAnimationFrame to avoid
-    // invalidating styles multiple times in a row if _uriToLoadPromise
+    // invalidating styles multiple times in a row if uriToLoadPromise
     // resolves before first paint.
     if (shouldRemoveFocusedAttribute) {
       window.requestAnimationFrame(() => {
         if (shouldRemoveFocusedAttribute)
           gURLBar.removeAttribute("focused");
       });
     }
   },
@@ -1864,19 +1864,19 @@ var gBrowserInit = {
     scheduleIdleTask(() => {
       this.idleTasksFinished = true;
       Services.obs.notifyObservers(window, "browser-idle-startup-tasks-finished");
     });
   },
 
   // Returns the URI(s) to load at startup if it is immediately known, or a
   // promise resolving to the URI to load.
-  get _uriToLoadPromise() {
-    delete this._uriToLoadPromise;
-    return this._uriToLoadPromise = function() {
+  get uriToLoadPromise() {
+    delete this.uriToLoadPromise;
+    return this.uriToLoadPromise = function() {
       // window.arguments[0]: URI to load (string), or an nsIArray of
       //                      nsISupportsStrings to load, or a xul:tab of
       //                      a tabbrowser, which will be replaced by this
       //                      window (for this case, all other arguments are
       //                      ignored).
       if (!window.arguments || !window.arguments[0]) {
         return null;
       }
@@ -1898,23 +1898,24 @@ var gBrowserInit = {
         return willOverride ? null : uri;
       }
       return willOverride.then(willOverrideHomepage =>
                                  willOverrideHomepage ? null : uri);
     }();
   },
 
   // Calls the given callback with the URI to load at startup.
-  // Synchronously if possible, or after _uriToLoadPromise resolves otherwise.
+  // Synchronously if possible, or after uriToLoadPromise resolves otherwise.
   _callWithURIToLoad(callback) {
-    let uriToLoad = this._uriToLoadPromise;
-    if (!uriToLoad || !uriToLoad.then)
+    let uriToLoad = this.uriToLoadPromise;
+    if (uriToLoad && uriToLoad.then) {
+      uriToLoad.then(callback);
+    } else {
       callback(uriToLoad);
-    else
-      uriToLoad.then(callback);
+    }
   },
 
   onUnload() {
     gUIDensity.uninit();
 
     TabsInTitlebar.uninit();
 
     ToolbarIconColor.uninit();
--- a/browser/base/content/browser.xul
+++ b/browser/base/content/browser.xul
@@ -725,21 +725,22 @@ xmlns="http://www.w3.org/1999/xhtml"
                customizable="true"
                mode="icons"
 #ifdef MENUBAR_CAN_AUTOHIDE
                toolbarname="&menubarCmd.label;"
                accesskey="&menubarCmd.accesskey;"
                autohide="true"
 #endif
                context="toolbar-context-menu">
-        <toolbaritem id="menubar-items" align="start" flex="1">
+        <toolbaritem id="menubar-items" align="center">
 # The entire main menubar is placed into browser-menubar.inc, so that it can be
 # shared with other top level windows in macWindow.inc.xul.
 #include browser-menubar.inc
         </toolbaritem>
+        <spacer flex="1" />
 #include titlebar-items.inc.xul
       </toolbar>
 
       <toolbar id="TabsToolbar"
                class="browser-toolbar titlebar-color"
                fullscreentoolbar="true"
                customizable="true"
                customizationtarget="TabsToolbar-customization-target"
--- a/browser/base/content/tabbrowser.js
+++ b/browser/base/content/tabbrowser.js
@@ -280,44 +280,51 @@ window._gBrowser = {
     return this._selectedBrowser;
   },
 
   _setupInitialBrowserAndTab() {
     // See browser.js for the meaning of window.arguments.
     // Bug 1485961 covers making this more sane.
     let userContextId = window.arguments && window.arguments[6];
 
-    // We default to a remote content browser, except if:
-    // - e10s is disabled.
-    // - there's a parent process opener (e.g. parent process about: page) for
-    //   the content tab.
-    let remoteType;
-    if (gMultiProcessBrowser && !window.hasOpenerForInitialContentBrowser) {
-      remoteType = E10SUtils.DEFAULT_REMOTE_TYPE;
-    } else {
-      remoteType = E10SUtils.NOT_REMOTE;
-    }
+    let tabArgument = gBrowserInit.getTabToAdopt();
 
     // We only need sameProcessAsFrameLoader in the case where we're passed a tab
     let sameProcessAsFrameLoader;
-    let tabArgument = gBrowserInit.getTabToAdopt();
-    if (tabArgument) {
+    // If we have a tab argument with browser, we use its remoteType. Otherwise,
+    // if e10s is disabled or there's a parent process opener (e.g. parent
+    // process about: page) for the content tab, we use a parent
+    // process remoteType. Otherwise, we check the URI to determine
+    // what to do - if there isn't one, we default to the default remote type.
+    let remoteType;
+    if (tabArgument && tabArgument.linkedBrowser) {
+      remoteType = tabArgument.linkedBrowser.remoteType;
+      sameProcessAsFrameLoader = tabArgument.linkedBrowser.frameLoader;
+    } else if (!gMultiProcessBrowser || window.hasOpenerForInitialContentBrowser) {
+      remoteType = E10SUtils.NOT_REMOTE;
+    } else {
+      let uriToLoad = gBrowserInit.uriToLoadPromise;
+      if (uriToLoad && typeof uriToLoad == "string") {
+        remoteType = E10SUtils.getRemoteTypeForURI(
+          uriToLoad,
+          gMultiProcessBrowser,
+          E10SUtils.DEFAULT_REMOTE_TYPE
+        );
+      } else {
+        remoteType = E10SUtils.DEFAULT_REMOTE_TYPE;
+      }
+    }
+
+    if (tabArgument && tabArgument.hasAttribute("usercontextid")) {
       // The window's first argument is a tab if and only if we are swapping tabs.
       // We must set the browser's usercontextid so that the newly created remote
       // tab child has the correct usercontextid.
-      if (tabArgument.hasAttribute("usercontextid")) {
-        userContextId = parseInt(tabArgument.getAttribute("usercontextid"), 10);
-      }
-
-      let linkedBrowser = tabArgument.linkedBrowser;
-      if (linkedBrowser) {
-        remoteType = linkedBrowser.remoteType;
-        sameProcessAsFrameLoader = linkedBrowser.frameLoader;
-      }
-    }
+      userContextId = parseInt(tabArgument.getAttribute("usercontextid"), 10);
+    }
+
     let createOptions = {
       uriIsAboutBlank: false,
       userContextId,
       sameProcessAsFrameLoader,
       remoteType,
     };
     let browser = this._createBrowser(createOptions);
     browser.setAttribute("primary", "true");
--- a/browser/base/content/titlebar-items.inc.xul
+++ b/browser/base/content/titlebar-items.inc.xul
@@ -1,11 +1,11 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-<hbox class="titlebar-buttonbox-container" skipintoolbarset="true">
+<hbox class="titlebar-buttonbox-container">
   <hbox class="titlebar-buttonbox titlebar-color">
     <toolbarbutton class="titlebar-button titlebar-min" oncommand="window.minimize();"/>
     <toolbarbutton class="titlebar-button titlebar-max" oncommand="onTitlebarMaxClick();"/>
     <toolbarbutton class="titlebar-button titlebar-close" command="cmd_closeWindow"/>
   </hbox>
 </hbox>
--- a/browser/components/sessionstore/StartupPerformance.jsm
+++ b/browser/components/sessionstore/StartupPerformance.jsm
@@ -71,16 +71,17 @@ var StartupPerformance = {
   get isRestored() {
     return this._isRestored;
   },
 
   // Called when restoration starts.
   // Record the start timestamp, setup the timer and `this._promiseFinished`.
   // Behavior is unspecified if there was already an ongoing measure.
   _onRestorationStarts(isAutoRestore) {
+    Services.profiler.AddMarker("_onRestorationStarts");
     this._latestRestoredTimeStamp = this._startTimeStamp = Date.now();
     this._totalNumberOfEagerTabs = 0;
     this._totalNumberOfTabs = 0;
     this._totalNumberOfWindows = 0;
 
     // While we may restore several sessions in a single run of the browser,
     // that's a very unusual case, and not really worth measuring, so let's
     // stop listening for further restorations.
@@ -193,16 +194,17 @@ var StartupPerformance = {
 
           let observer = (event) => {
             // We don't care about tab restorations that are due to
             // a browser flipping from out-of-main-process to in-main-process
             // or vice-versa. We only care about restorations that are due
             // to the user switching to a lazily restored tab, or for tabs
             // that are restoring eagerly.
             if (!event.detail.isRemotenessUpdate) {
+              Services.profiler.AddMarker("SSTabRestored");
               this._latestRestoredTimeStamp = Date.now();
               this._totalNumberOfEagerTabs += 1;
             }
           };
           win.gBrowser.tabContainer.addEventListener("SSTabRestored", observer);
           this._totalNumberOfTabs += win.gBrowser.tabContainer.itemCount;
 
           // Once we have finished collecting the results, clean up the observers.
new file mode 100644
--- /dev/null
+++ b/browser/config/whats_new_page.yml
@@ -0,0 +1,57 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+---
+- type: product-details
+  # %LOCALE% is automatically replaced by Balrog.
+  url: "https://www.mozilla.org/%LOCALE%/{product}/{version}/releasenotes/"
+- type: show-url
+  # yamllint disable-line rule:line-length
+  url: "https://www.mozilla.org/%LOCALE%/{product}/{version}/whatsnew/?oldversion=%OLD_VERSION%"
+  conditions:
+      blob-types: [wnp]
+      release-types: [release, release-rc]
+      products: [firefox]
+      update-channel: release
+      # e.g.: ["<61.0"]. {version.major_number} reflects the current version.
+      # This is done by taskgraph.
+      versions: ["<{version.major_number}.0"]
+      locales:
+          - az
+          - be
+          - cak
+          - cy
+          - da
+          - de
+          - dsb
+          - en-CA
+          - en-GB
+          - en-US
+          - es-AR
+          - et
+          - fr
+          - hi-IN
+          - hsb
+          - hu
+          - ia
+          - id
+          - it
+          - ka
+          - lij
+          - lt
+          - ms
+          - nb-NO
+          - nl
+          - pl
+          - pt-PT
+          - ro
+          - ru
+          - sk
+          - sl
+          - sq
+          - sv-SE
+          - th
+          - tr
+          - zh-CN
+          - zh-TW
--- a/browser/locales/en-US/browser/preferences/preferences.ftl
+++ b/browser/locales/en-US/browser/preferences/preferences.ftl
@@ -824,16 +824,17 @@ content-blocking-setting-standard =
   .accesskey = d
 content-blocking-setting-strict =
   .label = Strict
   .accesskey = r
 content-blocking-setting-custom =
   .label = Custom
   .accesskey = C
 
+content-blocking-standard-description = Only blocks known trackers in Private Windows.
 content-blocking-standard-desc = Balanced for protection and performance. Allows some trackers so websites function properly.
 content-blocking-strict-desc = Blocks all trackers { -brand-short-name } detects. May cause some sites to break.
 content-blocking-custom-desc = Choose what to block.
 
 content-blocking-private-trackers = Known trackers only in Private Windows
 content-blocking-third-party-cookies = Third-party tracking cookies
 content-blocking-all-windows-trackers = Known trackers in all windows
 content-blocking-all-third-party-cookies = All third-party cookies
--- a/browser/modules/test/browser/browser.ini
+++ b/browser/modules/test/browser/browser.ini
@@ -35,22 +35,18 @@ skip-if = os != win || (os == win && bit
 [browser_UnsubmittedCrashHandler.js]
 run-if = crashreporter
 [browser_urlBar_zoom.js]
 [browser_UsageTelemetry.js]
 [browser_UsageTelemetry_domains.js]
 [browser_UsageTelemetry_private_and_restore.js]
 skip-if = verify && debug
 [browser_UsageTelemetry_urlbar.js]
-disabled = bug 1496764 - Telemetry probe needs extension
 support-files =
   usageTelemetrySearchSuggestions.sjs
   usageTelemetrySearchSuggestions.xml
 [browser_UsageTelemetry_searchbar.js]
-disabled = bug 1496764 - Telemetry probe needs extension
 support-files =
   usageTelemetrySearchSuggestions.sjs
   usageTelemetrySearchSuggestions.xml
 [browser_UsageTelemetry_content.js]
-disabled = bug 1496764 - Telemetry probe needs extension
 [browser_UsageTelemetry_content_aboutHome.js]
-disabled = bug 1496764 - Telemetry probe needs extension
 [browser_UsageTelemetry_content_aboutRestartRequired.js]
--- a/build/sparse-profiles/taskgraph
+++ b/build/sparse-profiles/taskgraph
@@ -4,16 +4,17 @@
 # These files are read as part of generating the taskgraph.
 path:browser/locales/l10n-changesets.json
 path:browser/locales/l10n-onchange-changesets.json
 path:mobile/locales/l10n-changesets.json
 path:mobile/locales/l10n-onchange-changesets.json
 path:browser/locales/shipped-locales
 path:browser/config/version_display.txt
 path:browser/config/version.txt
+path:browser/config/whats_new_page.yml
 
 # Lots of random files in here are read. Just pull in the whole thing.
 path:build/
 
 # TODO remove once bug 1402010 is resolved and test manifests aren't
 # processed in Files() reading mode in moz.build files.
 path:layout/tools/reftest/
 path:testing/web-platform/tests/tools/
--- a/config/external/moz.build
+++ b/config/external/moz.build
@@ -33,16 +33,17 @@ if CONFIG['MOZ_TREMOR']:
 if CONFIG['MOZ_WEBM_ENCODER']:
     external_dirs += ['media/libmkv']
 
 if not CONFIG['MOZ_SYSTEM_LIBVPX']:
     external_dirs += ['media/libvpx']
 
 if CONFIG['MOZ_AV1']:
     external_dirs += ['media/libaom']
+    external_dirs += ['media/libdav1d']
 
 if not CONFIG['MOZ_SYSTEM_PNG']:
     external_dirs += ['media/libpng']
 
 if not CONFIG['MOZ_SYSTEM_WEBP']:
     external_dirs += ['media/libwebp']
 
 if CONFIG['CPU_ARCH'] == 'arm':
--- a/devtools/client/aboutdebugging-new/src/actions/debug-targets.js
+++ b/devtools/client/aboutdebugging-new/src/actions/debug-targets.js
@@ -98,22 +98,23 @@ function pushServiceWorker(actor) {
     try {
       await clientWrapper.request({ to: actor, type: "push" });
     } catch (e) {
       console.error(e);
     }
   };
 }
 
-function reloadTemporaryExtension(actor) {
+function reloadTemporaryExtension(id) {
   return async (_, getState) => {
     const clientWrapper = getCurrentClient(getState().runtimes);
 
     try {
-      await clientWrapper.request({ to: actor, type: "reload" });
+      const addonTargetFront = await clientWrapper.getAddon({ id });
+      await addonTargetFront.reload();
     } catch (e) {
       console.error(e);
     }
   };
 }
 
 function removeTemporaryExtension(id) {
   return async () => {
@@ -144,17 +145,17 @@ function requestTabs() {
 function requestExtensions() {
   return async (dispatch, getState) => {
     dispatch({ type: REQUEST_EXTENSIONS_START });
 
     const runtime = getCurrentRuntime(getState().runtimes);
     const clientWrapper = getCurrentClient(getState().runtimes);
 
     try {
-      const { addons } = await clientWrapper.listAddons();
+      const addons = await clientWrapper.listAddons();
       let extensions = addons.filter(a => a.debuggable);
 
       // Filter out system addons unless the dedicated preference is set to true.
       if (!getState().ui.showSystemAddons) {
         extensions = extensions.filter(e => !e.isSystem);
       }
 
       if (runtime.type !== RUNTIMES.THIS_FIREFOX) {
--- a/devtools/client/aboutdebugging-new/src/components/debugtarget/TemporaryExtensionAction.js
+++ b/devtools/client/aboutdebugging-new/src/components/debugtarget/TemporaryExtensionAction.js
@@ -24,17 +24,17 @@ class TemporaryExtensionAction extends P
     return {
       dispatch: PropTypes.func.isRequired,
       target: Types.debugTarget.isRequired,
     };
   }
 
   reload() {
     const { dispatch, target } = this.props;
-    dispatch(Actions.reloadTemporaryExtension(target.details.actor));
+    dispatch(Actions.reloadTemporaryExtension(target.id));
   }
 
   remove() {
     const { dispatch, target } = this.props;
     dispatch(Actions.removeTemporaryExtension(target.id));
   }
 
   render() {
--- a/devtools/client/aboutdebugging-new/src/modules/client-wrapper.js
+++ b/devtools/client/aboutdebugging-new/src/modules/client-wrapper.js
@@ -92,17 +92,21 @@ class ClientWrapper {
     }
   }
 
   async listTabs(options) {
     return this.client.listTabs(options);
   }
 
   async listAddons() {
-    return this.client.listAddons();
+    return this.client.mainRoot.listAddons();
+  }
+
+  async getAddon({ id }) {
+    return this.client.mainRoot.getAddon({ id });
   }
 
   async listWorkers() {
     const { other, service, shared } = await this.client.mainRoot.listAllWorkers();
 
     return {
       otherWorkers: other,
       serviceWorkers: service,
--- a/devtools/client/aboutdebugging-new/src/modules/extensions-helper.js
+++ b/devtools/client/aboutdebugging-new/src/modules/extensions-helper.js
@@ -48,24 +48,23 @@ exports.debugLocalAddon = async function
  * Start debugging an addon in a remote instance of Firefox.
  *
  * @param {String} id
  *        The addon id to debug.
  * @param {DebuggerClient} client
  *        Required for remote debugging.
  */
 exports.debugRemoteAddon = async function(id, client) {
-  const { addons } = await client.listAddons();
-  const addonForm = addons.find(addon => addon.id === id);
+  const addonTargetFront = await client.mainRoot.getAddon({ id });
 
   // Close previous addon debugging toolbox.
   closeToolbox();
 
   const options = {
-    form: addonForm,
+    activeTab: addonTargetFront,
     chrome: true,
     client,
   };
 
   const target = await TargetFactory.forRemoteTab(options);
 
   const hostType = Toolbox.HostType.WINDOW;
   remoteAddonToolbox = await gDevTools.showToolbox(target, null, hostType);
--- a/devtools/client/aboutdebugging-new/test/browser/browser_aboutdebugging_addons_usb_runtime.js
+++ b/devtools/client/aboutdebugging-new/test/browser/browser_aboutdebugging_addons_usb_runtime.js
@@ -29,31 +29,31 @@ add_task(async function() {
 
   const extensionPane = getDebugTargetPane("Extensions", document);
   info("Check an empty target pane message is displayed");
   ok(extensionPane.querySelector(".js-debug-target-list-empty"),
     "Extensions list is empty");
 
   info("Add an extension to the remote client");
   const addon = { name: "Test extension name", debuggable: true };
-  usbClient.listAddons = () => ({ addons: [addon] });
+  usbClient.listAddons = () => [addon];
   usbClient._eventEmitter.emit("addonListChanged");
 
   info("Wait until the extension appears");
   await waitUntil(() => !extensionPane.querySelector(".js-debug-target-list-empty"));
 
   const extensionTarget = findDebugTargetByText("Test extension name", document);
   ok(extensionTarget, "Extension target appeared for the USB runtime");
 
   // The goal here is to check that USB runtimes addons are only updated when the USB
   // runtime is sending addonListChanged events. The reason for this test is because the
   // previous implementation was updating the USB runtime extensions list when the _local_
   // AddonManager was updated.
   info("Remove the extension from the remote client WITHOUT sending an event");
-  usbClient.listAddons = () => ({ addons: [] });
+  usbClient.listAddons = () => [];
 
   info("Simulate an addon update on the ThisFirefox client");
   usbMocks.thisFirefoxClient._eventEmitter.emit("addonListChanged");
 
   // To avoid wait for a set period of time we trigger another async update, adding a new
   // tab. We assume that if the addon update mechanism had started, it would also be done
   // when the new tab was processed.
   info("Wait until the tab target for 'http://some.random/url.com' appears");
--- a/devtools/client/aboutdebugging-new/test/browser/browser_aboutdebugging_system_addons.js
+++ b/devtools/client/aboutdebugging-new/test/browser/browser_aboutdebugging_system_addons.js
@@ -15,34 +15,34 @@ Services.scriptloader.loadSubScript(
 
 const SYSTEM_ADDON =
   createAddonData({ id: "system", name: "System Addon", isSystem: true });
 const INSTALLED_ADDON =
   createAddonData({ id: "installed", name: "Installed Addon", isSystem: false });
 
 add_task(async function testShowSystemAddonsFalse() {
   const thisFirefoxClient = setupThisFirefoxMock();
-  thisFirefoxClient.listAddons = () => ({ addons: [SYSTEM_ADDON, INSTALLED_ADDON] });
+  thisFirefoxClient.listAddons = () => ([SYSTEM_ADDON, INSTALLED_ADDON]);
 
   info("Hide system addons in aboutdebugging via preference");
   await pushPref("devtools.aboutdebugging.showSystemAddons", false);
 
   const { document, tab } = await openAboutDebugging();
 
   const hasSystemAddon = !!findDebugTargetByText("System Addon", document);
   const hasInstalledAddon = !!findDebugTargetByText("Installed Addon", document);
   ok(!hasSystemAddon, "System addon is hidden when system addon pref is false");
   ok(hasInstalledAddon, "Installed addon is displayed when system addon pref is false");
 
   await removeTab(tab);
 });
 
 add_task(async function testShowSystemAddonsTrue() {
   const thisFirefoxClient = setupThisFirefoxMock();
-  thisFirefoxClient.listAddons = () => ({ addons: [SYSTEM_ADDON, INSTALLED_ADDON] });
+  thisFirefoxClient.listAddons = () => ([SYSTEM_ADDON, INSTALLED_ADDON]);
 
   info("Show system addons in aboutdebugging via preference");
   await pushPref("devtools.aboutdebugging.showSystemAddons", true);
 
   const { document, tab } = await openAboutDebugging();
   const hasSystemAddon = !!findDebugTargetByText("System Addon", document);
   const hasInstalledAddon = !!findDebugTargetByText("Installed Addon", document);
   ok(hasSystemAddon, "System addon is displayed when system addon pref is true");
--- a/devtools/client/aboutdebugging-new/test/browser/mocks/head-client-wrapper-mock.js
+++ b/devtools/client/aboutdebugging-new/test/browser/mocks/head-client-wrapper-mock.js
@@ -57,17 +57,17 @@ function createClientMock() {
     // Return default preference value or null if no match.
     getPreference: (prefName) => {
       if (prefName in DEFAULT_PREFERENCES) {
         return DEFAULT_PREFERENCES[prefName];
       }
       return null;
     },
     // Empty array of addons
-    listAddons: () => ({ addons: [] }),
+    listAddons: () => [],
     // Empty array of tabs
     listTabs: () => ({ tabs: []}),
     // Empty arrays of workers
     listWorkers: () => ({
       otherWorkers: [],
       serviceWorkers: [],
       sharedWorkers: [],
     }),
--- a/devtools/client/aboutdebugging/components/addons/Panel.js
+++ b/devtools/client/aboutdebugging/components/addons/Panel.js
@@ -95,24 +95,22 @@ class AddonsPanel extends Component {
   }
 
   updateShowSystemStatus() {
     const showSystemAddons = Services.prefs.getBoolPref(SYSTEM_ENABLED_PREF, false);
     this.setState({ showSystemAddons });
   }
 
   updateAddonsList() {
-    this.props.client.listAddons()
-      .then(({addons}) => {
+    this.props.client.mainRoot.listAddons()
+      .then(addons => {
         const extensions = addons.filter(addon => addon.debuggable).map(addon => {
           return {
-            addonTargetActor: addon.actor,
+            addonTargetFront: addon,
             addonID: addon.id,
-            // Forward the whole addon actor form for potential remote debugging.
-            form: addon,
             icon: addon.iconURL || ExtensionIcon,
             isSystem: addon.isSystem,
             manifestURL: addon.manifestURL,
             name: addon.name,
             temporarilyInstalled: addon.temporarilyInstalled,
             url: addon.url,
             warnings: addon.warnings,
           };
--- a/devtools/client/aboutdebugging/components/addons/Target.js
+++ b/devtools/client/aboutdebugging/components/addons/Target.js
@@ -8,17 +8,16 @@
 
 const { Component } = require("devtools/client/shared/vendor/react");
 const PropTypes = require("devtools/client/shared/vendor/react-prop-types");
 const dom = require("devtools/client/shared/vendor/react-dom-factories");
 const {
   debugLocalAddon,
   debugRemoteAddon,
   getExtensionUuid,
-  isLegacyTemporaryExtension,
   isTemporaryID,
   parseFileUri,
   uninstallAddon,
 } = require("../../modules/addon");
 const Services = require("Services");
 
 loader.lazyRequireGetter(this, "DebuggerClient",
   "devtools/shared/client/debugger-client", true);
@@ -119,17 +118,17 @@ function infoMessages(target) {
   }
 
   return messages;
 }
 
 function warningMessages(target) {
   let messages = [];
 
-  if (isLegacyTemporaryExtension(target.form)) {
+  if (target.addonTargetFront.isLegacyTemporaryExtension()) {
     messages.push(dom.li(
       {
         className: "addon-target-warning-message addon-target-message",
       },
       Strings.GetStringFromName("legacyExtensionWarning"),
       " ",
       dom.a(
         {
@@ -152,19 +151,18 @@ function warningMessages(target) {
 
 class AddonTarget extends Component {
   static get propTypes() {
     return {
       client: PropTypes.instanceOf(DebuggerClient).isRequired,
       connect: PropTypes.object,
       debugDisabled: PropTypes.bool,
       target: PropTypes.shape({
-        addonTargetActor: PropTypes.string.isRequired,
+        addonTargetFront: PropTypes.object.isRequired,
         addonID: PropTypes.string.isRequired,
-        form: PropTypes.object.isRequired,
         icon: PropTypes.string,
         name: PropTypes.string.isRequired,
         temporarilyInstalled: PropTypes.bool,
         url: PropTypes.string,
         warnings: PropTypes.array,
       }).isRequired,
     };
   }
@@ -187,23 +185,20 @@ class AddonTarget extends Component {
   }
 
   uninstall() {
     const { target } = this.props;
     uninstallAddon(target.addonID);
   }
 
   async reload() {
-    const { client, target } = this.props;
+    const { target } = this.props;
     const { AboutDebugging } = window;
     try {
-      await client.request({
-        to: target.addonTargetActor,
-        type: "reload",
-      });
+      await target.addonTargetFront.reload();
       AboutDebugging.emit("addon-reload");
     } catch (e) {
       throw new Error("Error reloading addon " + target.addonID + ": " + e.message);
     }
   }
 
   render() {
     const { target, debugDisabled } = this.props;
--- a/devtools/client/aboutdebugging/modules/addon.js
+++ b/devtools/client/aboutdebugging/modules/addon.js
@@ -20,29 +20,16 @@ const {
  * devtools/client/aboutdebugging-new/src/modules/extensions-helper.js
  * The only methods implemented here are the ones used in the old aboutdebugging only.
  */
 
 exports.isTemporaryID = function(addonID) {
   return AddonManagerPrivate.isTemporaryInstallID(addonID);
 };
 
-exports.isLegacyTemporaryExtension = function(addonForm) {
-  if (!addonForm.type) {
-    // If about:debugging is connected to an older then 59 remote Firefox, and type is
-    // not available on the addon/webextension actors, return false to avoid showing
-    // irrelevant warning messages.
-    return false;
-  }
-  return addonForm.type == "extension" &&
-         addonForm.temporarilyInstalled &&
-         !addonForm.isWebExtension &&
-         !addonForm.isAPIExtension;
-};
-
 /**
  * See JSDoc in devtools/client/aboutdebugging-new/src/modules/extensions-helper for all
  * the methods exposed below.
  */
 
 exports.debugLocalAddon = debugLocalAddon;
 exports.debugRemoteAddon = debugRemoteAddon;
 exports.getExtensionUuid = getExtensionUuid;
--- a/devtools/client/debugger/test/mochitest/head.js
+++ b/devtools/client/debugger/test/mochitest/head.js
@@ -158,29 +158,16 @@ function getTargetActorForUrl(aClient, a
   aClient.listTabs().then(aResponse => {
     let targetActor = aResponse.tabs.filter(aGrip => aGrip.url == aUrl).pop();
     deferred.resolve(targetActor);
   });
 
   return deferred.promise;
 }
 
-function getAddonActorForId(aClient, aAddonId) {
-  info("Get addon actor for ID: " + aAddonId);
-  let deferred = promise.defer();
-
-  aClient.listAddons().then(aResponse => {
-    let addonTargetActor = aResponse.addons.filter(aGrip => aGrip.id == aAddonId).pop();
-    info("got addon actor for ID: " + aAddonId);
-    deferred.resolve(addonTargetActor);
-  });
-
-  return deferred.promise;
-}
-
 async function attachTargetActorForUrl(aClient, aUrl) {
   let grip = await getTargetActorForUrl(aClient, aUrl);
   let [ response, front ] = await aClient.attachTarget(grip.actor);
   return [grip, response, front];
 }
 
 async function attachThreadActorForUrl(aClient, aUrl) {
   let [grip, response] = await attachTargetActorForUrl(aClient, aUrl);
@@ -772,28 +759,16 @@ function hideVarPopupByScrollingEditor(a
   editor.setFirstVisibleLine(0);
   return popupHiding.then(waitForTick);
 }
 
 function reopenVarPopup(...aArgs) {
   return hideVarPopup.apply(this, aArgs).then(() => openVarPopup.apply(this, aArgs));
 }
 
-function attachAddonActorForId(aClient, aAddonId) {
-  let deferred = promise.defer();
-
-  getAddonActorForId(aClient, aAddonId).then(aGrip => {
-    aClient.attachAddon(aGrip).then(([aResponse]) => {
-      deferred.resolve([aGrip, aResponse]);
-    });
-  });
-
-  return deferred.promise;
-}
-
 function doResume(aPanel) {
   const threadClient = aPanel.panelWin.gThreadClient;
   return threadClient.resume();
 }
 
 function doInterrupt(aPanel) {
   const threadClient = aPanel.panelWin.gThreadClient;
   return threadClient.interrupt();
--- a/devtools/client/framework/connect/connect.js
+++ b/devtools/client/framework/connect/connect.js
@@ -75,20 +75,17 @@ var submit = async function() {
 /**
  * Connection is ready. List actors and build buttons.
  */
 var onConnectionReady = async function([aType, aTraits]) {
   clearTimeout(gConnectionTimeout);
 
   let addons = [];
   try {
-    const response = await gClient.listAddons();
-    if (!response.error && response.addons.length > 0) {
-      addons = response.addons;
-    }
+    addons = await gClient.mainRoot.listAddons();
   } catch (e) {
     // listAddons throws if the runtime doesn't support addons
   }
 
   let parent = document.getElementById("addonTargetActors");
   if (addons.length > 0) {
     // Add one entry for each add-on.
     for (const addon of addons) {
@@ -158,17 +155,17 @@ var onConnectionReady = async function([
 };
 
 /**
  * Build one button for an add-on.
  */
 function buildAddonLink(addon, parent) {
   const a = document.createElement("a");
   a.onclick = async function() {
-    openToolbox(addon, true, "webconsole");
+    openToolbox(null, true, "webconsole", addon);
   };
 
   a.textContent = addon.name;
   a.title = addon.id;
   a.href = "#";
 
   parent.appendChild(a);
 }
--- a/devtools/client/framework/target.js
+++ b/devtools/client/framework/target.js
@@ -557,38 +557,29 @@ Target.prototype = {
           this.client.mainRoot.traits.webExtensionAddonConnect) {
         // The addonTargetActor form is related to a WebExtensionActor instance,
         // which isn't a target actor on its own, it is an actor living in the parent
         // process with access to the addon metadata, it can control the addon (e.g.
         // reloading it) and listen to the AddonManager events related to the lifecycle of
         // the addon (e.g. when the addon is disabled or uninstalled).
         // To retrieve the target actor instance, we call its "connect" method, (which
         // fetches the target actor form from a WebExtensionTargetActor instance).
-        const {form} = await this._client.request({
-          to: this.form.actor, type: "connect",
-        });
-
-        this._form = form;
-        this._url = this.form.url;
-        this._title = this.form.title;
+        this.activeTab = await this.activeTab.connect();
       }
 
       // AddonTargetActor and ContentProcessTargetActor don't inherit from
       // BrowsingContextTargetActor (i.e. this.isBrowsingContext=false) and don't need
       // to be attached via DebuggerClient.attachTarget.
       if (this.isBrowsingContext) {
         await attachBrowsingContextTarget();
-      } else if (this.isLegacyAddon) {
-        const [, addonTargetFront] = await this._client.attachAddon(this.form);
-        this.activeTab = addonTargetFront;
 
-      // Worker and Content process targets are the first target to have their front already
-      // instantiated. The plan is to have all targets to have their front passed as
-      // constructor argument.
-      } else if (this.isWorkerTarget) {
+      // Addon Worker and Content process targets are the first targets to have their
+      // front already instantiated. The plan is to have all targets to have their front
+      // passed as constructor argument.
+      } else if (this.isWorkerTarget || this.isLegacyAddon) {
         // Worker is the first front to be completely migrated to have only its attach
         // method being called from Target.attach. Other fronts should be refactored.
         await this.activeTab.attach();
       } else if (this.isContentProcess) {
         // ContentProcessTarget is the only one target without any attach request.
       } else {
         throw new Error(`Unsupported type of target. Expected target of one of the` +
           ` following types: BrowsingContext, ContentProcess, Worker or ` +
--- a/devtools/client/framework/toolbox-process-window.js
+++ b/devtools/client/framework/toolbox-process-window.js
@@ -84,19 +84,18 @@ var connect = async function() {
     webSocket,
   });
   gClient = new DebuggerClient(transport);
   appendStatusMessage("Start protocol client for connection");
   await gClient.connect();
 
   appendStatusMessage("Get root form for toolbox");
   if (addonID) {
-    const { addons } = await gClient.listAddons();
-    const addonTargetActor = addons.filter(addon => addon.id === addonID).pop();
-    await openToolbox({form: addonTargetActor, chrome: true});
+    const addonTargetFront = await gClient.mainRoot.getAddon({ id: addonID });
+    await openToolbox({activeTab: addonTargetFront, chrome: true});
   } else {
     const front = await gClient.mainRoot.getMainProcess();
     await openToolbox({activeTab: front, chrome: true});
   }
 };
 
 // Certain options should be toggled since we can assume chrome debugging here
 function setPrefDefaults() {
--- a/devtools/client/inspector/changes/components/ChangesApp.js
+++ b/devtools/client/inspector/changes/components/ChangesApp.js
@@ -5,16 +5,17 @@
 "use strict";
 
 const { createFactory, PureComponent } = require("devtools/client/shared/vendor/react");
 const dom = require("devtools/client/shared/vendor/react-dom-factories");
 const PropTypes = require("devtools/client/shared/vendor/react-prop-types");
 const { connect } = require("devtools/client/shared/vendor/react-redux");
 
 const CSSDeclaration = createFactory(require("./CSSDeclaration"));
+const { getSourceForDisplay } = require("../utils/changes-utils");
 const { getStr } = require("../utils/l10n");
 
 class ChangesApp extends PureComponent {
   static get propTypes() {
     return {
       // Redux state slice assigned to Track Changes feature; passed as prop by connect()
       changes: PropTypes.object.isRequired,
     };
@@ -57,17 +58,17 @@ class ChangesApp extends PureComponent {
           property,
           value,
         });
       });
 
     return [removals, additions];
   }
 
-  renderRule(ruleId, rule, rules) {
+  renderRule(ruleId, rule, rules, level = 0) {
     const selector = rule.selector;
 
     if (this.renderedRules.includes(ruleId)) {
       return null;
     }
 
     // Mark this rule as rendered so we don't render it again.
     this.renderedRules.push(ruleId);
@@ -77,62 +78,76 @@ class ChangesApp extends PureComponent {
       diffClass = "diff-add";
     } else if (rule.changeType === "rule-remove") {
       diffClass = "diff-remove";
     }
 
     return dom.div(
       {
         key: ruleId,
-        className: "rule",
+        className: "rule devtools-monospace",
+        style: {
+          "--diff-level": level,
+        },
       },
       dom.div(
         {
           className: `level selector ${diffClass}`,
           title: selector,
         },
         selector,
         dom.span({ className: "bracket-open" }, "{")
       ),
-      // Render any nested child rules if they are present.
-      rule.children.length > 0 && rule.children.map(childRuleId => {
-        return this.renderRule(childRuleId, rules[childRuleId], rules);
+      // Render any nested child rules if they exist.
+      rule.children.map(childRuleId => {
+        return this.renderRule(childRuleId, rules[childRuleId], rules, level + 1);
       }),
       // Render any changed CSS declarations.
       this.renderDeclarations(rule.remove, rule.add),
       dom.div({ className: `level bracket-close ${diffClass}` }, "}")
     );
   }
 
   renderDiff(changes = {}) {
     // Render groups of style sources: stylesheets and element style attributes.
     return Object.entries(changes).map(([sourceId, source]) => {
-      const href = source.href || `inline stylesheet #${source.index}`;
-      const rules = source.rules;
+      const path = getSourceForDisplay(source);
+      const { href, rules, isFramed } = source;
 
-      return dom.details(
+      return dom.div(
         {
           key: sourceId,
-          className: "source devtools-monospace",
-          open: true,
+          className: "source",
         },
-        dom.summary(
+        dom.div(
           {
             className: "href",
             title: href,
           },
-          href),
+          dom.span({}, path),
+          isFramed && this.renderFrameBadge(href)
+        ),
         // Render changed rules within this source.
         Object.entries(rules).map(([ruleId, rule]) => {
           return this.renderRule(ruleId, rule, rules);
         })
       );
     });
   }
 
+  renderFrameBadge(href = "") {
+    return dom.span(
+      {
+        className: "inspector-badge",
+        title: href,
+      },
+      getStr("changes.iframeLabel")
+    );
+  }
+
   renderEmptyState() {
     return dom.div({ className: "devtools-sidepanel-no-result" },
       dom.p({}, getStr("changes.noChanges")),
       dom.p({}, getStr("changes.noChangesDescription"))
     );
   }
 
   render() {
--- a/devtools/client/inspector/changes/reducers/changes.js
+++ b/devtools/client/inspector/changes/reducers/changes.js
@@ -124,32 +124,33 @@ function removeRule(ruleId, rules) {
 }
 
 /**
  * Aggregated changes grouped by sources (stylesheet/element), which contain rules,
  * which contain collections of added and removed CSS declarations.
  *
  * Structure:
  *    <sourceId>: {
- *      type: // "stylesheet" or "element"
- *      href: // Stylesheet or document URL
+ *      type: // {String} One of: "stylesheet", "inline" or "element"
+ *      href: // {String|null} Stylesheet or document URL; null for inline stylesheets
  *      rules: {
  *        <ruleId>: {
- *          selector: "" // String CSS selector or CSS at-rule text
- *          changeType:  // Optional string: "rule-add" or "rule-remove"
- *          children: [] // Array of <ruleId> for child rules of this rule.
- *          parent:      // <ruleId> of the parent rule
- *          add: {
- *            <property>: <value> // CSS declaration
- *            ...
- *          },
- *          remove: {
- *            <property>: <value> // CSS declaration
- *           ...
- *          }
+ *          selector:    // {String} CSS selector or CSS at-rule text
+ *          changeType:  // {String} Optional; one of: "rule-add" or "rule-remove"
+ *          children: [] // {Array} of <ruleId> for child rules of this rule
+ *          parent:      // {String} <ruleId> of the parent rule
+ *          add: [       // {Array} of objects with CSS declarations
+ *            {
+ *              property:    // {String} CSS property name
+ *              value:       // {String} CSS property value
+ *              index:       // {Number} Position of the declaration within its CSS rule
+ *            }
+ *            ... // more declarations
+ *          ],
+ *          remove: []   // {Array} of objects with CSS declarations
  *        }
  *        ... // more rules
  *      }
  *    }
  *    ... // more sources
  */
 const INITIAL_STATE = {};
 
@@ -187,23 +188,23 @@ const reducers = {
       ancestors: [],
       add: [],
       remove: [],
     };
 
     change = { ...defaults, ...change };
     state = cloneState(state);
 
-    const { type, href, index } = change.source;
+    const { type, href, index, isFramed } = change.source;
     const { selector, ancestors, ruleIndex, type: changeType } = change;
     const sourceId = getSourceHash(change.source);
     const ruleId = getRuleHash({ selector, ancestors, ruleIndex });
 
     // Copy or create object identifying the source (styelsheet/element) for this change.
-    const source = Object.assign({}, state[sourceId], { type, href, index });
+    const source = Object.assign({}, state[sourceId], { type, href, index, isFramed });
     // Copy or create collection of all rules ever changed in this source.
     const rules = Object.assign({}, source.rules);
     // Refrence or create object identifying the rule for this change.
     let rule = rules[ruleId];
     if (!rule) {
       rule = createRule({ selector, ancestors, ruleIndex }, rules);
       if (changeType.startsWith("rule-")) {
         rule.changeType = changeType;
--- a/devtools/client/inspector/changes/utils/changes-utils.js
+++ b/devtools/client/inspector/changes/utils/changes-utils.js
@@ -1,34 +1,36 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
 * License, v. 2.0. If a copy of the MPL was not distributed with this
 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 "use strict";
 
+const { getFormatStr, getStr } = require("./l10n");
+
 /**
 * Generate a hash that uniquely identifies a stylesheet or element style attribute.
 *
 * @param {Object} source
 *        Information about a stylesheet or element style attribute:
 *        {
 *          type:  {String}
-*                 One of "stylesheet" or "element".
+*                 One of "stylesheet", "inline" or "element".
 *          index: {Number|String}
 *                 Position of the styleshet in the list of stylesheets in the document.
 *                 If `type` is "element", `index` is the generated selector which
 *                 uniquely identifies the element in the document.
-*          href:  {String|null}
-*                 URL of the stylesheet or of the document when `type` is "element".
-*                 If the stylesheet is inline, `href` is null.
+*          href:  {String}
+*                 URL of the stylesheet or of the document when `type` is "element" or
+*                 "inline".
 *        }
 * @return {String}
 */
 function getSourceHash(source) {
-  const { type, index, href = "inline" } = source;
+  const { type, index, href } = source;
 
   return `${type}${index}${href}`;
 }
 
 /**
 * Generate a hash that uniquely identifies a CSS rule.
 *
 * @param {Object} ruleData
@@ -49,10 +51,43 @@ function getRuleHash(ruleData) {
   const atRules = ancestors.reduce((acc, rule) => {
     acc += `${rule.typeName} ${(rule.conditionText || rule.name || rule.keyText)}`;
     return acc;
   }, "");
 
   return `${atRules}${selector}${ruleIndex}`;
 }
 
+/**
+ * Get a human-friendly style source path to display in the Changes panel.
+ * For element inline styles, return a string indicating that.
+ * For inline stylesheets, return a string indicating that plus the stylesheet's index.
+ * For URLs, return just the stylesheet filename.
+ *
+ * @param {Object} source
+ *        Information about the style source. Contains:
+ *        - type: {String} One of "element" or "stylesheet"
+ *        - href: {String|null} Stylesheet URL or document URL for elmeent inline styles
+ *        - index: {Number} Position of the stylesheet in its document's stylesheet list.
+ * @return {String}
+ */
+function getSourceForDisplay(source) {
+  let href;
+
+  switch (source.type) {
+    case "element":
+      href = getStr("changes.elementStyleLabel");
+      break;
+    case "inline":
+      href = getFormatStr("changes.inlineStyleSheetLabel", `#${source.index}`);
+      break;
+    case "stylesheet":
+      const url = new URL(source.href);
+      href = url.pathname.substring(url.pathname.lastIndexOf("/") + 1);
+      break;
+  }
+
+  return href;
+}
+
+module.exports.getSourceForDisplay = getSourceForDisplay;
 module.exports.getSourceHash = getSourceHash;
 module.exports.getRuleHash = getRuleHash;
--- a/devtools/client/inspector/changes/utils/l10n.js
+++ b/devtools/client/inspector/changes/utils/l10n.js
@@ -4,9 +4,10 @@
 
 "use strict";
 
 const { LocalizationHelper } = require("devtools/shared/l10n");
 const L10N = new LocalizationHelper("devtools/client/locales/changes.properties");
 
 module.exports = {
   getStr: (...args) => L10N.getStr(...args),
+  getFormatStr: (...args) => L10N.getFormatStr(...args),
 };
--- a/devtools/client/locales/en-US/changes.properties
+++ b/devtools/client/locales/en-US/changes.properties
@@ -6,8 +6,21 @@
 # the Inspector sidebar.
 
 # LOCALIZATION NOTE (changes.noChanges): This text is shown when no changes are available.
 changes.noChanges=No changes found.
 
 # LOCALIZATION NOTE (changes.noChangesDescription): This text is shown when no changes are
 # available and provides additional context for the purpose of the Changes panel.
 changes.noChangesDescription=Changes to CSS in Inspector will appear here.
+
+# LOCALIZATION NOTE (changes.inlineStyleSheetLabel): This label appears in the Changes
+# panel above changes done to inline stylesheets. The variable will be replaced with the
+# index of the stylesheet within its document like so: Inline #1
+changes.inlineStyleSheetLabel=Inline %S
+
+# LOCALIZATION NOTE (changes.elementStyleLabel): This label appears in the Changes
+# panel above changes done to element styles.
+changes.elementStyleLabel=Element
+
+# LOCALIZATION NOTE (changes.iframeLabel): This label appears next to URLs of stylesheets
+# and element inline styles hosted by iframes. Lowercase intentional.
+changes.iframeLabel=iframe
--- a/devtools/client/preferences/devtools-client.js
+++ b/devtools/client/preferences/devtools-client.js
@@ -58,24 +58,18 @@ pref("devtools.inspector.fonthighlighter
 // Enable tracking of style changes and the Changes panel in the Inspector
 #if defined(NIGHTLY_BUILD)
 pref("devtools.inspector.changes.enabled", true);
 #else
 pref("devtools.inspector.changes.enabled", false);
 #endif
 
 // Flexbox preferences
-// Enable the Flexbox highlighter and inspector panel in Nightly and DevEdition
-#if defined(NIGHTLY_BUILD) || defined(MOZ_DEV_EDITION)
 pref("devtools.inspector.flexboxHighlighter.enabled", true);
 pref("devtools.flexboxinspector.enabled", true);
-#else
-pref("devtools.inspector.flexboxHighlighter.enabled", false);
-pref("devtools.flexboxinspector.enabled", false);
-#endif
 
 // Grid highlighter preferences
 pref("devtools.gridinspector.gridOutlineMaxColumns", 50);
 pref("devtools.gridinspector.gridOutlineMaxRows", 50);
 pref("devtools.gridinspector.showGridAreas", false);
 pref("devtools.gridinspector.showGridLineNumbers", false);
 pref("devtools.gridinspector.showInfiniteLines", false);
 // Max number of grid highlighters that can be displayed
--- a/devtools/client/shared/test/browser_dbg_addon-console.js
+++ b/devtools/client/shared/test/browser_dbg_addon-console.js
@@ -58,20 +58,20 @@ AddonDebugger.prototype = {
     document.documentElement.appendChild(this.frame);
     window.addEventListener("message", this._onMessage);
 
     const transport = DebuggerServer.connectPipe();
     this.client = new DebuggerClient(transport);
 
     yield this.client.connect();
 
-    const addonTargetActor = yield getAddonActorForId(this.client, addonId);
+    const addonTargetFront = yield this.client.mainRoot.getAddon({ id: addonId });
 
     const targetOptions = {
-      form: addonTargetActor,
+      activeTab: addonTargetFront,
       client: this.client,
       chrome: true,
     };
 
     const toolboxOptions = {
       customIframe: this.frame,
     };
 
--- a/devtools/client/shared/test/browser_dbg_listaddons.js
+++ b/devtools/client/shared/test/browser_dbg_listaddons.js
@@ -16,17 +16,17 @@ var { DebuggerClient } = require("devtoo
 /**
  * Make sure the listAddons request works as specified.
  */
 const ADDON1_ID = "jid1-oBAwBoE5rSecNg@jetpack";
 const ADDON1_PATH = "addon1.xpi";
 const ADDON2_ID = "jid1-qjtzNGV8xw5h2A@jetpack";
 const ADDON2_PATH = "addon2.xpi";
 
-var gAddon1, gAddon1Actor, gAddon2, gClient;
+var gAddon1, gAddon1Front, gAddon2, gClient;
 
 function test() {
   DebuggerServer.init();
   DebuggerServer.registerAllActors();
 
   const transport = DebuggerServer.connectPipe();
   gClient = new DebuggerClient(transport);
   gClient.connect().then(([aType, aTraits]) => {
@@ -50,69 +50,69 @@ function testFirstAddon() {
   let addonListChanged = false;
   gClient.mainRoot.once("addonListChanged").then(() => {
     addonListChanged = true;
   });
 
   return addTemporaryAddon(ADDON1_PATH).then(addon => {
     gAddon1 = addon;
 
-    return getAddonActorForId(gClient, ADDON1_ID).then(grip => {
+    return gClient.mainRoot.getAddon({ id: ADDON1_ID }).then(front => {
       ok(!addonListChanged, "Should not yet be notified that list of addons changed.");
-      ok(grip, "Should find an addon actor for addon1.");
-      gAddon1Actor = grip.actor;
+      ok(front, "Should find an addon actor for addon1.");
+      gAddon1Front = front;
     });
   });
 }
 
 function testSecondAddon() {
   let addonListChanged = false;
   gClient.mainRoot.once("addonListChanged").then(() => {
     addonListChanged = true;
   });
 
   return addTemporaryAddon(ADDON2_PATH).then(addon => {
     gAddon2 = addon;
 
-    return getAddonActorForId(gClient, ADDON1_ID).then(fistGrip => {
-      return getAddonActorForId(gClient, ADDON2_ID).then(secondGrip => {
+    return gClient.mainRoot.getAddon({ id: ADDON1_ID }).then(front1 => {
+      return gClient.mainRoot.getAddon({ id: ADDON2_ID }).then(front2 => {
         ok(addonListChanged, "Should be notified that list of addons changed.");
-        is(fistGrip.actor, gAddon1Actor, "First addon's actor shouldn't have changed.");
-        ok(secondGrip, "Should find a addon actor for the second addon.");
+        is(front1, gAddon1Front, "First addon's actor shouldn't have changed.");
+        ok(front2, "Should find a addon actor for the second addon.");
       });
     });
   });
 }
 
 function testRemoveFirstAddon() {
   let addonListChanged = false;
   gClient.mainRoot.once("addonListChanged").then(() => {
     addonListChanged = true;
   });
 
   return removeAddon(gAddon1).then(() => {
-    return getAddonActorForId(gClient, ADDON1_ID).then(grip => {
+    return gClient.mainRoot.getAddon({ id: ADDON1_ID }).then(front => {
       ok(addonListChanged, "Should be notified that list of addons changed.");
-      ok(!grip, "Shouldn't find a addon actor for the first addon anymore.");
+      ok(!front, "Shouldn't find a addon actor for the first addon anymore.");
     });
   });
 }
 
 function testRemoveSecondAddon() {
   let addonListChanged = false;
   gClient.mainRoot.once("addonListChanged").then(() => {
     addonListChanged = true;
   });
 
   return removeAddon(gAddon2).then(() => {
-    return getAddonActorForId(gClient, ADDON2_ID).then(grip => {
+    return gClient.mainRoot.getAddon({ id: ADDON2_ID }).then(front => {
       ok(addonListChanged, "Should be notified that list of addons changed.");
-      ok(!grip, "Shouldn't find a addon actor for the second addon anymore.");
+      ok(!front, "Shouldn't find a addon actor for the second addon anymore.");
     });
   });
 }
 
 registerCleanupFunction(function() {
   gAddon1 = null;
-  gAddon1Actor = null;
+  gAddon1Front = null;
   gAddon2 = null;
   gClient = null;
 });
--- a/devtools/client/shared/test/helper_addons.js
+++ b/devtools/client/shared/test/helper_addons.js
@@ -30,29 +30,16 @@ function getAddonURIFromPath(path) {
 
 function addTemporaryAddon(path) {
   const addonFile = getAddonURIFromPath(path).file;
   info("Installing addon: " + addonFile.path);
 
   return AddonManager.installTemporaryAddon(addonFile);
 }
 
-function getAddonActorForId(client, addonId) {
-  info("Get addon actor for ID: " + addonId);
-  const deferred = getDeferredPromise().defer();
-
-  client.listAddons().then(response => {
-    const addonTargetActor = response.addons.filter(grip => grip.id == addonId).pop();
-    info("got addon actor for ID: " + addonId);
-    deferred.resolve(addonTargetActor);
-  });
-
-  return deferred.promise;
-}
-
 function removeAddon(addon) {
   info("Removing addon.");
 
   const deferred = getDeferredPromise().defer();
 
   const listener = {
     onUninstalled: function(uninstalledAddon) {
       if (uninstalledAddon != addon) {
--- a/devtools/client/themes/changes.css
+++ b/devtools/client/themes/changes.css
@@ -3,82 +3,85 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
  /* CSS Variables specific to the Changes panel that aren't defined by the themes */
  :root {
    --diff-add-background-color: #f1feec;
    --diff-add-text-color: #54983f;
    --diff-remove-background-color: #fbf2f5;
    --diff-remove-text-color: #bf7173;
-   --diff-level-offset: 15px;
+   --diff-level: 0;
+   --diff-level-offset: 10px;
+   /*
+    Minimum padding so content on the first level (zero) isn't touching the edge. Added
+    and removed lines will re-declare this to add extra padding to clear the +/- icons.
+   */
+   --diff-level-min-offset: 5px;
  }
 
 #sidebar-panel-changes {
   margin: 0;
   padding: 0;
   width: 100%;
   height: 100%;
   overflow: auto;
 }
 
-#sidebar-panel-changes .source[open] {
-  padding-bottom: 10px;
+#sidebar-panel-changes .href {
+  display: flex;
+  align-items: center;
+  color: var(--theme-toolbar-color);
+  background: var(--theme-toolbar-background);
+  border-top: 1px solid var(--theme-splitter-color);
+  border-bottom: 1px solid var(--theme-splitter-color);
+  padding: 4px;
+  font-size: 12px;
 }
 
-#sidebar-panel-changes .href {
-  background: var(--theme-sidebar-background);
-  border-top: 1px solid var(--theme-splitter-color);
-  border-bottom: 1px solid var(--theme-splitter-color);
-  padding: 5px;
+#sidebar-panel-changes .href span {
+  /* Allows trimming of flex item with overflow ellipsis within the flex container */
+  min-width: 0;
   white-space: nowrap;
   text-overflow: ellipsis;
   overflow: hidden;
-  cursor: pointer;
 }
 
-#sidebar-panel-changes .rule .level {
+#sidebar-panel-changes .level {
   padding-top: 3px;
   padding-right: 5px;
   padding-bottom: 3px;
-  padding-left: var(--diff-level-offset);
+  padding-left: calc(var(--diff-level-min-offset) +
+                     var(--diff-level-offset) * var(--diff-level));
   position: relative;
 }
 
-#sidebar-panel-changes .rule > .rule .level {
-  padding-left: calc(var(--diff-level-offset) * 2);
-}
-
-#sidebar-panel-changes .rule > .rule > .rule .level {
-  padding-left: calc(var(--diff-level-offset) * 3);
-}
-
-#sidebar-panel-changes .rule > .rule > .rule > .rule .level {
-  padding-left: calc(var(--diff-level-offset) * 4);
-}
-
-#sidebar-panel-changes .rule .selector:not(.diff-remove):not(.diff-add),
-#sidebar-panel-changes .rule .bracket-close:not(.diff-remove):not(.diff-add) {
-  margin-left: calc(-1 * var(--diff-level-offset) + 5px);
+#sidebar-panel-changes .selector {
+  word-wrap: break-word;
 }
 
 #sidebar-panel-changes .rule .bracket-open {
   display: inline-block;
   margin-left: 5px;
 }
 
 #sidebar-panel-changes .declaration-name {
   margin-left: 10px;
 }
 
 #sidebar-panel-changes .declaration-value {
   margin-left: 5px;
 }
 
+.diff-add,
+.diff-remove {
+  --diff-level-min-offset: 15px;
+}
+
 .diff-add::before,
-.diff-remove::before{
+.diff-remove::before {
   position: absolute;
   left: 5px;
 }
 
 .diff-add {
   background-color: var(--diff-add-background-color);
 }
 
--- a/devtools/client/webconsole/components/JSTerm.js
+++ b/devtools/client/webconsole/components/JSTerm.js
@@ -167,16 +167,23 @@ class JSTerm extends Component {
           }
 
           if (!inputUpdated) {
             return "CodeMirror.Pass";
           }
           return null;
         };
 
+        const onArrowLeft = () => {
+          if (this.autocompletePopup.isOpen || this.getAutoCompletionText()) {
+            this.clearCompletion();
+          }
+          return "CodeMirror.Pass";
+        };
+
         this.editor = new Editor({
           autofocus: true,
           enableCodeFolding: false,
           autoCloseBrackets: false,
           gutters: [],
           lineWrapping: true,
           mode: Editor.modes.js,
           styleActiveLine: false,
@@ -225,22 +232,19 @@ class JSTerm extends Component {
             },
 
             "Up": onArrowUp,
             "Cmd-Up": onArrowUp,
 
             "Down": onArrowDown,
             "Cmd-Down": onArrowDown,
 
-            "Left": () => {
-              if (this.autocompletePopup.isOpen || this.getAutoCompletionText()) {
-                this.clearCompletion();
-              }
-              return "CodeMirror.Pass";
-            },
+            "Left": onArrowLeft,
+            "Ctrl-Left": onArrowLeft,
+            "Cmd-Left": onArrowLeft,
 
             "Right": () => {
               // We only want to complete on Right arrow if the completion text is
               // displayed.
               if (this.getAutoCompletionText()) {
                 this.acceptProposedCompletion();
                 return null;
               }
@@ -313,31 +317,39 @@ class JSTerm extends Component {
                 return null;
               }
 
               if (!this.getInputValue()) {
                 this.hud.outputScroller.scrollTop = 0;
                 return null;
               }
 
+              if (this.getAutoCompletionText()) {
+                this.clearCompletion();
+              }
+
               return "CodeMirror.Pass";
             },
 
             "End": () => {
               if (this.autocompletePopup.isOpen) {
                 this.autocompletePopup.selectItemAtIndex(
                   this.autocompletePopup.itemCount - 1);
                 return null;
               }
 
               if (!this.getInputValue()) {
                 this.hud.outputScroller.scrollTop = this.hud.outputScroller.scrollHeight;
                 return null;
               }
 
+              if (this.getAutoCompletionText()) {
+                this.clearCompletion();
+              }
+
               return "CodeMirror.Pass";
             },
 
             "Ctrl-Space": () => {
               if (!this.autocompletePopup.isOpen) {
                 this.fetchAutocompletionProperties(true);
                 return null;
               }
@@ -860,16 +872,22 @@ class JSTerm extends Component {
       }
 
       if (event.key === " " && !this.autocompletePopup.isOpen) {
         // Open the autocompletion popup on Ctrl-Space (if it wasn't displayed).
         this.fetchAutocompletionProperties(true);
         event.preventDefault();
       }
 
+      if (event.keyCode === KeyCodes.DOM_VK_LEFT &&
+        (this.autocompletePopup.isOpen || this.getAutoCompletionText())
+      ) {
+        this.clearCompletion();
+      }
+
       return;
     } else if (event.keyCode == KeyCodes.DOM_VK_RETURN) {
       if (!this.autocompletePopup.isOpen && (
         event.shiftKey || !Debugger.isCompilableUnit(this.getInputValue())
       )) {
         // shift return or incomplete statement
         return;
       }
@@ -942,26 +960,30 @@ class JSTerm extends Component {
         }
         event.preventDefault();
         break;
 
       case KeyCodes.DOM_VK_HOME:
         if (this.autocompletePopup.isOpen) {
           this.autocompletePopup.selectItemAtIndex(0);
           event.preventDefault();
+        } else if (this.getAutoCompletionText()) {
+          this.clearCompletion();
         } else if (inputValue.length <= 0) {
           this.hud.outputScroller.scrollTop = 0;
           event.preventDefault();
         }
         break;
 
       case KeyCodes.DOM_VK_END:
         if (this.autocompletePopup.isOpen) {
           this.autocompletePopup.selectItemAtIndex(this.autocompletePopup.itemCount - 1);
           event.preventDefault();
+        } else if (this.getAutoCompletionText()) {
+          this.clearCompletion();
         } else if (inputValue.length <= 0) {
           this.hud.outputScroller.scrollTop = this.hud.outputScroller.scrollHeight;
           event.preventDefault();
         }
         break;
 
       case KeyCodes.DOM_VK_LEFT:
         if (this.autocompletePopup.isOpen || this.getAutoCompletionText()) {
--- a/devtools/client/webconsole/test/mochitest/browser_jsterm_autocomplete_arrow_keys.js
+++ b/devtools/client/webconsole/test/mochitest/browser_jsterm_autocomplete_arrow_keys.js
@@ -62,9 +62,20 @@ async function performTests() {
 
   info("Test that arrow right selects selected autocomplete item");
   onPopUpClose = popup.once("popup-closed");
   EventUtils.synthesizeKey("KEY_ArrowRight");
   await onPopUpClose;
   checkInput("window.foo.aa|");
   is(popup.isOpen, false, "popup is closed");
   checkJsTermCompletionValue(jsterm, "", "completeNode is empty");
+
+  await setInputValueForAutocompletion(jsterm, "window.foo.a");
+  const prefix = jsterm.getInputValue().replace(/[\S]/g, " ");
+  checkJsTermCompletionValue(jsterm, prefix + "a", "completeNode has expected value");
+
+  const isOSX = Services.appinfo.OS == "Darwin";
+  EventUtils.synthesizeKey("KEY_ArrowLeft", {
+    [isOSX ? "metaKey" : "ctrlKey"]: true,
+  });
+  checkJsTermCompletionValue(jsterm, "",
+    "completeNode was cleared after Ctrl/Cmd + left");
 }
--- a/devtools/client/webconsole/test/mochitest/browser_jsterm_autocomplete_nav_and_tab_key.js
+++ b/devtools/client/webconsole/test/mochitest/browser_jsterm_autocomplete_nav_and_tab_key.js
@@ -8,23 +8,22 @@
 // See Bug 585991.
 
 const TEST_URI = `data:text/html;charset=utf-8,
 <head>
   <script>
     /* Create a prototype-less object so popup does not contain native
      * Object prototype properties.
      */
-    window.foo = Object.create(null);
-    Object.assign(window.foo, {
-      item0: "value0",
+    window.foo = Object.create(null, Object.getOwnPropertyDescriptors({
+      item00: "value0",
       item1: "value1",
       item2: "value2",
       item3: "value3",
-    });
+    }));
   </script>
 </head>
 <body>bug 585991 - autocomplete popup navigation and tab key usage test</body>`;
 
 add_task(async function() {
   // Run test with legacy JsTerm
   await pushPref("devtools.webconsole.jsterm.codeMirror", false);
   await performTests();
@@ -47,30 +46,30 @@ async function performTests() {
   // Shows the popup
   EventUtils.sendString(".");
   await onPopUpOpen;
 
   ok(popup.isOpen, "popup is open");
 
   const popupItems = popup.getItems().map(e => e.label);
   const expectedPopupItems = [
-    "item0",
+    "item00",
     "item1",
     "item2",
     "item3",
   ];
 
   is(popup.itemCount, expectedPopupItems.length, "popup.itemCount is correct");
   is(popupItems.join("-"), expectedPopupItems.join("-"),
     "getItems returns the items we expect");
   is(popup.selectedIndex, 0, "Index of the first item is selected.");
 
   EventUtils.synthesizeKey("KEY_ArrowUp");
 
-  const prefix = jsterm.getInputValue().replace(/[\S]/g, " ");
+  let prefix = jsterm.getInputValue().replace(/[\S]/g, " ");
   is(popup.selectedIndex, 3, "index 3 is selected");
   is(popup.selectedItem.label, "item3", "item3 is selected");
   checkJsTermCompletionValue(jsterm, prefix + "item3", "completeNode.value holds item3");
 
   EventUtils.synthesizeKey("KEY_ArrowUp");
 
   is(popup.selectedIndex, 2, "index 2 is selected");
   is(popup.selectedItem.label, "item2", "item2 is selected");
@@ -103,9 +102,23 @@ async function performTests() {
 
   await onPopupClose;
 
   // At this point the completion suggestion should be accepted.
   ok(!popup.isOpen, "popup is not open");
   is(jsterm.getInputValue(), "window.foo.item3",
      "completion was successful after KEY_Tab");
   ok(!getJsTermCompletionValue(jsterm), "completeNode is empty");
+
+  info("Check that hitting Home hides the completion text when the popup is hidden");
+  await setInputValueForAutocompletion(jsterm, "window.foo.item0");
+  prefix = jsterm.getInputValue().replace(/[\S]/g, " ");
+  checkJsTermCompletionValue(jsterm, prefix + "0", "completeNode has expected value");
+  EventUtils.synthesizeKey("KEY_Home");
+  checkJsTermCompletionValue(jsterm, "", "completeNode was cleared after hitting Home");
+
+  info("Check that hitting End hides the completion text when the popup is hidden");
+  await setInputValueForAutocompletion(jsterm, "window.foo.item0");
+  prefix = jsterm.getInputValue().replace(/[\S]/g, " ");
+  checkJsTermCompletionValue(jsterm, prefix + "0", "completeNode has expected value");
+  EventUtils.synthesizeKey("KEY_End");
+  checkJsTermCompletionValue(jsterm, "", "completeNode was cleared after hitting End");
 }
--- a/devtools/server/actors/styles.js
+++ b/devtools/server/actors/styles.js
@@ -105,16 +105,20 @@ var PageStyleActor = protocol.ActorClass
     }
     this._watchedSheets.clear();
   },
 
   get conn() {
     return this.inspector.conn;
   },
 
+  get ownerWindow() {
+    return this.inspector.targetActor.window;
+  },
+
   form: function(detail) {
     if (detail === "actorid") {
       return this.actorID;
     }
 
     // We need to use CSS from the inspected window in order to use CSS.supports() and
     // detect the right platform features from there.
     const CSS = this.inspector.targetActor.window.CSS;
@@ -1122,26 +1126,31 @@ var StyleRuleActor = protocol.ActorClass
 
       data.source = {
         type: "element",
         // Used to differentiate between elements which match the same generated selector
         // but live in different documents (ex: host document and iframe).
         href: this.rawNode.baseURI,
         // Element style attributes don't have a rule index; use the generated selector.
         index: data.selector,
+        // Whether the element lives in a different frame than the host document.
+        isFramed: this.rawNode.ownerGlobal !== this.pageStyle.ownerWindow,
       };
       data.ruleIndex = 0;
     } else {
       data.selector = (this.type === CSSRule.KEYFRAME_RULE)
         ? this.rawRule.keyText
         : this.rawRule.selectorText;
       data.source = {
-        type: "stylesheet",
-        href: this.sheetActor.href,
+        // Inline stylesheets have a null href; Use window URL instead.
+        type: this.sheetActor.href ? "stylesheet" : "inline",
+        href: this.sheetActor.href || this.sheetActor.window.location.toString(),
         index: this.sheetActor.styleSheetIndex,
+        // Whether the stylesheet lives in a different frame than the host document.
+        isFramed: this.sheetActor.ownerWindow !== this.sheetActor.window,
       };
       // Used to differentiate between changes to rules with identical selectors.
       data.ruleIndex = this._ruleIndex;
     }
 
     return data;
   },
 
--- a/devtools/server/tests/mochitest/test_webextension-addon-debugging-connect.html
+++ b/devtools/server/tests/mochitest/test_webextension-addon-debugging-connect.html
@@ -32,23 +32,22 @@ async function test_connect_addon(oopMod
   await extension.awaitMessage("background page ready");
 
   // Connect a DebuggerClient.
   const transport = DebuggerServer.connectPipe();
   const client = new DebuggerClient(transport);
   await client.connect();
 
   // List addons and assertions on the expected addon actor.
-  const {addons} = await client.mainRoot.listAddons();
-  const addonTargetActor = addons.filter(actor => actor.id === extension.id).pop();
-  ok(addonTargetActor, "The expected webextension addon actor has been found");
+  const addonTargetFront = await client.mainRoot.getAddon({ id: extension.id });
+  ok(addonTargetFront, "The expected webextension addon actor has been found");
 
   // Connect to the target addon actor and wait for the updated list of frames.
   const addonTarget = await TargetFactory.forRemoteTab({
-    form: addonTargetActor,
+    activeTab: addonTargetFront,
     client,
     chrome: true,
   });
   is(addonTarget.form.isOOP, oopMode,
      "Got the expected oop mode in the webextension actor form");
   const frames = await waitForFramesUpdated(addonTarget);
   const backgroundPageFrame = frames.filter((frame) => {
     return frame.url && frame.url.endsWith("/_generated_background_page.html");
--- a/devtools/server/tests/mochitest/webextension-helpers.js
+++ b/devtools/server/tests/mochitest/webextension-helpers.js
@@ -93,46 +93,41 @@ function collectFrameUpdates({client}, m
 }
 
 async function attachAddon(addonId) {
   const transport = DebuggerServer.connectPipe();
   const client = new DebuggerClient(transport);
 
   await client.connect();
 
-  const {addons} = await client.mainRoot.listAddons();
-  const addonTargetActor = addons.filter(actor => actor.id === addonId).pop();
+  const addonTargetFront = await client.mainRoot.getAddon({ id: addonId });
 
-  if (!addonTargetActor) {
+  if (!addonTargetFront) {
     client.close();
     throw new Error(`No WebExtension Actor found for ${addonId}`);
   }
 
   const addonTarget = await TargetFactory.forRemoteTab({
-    form: addonTargetActor,
+    activeTab: addonTargetFront,
     client,
     chrome: true,
   });
 
   return addonTarget;
 }
 
 async function reloadAddon({client}, addonId) {
-  const {addons} = await client.mainRoot.listAddons();
-  const addonTargetActor = addons.filter(actor => actor.id === addonId).pop();
+  const addonTargetFront = await client.mainRoot.getAddon({ id: addonId });
 
-  if (!addonTargetActor) {
+  if (!addonTargetFront) {
     client.close();
     throw new Error(`No WebExtension Actor found for ${addonId}`);
   }
 
-  await client.request({
-    to: addonTargetActor.actor,
-    type: "reload",
-  });
+  await addonTargetFront.reload();
 }
 
 // Test helpers related to the AddonManager.
 
 function generateWebExtensionXPI(extDetails) {
   const addonFile = Extension.generateXPI(extDetails);
 
   flushJarCache(addonFile.path);
--- a/devtools/server/tests/unit/test_addon_events.js
+++ b/devtools/server/tests/unit/test_addon_events.js
@@ -13,17 +13,17 @@ add_task(async function testReloadExited
   DebuggerServer.registerAllActors();
 
   const client = new DebuggerClient(DebuggerServer.connectPipe());
   await client.connect();
 
   // Retrieve the current list of addons to be notified of the next list update.
   // We will also call listAddons every time we receive the event "addonListChanged" for
   // the same reason.
-  await client.listAddons();
+  await client.mainRoot.listAddons();
 
   info("Install the addon");
   const addonFile = do_get_file("addons/web-extension", false);
 
   let installedAddon;
   await expectAddonListChanged(client, async () => {
     installedAddon = await AddonManager.installTemporaryAddon(addonFile);
   });
@@ -51,10 +51,10 @@ add_task(async function testReloadExited
 
   await close(client);
 });
 
 async function expectAddonListChanged(client, predicate) {
   const onAddonListChanged = client.mainRoot.once("addonListChanged");
   await predicate();
   await onAddonListChanged;
-  await client.listAddons();
+  await client.mainRoot.listAddons();
 }
--- a/devtools/server/tests/unit/test_addon_reload.js
+++ b/devtools/server/tests/unit/test_addon_reload.js
@@ -29,27 +29,20 @@ function promiseWebExtensionStartup() {
       Management.off("ready", listener);
       resolve(extension);
     };
 
     Management.on("ready", listener);
   });
 }
 
-async function findAddonInRootList(client, addonId) {
-  const result = await client.listAddons();
-  const addonTargetActor = result.addons.filter(addon => addon.id === addonId)[0];
-  ok(addonTargetActor, `Found add-on actor for ${addonId}`);
-  return addonTargetActor;
-}
-
-async function reloadAddon(client, addonTargetActor) {
+async function reloadAddon(addonTargetFront) {
   // The add-on will be re-installed after a successful reload.
   const onInstalled = promiseAddonEvent("onInstalled");
-  await client.request({to: addonTargetActor.actor, type: "reload"});
+  await addonTargetFront.reload();
   await onInstalled;
 }
 
 function getSupportFile(path) {
   const allowMissing = false;
   return do_get_file(path, allowMissing);
 }
 
@@ -69,51 +62,51 @@ add_task(async function testReloadExited
 
   // Install a decoy add-on.
   const addonFile2 = getSupportFile("addons/web-extension2");
   const [installedAddon2] = await Promise.all([
     AddonManager.installTemporaryAddon(addonFile2),
     promiseWebExtensionStartup(),
   ]);
 
-  const addonTargetActor = await findAddonInRootList(client, installedAddon.id);
+  const addonTargetFront = await client.mainRoot.getAddon({ id: installedAddon.id });
 
   await Promise.all([
-    reloadAddon(client, addonTargetActor),
+    reloadAddon(addonTargetFront),
     promiseWebExtensionStartup(),
   ]);
 
   // Uninstall the decoy add-on, which should cause its actor to exit.
   const onUninstalled = promiseAddonEvent("onUninstalled");
   installedAddon2.uninstall();
   await onUninstalled;
 
   // Try to re-list all add-ons after a reload.
   // This was throwing an exception because of the exited actor.
-  const newAddonActor = await findAddonInRootList(client, installedAddon.id);
-  equal(newAddonActor.id, addonTargetActor.id);
+  const newAddonFront = await client.mainRoot.getAddon({ id: installedAddon.id });
+  equal(newAddonFront.id, addonTargetFront.id);
 
-  // The actor id should be the same after the reload
-  equal(newAddonActor.actor, addonTargetActor.actor);
+  // The fronts should be the same after the reload
+  equal(newAddonFront, addonTargetFront);
 
   const onAddonListChanged = client.mainRoot.once("addonListChanged");
 
   // Install an upgrade version of the first add-on.
   const addonUpgradeFile = getSupportFile("addons/web-extension-upgrade");
   const [upgradedAddon] = await Promise.all([
     AddonManager.installTemporaryAddon(addonUpgradeFile),
     promiseWebExtensionStartup(),
   ]);
 
   // Waiting for addonListChanged unsolicited event
   await onAddonListChanged;
 
   // re-list all add-ons after an upgrade.
-  const upgradedAddonActor = await findAddonInRootList(client, upgradedAddon.id);
-  equal(upgradedAddonActor.id, addonTargetActor.id);
-  // The actor id should be the same after the upgrade.
-  equal(upgradedAddonActor.actor, addonTargetActor.actor);
+  const upgradedAddonFront = await client.mainRoot.getAddon({ id: upgradedAddon.id });
+  equal(upgradedAddonFront.id, addonTargetFront.id);
+  // The fronts should be the same after the upgrade.
+  equal(upgradedAddonFront, addonTargetFront);
 
   // The addon metadata has been updated.
-  equal(upgradedAddonActor.name, "Test Addons Actor Upgrade");
+  equal(upgradedAddonFront.name, "Test Addons Actor Upgrade");
 
   await close(client);
 });
--- a/devtools/server/tests/unit/test_addons_actor.js
+++ b/devtools/server/tests/unit/test_addons_actor.js
@@ -24,20 +24,20 @@ add_task(async function testSuccessfulIn
   const usePlatformSeparator = true;
   const addonPath = getFilePath("addons/web-extension",
                                 allowMissing, usePlatformSeparator);
   const installedAddon = await addons.installTemporaryAddon(addonPath);
   equal(installedAddon.id, "test-addons-actor@mozilla.org");
   // The returned object is currently not a proper actor.
   equal(installedAddon.actor, false);
 
-  const addonList = await client.listAddons();
-  ok(addonList && addonList.addons && addonList.addons.map(a => a.name),
+  const addonList = await client.mainRoot.listAddons();
+  ok(addonList && addonList.map(a => a.name),
      "Received list of add-ons");
-  const addon = addonList.addons.filter(a => a.id === installedAddon.id)[0];
+  const addon = addonList.find(a => a.id === installedAddon.id);
   ok(addon, "Test add-on appeared in root install list");
 
   await close(client);
 });
 
 add_task(async function testNonExistantPath() {
   const [client, addons] = await connect();
 
deleted file mode 100644
--- a/devtools/shared/client/addon-client.js
+++ /dev/null
@@ -1,43 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-"use strict";
-
-const {DebuggerClient} = require("devtools/shared/client/debugger-client");
-
-function AddonClient(client, actor) {
-  this._client = client;
-  this._actor = actor;
-  this.request = this._client.request;
-  this.events = [];
-}
-
-AddonClient.prototype = {
-  get actor() {
-    return this._actor;
-  },
-  get _transport() {
-    return this._client._transport;
-  },
-
-  /**
-   * Detach the client from the addon actor.
-   *
-   * @param function onResponse
-   *        Called with the response packet.
-   */
-  detach: DebuggerClient.requester({
-    type: "detach",
-  }, {
-    after: function(response) {
-      if (this._client.activeAddon === this) {
-        this._client.activeAddon = null;
-      }
-      this._client.unregisterClient(this);
-      return response;
-    },
-  }),
-};
-
-module.exports = AddonClient;
--- a/devtools/shared/client/debugger-client.js
+++ b/devtools/shared/client/debugger-client.js
@@ -17,17 +17,16 @@ const {
   UnsolicitedPauses,
 } = require("./constants");
 
 loader.lazyRequireGetter(this, "Authentication", "devtools/shared/security/auth");
 loader.lazyRequireGetter(this, "DebuggerSocket", "devtools/shared/security/socket", true);
 loader.lazyRequireGetter(this, "EventEmitter", "devtools/shared/event-emitter");
 
 loader.lazyRequireGetter(this, "WebConsoleClient", "devtools/shared/webconsole/client", true);
-loader.lazyRequireGetter(this, "AddonTargetFront", "devtools/shared/fronts/targets/addon", true);
 loader.lazyRequireGetter(this, "RootFront", "devtools/shared/fronts/root", true);
 loader.lazyRequireGetter(this, "BrowsingContextTargetFront", "devtools/shared/fronts/targets/browsing-context", true);
 loader.lazyRequireGetter(this, "ThreadClient", "devtools/shared/client/thread-client");
 loader.lazyRequireGetter(this, "ObjectClient", "devtools/shared/client/object-client");
 loader.lazyRequireGetter(this, "Pool", "devtools/shared/protocol", true);
 loader.lazyRequireGetter(this, "Front", "devtools/shared/protocol", true);
 
 // Retrieve the major platform version, i.e. if we are on Firefox 64.0a1, it will be 64.
@@ -340,24 +339,16 @@ DebuggerClient.prototype = {
   /*
    * This function exists only to preserve DebuggerClient's interface;
    * new code should say 'client.mainRoot.listTabs()'.
    */
   listTabs: function(options) {
     return this.mainRoot.listTabs(options);
   },
 
-  /*
-   * This function exists only to preserve DebuggerClient's interface;
-   * new code should say 'client.mainRoot.listAddons()'.
-   */
-  listAddons: function() {
-    return this.mainRoot.listAddons();
-  },
-
   getTab: function(filter) {
     return this.mainRoot.getTab(filter);
   },
 
   /**
    * Attach to a target actor:
    *
    *  - start watching for new documents (emits `tabNativated` messages)
@@ -376,33 +367,16 @@ DebuggerClient.prototype = {
       this._frontPool.manage(front);
     }
 
     const response = await front.attach();
     return [response, front];
   },
 
   /**
-   * Attach to an addon target actor.
-   *
-   * @param string addonTargetActor
-   *        The actor ID for the addon to attach.
-   */
-  attachAddon: async function(form) {
-    let front = this._frontPool.actor(form.actor);
-    if (!front) {
-      front = new AddonTargetFront(this, form);
-      this._frontPool.manage(front);
-    }
-
-    const response = await front.attach();
-    return [response, front];
-  },
-
-  /**
    * Attach to a Web Console actor. Depending on the listeners being passed as second
    * arguments, starts listening for:
    * - PageError:
    *   Javascript error happening in the debugged context
    * - ConsoleAPI:
    *   Calls made to console.* API
    * - NetworkActivity:
    *   Http requests made in the debugged context
--- a/devtools/shared/client/moz.build
+++ b/devtools/shared/client/moz.build
@@ -1,16 +1,15 @@
 # -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
 # vim: set filetype=python:
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 DevToolsModules(
-    'addon-client.js',
     'array-buffer-client.js',
     'breakpoint-client.js',
     'connection-manager.js',
     'constants.js',
     'debugger-client.js',
     'environment-client.js',
     'event-source.js',
     'long-string-client.js',
--- a/devtools/shared/fronts/root.js
+++ b/devtools/shared/fronts/root.js
@@ -211,16 +211,30 @@ const RootFront = protocol.FrontClassWit
     }
 
     return this._getTab(packet);
   }, {
     impl: "_getTab",
   }),
 
   /**
+   * Fetch the target front for a given add-on.
+   * This is just an helper on top of `listAddons` request.
+   *
+   * @param object filter
+   *        A dictionary object with following attribute:
+   *         - id: used to match the add-on to connect to.
+   */
+  async getAddon({ id }) {
+    const addons = await this.listAddons();
+    const addonTargetFront = addons.find(addon => addon.id === id);
+    return addonTargetFront;
+  },
+
+  /**
    * Test request that returns the object passed as first argument.
    *
    * `echo` is special as all the property of the given object have to be passed
    * on the packet object. That's not something that can be achieve by requester helper.
    */
 
   echo(packet) {
     packet.type = "echo";
--- a/devtools/shared/fronts/targets/addon.js
+++ b/devtools/shared/fronts/targets/addon.js
@@ -1,26 +1,75 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 "use strict";
 
 const {addonTargetSpec} = require("devtools/shared/specs/targets/addon");
 const protocol = require("devtools/shared/protocol");
 const {custom} = protocol;
+loader.lazyRequireGetter(this, "BrowsingContextTargetFront", "devtools/shared/fronts/targets/browsing-context", true);
 
 const AddonTargetFront = protocol.FrontClassWithSpec(addonTargetSpec, {
-  initialize: function(client, form) {
-    protocol.Front.prototype.initialize.call(this, client, form);
+  initialize: function(client) {
+    protocol.Front.prototype.initialize.call(this, client);
 
     this.client = client;
 
     this.traits = {};
   },
 
+  form(json) {
+    this.actorID = json.actor;
+
+    // Save the full form for Target class usage.
+    // Do not use `form` name to avoid colliding with protocol.js's `form` method
+    this.targetForm = json;
+
+    // We used to manipulate the form rather than the front itself.
+    // Expose all form attributes to ease accessing them.
+    for (const name in json) {
+      if (name == "actor") {
+        continue;
+      }
+      this[name] = json[name];
+    }
+  },
+
+  isLegacyTemporaryExtension() {
+    if (!this.type) {
+      // If about:debugging is connected to an older then 59 remote Firefox, and type is
+      // not available on the addon/webextension actors, return false to avoid showing
+      // irrelevant warning messages.
+      return false;
+    }
+    return this.type == "extension" &&
+           this.temporarilyInstalled &&
+           !this.isWebExtension &&
+           !this.isAPIExtension;
+  },
+
+  /**
+   * Returns the actual target front for web extensions.
+   *
+   * AddonTargetActor is used for WebExtensions, but this isn't the final target actor
+   * we want to use for it. AddonTargetActor only expose metadata about the Add-on, like
+   * its name, type, ... Instead, we want to use a WebExtensionTargetActor, which
+   * inherits from BrowsingContextTargetActor. This connect method is used to retrive
+   * the final target actor to use.
+   */
+  connect: custom(async function() {
+    const { form } = await this._connect();
+    const front = new BrowsingContextTargetFront(this.client, form);
+    this.manage(front);
+    return front;
+  }, {
+    impl: "_connect",
+  }),
+
   attach: custom(async function() {
     const response = await this._attach();
 
     this.threadActor = response.threadActor;
 
     return response;
   }, {
     impl: "_attach",
--- a/devtools/shared/specs/index.js
+++ b/devtools/shared/specs/index.js
@@ -235,16 +235,21 @@ const Types = exports.__TypesForTests = 
     front: null,
   },
   {
     types: ["symbolIterator"],
     spec: "devtools/shared/specs/symbol-iterator",
     front: null,
   },
   {
+    types: ["addonTarget"],
+    spec: "devtools/shared/specs/targets/addon",
+    front: "devtools/shared/fronts/targets/addon",
+  },
+  {
     types: ["browsingContextTarget"],
     spec: "devtools/shared/specs/targets/browsing-context",
     front: null,
   },
   {
     types: ["chromeWindowTarget"],
     spec: "devtools/shared/specs/targets/chrome-window",
     front: null,
--- a/devtools/shared/specs/root.js
+++ b/devtools/shared/specs/root.js
@@ -6,19 +6,16 @@
 const { types, generateActorSpec, RetVal, Arg, Option } = require("devtools/shared/protocol");
 
 types.addDictType("root.getTab", {
   tab: "json",
 });
 types.addDictType("root.getWindow", {
   window: "json",
 });
-types.addDictType("root.listAddons", {
-  addons: "array:json",
-});
 types.addDictType("root.listWorkers", {
   workers: "array:workerTarget",
 });
 types.addDictType("root.listServiceWorkerRegistrations", {
   registrations: "array:json",
 });
 types.addDictType("root.listProcesses", {
   processes: "array:json",
@@ -52,17 +49,19 @@ const rootSpecPrototype = {
       request: {
         outerWindowID: Option(0, "number"),
       },
       response: RetVal("root.getWindow"),
     },
 
     listAddons: {
       request: {},
-      response: RetVal("root.listAddons"),
+      response: {
+        addons: RetVal("array:addonTarget"),
+      },
     },
 
     listWorkers: {
       request: {},
       response: RetVal("root.listWorkers"),
     },
 
     listServiceWorkerRegistrations: {
--- a/devtools/shared/specs/targets/addon.js
+++ b/devtools/shared/specs/targets/addon.js
@@ -1,33 +1,35 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 "use strict";
 
-const {Arg, Option, RetVal, generateActorSpec} = require("devtools/shared/protocol");
+const {Option, RetVal, generateActorSpec} = require("devtools/shared/protocol");
 
 const addonTargetSpec = generateActorSpec({
   typeName: "addonTarget",
 
   methods: {
     attach: {
       request: {},
       response: RetVal("json"),
     },
     detach: {
       request: {},
       response: RetVal("json"),
     },
     connect: {
-      request: {
-        options: Arg(0, "json"),
-      },
+      request: {},
       response: RetVal("json"),
     },
+    reload: {
+      request: {},
+      response: {},
+    },
     push: {
       request: {},
       response: RetVal("json"),
     },
   },
 
   events: {
     // newSource is being sent by ThreadActor in the name of its parent,
--- a/devtools/shared/webconsole/js-property-provider.js
+++ b/devtools/shared/webconsole/js-property-provider.js
@@ -6,16 +6,17 @@
 
 "use strict";
 
 const DevToolsUtils = require("devtools/shared/DevToolsUtils");
 
 if (!isWorker) {
   loader.lazyImporter(this, "Parser", "resource://devtools/shared/Parser.jsm");
 }
+loader.lazyRequireGetter(this, "Reflect", "resource://gre/modules/reflect.jsm", true);
 
 // Provide an easy way to bail out of even attempting an autocompletion
 // if an object has way too many properties. Protects against large objects
 // with numeric values that wouldn't be tallied towards MAX_AUTOCOMPLETIONS.
 const MAX_AUTOCOMPLETE_ATTEMPTS = exports.MAX_AUTOCOMPLETE_ATTEMPTS = 100000;
 // Prevent iterating over too many properties during autocomplete suggestions.
 const MAX_AUTOCOMPLETIONS = exports.MAX_AUTOCOMPLETIONS = 1500;
 
@@ -112,22 +113,23 @@ function analyzeInputString(str) {
           const nextNonSpaceCharIndex = after.indexOf(nextNonSpaceChar);
           const previousNonSpaceChar = trimmedBefore[trimmedBefore.length - 1];
 
           // There's only spaces after that, so we can return.
           if (!nextNonSpaceChar) {
             return buildReturnObject();
           }
 
-          // If the previous char in't a dot, and the next one isn't a dot either,
-          // and the current computed statement is not a variable/function/class
-          // declaration, update the start position.
+          // If the previous char isn't a dot or opening bracket, and the next one isn't
+          // one either, and the current computed statement is not a
+          // variable/function/class declaration, update the start position.
           if (
-            previousNonSpaceChar !== "." && nextNonSpaceChar !== "."
-            && !NO_AUTOCOMPLETE_PREFIXES.includes(currentLastStatement)
+            previousNonSpaceChar !== "." && nextNonSpaceChar !== "." &&
+            previousNonSpaceChar !== "[" && nextNonSpaceChar !== "[" &&
+            !NO_AUTOCOMPLETE_PREFIXES.includes(currentLastStatement)
           ) {
             start = i + nextNonSpaceCharIndex;
           }
 
           // Let's jump to handle the next non-space char.
           i = i + nextNonSpaceCharIndex;
         } else if (OPEN_BODY.includes(c)) {
           bodyStack.push({
@@ -474,17 +476,34 @@ function JSPropertyProvider({
     }
   }
 
   const prepareReturnedObject = matches => {
     if (isElementAccess) {
       // If it's an element access, we need to wrap properties in quotes (either the one
       // the user already typed, or `"`).
       matches = wrapMatchesInQuotes(matches, elementAccessQuote);
+    } else if (!isWorker) {
+      // If we're not performing an element access, we need to check that the property
+      // are suited for a dot access. (Reflect.jsm is not available in worker context yet,
+      // see Bug 1507181).
+      matches = new Set([...matches].filter(propertyName => {
+        let valid = true;
+        try {
+          // In order to know if the property is suited for dot notation, we use Reflect
+          // to parse an expression where we try to access the property with a dot. If it
+          // throws, this means that we need to do an element access instead.
+          Reflect.parse(`({${propertyName}: true})`);
+        } catch (e) {
+          valid = false;
+        }
+        return valid;
+      }));
     }
+
     return {isElementAccess, matchProp, matches};
   };
 
   // If the final property is a primitive
   if (typeof obj != "object") {
     return prepareReturnedObject(getMatchedProps(obj, search));
   }
 
--- a/devtools/shared/webconsole/test/test_jsterm_autocomplete.html
+++ b/devtools/shared/webconsole/test/test_jsterm_autocomplete.html
@@ -106,21 +106,23 @@
       doAutocompleteAfterOperator,
       dontAutocompleteAfterDeclaration,
       doKeywordsAutocomplete,
     ];
 
     if (!isWorker) {
       // `Cu` is not defined in workers, then we can't test `Cu.Sandbox`
       tests.push(doAutocompleteSandbox);
-      // Array literal, string and commands completion aren't handled in Workers yet.
+      // Some cases are handled in worker context because we can't use Parser.jsm.
+      // See Bug 1507181.
       tests.push(
         doAutocompleteArray,
         doAutocompleteString,
         doAutocompleteCommands,
+        doAutocompleteBracketSurroundedBySpaces,
       );
     }
 
     for (const test of tests) {
       await test(state.client);
     }
 
     await closeDebugger(state);
@@ -294,21 +296,41 @@
     is(matches.length, 7);
     checkObject(matches,
       ["foo", "foobar", "foobaz", "omg", "omgfoo", "omgstr", "strfoo"]);
 
     matches =
       (await client.autocomplete("window.foobarObject.  foo ; window.foo")).matches;
     is(matches.length, 1);
     checkObject(matches, ["foobarObject"]);
+  }
 
-    matches =
-      (await client.autocomplete("window.emojiObject  .  ")).matches;
+  async function doAutocompleteBracketSurroundedBySpaces(client) {
+    const wrap = (arr, quote = `"`) => arr.map(x => `${quote}${x}${quote}`);
+    let matches = await getAutocompleteMatches(client, "window.foobarObject\n  [")
+    is(matches.length, 7);
+    checkObject(matches,
+      wrap(["foo", "foobar", "foobaz", "omg", "omgfoo", "omgstr", "strfoo"]));
+
+    matches = await getAutocompleteMatches(client, "window.foobarObject\n  ['o")
+    is(matches.length, 3);
+    checkObject(matches, wrap(["omg", "omgfoo", "omgstr"], "'"));
+
+    matches = await getAutocompleteMatches(client, "window.foobarObject\n  [\n  s");
     is(matches.length, 1);
-    checkObject(matches, ["šŸ˜Ž"]);
+    checkObject(matches, [`"strfoo"`]);
+
+    matches = await getAutocompleteMatches(client, "window.foobarObject\n  [  ");
+    is(matches.length, 7);
+    checkObject(matches,
+      wrap(["foo", "foobar", "foobaz", "omg", "omgfoo", "omgstr", "strfoo"]));
+
+    matches = await getAutocompleteMatches(client, "window.emojiObject  [   '");
+    is(matches.length, 1);
+    checkObject(matches, [`'šŸ˜Ž'`]);
   }
 
   async function doAutocompleteAfterOr(client) {
     info("test autocomplete for 'true || foo'");
     const {matches} = await client.autocomplete("true || foobar");
     is(matches.length, 1, "autocomplete returns expected results");
     is(matches.join("-"), "foobarObject");
   }
@@ -537,11 +559,17 @@ async function doKeywordsAutocomplete(cl
   ok(!matches.includes("function"),
     "'function' is not returned when doing a property access");
 
   info("test autocomplete for 'window[func'");
   matches = (await client.autocomplete("window[func")).matches;
   ok(!matches.includes("function"),
     "'function' is not returned when doing an element access");
   }
+
+  async function getAutocompleteMatches(client, input) {
+    info(`test autocomplete for "${input}"`);
+    const res = (await client.autocomplete(input));
+    return res.matches;
+  }
 </script>
 </body>
 </html>
--- a/devtools/shared/webconsole/test/unit/test_js_property_provider.js
+++ b/devtools/shared/webconsole/test/unit/test_js_property_provider.js
@@ -343,16 +343,24 @@ function runChecks(dbgObject, environmen
   results = propertyProvider("(1.1)[");
   test_has_result(results, `"toFixed"`);
 
   results = propertyProvider("(1)[toFixed");
   test_has_exact_results(results, [`"toFixed"`]);
 
   results = propertyProvider("(1)['toFixed");
   test_has_exact_results(results, ["'toFixed'"]);
+
+  info("Test access on dot-notation invalid property name");
+  results = propertyProvider("testHyphenated.prop");
+  Assert.ok(!results.matches.has("prop-A"),
+    "Does not return invalid property name on dot access");
+
+  results = propertyProvider("testHyphenated['prop");
+  test_has_result(results, `'prop-A'`);
 }
 
 /**
  * A helper that ensures an empty array of results were found.
  * @param Object results
  *        The results returned by JSPropertyProvider.
  */
 function test_has_no_results(results) {
--- a/docshell/base/ChromeBrowsingContext.cpp
+++ b/docshell/base/ChromeBrowsingContext.cpp
@@ -1,18 +1,16 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "ChromeBrowsingContext.h"
 
-#include "mozilla/dom/ContentParent.h"
-
 namespace mozilla {
 namespace dom {
 
 ChromeBrowsingContext::ChromeBrowsingContext(BrowsingContext* aParent,
                                              BrowsingContext* aOpener,
                                              const nsAString& aName,
                                              uint64_t aBrowsingContextId,
                                              uint64_t aProcessId,
--- a/docshell/base/nsDocShell.cpp
+++ b/docshell/base/nsDocShell.cpp
@@ -1454,24 +1454,16 @@ NS_IMETHODIMP
 nsDocShell::GetHasTrackingContentBlocked(bool* aHasTrackingContentBlocked)
 {
   nsCOMPtr<nsIDocument> doc(GetDocument());
   *aHasTrackingContentBlocked = doc && doc->GetHasTrackingContentBlocked();
   return NS_OK;
 }
 
 NS_IMETHODIMP
-nsDocShell::GetHasSlowTrackingContentBlocked(bool* aHasSlowTrackingContentBlocked)
-{
-  nsCOMPtr<nsIDocument> doc(GetDocument());
-  *aHasSlowTrackingContentBlocked = doc && doc->GetHasSlowTrackingContentBlocked();
-  return NS_OK;
-}
-
-NS_IMETHODIMP
 nsDocShell::GetHasTrackingContentLoaded(bool* aHasTrackingContentLoaded)
 {
   nsCOMPtr<nsIDocument> doc(GetDocument());
   *aHasTrackingContentLoaded = doc && doc->GetHasTrackingContentLoaded();
   return NS_OK;
 }
 
 NS_IMETHODIMP
@@ -4280,32 +4272,16 @@ nsDocShell::DisplayLoadError(nsresult aE
   nsAutoString formatStrs[kMaxFormatStrArgs];
   uint32_t formatStrCount = 0;
   bool addHostPort = false;
   nsresult rv = NS_OK;
   nsAutoString messageStr;
   nsAutoCString cssClass;
   nsAutoCString errorPage;
 
-  if (mLoadURIDelegate) {
-    nsCOMPtr<nsIURI> errorPageURI;
-    rv = mLoadURIDelegate->HandleLoadError(aURI, aError,
-                                           NS_ERROR_GET_MODULE(aError),
-                                           getter_AddRefs(errorPageURI));
-    if (NS_FAILED(rv)) {
-      *aDisplayedErrorPage = false;
-      return NS_OK;
-    }
-
-    if (errorPageURI) {
-      *aDisplayedErrorPage = NS_SUCCEEDED(LoadErrorPage(errorPageURI, aURI, aFailedChannel));
-      return NS_OK;
-    }
-  }
-
   errorPage.AssignLiteral("neterror");
 
   // Turn the error code into a human readable error message.
   if (NS_ERROR_UNKNOWN_PROTOCOL == aError) {
     NS_ENSURE_ARG_POINTER(aURI);
 
     // Extract the schemes into a comma delimited list.
     nsAutoCString scheme;
@@ -4611,16 +4587,32 @@ nsDocShell::DisplayLoadError(nsresult aE
     }
   }
 
   // Test if the error should be displayed
   if (!error) {
     return NS_OK;
   }
 
+  if (mLoadURIDelegate) {
+    nsCOMPtr<nsIURI> errorPageURI;
+    rv = mLoadURIDelegate->HandleLoadError(aURI, aError,
+                                           NS_ERROR_GET_MODULE(aError),
+                                           getter_AddRefs(errorPageURI));
+    if (NS_FAILED(rv)) {
+      *aDisplayedErrorPage = false;
+      return NS_OK;
+    }
+
+    if (errorPageURI) {
+      *aDisplayedErrorPage = NS_SUCCEEDED(LoadErrorPage(errorPageURI, aURI, aFailedChannel));
+      return NS_OK;
+    }
+  }
+
   if (!errorDescriptionID) {
     errorDescriptionID = error;
   }
 
   // Test if the error needs to be formatted
   if (!messageStr.IsEmpty()) {
     // already obtained message
   } else {
--- a/docshell/base/nsIDocShell.idl
+++ b/docshell/base/nsIDocShell.idl
@@ -619,22 +619,16 @@ interface nsIDocShell : nsIDocShellTreeI
    /**
    * This attribute determines whether Tracking Content is loaded on the
    * document. When it is true, tracking content was not blocked and has
    * loaded (or is about to load) on the page.
    */
   [infallible] readonly attribute boolean hasTrackingContentLoaded;
 
    /**
-   * This attribute determines whether a document has Slow Tracking Content
-   * that has been blocked from loading.
-   */
-   [infallible] readonly attribute boolean hasSlowTrackingContentBlocked;
-
-   /**
    * This attribute determines whether a document seen cookies or storage
    * blocked due to a site permission being denied.
    */
    [infallible] readonly attribute boolean hasCookiesBlockedByPermission;
 
    /**
    * This attribute determines whether a document seen cookies or storage
    * blocked due to a the request being made by a tracker.
--- a/docshell/test/mochitest/test_bug590573.html
+++ b/docshell/test/mochitest/test_bug590573.html
@@ -213,17 +213,18 @@ function* testBody()
   setTimeout(pageLoad, 0);
   yield;
 
   page2PageShowCallbackEnabled = true;
   popup.history.forward();
   yield;
 
   // Bug 821821, on Android tegras we get 299 instead of 300 sometimes
-  if (popup.scrollY >= 299 && popup.scrollY <= 300) {
+  const scrollY = Math.floor(popup.scrollY);
+  if (scrollY >= 299 && scrollY <= 300) {
     is(1, 1, "test 11");
   } else {
     is(1, 0, "test 11, got " + popup.scrollY + " for popup.scrollY instead of 299|300");
   }
   popup.close();
 }
 </script>
 
--- a/dom/asmjscache/AsmJSCache.cpp
+++ b/dom/asmjscache/AsmJSCache.cpp
@@ -10,17 +10,16 @@
 
 #include "js/RootingAPI.h"
 #include "jsfriendapi.h"
 #include "mozilla/Assertions.h"
 #include "mozilla/CondVar.h"
 #include "mozilla/CycleCollectedJSRuntime.h"
 #include "mozilla/dom/asmjscache/PAsmJSCacheEntryChild.h"
 #include "mozilla/dom/asmjscache/PAsmJSCacheEntryParent.h"
-#include "mozilla/dom/ContentChild.h"
 #include "mozilla/dom/PermissionMessageUtils.h"
 #include "mozilla/dom/quota/Client.h"
 #include "mozilla/dom/quota/QuotaManager.h"
 #include "mozilla/dom/quota/QuotaObject.h"
 #include "mozilla/dom/quota/UsageInfo.h"
 #include "mozilla/HashFunctions.h"
 #include "mozilla/ipc/BackgroundChild.h"
 #include "mozilla/ipc/BackgroundParent.h"
--- a/dom/base/ChromeUtils.cpp
+++ b/dom/base/ChromeUtils.cpp
@@ -13,17 +13,16 @@
 
 #include "mozilla/Base64.h"
 #include "mozilla/BasePrincipal.h"
 #include "mozilla/CycleCollectedJSRuntime.h"
 #include "mozilla/PerformanceMetricsCollector.h"
 #include "mozilla/Preferences.h"
 #include "mozilla/TimeStamp.h"
 #include "mozilla/dom/BrowsingContext.h"
-#include "mozilla/dom/ContentParent.h"
 #include "mozilla/dom/IdleDeadline.h"
 #include "mozilla/dom/UnionTypes.h"
 #include "mozilla/dom/WindowBinding.h" // For IdleRequestCallback/Options
 #include "IOActivityMonitor.h"
 #include "nsThreadUtils.h"
 #include "mozJSComponentLoader.h"
 #include "GeckoProfiler.h"
 
--- a/dom/base/Navigator.cpp
+++ b/dom/base/Navigator.cpp
@@ -88,17 +88,16 @@
 #include "nsIDocShell.h"
 
 #include "mozilla/dom/WorkerPrivate.h"
 #include "mozilla/dom/WorkerRunnable.h"
 
 #if defined(XP_LINUX)
 #include "mozilla/Hal.h"
 #endif
-#include "mozilla/dom/ContentChild.h"
 
 #include "mozilla/EMEUtils.h"
 #include "mozilla/DetailedPromise.h"
 #include "mozilla/Unused.h"
 
 namespace mozilla {
 namespace dom {
 
--- a/dom/base/TabGroup.cpp
+++ b/dom/base/TabGroup.cpp
@@ -1,17 +1,17 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "mozilla/dom/TabGroup.h"
 
-#include "mozilla/dom/ContentChild.h"
+#include "mozilla/dom/nsIContentChild.h"
 #include "mozilla/dom/TabChild.h"
 #include "mozilla/dom/DocGroup.h"
 #include "mozilla/dom/TimeoutManager.h"
 #include "mozilla/AbstractThread.h"
 #include "mozilla/ClearOnShutdown.h"
 #include "mozilla/StaticPtr.h"
 #include "mozilla/Telemetry.h"
 #include "mozilla/ThrottledEventQueue.h"
--- a/dom/base/nsContentUtils.cpp
+++ b/dom/base/nsContentUtils.cpp
@@ -9042,16 +9042,36 @@ nsContentUtils::IsTrackingResourceWindow
     do_QueryInterface(document->GetChannel());
   if (!httpChannel) {
     return false;
   }
 
   return httpChannel->GetIsTrackingResource();
 }
 
+// static public
+bool
+nsContentUtils::IsThirdPartyTrackingResourceWindow(nsPIDOMWindowInner* aWindow)
+{
+  MOZ_ASSERT(aWindow);
+
+  nsIDocument* document = aWindow->GetExtantDoc();
+  if (!document) {
+    return false;
+  }
+
+  nsCOMPtr<nsIHttpChannel> httpChannel =
+    do_QueryInterface(document->GetChannel());
+  if (!httpChannel) {
+    return false;
+  }
+
+  return httpChannel->GetIsThirdPartyTrackingResource();
+}
+
 static bool
 StorageDisabledByAntiTrackingInternal(nsPIDOMWindowInner* aWindow,
                                       nsIChannel* aChannel,
                                       nsIPrincipal* aPrincipal,
                                       nsIURI* aURI,
                                       uint32_t* aRejectedReason)
 {
   MOZ_ASSERT(aWindow || aChannel || aPrincipal);
--- a/dom/base/nsContentUtils.h
+++ b/dom/base/nsContentUtils.h
@@ -3120,16 +3120,22 @@ public:
 
   /*
    * Returns true if this window's channel has been marked as a tracking
    * resource.
    */
   static bool IsTrackingResourceWindow(nsPIDOMWindowInner* aWindow);
 
   /*
+   * Returns true if this window's channel has been marked as a third-party
+   * tracking resource.
+   */
+  static bool IsThirdPartyTrackingResourceWindow(nsPIDOMWindowInner* aWindow);
+
+  /*
    * Serializes a HTML nsINode into its markup representation.
    */
   static bool SerializeNodeToMarkup(nsINode* aRoot,
                                     bool aDescendentsOnly,
                                     nsAString& aOut);
 
   /*
    * Returns true iff the provided JSObject is a global, and its URI matches
--- a/dom/base/nsDocument.cpp
+++ b/dom/base/nsDocument.cpp
@@ -57,16 +57,17 @@
 #include "mozilla/AsyncEventDispatcher.h"
 #include "mozilla/BasicEvents.h"
 #include "mozilla/EventListenerManager.h"
 #include "mozilla/EventStateManager.h"
 #include "mozilla/FullscreenChange.h"
 
 #include "mozilla/dom/Attr.h"
 #include "mozilla/dom/BindingDeclarations.h"
+#include "mozilla/dom/ContentChild.h"
 #include "mozilla/dom/Element.h"
 #include "mozilla/dom/Event.h"
 #include "mozilla/dom/FeaturePolicy.h"
 #include "mozilla/dom/FramingChecker.h"
 #include "mozilla/dom/HTMLSharedElement.h"
 #include "mozilla/dom/Navigator.h"
 #include "mozilla/dom/ServiceWorkerContainer.h"
 #include "mozilla/dom/SVGUseElement.h"
@@ -1490,18 +1491,16 @@ nsIDocument::nsIDocument()
     mHeaderData(nullptr),
     mFlashClassification(FlashClassification::Unclassified),
     mBoxObjectTable(nullptr),
     mCurrentOrientationAngle(0),
     mCurrentOrientationType(OrientationType::Portrait_primary),
     mServoRestyleRootDirtyBits(0),
     mThrowOnDynamicMarkupInsertionCounter(0),
     mIgnoreOpensDuringUnloadCounter(0),
-    mNumTrackersFound(0),
-    mNumTrackersBlocked(0),
     mDocLWTheme(Doc_Theme_Uninitialized),
     mSavedResolution(1.0f)
 {
   SetIsInDocument();
   SetIsConnected(true);
 
   if (StaticPrefs::layout_css_use_counters_enabled()) {
     mStyleUseCounters.reset(Servo_UseCounters_Create());
@@ -1661,29 +1660,16 @@ nsDocument::~nsDocument()
             Telemetry::AccumulateCategorical(Telemetry::LABELS_QUIRKS_MODE::NavQuirks);
             break;
           default:
             MOZ_ASSERT_UNREACHABLE("Unknown quirks mode");
             break;
         }
       }
     }
-
-    // Report the fastblock telemetry probes when the document is dying if
-    // fastblock is enabled and we're not a private document.  We always report
-    // the all probe, and for the rest, report each category's probe depending
-    // on whether the respective bit has been set in our enum set.
-    if (StaticPrefs::browser_fastblock_enabled() &&
-        !nsContentUtils::IsInPrivateBrowsing(this)) {
-      for (auto label : mTrackerBlockedReasons) {
-        AccumulateCategorical(label);
-      }
-      // Always accumulate the "all" probe since we will use it as a baseline counter.
-      AccumulateCategorical(Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::all);
-    }
   }
 
   ReportUseCounters();
 
   mInDestructor = true;
   mInUnlinkOrDeletion = true;
 
   mozilla::DropJSObjects(this);
--- a/dom/base/nsGlobalWindowOuter.cpp
+++ b/dom/base/nsGlobalWindowOuter.cpp
@@ -5441,21 +5441,16 @@ nsGlobalWindowOuter::NotifyContentBlocki
     if (!aBlocked) {
       unblocked = !doc->GetHasTrackingContentBlocked();
     }
   } else if (aState == nsIWebProgressListener::STATE_LOADED_TRACKING_CONTENT) {
     doc->SetHasTrackingContentLoaded(aBlocked, origin);
     if (!aBlocked) {
       unblocked = !doc->GetHasTrackingContentLoaded();
     }
-  } else if (aState == nsIWebProgressListener::STATE_BLOCKED_SLOW_TRACKING_CONTENT) {
-    doc->SetHasSlowTrackingContentBlocked(aBlocked, origin);
-    if (!aBlocked) {
-      unblocked = !doc->GetHasSlowTrackingContentBlocked();
-    }
   } else if (aState == nsIWebProgressListener::STATE_COOKIES_BLOCKED_BY_PERMISSION) {
     doc->SetHasCookiesBlockedByPermission(aBlocked, origin);
     if (!aBlocked) {
       unblocked = !doc->GetHasCookiesBlockedByPermission();
     }
   } else if (aState == nsIWebProgressListener::STATE_COOKIES_BLOCKED_TRACKER) {
     doc->SetHasTrackingCookiesBlocked(aBlocked, origin);
     if (!aBlocked) {
--- a/dom/base/nsIContentPolicy.idl
+++ b/dom/base/nsIContentPolicy.idl
@@ -343,21 +343,20 @@ interface nsIContentPolicy : nsISupports
 
   /**
    * Indicates a speculative connection.
    */
   const nsContentPolicyType TYPE_SPECULATIVE = 44;
 
   /* When adding new content types, please update nsContentBlocker,
    * NS_CP_ContentTypeName, nsCSPContext, CSP_ContentTypeToDirective,
-   * DoContentSecurityChecks, IsContentPolicyTypeWhitelistedForFastBlock,
-   * all nsIContentPolicy implementations, the static_assert in
-   * dom/cache/DBSchema.cpp, ChannelWrapper.webidl, ChannelWrapper.cpp,
-   * nsPermissionManager.cpp, and other things that are not listed here
-   * that are related to nsIContentPolicy. */
+   * DoContentSecurityChecks, all nsIContentPolicy implementations, the
+   * static_assert in dom/cache/DBSchema.cpp, ChannelWrapper.webidl,
+   * ChannelWrapper.cpp, nsPermissionManager.cpp, and other things that are not
+   * listed here that are related to nsIContentPolicy. */
 
   //////////////////////////////////////////////////////////////////////
 
   /**
    * Returned from shouldLoad or shouldProcess if the load or process request
    * is rejected based on details of the request.
    */
   const short REJECT_REQUEST = -1;
--- a/dom/base/nsIDocument.h
+++ b/dom/base/nsIDocument.h
@@ -43,17 +43,16 @@
 #include "Units.h"
 #include "nsContentListDeclarations.h"
 #include "nsExpirationTracker.h"
 #include "nsClassHashtable.h"
 #include "mozilla/CORSMode.h"
 #include "mozilla/dom/ContentBlockingLog.h"
 #include "mozilla/dom/DispatcherTrait.h"
 #include "mozilla/dom/DocumentOrShadowRoot.h"
-#include "mozilla/EnumSet.h"
 #include "mozilla/LinkedList.h"
 #include "mozilla/NotNull.h"
 #include "mozilla/SegmentedVector.h"
 #include "mozilla/ServoBindingTypes.h"
 #include "mozilla/StyleSheet.h"
 #include "mozilla/TimeStamp.h"
 #include "mozilla/UniquePtr.h"
 #include <bitset>                        // for member
@@ -996,25 +995,16 @@ public:
    */
   bool GetHasTrackingContentBlocked()
   {
     return mContentBlockingLog.HasBlockedAnyOfType(
         nsIWebProgressListener::STATE_BLOCKED_TRACKING_CONTENT);
   }
 
   /**
-   * Get slow tracking content blocked flag for this document.
-   */
-  bool GetHasSlowTrackingContentBlocked()
-  {
-    return mContentBlockingLog.HasBlockedAnyOfType(
-        nsIWebProgressListener::STATE_BLOCKED_SLOW_TRACKING_CONTENT);
-  }
-
-  /**
    * Get all cookies blocked flag for this document.
    */
   bool GetHasAllCookiesBlocked()
   {
     return mContentBlockingLog.HasBlockedAnyOfType(
         nsIWebProgressListener::STATE_COOKIES_BLOCKED_ALL);
   }
 
@@ -1052,27 +1042,16 @@ public:
                                     const nsAString& aOriginBlocked)
   {
     RecordContentBlockingLog(aOriginBlocked,
                              nsIWebProgressListener::STATE_BLOCKED_TRACKING_CONTENT,
                              aHasTrackingContentBlocked);
   }
 
   /**
-   * Set the slow tracking content blocked flag for this document.
-   */
-  void SetHasSlowTrackingContentBlocked(bool aHasSlowTrackingContentBlocked,
-                                        const nsAString& aOriginBlocked)
-  {
-    RecordContentBlockingLog(aOriginBlocked,
-                             nsIWebProgressListener::STATE_BLOCKED_SLOW_TRACKING_CONTENT,
-                             aHasSlowTrackingContentBlocked);
-  }
-
-  /**
    * Set the all cookies blocked flag for this document.
    */
   void SetHasAllCookiesBlocked(bool aHasAllCookiesBlocked,
                                const nsAString& aOriginBlocked)
   {
     RecordContentBlockingLog(aOriginBlocked,
                              nsIWebProgressListener::STATE_COOKIES_BLOCKED_ALL,
                              aHasAllCookiesBlocked);
@@ -3768,49 +3747,16 @@ public:
   }
 
   void DecrementIgnoreOpensDuringUnloadCounter()
   {
     MOZ_ASSERT(mIgnoreOpensDuringUnloadCounter);
     --mIgnoreOpensDuringUnloadCounter;
   }
 
-  void IncrementTrackerCount()
-  {
-    MOZ_ASSERT(!GetSameTypeParentDocument());
-    ++mNumTrackersFound;
-  }
-
-  void IncrementTrackerBlockedCount()
-  {
-    MOZ_ASSERT(!GetSameTypeParentDocument());
-    ++mNumTrackersBlocked;
-  }
-
-  void NoteTrackerBlockedReason(
-    mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED aLabel)
-  {
-    MOZ_ASSERT(!GetSameTypeParentDocument());
-    mTrackerBlockedReasons += aLabel;
-  }
-
-  uint32_t NumTrackersFound()
-  {
-    MOZ_ASSERT(!GetSameTypeParentDocument() || mNumTrackersFound == 0);
-
-    return mNumTrackersFound;
-  }
-
-  uint32_t NumTrackersBlocked()
-  {
-    MOZ_ASSERT(!GetSameTypeParentDocument() || mNumTrackersBlocked == 0);
-
-    return mNumTrackersBlocked;
-  }
-
   bool AllowPaymentRequest() const
   {
     return mAllowPaymentRequest;
   }
 
   void SetAllowPaymentRequest(bool aAllowPaymentRequest)
   {
     mAllowPaymentRequest = aAllowPaymentRequest;
@@ -4765,25 +4711,16 @@ protected:
   // Count of unload/beforeunload/pagehide operations in progress.
   uint32_t mIgnoreOpensDuringUnloadCounter;
 
   nsCOMPtr<nsIDOMXULCommandDispatcher> mCommandDispatcher; // [OWNER] of the focus tracker
 
   RefPtr<mozilla::dom::XULBroadcastManager> mXULBroadcastManager;
   RefPtr<mozilla::dom::XULPersist> mXULPersist;
 
-  // At the moment, trackers might be blocked by Tracking Protection or FastBlock.
-  // In order to know the numbers of trackers detected and blocked, we add
-  // these two values here and those are shared by TP and FB.
-  uint32_t mNumTrackersFound;
-  uint32_t mNumTrackersBlocked;
-
-  mozilla::EnumSet<mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED>
-    mTrackerBlockedReasons;
-
   // document lightweight theme for use with :-moz-lwtheme, :-moz-lwtheme-brighttext
   // and :-moz-lwtheme-darktext
   DocumentTheme                         mDocLWTheme;
 
   // Pres shell resolution saved before entering fullscreen mode.
   float mSavedResolution;
 };
 
--- a/dom/base/nsRange.cpp
+++ b/dom/base/nsRange.cpp
@@ -1246,21 +1246,29 @@ nsRange::IsValidPoints(nsINode* aStartCo
 void
 nsRange::SetStartJS(nsINode& aNode, uint32_t aOffset, ErrorResult& aErr)
 {
   AutoCalledByJSRestore calledByJSRestorer(*this);
   mCalledByJS = true;
   SetStart(aNode, aOffset, aErr);
 }
 
+bool
+nsRange::CanAccess(const nsINode& aNode) const
+{
+  if (nsContentUtils::LegacyIsCallerNativeCode()) {
+    return true;
+  }
+  return nsContentUtils::CanCallerAccess(&aNode);
+}
+
 void
 nsRange::SetStart(nsINode& aNode, uint32_t aOffset, ErrorResult& aRv)
 {
- if (!nsContentUtils::LegacyIsCallerNativeCode() &&
-     !nsContentUtils::CanCallerAccess(&aNode)) {
+ if (!CanAccess(aNode)) {
     aRv.Throw(NS_ERROR_DOM_SECURITY_ERR);
     return;
   }
 
   AutoInvalidateSelection atEndOfBlock(this);
   SetStart(RawRangeBoundary(&aNode, aOffset), aRv);
 }
 
@@ -1295,18 +1303,17 @@ nsRange::SetStartBeforeJS(nsINode& aNode
   AutoCalledByJSRestore calledByJSRestorer(*this);
   mCalledByJS = true;
   SetStartBefore(aNode, aErr);
 }
 
 void
 nsRange::SetStartBefore(nsINode& aNode, ErrorResult& aRv)
 {
-  if (!nsContentUtils::LegacyIsCallerNativeCode() &&
-      !nsContentUtils::CanCallerAccess(&aNode)) {
+  if (!CanAccess(aNode)) {
     aRv.Throw(NS_ERROR_DOM_SECURITY_ERR);
     return;
   }
 
   AutoInvalidateSelection atEndOfBlock(this);
   // If the node is being removed from its parent, GetContainerAndOffsetBefore()
   // returns nullptr.  Then, SetStart() will throw
   // NS_ERROR_DOM_INVALID_NODE_TYPE_ERR.
@@ -1321,18 +1328,17 @@ nsRange::SetStartAfterJS(nsINode& aNode,
   AutoCalledByJSRestore calledByJSRestorer(*this);
   mCalledByJS = true;
   SetStartAfter(aNode, aErr);
 }
 
 void
 nsRange::SetStartAfter(nsINode& aNode, ErrorResult& aRv)
 {
-  if (!nsContentUtils::LegacyIsCallerNativeCode() &&
-      !nsContentUtils::CanCallerAccess(&aNode)) {
+  if (!CanAccess(aNode)) {
     aRv.Throw(NS_ERROR_DOM_SECURITY_ERR);
     return;
   }
 
   AutoInvalidateSelection atEndOfBlock(this);
   // If the node is being removed from its parent, GetContainerAndOffsetAfter()
   // returns nullptr.  Then, SetStart() will throw
   // NS_ERROR_DOM_INVALID_NODE_TYPE_ERR.
@@ -1347,18 +1353,17 @@ nsRange::SetEndJS(nsINode& aNode, uint32
   AutoCalledByJSRestore calledByJSRestorer(*this);
   mCalledByJS = true;
   SetEnd(aNode, aOffset, aErr);
 }
 
 void
 nsRange::SetEnd(nsINode& aNode, uint32_t aOffset, ErrorResult& aRv)
 {
- if (!nsContentUtils::LegacyIsCallerNativeCode() &&
-     !nsContentUtils::CanCallerAccess(&aNode)) {
+ if (!CanAccess(aNode)) {
     aRv.Throw(NS_ERROR_DOM_SECURITY_ERR);
     return;
   }
   AutoInvalidateSelection atEndOfBlock(this);
   SetEnd(RawRangeBoundary(&aNode, aOffset), aRv);
 }
 
 void
@@ -1471,18 +1476,17 @@ nsRange::SetEndBeforeJS(nsINode& aNode, 
   AutoCalledByJSRestore calledByJSRestorer(*this);
   mCalledByJS = true;
   SetEndBefore(aNode, aErr);
 }
 
 void
 nsRange::SetEndBefore(nsINode& aNode, ErrorResult& aRv)
 {
-  if (!nsContentUtils::LegacyIsCallerNativeCode() &&
-      !nsContentUtils::CanCallerAccess(&aNode)) {
+  if (!CanAccess(aNode)) {
     aRv.Throw(NS_ERROR_DOM_SECURITY_ERR);
     return;
   }
 
   AutoInvalidateSelection atEndOfBlock(this);
   // If the node is being removed from its parent, GetContainerAndOffsetBefore()
   // returns nullptr.  Then, SetEnd() will throw
   // NS_ERROR_DOM_INVALID_NODE_TYPE_ERR.
@@ -1497,18 +1501,17 @@ nsRange::SetEndAfterJS(nsINode& aNode, E
   AutoCalledByJSRestore calledByJSRestorer(*this);
   mCalledByJS = true;
   SetEndAfter(aNode, aErr);
 }
 
 void
 nsRange::SetEndAfter(nsINode& aNode, ErrorResult& aRv)
 {
-  if (!nsContentUtils::LegacyIsCallerNativeCode() &&
-      !nsContentUtils::CanCallerAccess(&aNode)) {
+  if (!CanAccess(aNode)) {
     aRv.Throw(NS_ERROR_DOM_SECURITY_ERR);
     return;
   }
 
   AutoInvalidateSelection atEndOfBlock(this);
   // If the node is being removed from its parent, GetContainerAndOffsetAfter()
   // returns nullptr.  Then, SetEnd() will throw
   // NS_ERROR_DOM_INVALID_NODE_TYPE_ERR.
@@ -1545,18 +1548,17 @@ nsRange::SelectNodeJS(nsINode& aNode, Er
   AutoCalledByJSRestore calledByJSRestorer(*this);
   mCalledByJS = true;
   SelectNode(aNode, aErr);
 }
 
 void
 nsRange::SelectNode(nsINode& aNode, ErrorResult& aRv)
 {
-  if (!nsContentUtils::LegacyIsCallerNativeCode() &&
-      !nsContentUtils::CanCallerAccess(&aNode)) {
+  if (!CanAccess(aNode)) {
     aRv.Throw(NS_ERROR_DOM_SECURITY_ERR);
     return;
   }
 
   nsINode* container = aNode.GetParentNode();
   nsINode* newRoot = IsValidBoundary(container);
   if (!newRoot) {
     aRv.Throw(NS_ERROR_DOM_INVALID_NODE_TYPE_ERR);
@@ -1586,18 +1588,17 @@ nsRange::SelectNodeContentsJS(nsINode& a
   AutoCalledByJSRestore calledByJSRestorer(*this);
   mCalledByJS = true;
   SelectNodeContents(aNode, aErr);
 }
 
 void
 nsRange::SelectNodeContents(nsINode& aNode, ErrorResult& aRv)
 {
-  if (!nsContentUtils::LegacyIsCallerNativeCode() &&
-      !nsContentUtils::CanCallerAccess(&aNode)) {
+  if (!CanAccess(aNode)) {
     aRv.Throw(NS_ERROR_DOM_SECURITY_ERR);
     return;
   }
 
   nsINode* newRoot = IsValidBoundary(&aNode);
   if (!newRoot) {
     aRv.Throw(NS_ERROR_DOM_INVALID_NODE_TYPE_ERR);
     return;
@@ -1971,16 +1972,20 @@ ValidateCurrentNode(nsRange* aRange, Ran
 
 nsresult
 nsRange::CutContents(DocumentFragment** aFragment)
 {
   if (aFragment) {
     *aFragment = nullptr;
   }
 
+  if (!CanAccess(*mStart.Container()) || !CanAccess(*mEnd.Container())) {
+    return NS_ERROR_DOM_SECURITY_ERR;
+  }
+
   nsCOMPtr<nsIDocument> doc = mStart.Container()->OwnerDoc();
 
   ErrorResult res;
   nsCOMPtr<nsINode> commonAncestor = GetCommonAncestorContainer(res);
   NS_ENSURE_TRUE(!res.Failed(), res.StealNSResult());
 
   // If aFragment isn't null, create a temporary fragment to hold our return.
   RefPtr<DocumentFragment> retval;
@@ -2606,40 +2611,43 @@ nsRange::CloneRange() const
   range->DoSetRange(mStart.AsRaw(), mEnd.AsRaw(), mRoot);
 
   return range.forget();
 }
 
 void
 nsRange::InsertNode(nsINode& aNode, ErrorResult& aRv)
 {
-  if (!nsContentUtils::LegacyIsCallerNativeCode() &&
-      !nsContentUtils::CanCallerAccess(&aNode)) {
+  if (!CanAccess(aNode)) {
     aRv.Throw(NS_ERROR_DOM_SECURITY_ERR);
     return;
   }
 
   uint32_t tStartOffset = StartOffset();
 
   nsCOMPtr<nsINode> tStartContainer = GetStartContainer(aRv);
   if (aRv.Failed()) {
     return;
   }
 
+  if (!CanAccess(*tStartContainer)) {
+    aRv.Throw(NS_ERROR_DOM_SECURITY_ERR);
+    return;
+  }
+
   if (&aNode == tStartContainer) {
     aRv.Throw(NS_ERROR_DOM_HIERARCHY_REQUEST_ERR);
     return;
   }
 
   // This is the node we'll be inserting before, and its parent
   nsCOMPtr<nsINode> referenceNode;
   nsCOMPtr<nsINode> referenceParentNode = tStartContainer;
 
-  RefPtr<Text> startTextNode =
-    tStartContainer ? tStartContainer->GetAsText() : nullptr;
+  RefPtr<Text> startTextNode = tStartContainer->GetAsText();
   nsCOMPtr<nsINodeList> tChildList;
   if (startTextNode) {
     referenceParentNode = tStartContainer->GetParentNode();
     if (!referenceParentNode) {
       aRv.Throw(NS_ERROR_DOM_HIERARCHY_REQUEST_ERR);
       return;
     }
 
@@ -2699,18 +2707,17 @@ nsRange::InsertNode(nsINode& aNode, Erro
   if (Collapsed()) {
     aRv = SetEnd(referenceParentNode, newOffset);
   }
 }
 
 void
 nsRange::SurroundContents(nsINode& aNewParent, ErrorResult& aRv)
 {
-  if (!nsContentUtils::LegacyIsCallerNativeCode() &&
-      !nsContentUtils::CanCallerAccess(&aNewParent)) {
+  if (!CanAccess(aNewParent)) {
     aRv.Throw(NS_ERROR_DOM_SECURITY_ERR);
     return;
   }
 
   if (!mRoot) {
     aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
     return;
   }
--- a/dom/base/nsRange.h
+++ b/dom/base/nsRange.h
@@ -387,16 +387,21 @@ private:
    */
   nsresult CutContents(mozilla::dom::DocumentFragment** frag);
 
   static nsresult CloneParentsBetween(nsINode* aAncestor,
                                       nsINode* aNode,
                                       nsINode** aClosestAncestor,
                                       nsINode** aFarthestAncestor);
 
+  /**
+   * Returns whether a node is safe to be accessed by the current caller.
+   */
+  bool CanAccess(const nsINode&) const;
+
 public:
   /**
    * Compute the root node of aNode for initializing range classes.
    * When aNode is in an anonymous subtree, this returns the shadow root or
    * binding parent.  Otherwise, the root node of the document or document
    * fragment.  If this returns nullptr, that means aNode can be neither the
    * start container nor end container of any range.
    */
--- a/dom/cache/CacheStorageParent.cpp
+++ b/dom/cache/CacheStorageParent.cpp
@@ -2,17 +2,16 @@
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "mozilla/dom/cache/CacheStorageParent.h"
 
 #include "mozilla/Unused.h"
-#include "mozilla/dom/ContentParent.h"
 #include "mozilla/dom/cache/ActorUtils.h"
 #include "mozilla/dom/cache/CacheOpParent.h"
 #include "mozilla/dom/cache/ManagerId.h"
 #include "mozilla/ipc/PBackgroundParent.h"
 
 namespace mozilla {
 namespace dom {
 namespace cache {
--- a/dom/cache/PrincipalVerifier.h
+++ b/dom/cache/PrincipalVerifier.h
@@ -13,16 +13,19 @@
 
 namespace mozilla {
 
 namespace ipc {
   class PBackgroundParent;
 } // namespace ipc
 
 namespace dom {
+
+class ContentParent;
+
 namespace cache {
 
 class ManagerId;
 
 class PrincipalVerifier final : public Runnable
 {
 public:
   // An interface to be implemented by code wishing to use the
--- a/dom/canvas/CanvasRenderingContext2D.cpp
+++ b/dom/canvas/CanvasRenderingContext2D.cpp
@@ -68,17 +68,16 @@
 #include "jsfriendapi.h"
 #include "js/Conversions.h"
 #include "js/HeapAPI.h"
 
 #include "mozilla/Alignment.h"
 #include "mozilla/Assertions.h"
 #include "mozilla/CheckedInt.h"
 #include "mozilla/DebugOnly.h"
-#include "mozilla/dom/ContentParent.h"
 #include "mozilla/dom/ImageBitmap.h"
 #include "mozilla/dom/ImageData.h"
 #include "mozilla/dom/PBrowserParent.h"
 #include "mozilla/dom/ToJSValue.h"
 #include "mozilla/dom/TypedArray.h"
 #include "mozilla/EndianUtils.h"
 #include "mozilla/gfx/2D.h"
 #include "mozilla/gfx/Helpers.h"
--- a/dom/clients/manager/ClientManagerService.h
+++ b/dom/clients/manager/ClientManagerService.h
@@ -10,16 +10,17 @@
 #include "nsDataHashtable.h"
 
 namespace mozilla {
 
 namespace dom {
 
 class ClientManagerParent;
 class ClientSourceParent;
+class ContentParent;
 
 // Define a singleton service to manage client activity throughout the
 // browser.  This service runs on the PBackground thread.  To interact
 // it with it please use the ClientManager and ClientHandle classes.
 class ClientManagerService final
 {
   // Store the ClientSourceParent objects in a hash table.  We want to
   // optimize for insertion, removal, and lookup by UUID.
--- a/dom/events/Clipboard.cpp
+++ b/dom/events/Clipboard.cpp
@@ -6,17 +6,16 @@
 
 #include "mozilla/AbstractThread.h"
 #include "mozilla/dom/Clipboard.h"
 #include "mozilla/dom/ClipboardBinding.h"
 #include "mozilla/dom/Promise.h"
 #include "mozilla/dom/DataTransfer.h"
 #include "mozilla/dom/DataTransferItemList.h"
 #include "mozilla/dom/DataTransferItem.h"
-#include "mozilla/dom/ContentChild.h"
 #include "nsIClipboard.h"
 #include "nsISupportsPrimitives.h"
 #include "nsComponentManagerUtils.h"
 #include "nsITransferable.h"
 #include "nsArrayUtils.h"
 
 
 static mozilla::LazyLogModule gClipboardLog("Clipboard");
--- a/dom/html/nsGenericHTMLFrameElement.cpp
+++ b/dom/html/nsGenericHTMLFrameElement.cpp
@@ -1,17 +1,16 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "nsGenericHTMLFrameElement.h"
 
-#include "mozilla/dom/ContentChild.h"
 #include "mozilla/dom/HTMLIFrameElement.h"
 #include "mozilla/dom/XULFrameElement.h"
 #include "mozilla/Preferences.h"
 #include "mozilla/ErrorResult.h"
 #include "GeckoProfiler.h"
 #include "nsAttrValueInlines.h"
 #include "nsContentUtils.h"
 #include "nsIDocShell.h"
--- a/dom/indexedDB/IDBObjectStore.cpp
+++ b/dom/indexedDB/IDBObjectStore.cpp
@@ -25,18 +25,16 @@
 #include "KeyPath.h"
 #include "mozilla/ClearOnShutdown.h"
 #include "mozilla/EndianUtils.h"
 #include "mozilla/ErrorResult.h"
 #include "mozilla/JSObjectHolder.h"
 #include "mozilla/Move.h"
 #include "mozilla/NullPrincipal.h"
 #include "mozilla/dom/BindingUtils.h"
-#include "mozilla/dom/ContentChild.h"
-#include "mozilla/dom/ContentParent.h"
 #include "mozilla/dom/DOMStringList.h"
 #include "mozilla/dom/File.h"
 #include "mozilla/dom/FileBlobImpl.h"
 #include "mozilla/dom/IDBMutableFileBinding.h"
 #include "mozilla/dom/BlobBinding.h"
 #include "mozilla/dom/IDBObjectStoreBinding.h"
 #include "mozilla/dom/MemoryBlobImpl.h"
 #include "mozilla/dom/StreamBlobImpl.h"
--- a/dom/media/MediaDecoderStateMachine.cpp
+++ b/dom/media/MediaDecoderStateMachine.cpp
@@ -761,18 +761,16 @@ class MediaDecoderStateMachine::LoopingD
     if (!mMaster->IsAudioDecoding()) {
       SLOG("audio has ended, request the data again.");
       RequestAudioDataFromStartPosition();
     }
     DecodingState::Enter();
   }
 
   void Exit() override {
-    mAudioDataRequest.DisconnectIfExists();
-    mAudioSeekRequest.DisconnectIfExists();
     if (ShouldDiscardLoopedAudioData()) {
       mMaster->mAudioDataRequest.DisconnectIfExists();
       DiscardLoopedAudioData();
     }
     if (HasDecodedLastAudioFrame()) {
       AudioQueue().Finish();
     }
     mAudioDataRequest.DisconnectIfExists();
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -38,17 +38,16 @@
 #include "nsPIDOMWindow.h"
 #include "mozilla/EventStateManager.h"
 #include "mozilla/MozPromise.h"
 #include "mozilla/NullPrincipal.h"
 #include "mozilla/Telemetry.h"
 #include "mozilla/Types.h"
 #include "mozilla/PeerIdentity.h"
 #include "mozilla/dom/BindingDeclarations.h"
-#include "mozilla/dom/ContentChild.h"
 #include "mozilla/dom/Element.h"
 #include "mozilla/dom/FeaturePolicyUtils.h"
 #include "mozilla/dom/File.h"
 #include "mozilla/dom/MediaStreamBinding.h"
 #include "mozilla/dom/MediaStreamTrackBinding.h"
 #include "mozilla/dom/GetUserMediaRequestBinding.h"
 #include "mozilla/dom/Promise.h"
 #include "mozilla/dom/MediaDevices.h"
--- a/dom/media/gmp/GMPServiceChild.h
+++ b/dom/media/gmp/GMPServiceChild.h
@@ -4,20 +4,20 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef GMPServiceChild_h_
 #define GMPServiceChild_h_
 
 #include "GMPService.h"
 #include "MediaResult.h"
 #include "base/process.h"
+#include "mozilla/dom/PContent.h"
 #include "mozilla/ipc/Transport.h"
 #include "mozilla/gmp/PGMPServiceChild.h"
 #include "nsRefPtrHashtable.h"
-#include "mozilla/dom/ContentChild.h"
 
 namespace mozilla {
 namespace gmp {
 
 class GMPContentParent;
 class GMPServiceChild;
 
 class GeckoMediaPluginServiceChild : public GeckoMediaPluginService {
--- a/dom/media/ipc/RDDProcessManager.cpp
+++ b/dom/media/ipc/RDDProcessManager.cpp
@@ -1,19 +1,19 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 #include "RDDProcessManager.h"
 
+#include "mozilla/MemoryReportingProcess.h"
 #include "mozilla/RemoteDecoderManagerChild.h"
 #include "mozilla/RemoteDecoderManagerParent.h"
 #include "mozilla/StaticPrefs.h"
-#include "mozilla/dom/ContentParent.h"
 
 #include "nsAppRunner.h"
 #include "nsContentUtils.h"
 #include "RDDChild.h"
 #include "RDDProcessHost.h"
 
 namespace mozilla {
 
--- a/dom/media/platforms/agnostic/AgnosticDecoderModule.cpp
+++ b/dom/media/platforms/agnostic/AgnosticDecoderModule.cpp
@@ -10,16 +10,17 @@
 #include "VPXDecoder.h"
 #include "VorbisDecoder.h"
 #include "WAVDecoder.h"
 #include "mozilla/Logging.h"
 #include "mozilla/StaticPrefs.h"
 
 #ifdef MOZ_AV1
 #include "AOMDecoder.h"
+#include "DAV1DDecoder.h"
 #endif
 
 namespace mozilla {
 
 bool AgnosticDecoderModule::SupportsMimeType(
     const nsACString& aMimeType, DecoderDoctorDiagnostics* aDiagnostics) const {
   bool supports =
       VPXDecoder::IsVPX(aMimeType) || OpusDataDecoder::IsOpus(aMimeType) ||
@@ -41,17 +42,21 @@ already_AddRefed<MediaDataDecoder> Agnos
   RefPtr<MediaDataDecoder> m;
 
   if (VPXDecoder::IsVPX(aParams.mConfig.mMimeType)) {
     m = new VPXDecoder(aParams);
   }
 #ifdef MOZ_AV1
   else if (AOMDecoder::IsAV1(aParams.mConfig.mMimeType) &&
            StaticPrefs::MediaAv1Enabled()) {
-    m = new AOMDecoder(aParams);
+    if (StaticPrefs::MediaAv1UseDav1d()) {
+      m = new DAV1DDecoder(aParams);
+    } else {
+      m = new AOMDecoder(aParams);
+    }
   }
 #endif
   else if (TheoraDecoder::IsTheora(aParams.mConfig.mMimeType)) {
     m = new TheoraDecoder(aParams);
   }
 
   return m.forget();
 }
new file mode 100644
--- /dev/null
+++ b/dom/media/platforms/agnostic/DAV1DDecoder.cpp
@@ -0,0 +1,252 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "DAV1DDecoder.h"
+
+#undef LOG
+#define LOG(arg, ...)                                                  \
+  DDMOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, "::%s: " arg, __func__, \
+            ##__VA_ARGS__)
+
+namespace mozilla {
+
+DAV1DDecoder::DAV1DDecoder(const CreateDecoderParams& aParams)
+    : mInfo(aParams.VideoConfig()),
+      mTaskQueue(aParams.mTaskQueue),
+      mImageContainer(aParams.mImageContainer) {}
+
+DAV1DDecoder::~DAV1DDecoder() {}
+
+RefPtr<MediaDataDecoder::InitPromise> DAV1DDecoder::Init() {
+  Dav1dSettings settings;
+  dav1d_default_settings(&settings);
+  int decoder_threads = 2;
+  if (mInfo.mDisplay.width >= 2048) {
+    decoder_threads = 8;
+  } else if (mInfo.mDisplay.width >= 1024) {
+    decoder_threads = 4;
+  }
+  settings.n_frame_threads =
+      std::min(decoder_threads, PR_GetNumberOfProcessors());
+
+  int res = dav1d_open(&mContext, &settings);
+  if (res < 0) {
+    return DAV1DDecoder::InitPromise::CreateAndReject(
+        MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+                    RESULT_DETAIL("Couldn't get dAV1d decoder interface.")),
+        __func__);
+  }
+  return DAV1DDecoder::InitPromise::CreateAndResolve(TrackInfo::kVideoTrack,
+                                                     __func__);
+}
+
+RefPtr<MediaDataDecoder::DecodePromise> DAV1DDecoder::Decode(
+    MediaRawData* aSample) {
+  return InvokeAsync<MediaRawData*>(mTaskQueue, this, __func__,
+                                    &DAV1DDecoder::InvokeDecode, aSample);
+}
+
+void ReleaseDataBuffer_s(const uint8_t* buf, void* user_data) {
+  MOZ_ASSERT(user_data);
+  MOZ_ASSERT(buf);
+  DAV1DDecoder* d = static_cast<DAV1DDecoder*>(user_data);
+  d->ReleaseDataBuffer(buf);
+}
+
+void DAV1DDecoder::ReleaseDataBuffer(const uint8_t* buf) {
+  // The release callback may be called on a
+  // different thread defined by third party
+  // dav1d execution. Post a task into TaskQueue
+  // to ensure mDecodingBuffers is only ever
+  // accessed on the TaskQueue
+  RefPtr<DAV1DDecoder> self = this;
+  nsresult rv = mTaskQueue->Dispatch(
+      NS_NewRunnableFunction("DAV1DDecoder::ReleaseDataBuffer", [self, buf]() {
+        DebugOnly<bool> found = self->mDecodingBuffers.Remove(buf);
+        MOZ_ASSERT(found);
+      }));
+  MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+  Unused << rv;
+}
+
+RefPtr<MediaDataDecoder::DecodePromise> DAV1DDecoder::InvokeDecode(
+    MediaRawData* aSample) {
+  MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
+  MOZ_ASSERT(aSample);
+
+  // Save the last timing values to use in drain.
+  mLastTimecode = aSample->mTimecode;
+  mLastDuration = aSample->mDuration;
+  mLastOffset = aSample->mOffset;
+  // Add the buffer to the hashtable in order to increase
+  // the ref counter and keep it alive. When dav1d does not
+  // need it any more will call it's release callback. Remove
+  // the buffer, in there, to reduce the ref counter and eventually
+  // free it. We need a hashtable and not an array because the
+  // release callback are not coming in the same order that the
+  // buffers have been added in the decoder (threading ordering
+  // inside decoder)
+  mDecodingBuffers.Put(aSample->Data(), aSample);
+  Dav1dData data;
+  int res = dav1d_data_wrap(&data, aSample->Data(), aSample->Size(),
+                            ReleaseDataBuffer_s, this);
+  if (res < 0) {
+    LOG("Create decoder data error.");
+    return DecodePromise::CreateAndReject(
+        MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__), __func__);
+  }
+  DecodedData results;
+  do {
+    res = dav1d_send_data(mContext, &data);
+    if (res < 0 && res != -EAGAIN) {
+      LOG("Decode error: %d", res);
+      return DecodePromise::CreateAndReject(
+          MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR, __func__), __func__);
+    }
+    // Alway consume the whole buffer on success.
+    // At this point only -EAGAIN error is expected.
+    MOZ_ASSERT((res == 0 && !data.sz) ||
+               (res == -EAGAIN && data.sz == aSample->Size()));
+
+    MediaResult rs(NS_OK);
+    res = GetPicture(aSample, results, rs);
+    if (res < 0) {
+      if (res == -EAGAIN) {
+        // No frames ready to return. This is not an
+        // error, in some circumstances, we need to
+        // feed it with a certain amount of frames
+        // before we get a picture.
+        continue;
+      }
+      return DecodePromise::CreateAndReject(rs, __func__);
+    }
+  } while (data.sz > 0);
+
+  return DecodePromise::CreateAndResolve(std::move(results), __func__);
+}
+
+int DAV1DDecoder::GetPicture(const MediaRawData* aSample, DecodedData& aData,
+                             MediaResult& aResult) {
+  class Dav1dPictureWrapper {
+   public:
+    Dav1dPicture* operator&() { return &p; }
+    const Dav1dPicture& operator*() const { return p; }
+    ~Dav1dPictureWrapper() { dav1d_picture_unref(&p); }
+
+   private:
+    Dav1dPicture p = Dav1dPicture();
+  };
+  Dav1dPictureWrapper picture;
+
+  int res = dav1d_get_picture(mContext, &picture);
+  if (res < 0) {
+    LOG("Decode error: %d", res);
+    aResult = MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR, __func__);
+    return res;
+  }
+
+  if ((*picture).p.layout == DAV1D_PIXEL_LAYOUT_I400) {
+    return 0;
+  }
+
+  RefPtr<VideoData> v = ConstructImage(aSample, *picture);
+  if (!v) {
+    LOG("Image allocation error: %ux%u"
+        " display %ux%u picture %ux%u",
+        (*picture).p.w, (*picture).p.h, mInfo.mDisplay.width,
+        mInfo.mDisplay.height, mInfo.mImage.width, mInfo.mImage.height);
+    aResult = MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__);
+    return -1;
+  }
+  aData.AppendElement(std::move(v));
+  return 0;
+}
+
+already_AddRefed<VideoData> DAV1DDecoder::ConstructImage(
+    const MediaRawData* aSample, const Dav1dPicture& picture) {
+  VideoData::YCbCrBuffer b;
+  if (picture.p.bpc == 10) {
+    b.mColorDepth = ColorDepth::COLOR_10;
+  } else if (picture.p.bpc == 12) {
+    b.mColorDepth = ColorDepth::COLOR_12;
+  }
+  b.mPlanes[0].mData = static_cast<uint8_t*>(picture.data[0]);
+  b.mPlanes[0].mStride = picture.stride[0];
+  b.mPlanes[0].mHeight = picture.p.h;
+  b.mPlanes[0].mWidth = picture.p.w;
+  b.mPlanes[0].mOffset = 0;
+  b.mPlanes[0].mSkip = 0;
+
+  b.mPlanes[1].mData = static_cast<uint8_t*>(picture.data[1]);
+  b.mPlanes[1].mStride = picture.stride[1];
+  b.mPlanes[1].mOffset = 0;
+  b.mPlanes[1].mSkip = 0;
+
+  b.mPlanes[2].mData = static_cast<uint8_t*>(picture.data[2]);
+  b.mPlanes[2].mStride = picture.stride[1];
+  b.mPlanes[2].mOffset = 0;
+  b.mPlanes[2].mSkip = 0;
+
+  // https://code.videolan.org/videolan/dav1d/blob/master/tools/output/yuv.c#L67
+  const int ss_ver = picture.p.layout == DAV1D_PIXEL_LAYOUT_I420;
+  const int ss_hor = picture.p.layout != DAV1D_PIXEL_LAYOUT_I444;
+
+  b.mPlanes[1].mHeight = (picture.p.h + ss_ver) >> ss_ver;
+  b.mPlanes[1].mWidth = (picture.p.w + ss_hor) >> ss_hor;
+
+  b.mPlanes[2].mHeight = (picture.p.h + ss_ver) >> ss_ver;
+  b.mPlanes[2].mWidth = (picture.p.w + ss_hor) >> ss_hor;
+
+  // Timestamp, duration and offset used here are wrong.
+  // We need to take those values from the decoder. Latest
+  // dav1d version allows for that.
+  return VideoData::CreateAndCopyData(
+      mInfo, mImageContainer, aSample->mOffset, aSample->mTime,
+      aSample->mDuration, b, aSample->mKeyframe, aSample->mTimecode,
+      mInfo.ScaledImageRect(picture.p.w, picture.p.h));
+}
+
+RefPtr<MediaDataDecoder::DecodePromise> DAV1DDecoder::Drain() {
+  RefPtr<DAV1DDecoder> self = this;
+  return InvokeAsync(mTaskQueue, __func__, [self, this] {
+    int res = 0;
+    DecodedData results;
+    do {
+      RefPtr<MediaRawData> empty(new MediaRawData());
+      // Update last timecode in case we loop over.
+      empty->mTimecode = empty->mTime = mLastTimecode =
+          mLastTimecode + mLastDuration;
+      empty->mDuration = mLastDuration;
+      empty->mOffset = mLastOffset;
+
+      MediaResult rs(NS_OK);
+      res = GetPicture(empty, results, rs);
+      if (res < 0 && res != -EAGAIN) {
+        return DecodePromise::CreateAndReject(rs, __func__);
+      }
+    } while (res != -EAGAIN);
+    return DecodePromise::CreateAndResolve(results, __func__);
+  });
+}
+
+RefPtr<MediaDataDecoder::FlushPromise> DAV1DDecoder::Flush() {
+  RefPtr<DAV1DDecoder> self = this;
+  return InvokeAsync(mTaskQueue, __func__, [self]() {
+    dav1d_flush(self->mContext);
+    return FlushPromise::CreateAndResolve(true, __func__);
+  });
+}
+
+RefPtr<ShutdownPromise> DAV1DDecoder::Shutdown() {
+  RefPtr<DAV1DDecoder> self = this;
+  return InvokeAsync(mTaskQueue, __func__, [self]() {
+    dav1d_close(&self->mContext);
+    return ShutdownPromise::CreateAndResolve(true, __func__);
+  });
+}
+
+}  // namespace mozilla
+#undef LOG
new file mode 100644
--- /dev/null
+++ b/dom/media/platforms/agnostic/DAV1DDecoder.h
@@ -0,0 +1,62 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+#if !defined(DAV1DDecoder_h_)
+#define DAV1DDecoder_h_
+
+#include "PlatformDecoderModule.h"
+#include "dav1d/dav1d.h"
+
+namespace mozilla {
+
+DDLoggedTypeDeclNameAndBase(DAV1DDecoder, MediaDataDecoder);
+
+typedef nsRefPtrHashtable<nsPtrHashKey<const uint8_t>, MediaRawData>
+    MediaRawDataHashtable;
+
+class DAV1DDecoder : public MediaDataDecoder,
+                     public DecoderDoctorLifeLogger<DAV1DDecoder> {
+ public:
+  explicit DAV1DDecoder(const CreateDecoderParams& aParams);
+
+  RefPtr<InitPromise> Init() override;
+  RefPtr<DecodePromise> Decode(MediaRawData* aSample) override;
+  RefPtr<DecodePromise> Drain() override;
+  RefPtr<FlushPromise> Flush() override;
+  RefPtr<ShutdownPromise> Shutdown() override;
+  nsCString GetDescriptionName() const override {
+    return NS_LITERAL_CSTRING("av1 libdav1d video decoder");
+  }
+
+  void ReleaseDataBuffer(const uint8_t* buf);
+
+ private:
+  ~DAV1DDecoder();
+  RefPtr<DecodePromise> InvokeDecode(MediaRawData* aSample);
+  int GetPicture(const MediaRawData* aSample, DecodedData& aData,
+                 MediaResult& aResult);
+  already_AddRefed<VideoData> ConstructImage(const MediaRawData* aSample,
+                                             const Dav1dPicture&);
+
+  Dav1dContext* mContext;
+
+  const VideoInfo& mInfo;
+  const RefPtr<TaskQueue> mTaskQueue;
+  const RefPtr<layers::ImageContainer> mImageContainer;
+
+  // Keep the buffers alive until dav1d
+  // does not need them any more.
+  MediaRawDataHashtable mDecodingBuffers;
+
+  // Store the last timing values to use
+  // them during drain.
+  media::TimeUnit mLastTimecode;
+  media::TimeUnit mLastDuration;
+  int64_t mLastOffset = 0;
+};
+
+}  // namespace mozilla
+
+#endif  // DAV1DDecoder_h_
--- a/dom/media/platforms/moz.build
+++ b/dom/media/platforms/moz.build
@@ -67,19 +67,21 @@ if CONFIG['MOZ_FFVPX']:
 if CONFIG['MOZ_FFMPEG']:
     DIRS += [
         'ffmpeg',
     ]
 
 if CONFIG['MOZ_AV1']:
     EXPORTS += [
         'agnostic/AOMDecoder.h',
+        'agnostic/DAV1DDecoder.h',
     ]
     UNIFIED_SOURCES += [
         'agnostic/AOMDecoder.cpp',
+        'agnostic/DAV1DDecoder.cpp',
     ]
 
 if CONFIG['MOZ_OMX']:
     EXPORTS += [
         'omx/OmxCoreLibLinker.h',
     ]
     UNIFIED_SOURCES += [
         'omx/OmxCoreLibLinker.cpp',
--- a/dom/media/systemservices/CamerasChild.h
+++ b/dom/media/systemservices/CamerasChild.h
@@ -4,17 +4,16 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef mozilla_CamerasChild_h
 #define mozilla_CamerasChild_h
 
 #include "mozilla/Move.h"
 #include "mozilla/Pair.h"
-#include "mozilla/dom/ContentChild.h"
 #include "mozilla/camera/PCamerasChild.h"
 #include "mozilla/camera/PCamerasParent.h"
 #include "mozilla/media/DeviceChangeCallback.h"
 #include "mozilla/Mutex.h"
 #include "nsCOMPtr.h"
 
 // conflicts with #include of scoped_ptr.h
 #undef FF
--- a/dom/media/systemservices/CamerasParent.h
+++ b/dom/media/systemservices/CamerasParent.h
@@ -4,17 +4,16 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef mozilla_CamerasParent_h
 #define mozilla_CamerasParent_h
 
 #include "nsIObserver.h"
 #include "VideoEngine.h"
-#include "mozilla/dom/ContentParent.h"
 #include "mozilla/camera/PCamerasParent.h"
 #include "mozilla/ipc/Shmem.h"
 #include "mozilla/ShmemPool.h"
 #include "mozilla/Atomics.h"
 #include "webrtc/modules/video_capture/video_capture.h"
 #include "webrtc/modules/video_capture/video_capture_defines.h"
 #include "webrtc/common_video/include/incoming_video_stream.h"
 #include "webrtc/media/base/videosinkinterface.h"
--- a/dom/media/systemservices/MediaChild.cpp
+++ b/dom/media/systemservices/MediaChild.cpp
@@ -3,16 +3,17 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaChild.h"
 #include "MediaParent.h"
 
 #include "nsGlobalWindow.h"
+#include "mozilla/dom/ContentChild.h"
 #include "mozilla/MediaManager.h"
 #include "mozilla/Logging.h"
 #include "nsQueryObject.h"
 
 #undef LOG
 mozilla::LazyLogModule gMediaChildLog("MediaChild");
 #define LOG(args) MOZ_LOG(gMediaChildLog, mozilla::LogLevel::Debug, args)
 
--- a/dom/media/systemservices/MediaChild.h
+++ b/dom/media/systemservices/MediaChild.h
@@ -2,17 +2,16 @@
 /* vim: set sw=2 ts=8 et ft=cpp : */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef mozilla_MediaChild_h
 #define mozilla_MediaChild_h
 
-#include "mozilla/dom/ContentChild.h"
 #include "mozilla/media/PMediaChild.h"
 #include "mozilla/media/PMediaParent.h"
 #include "MediaUtils.h"
 
 namespace mozilla {
 
 namespace ipc {
 class PrincipalInfo;
--- a/dom/media/systemservices/MediaParent.h
+++ b/dom/media/systemservices/MediaParent.h
@@ -4,17 +4,16 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef mozilla_MediaParent_h
 #define mozilla_MediaParent_h
 
 #include "MediaChild.h"
 
-#include "mozilla/dom/ContentParent.h"
 #include "mozilla/media/PMediaParent.h"
 
 namespace mozilla {
 namespace media {
 
 // media::Parent implements the chrome-process side of ipc for media::Child APIs
 // A same-process version may also be created to service non-e10s calls.
 
--- a/dom/media/test/manifest.js
+++ b/dom/media/test/manifest.js
@@ -49,16 +49,17 @@ var gSmallTests = [
 ];
 
 var gFrameCountTests = [
   { name:"bipbop.mp4", type:"video/mp4", totalFrameCount:297},
   { name:"gizmo.mp4", type:"video/mp4", totalFrameCount:166},
   { name:"seek-short.webm", type:"video/webm", totalFrameCount:8},
   { name:"seek.webm", type:"video/webm", totalFrameCount:120},
   { name:"320x240.ogv", type:"video/ogg", totalFrameCount:8},
+  { name:"av1.mp4", type:"video/mp4", totalFrameCount:24},
 ];
 
 if (SpecialPowers.Services.appinfo.name != "B2G") {
   // We only run mochitests on b2g desktop and b2g emulator. The 3gp codecs
   // aren't present on desktop, and the emulator codecs (which are different
   // from the real device codecs) don't pass all of our tests, so we need
   // to disable them.
 
--- a/dom/media/test/test_videoPlaybackQuality_totalFrames.html
+++ b/dom/media/test/test_videoPlaybackQuality_totalFrames.html
@@ -27,17 +27,18 @@ var startTest = function(test, token) {
   v.addEventListener("ended", ended);
   v.play();
 };
 
 SimpleTest.waitForExplicitFinish();
 SpecialPowers.pushPrefEnv(
   {
     "set": [
-      ["media.decoder.skip-to-next-key-frame.enabled", false]
+      ["media.decoder.skip-to-next-key-frame.enabled", false],
+      ["media.av1.use-dav1d", true]
     ]
   },
   function() {
     manager.runTests(getPlayableVideos(gFrameCountTests), startTest);
   });
 
 </script>
 </pre>
--- a/dom/media/webspeech/synth/SpeechSynthesis.cpp
+++ b/dom/media/webspeech/synth/SpeechSynthesis.cpp
@@ -3,17 +3,16 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "nsISupportsPrimitives.h"
 #include "nsSpeechTask.h"
 #include "mozilla/Logging.h"
 
-#include "mozilla/dom/ContentChild.h"
 #include "mozilla/dom/Element.h"
 
 #include "mozilla/dom/SpeechSynthesisBinding.h"
 #include "SpeechSynthesis.h"
 #include "nsContentUtils.h"
 #include "nsSynthVoiceRegistry.h"
 #include "nsIDocument.h"
 #include "nsIDocShell.h"
--- a/dom/network/TCPServerSocketParent.cpp
+++ b/dom/network/TCPServerSocketParent.cpp
@@ -5,17 +5,16 @@
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "nsIScriptSecurityManager.h"
 #include "TCPServerSocket.h"
 #include "TCPServerSocketParent.h"
 #include "nsJSUtils.h"
 #include "TCPSocketParent.h"
 #include "mozilla/Unused.h"
-#include "mozilla/dom/ContentParent.h"
 #include "mozilla/dom/TabParent.h"
 
 namespace mozilla {
 namespace dom {
 
 NS_IMPL_CYCLE_COLLECTION(TCPServerSocketParent, mServerSocket)
 NS_IMPL_CYCLE_COLLECTING_ADDREF(TCPServerSocketParent)
 NS_IMPL_CYCLE_COLLECTING_RELEASE(TCPServerSocketParent)
--- a/dom/network/TCPSocketParent.cpp
+++ b/dom/network/TCPSocketParent.cpp
@@ -6,17 +6,16 @@
 
 #include "TCPSocketParent.h"
 #include "jsapi.h"
 #include "jsfriendapi.h"
 #include "nsJSUtils.h"
 #include "mozilla/Unused.h"
 #include "mozilla/net/NeckoCommon.h"
 #include "mozilla/net/PNeckoParent.h"
-#include "mozilla/dom/ContentParent.h"
 #include "mozilla/dom/ScriptSettings.h"
 #include "mozilla/dom/TabParent.h"
 #include "mozilla/HoldDropJSObjects.h"
 #include "nsISocketTransportService.h"
 #include "nsISocketTransport.h"
 #include "nsIScriptSecurityManager.h"
 #include "nsNetUtil.h"
 
--- a/dom/network/UDPSocketParent.cpp
+++ b/dom/network/UDPSocketParent.cpp
@@ -5,16 +5,17 @@
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "nsIServiceManager.h"
 #include "UDPSocketParent.h"
 #include "nsComponentManagerUtils.h"
 #include "nsIUDPSocket.h"
 #include "nsINetAddr.h"
 #include "mozilla/Unused.h"
+#include "mozilla/dom/ContentParent.h"
 #include "mozilla/ipc/InputStreamUtils.h"
 #include "mozilla/net/DNS.h"
 #include "mozilla/net/NeckoCommon.h"
 #include "mozilla/net/PNeckoParent.h"
 #include "nsIPermissionManager.h"
 #include "nsIScriptSecurityManager.h"
 #include "mozilla/ipc/PBackgroundParent.h"
 #include "mtransport/runnable_utils.h"
--- a/dom/payments/PaymentRequestManager.cpp
+++ b/dom/payments/PaymentRequestManager.cpp
@@ -1,16 +1,15 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "mozilla/ClearOnShutdown.h"
-#include "mozilla/dom/ContentChild.h"
 #include "mozilla/dom/PaymentRequestChild.h"
 #include "mozilla/dom/TabChild.h"
 #include "nsContentUtils.h"
 #include "nsString.h"
 #include "nsIPrincipal.h"
 #include "PaymentRequestManager.h"
 #include "PaymentRequestUtils.h"
 #include "PaymentResponse.h"
--- a/dom/plugins/base/nsPluginTags.cpp
+++ b/dom/plugins/base/nsPluginTags.cpp
@@ -14,17 +14,16 @@
 #include "nsPluginLogging.h"
 #include "nsNPAPIPlugin.h"
 #include "nsCharSeparatedTokenizer.h"
 #include "mozilla/Preferences.h"
 #include "mozilla/Unused.h"
 #include "nsNetUtil.h"
 #include <cctype>
 #include "mozilla/Encoding.h"
-#include "mozilla/dom/ContentChild.h"
 #include "mozilla/dom/FakePluginTagInitBinding.h"
 
 #if defined(XP_MACOSX) && defined(MOZ_SANDBOX)
 #include "mozilla/SandboxSettings.h"
 #include "nsCocoaFeatures.h"
 #endif
 
 using mozilla::dom::FakePluginTagInit;
--- a/dom/serviceworkers/ServiceWorkerManager.h
+++ b/dom/serviceworkers/ServiceWorkerManager.h
@@ -40,16 +40,17 @@ namespace mozilla {
 class OriginAttributes;
 
 namespace ipc {
 class PrincipalInfo;
 } // namespace ipc
 
 namespace dom {
 
+class ContentParent;
 class ServiceWorkerInfo;
 class ServiceWorkerJobQueue;
 class ServiceWorkerManagerChild;
 class ServiceWorkerPrivate;
 class ServiceWorkerRegistrar;
 
 class ServiceWorkerUpdateFinishCallback
 {
--- a/dom/storage/LocalStorage.cpp
+++ b/dom/storage/LocalStorage.cpp
@@ -10,18 +10,16 @@
 #include "StorageUtils.h"
 
 #include "nsIObserverService.h"
 #include "nsIScriptSecurityManager.h"
 #include "nsIPermissionManager.h"
 #include "nsIPrincipal.h"
 #include "nsICookiePermission.h"
 
-#include "mozilla/dom/ContentChild.h"
-#include "mozilla/dom/ContentParent.h"
 #include "mozilla/dom/PermissionMessageUtils.h"
 #include "mozilla/dom/StorageBinding.h"
 #include "mozilla/dom/StorageEvent.h"
 #include "mozilla/dom/StorageEventBinding.h"
 #include "mozilla/ipc/BackgroundChild.h"
 #include "mozilla/ipc/PBackgroundChild.h"
 #include "mozilla/Services.h"
 #include "mozilla/Preferences.h"
--- a/dom/storage/StorageIPC.cpp
+++ b/dom/storage/StorageIPC.cpp
@@ -3,18 +3,16 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "StorageIPC.h"
 
 #include "LocalStorageManager.h"
 
-#include "mozilla/dom/ContentChild.h"
-#include "mozilla/dom/ContentParent.h"
 #include "mozilla/ipc/BackgroundChild.h"
 #include "mozilla/ipc/BackgroundParent.h"
 #include "mozilla/ipc/PBackgroundChild.h"
 #include "mozilla/ipc/PBackgroundParent.h"
 #include "mozilla/Unused.h"
 #include "nsThreadUtils.h"
 
 namespace mozilla {
--- a/dom/system/android/nsHapticFeedback.cpp
+++ b/dom/system/android/nsHapticFeedback.cpp
@@ -1,15 +1,14 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#include "mozilla/dom/ContentChild.h"
 #include "nsHapticFeedback.h"
 #include "GeneratedJNIWrappers.h"
 
 using namespace mozilla;
 
 NS_IMPL_ISUPPORTS(nsHapticFeedback, nsIHapticFeedback)
 
 NS_IMETHODIMP
--- a/dom/webbrowserpersist/WebBrowserPersistResourcesChild.cpp
+++ b/dom/webbrowserpersist/WebBrowserPersistResourcesChild.cpp
@@ -2,17 +2,16 @@
  *
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "WebBrowserPersistResourcesChild.h"
 
 #include "WebBrowserPersistDocumentChild.h"
-#include "mozilla/dom/ContentChild.h"
 
 namespace mozilla {
 
 NS_IMPL_ISUPPORTS(WebBrowserPersistResourcesChild,
                   nsIWebBrowserPersistResourceVisitor)
 
 WebBrowserPersistResourcesChild::WebBrowserPersistResourcesChild()
 {
--- a/dom/webidl/Document.webidl
+++ b/dom/webidl/Document.webidl
@@ -522,23 +522,16 @@ enum FlashClassification {
   "allowed",        // Site is on the Flash whitelist
   "denied"          // Site is on the Flash blacklist
 };
 partial interface Document {
   [ChromeOnly]
   readonly attribute FlashClassification documentFlashClassification;
 };
 
-// Extension to obtain the number of trackers are detected and blocked in the
-// Document (and it's corresponding docshell sub-tree)
-partial interface Document {
-  [ChromeOnly] readonly attribute unsigned long numTrackersFound;
-  [ChromeOnly] readonly attribute unsigned long numTrackersBlocked;
-};
-
 partial interface Document {
   [Func="nsDocument::DocumentSupportsL10n"] readonly attribute DocumentL10n? l10n;
 };
 
 Document implements XPathEvaluator;
 Document implements GlobalEventHandlers;
 Document implements DocumentAndElementEventHandlers;
 Document implements TouchEventHandlers;
--- a/editor/libeditor/tests/mochitest.ini
+++ b/editor/libeditor/tests/mochitest.ini
@@ -265,17 +265,16 @@ skip-if = toolkit == 'android'
 [test_abs_positioner_positioning_elements.html]
 skip-if = android_version == '18' # bug 1147989
 [test_CF_HTML_clipboard.html]
 subsuite = clipboard
 [test_composition_event_created_in_chrome.html]
 [test_contenteditable_focus.html]
 [test_documentCharacterSet.html]
 [test_dom_input_event_on_htmleditor.html]
-skip-if = toolkit == 'android' # bug 1054087
 [test_dom_input_event_on_texteditor.html]
 [test_dragdrop.html]
 skip-if = os == 'android'
 [test_handle_new_lines.html]
 subsuite = clipboard
 skip-if = android_version == '24'
 [test_inline_style_cache.html]
 [test_inlineTableEditing.html]
--- a/extensions/cookie/nsPermissionManager.cpp
+++ b/extensions/cookie/nsPermissionManager.cpp
@@ -4,17 +4,16 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "mozilla/Attributes.h"
 #include "mozilla/AntiTrackingCommon.h"
 #include "mozilla/DebugOnly.h"
 
 #include "mozilla/dom/ContentParent.h"
-#include "mozilla/dom/ContentChild.h"
 #include "mozilla/BasePrincipal.h"
 #include "mozilla/ContentPrincipal.h"
 #include "mozilla/Pair.h"
 #include "mozilla/Services.h"
 #include "mozilla/SystemGroup.h"
 #include "mozilla/Unused.h"
 #include "nsPermissionManager.h"
 #include "nsPermission.h"
@@ -2571,57 +2570,77 @@ nsPermissionManager::GetPermissionHashKe
   }
 
   // No entry, really...
   return nullptr;
 }
 
 NS_IMETHODIMP nsPermissionManager::GetEnumerator(nsISimpleEnumerator **aEnum)
 {
+  nsTArray<RefPtr<nsIPermission>> array;
+  nsresult rv = GetAllWithTypePrefix(NS_LITERAL_CSTRING(""), array);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  nsCOMArray<nsIPermission> comArray;
+  comArray.SetCapacity(array.Length());
+  for (size_t i = 0; i < array.Length(); i++) {
+    comArray.AppendElement(array[i].forget());
+  }
+
+  return NS_NewArrayEnumerator(aEnum, comArray, NS_GET_IID(nsIPermission));
+}
+
+NS_IMETHODIMP nsPermissionManager::GetAllWithTypePrefix(const nsACString& aPrefix,
+                                                        nsTArray<RefPtr<nsIPermission>>& aResult)
+{
+  aResult.Clear();
   if (XRE_IsContentProcess()) {
-    NS_WARNING("nsPermissionManager's enumerator is not available in the "
+    NS_WARNING("nsPermissionManager's getAllWithTypePrefix is not available in the "
                "content process, as not all permissions may be available.");
-    *aEnum = nullptr;
     return NS_ERROR_NOT_AVAILABLE;
   }
 
-  // roll an nsCOMArray of all our permissions, then hand out an enumerator
-  nsCOMArray<nsIPermission> array;
-
   for (auto iter = mPermissionTable.Iter(); !iter.Done(); iter.Next()) {
     PermissionHashKey* entry = iter.Get();
     for (const auto& permEntry : entry->GetPermissions()) {
       // Given how "default" permissions work and the possibility of them being
       // overridden with UNKNOWN_ACTION, we might see this value here - but we
       // do *not* want to return them via the enumerator.
       if (permEntry.mPermission == nsIPermissionManager::UNKNOWN_ACTION) {
         continue;
       }
 
+      if (!aPrefix.IsEmpty() &&
+          !StringBeginsWith(mTypeArray.ElementAt(permEntry.mType), aPrefix)) {
+        continue;
+      }
+
       nsCOMPtr<nsIPrincipal> principal;
       nsresult rv = GetPrincipalFromOrigin(entry->GetKey()->mOrigin,
                                            getter_AddRefs(principal));
       if (NS_FAILED(rv)) {
         continue;
       }
 
-      nsCOMPtr<nsIPermission> permission =
+      RefPtr<nsIPermission> permission =
         nsPermission::Create(principal,
                              mTypeArray.ElementAt(permEntry.mType),
                              permEntry.mPermission,
                              permEntry.mExpireType,
                              permEntry.mExpireTime);
       if (NS_WARN_IF(!permission)) {
         continue;
       }
-      array.AppendObject(permission);
+      aResult.AppendElement(std::move(permission));
     }
   }
 
-  return NS_NewArrayEnumerator(aEnum, array, NS_GET_IID(nsIPermission));
+  return NS_OK;
 }
 
 NS_IMETHODIMP nsPermissionManager::GetAllForURI(nsIURI* aURI, nsISimpleEnumerator **aEnum)
 {
   nsCOMPtr<nsIPrincipal> principal;
   nsresult rv = GetPrincipal(aURI, getter_AddRefs(principal));
   NS_ENSURE_SUCCESS(rv, rv);
 
new file mode 100644
--- /dev/null
+++ b/extensions/cookie/test/unit/test_permmanager_getAllWithTypePrefix.js
@@ -0,0 +1,69 @@
+/* Any copyright is dedicated to the Public Domain.
+   http://creativecommons.org/publicdomain/zero/1.0/ */
+
+function check_enumerator(prefix, permissions) {
+  let pm = Cc["@mozilla.org/permissionmanager;1"]
+           .getService(Ci.nsIPermissionManager);
+
+  let array = pm.getAllWithTypePrefix(prefix);
+  for (let [uri, type, capability] of permissions) {
+    let perm = array.shift();
+    Assert.ok(perm != null);
+    Assert.ok(perm.principal.URI.equals(uri));
+    Assert.equal(perm.type, type);
+    Assert.equal(perm.capability, capability);
+    Assert.equal(perm.expireType, pm.EXPIRE_NEVER);
+  }
+  Assert.equal(array.length, 0);
+}
+
+function run_test() {
+  let pm = Cc["@mozilla.org/permissionmanager;1"]
+           .getService(Ci.nsIPermissionManager);
+
+  let uri = NetUtil.newURI("http://example.com");
+  let sub = NetUtil.newURI("http://sub.example.com");
+
+  check_enumerator("test/", [ ]);
+
+  pm.add(uri, "test/getallwithtypeprefix", pm.ALLOW_ACTION);
+  pm.add(sub, "other-test/getallwithtypeprefix", pm.PROMPT_ACTION);
+  check_enumerator("test/", [
+    [ uri, "test/getallwithtypeprefix", pm.ALLOW_ACTION ],
+  ]);
+
+  pm.add(sub, "test/getallwithtypeprefix", pm.PROMPT_ACTION);
+  check_enumerator("test/", [
+    [ sub, "test/getallwithtypeprefix", pm.PROMPT_ACTION ],
+    [ uri, "test/getallwithtypeprefix", pm.ALLOW_ACTION ],
+  ]);
+
+  check_enumerator("test/getallwithtypeprefix", [
+    [ sub, "test/getallwithtypeprefix", pm.PROMPT_ACTION ],
+    [ uri, "test/getallwithtypeprefix", pm.ALLOW_ACTION ],
+  ]);
+
+  // check that UNKNOWN_ACTION permissions are ignored
+  pm.add(uri, "test/getallwithtypeprefix2", pm.UNKNOWN_ACTION);
+  check_enumerator("test/", [
+    [ sub, "test/getallwithtypeprefix", pm.PROMPT_ACTION ],
+    [ uri, "test/getallwithtypeprefix", pm.ALLOW_ACTION ],
+  ]);
+
+  // check that permission updates are reflected
+  pm.add(uri, "test/getallwithtypeprefix", pm.PROMPT_ACTION);
+  check_enumerator("test/", [
+    [ sub, "test/getallwithtypeprefix", pm.PROMPT_ACTION ],
+    [ uri, "test/getallwithtypeprefix", pm.PROMPT_ACTION ],
+  ]);
+
+  // check that permission removals are reflected
+  pm.remove(uri, "test/getallwithtypeprefix");
+  check_enumerator("test/", [
+    [ sub, "test/getallwithtypeprefix", pm.PROMPT_ACTION ],
+  ]);
+
+  pm.removeAll();
+  check_enumerator("test/", [ ]);
+}
+
--- a/extensions/cookie/test/unit/xpcshell.ini
+++ b/extensions/cookie/test/unit/xpcshell.ini
@@ -16,16 +16,17 @@ skip-if = true # Bug 863738
 [test_cookies_thirdparty_nonsecure_session.js]
 [test_cookies_thirdparty_session.js]
 [test_domain_eviction.js]
 [test_eviction.js]
 [test_permmanager_default_pref.js]
 [test_permmanager_defaults.js]
 [test_permmanager_expiration.js]
 [test_permmanager_getAllForURI.js]
+[test_permmanager_getAllWithTypePrefix.js]
 [test_permmanager_getPermissionObject.js]
 [test_permmanager_notifications.js]
 [test_permmanager_removeall.js]
 [test_permmanager_removebytype.js]
 [test_permmanager_removesince.js]
 [test_permmanager_removeforapp.js]
 [test_permmanager_load_invalid_entries.js]
 skip-if = debug == true
--- a/gfx/layers/ipc/CompositorBridgeParent.cpp
+++ b/gfx/layers/ipc/CompositorBridgeParent.cpp
@@ -23,17 +23,16 @@
 #include "TreeTraversal.h"              // for ForEachNode
 #ifdef MOZ_WIDGET_GTK
 #include "gfxPlatformGtk.h"             // for gfxPlatform
 #endif
 #include "gfxPrefs.h"                   // for gfxPrefs
 #include "mozilla/AutoRestore.h"        // for AutoRestore
 #include "mozilla/ClearOnShutdown.h"    // for ClearOnShutdown
 #include "mozilla/DebugOnly.h"          // for DebugOnly
-#include "mozilla/dom/ContentParent.h"
 #include "mozilla/dom/TabParent.h"
 #include "mozilla/gfx/2D.h"          // for DrawTarget
 #include "mozilla/gfx/GPUChild.h"       // for GfxPrefValue
 #include "mozilla/gfx/Point.h"          // for IntSize
 #include "mozilla/gfx/Rect.h"          // for IntSize
 #include "mozilla/gfx/gfxVars.h"        // for gfxVars
 #include "VRManager.h"                  // for VRManager
 #include "mozilla/ipc/Transport.h"      // for Transport
--- a/gfx/layers/ipc/LayerTreeOwnerTracker.cpp
+++ b/gfx/layers/ipc/LayerTreeOwnerTracker.cpp
@@ -2,17 +2,16 @@
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "LayerTreeOwnerTracker.h"
 
 #include "mozilla/StaticPtr.h"              // for StaticAutoPtr
-#include "mozilla/dom/ContentParent.h"      // for ContentParent
 #include "mozilla/gfx/GPUChild.h"           // for GPUChild
 #include "mozilla/gfx/GPUProcessManager.h"  // for GPUProcessManager
 
 #include <functional>
 #include <utility> // for std::make_pair
 
 namespace mozilla {
 namespace layers {
--- a/gfx/layers/ipc/RemoteContentController.cpp
+++ b/gfx/layers/ipc/RemoteContentController.cpp
@@ -4,17 +4,16 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "mozilla/layers/RemoteContentController.h"
 
 #include "base/message_loop.h"
 #include "base/task.h"
 #include "MainThreadUtils.h"
-#include "mozilla/dom/ContentParent.h"
 #include "mozilla/dom/TabParent.h"
 #include "mozilla/layers/APZCCallbackHelper.h"
 #include "mozilla/layers/APZCTreeManagerParent.h"  // for APZCTreeManagerParent
 #include "mozilla/layers/APZThreadUtils.h"
 #include "mozilla/gfx/GPUProcessManager.h"
 #include "mozilla/Unused.h"
 #include "Units.h"
 
--- a/gfx/thebes/gfxAndroidPlatform.cpp
+++ b/gfx/thebes/gfxAndroidPlatform.cpp
@@ -12,17 +12,16 @@
 #include "mozilla/intl/LocaleService.h"
 #include "mozilla/intl/OSPreferences.h"
 #include "mozilla/Preferences.h"
 
 #include "gfx2DGlue.h"
 #include "gfxFT2FontList.h"
 #include "gfxImageSurface.h"
 #include "gfxTextRun.h"
-#include "mozilla/dom/ContentChild.h"
 #include "nsXULAppAPI.h"
 #include "nsIScreen.h"
 #include "nsIScreenManager.h"
 #include "nsServiceManagerUtils.h"
 #include "nsUnicodeProperties.h"
 #include "gfxPrefs.h"
 #include "cairo.h"
 #include "VsyncSource.h"
--- a/gfx/thebes/gfxDWriteFonts.cpp
+++ b/gfx/thebes/gfxDWriteFonts.cpp
@@ -8,17 +8,16 @@
 #include <algorithm>
 #include "gfxDWriteFontList.h"
 #include "gfxContext.h"
 #include "gfxTextRun.h"
 #include "mozilla/gfx/gfxVars.h"
 
 #include "harfbuzz/hb.h"
 #include "mozilla/FontPropertyTypes.h"
-#include "mozilla/dom/ContentParent.h"
 #include "cairo-win32.h"
 
 using namespace mozilla;
 using namespace mozilla::gfx;
 
 // This is also in gfxGDIFont.cpp. Would be nice to put it somewhere common,
 // but we can't declare it in the gfxFont.h or gfxFontUtils.h headers
 // because those are exported, and the cairo headers aren't.
--- a/gfx/webrender_bindings/revision.txt
+++ b/gfx/webrender_bindings/revision.txt
@@ -1,1 +1,1 @@
-ea8f4a922b2aa38c40de137d7f0ab6598d53e29a
+195582a8dc5b9c6d26d54ce8d70060ccc8f423d8
--- a/gfx/webrender_bindings/webrender_ffi_generated.h
+++ b/gfx/webrender_bindings/webrender_ffi_generated.h
@@ -130,16 +130,18 @@ enum class ImageFormat : uint32_t {
   BGRA8 = 3,
   // Four channels, float storage.
   RGBAF32 = 4,
   // Two-channels, byte storage. Similar to `R8`, this just means
   // "two channels" rather than "red and green".
   RG8 = 5,
   // Four channels, signed integer storage.
   RGBAI32 = 6,
+  // Four channels, byte storage.
+  RGBA8 = 7,
 
   Sentinel /* this must be last for serialization purposes. */
 };
 
 enum class ImageRendering : uint32_t {
   Auto = 0,
   CrispEdges = 1,
   Pixelated = 2,
--- a/gfx/wr/webrender/src/device/gl.rs
+++ b/gfx/wr/webrender/src/device/gl.rs
@@ -2622,16 +2622,23 @@ impl Device {
             },
             ImageFormat::BGRA8 => {
                 FormatDesc {
                     internal: self.bgra_format_internal,
                     external: self.bgra_format_external,
                     pixel_type: gl::UNSIGNED_BYTE,
                 }
             },
+            ImageFormat::RGBA8 => {
+                FormatDesc {
+                    internal: gl::RGBA8,
+                    external: gl::RGBA,
+                    pixel_type: gl::UNSIGNED_BYTE,
+                }
+            },
             ImageFormat::RGBAF32 => FormatDesc {
                 internal: gl::RGBA32F,
                 external: gl::RGBA,
                 pixel_type: gl::FLOAT,
             },
             ImageFormat::RGBAI32 => FormatDesc {
                 internal: gl::RGBA32I,
                 external: gl::RGBA_INTEGER,
@@ -2778,16 +2785,17 @@ impl<'a, T> TextureUploader<'a, T> {
 }
 
 impl<'a> UploadTarget<'a> {
     fn update_impl(&mut self, chunk: UploadChunk) {
         let (gl_format, bpp, data_type) = match self.texture.format {
             ImageFormat::R8 => (gl::RED, 1, gl::UNSIGNED_BYTE),
             ImageFormat::R16 => (gl::RED, 2, gl::UNSIGNED_SHORT),
             ImageFormat::BGRA8 => (self.bgra_format, 4, gl::UNSIGNED_BYTE),
+            ImageFormat::RGBA8 => (gl::RGBA, 4, gl::UNSIGNED_BYTE),
             ImageFormat::RG8 => (gl::RG, 2, gl::UNSIGNED_BYTE),
             ImageFormat::RGBAF32 => (gl::RGBA, 16, gl::FLOAT),
             ImageFormat::RGBAI32 => (gl::RGBA_INTEGER, 16, gl::INT),
         };
 
         let row_length = match chunk.stride {
             Some(value) => value / bpp,
             None => self.texture.size.width,
--- a/gfx/wr/webrender/src/texture_cache.rs
+++ b/gfx/wr/webrender/src/texture_cache.rs
@@ -948,16 +948,22 @@ impl TextureCache {
     // placed in the shared texture cache.
     pub fn is_allowed_in_shared_cache(
         &self,
         filter: TextureFilter,
         descriptor: &ImageDescriptor,
     ) -> bool {
         let mut allowed_in_shared_cache = true;
 
+        // TODO(sotaro): For now, anything that requests RGBA8 just fails to allocate
+        // in a texture page, and gets a standalone texture.
+        if descriptor.format == ImageFormat::RGBA8 {
+            allowed_in_shared_cache = false;
+        }
+
         // TODO(gw): For now, anything that requests nearest filtering and isn't BGRA8
         //           just fails to allocate in a texture page, and gets a standalone
         //           texture. This is probably rare enough that it can be fixed up later.
         if filter == TextureFilter::Nearest &&
            descriptor.format != ImageFormat::BGRA8 {
             allowed_in_shared_cache = false;
         }
 
--- a/gfx/wr/webrender_api/src/image.rs
+++ b/gfx/wr/webrender_api/src/image.rs
@@ -111,28 +111,31 @@ pub enum ImageFormat {
     BGRA8 = 3,
     /// Four channels, float storage.
     RGBAF32 = 4,
     /// Two-channels, byte storage. Similar to `R8`, this just means
     /// "two channels" rather than "red and green".
     RG8 = 5,
     /// Four channels, signed integer storage.
     RGBAI32 = 6,
+    /// Four channels, byte storage.
+    RGBA8 = 7,
 }
 
 impl ImageFormat {
     /// Returns the number of bytes per pixel for the given format.
     pub fn bytes_per_pixel(self) -> i32 {
         match self {
             ImageFormat::R8 => 1,
             ImageFormat::R16 => 2,
             ImageFormat::BGRA8 => 4,
             ImageFormat::RGBAF32 => 16,
             ImageFormat::RG8 => 2,
             ImageFormat::RGBAI32 => 16,
+            ImageFormat::RGBA8 => 4,
         }
     }
 }
 
 /// Specifies the color depth of an image. Currently only used for YUV images.
 #[repr(u8)]
 #[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
 pub enum ColorDepth {
--- a/gfx/wr/wrench/src/yaml_frame_reader.rs
+++ b/gfx/wr/wrench/src/yaml_frame_reader.rs
@@ -173,17 +173,18 @@ fn generate_solid_color_image(
         ImageData::new(pixels),
     )
 }
 
 
 
 fn is_image_opaque(format: ImageFormat, bytes: &[u8]) -> bool {
     match format {
-        ImageFormat::BGRA8 => {
+        ImageFormat::BGRA8 |
+        ImageFormat::RGBA8 => {
             let mut is_opaque = true;
             for i in 0 .. (bytes.len() / 4) {
                 if bytes[i * 4 + 3] != 255 {
                     is_opaque = false;
                     break;
                 }
             }
             is_opaque
--- a/hal/HalWakeLock.cpp
+++ b/hal/HalWakeLock.cpp
@@ -3,17 +3,16 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "Hal.h"
 #include "mozilla/HalWakeLock.h"
 #include "mozilla/Services.h"
 #include "mozilla/StaticPtr.h"
-#include "mozilla/dom/ContentParent.h"
 #include "nsAutoPtr.h"
 #include "nsClassHashtable.h"
 #include "nsDataHashtable.h"
 #include "nsHashKeys.h"
 #include "nsIPropertyBag2.h"
 #include "nsIObserverService.h"
 
 using namespace mozilla;
--- a/hal/sandbox/SandboxHal.cpp
+++ b/hal/sandbox/SandboxHal.cpp
@@ -2,17 +2,16 @@
 /* vim: set sw=2 ts=8 et ft=cpp : */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "Hal.h"
 #include "HalLog.h"
 #include "mozilla/dom/ContentChild.h"
-#include "mozilla/dom/ContentParent.h"
 #include "mozilla/hal_sandbox/PHalChild.h"
 #include "mozilla/hal_sandbox/PHalParent.h"
 #include "mozilla/dom/TabParent.h"
 #include "mozilla/dom/TabChild.h"
 #include "mozilla/fallback/FallbackScreenConfiguration.h"
 #include "mozilla/EnumeratedRange.h"
 #include "mozilla/Observer.h"
 #include "mozilla/Unused.h"
--- a/ipc/glue/BackgroundUtils.cpp
+++ b/ipc/glue/BackgroundUtils.cpp
@@ -631,46 +631,36 @@ LoadInfoArgsToLoadInfo(const OptionalLoa
 void
 LoadInfoToParentLoadInfoForwarder(nsILoadInfo* aLoadInfo,
                                   ParentLoadInfoForwarderArgs* aForwarderArgsOut)
 {
   if (!aLoadInfo) {
     *aForwarderArgsOut = ParentLoadInfoForwarderArgs(false, void_t(),
                                                      nsILoadInfo::TAINTING_BASIC,
                                                      false, // serviceWorkerTaintingSynthesized
-                                                     false, // isTracker
-                                                     false, // isTrackerBlocked
-                                                     mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::all, // trackerBlockedReason
                                                      false, // documentHasUserInteracted
                                                      false  // documentHasLoaded
                                                     );
     return;
   }
 
   OptionalIPCServiceWorkerDescriptor ipcController = void_t();
   Maybe<ServiceWorkerDescriptor> controller(aLoadInfo->GetController());
   if (controller.isSome()) {
     ipcController = controller.ref().ToIPC();
   }
 
   uint32_t tainting = nsILoadInfo::TAINTING_BASIC;
   Unused << aLoadInfo->GetTainting(&tainting);
 
-  mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED label =
-    mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::all;
-  Unused << aLoadInfo->GetTrackerBlockedReason(&label);
-
   *aForwarderArgsOut = ParentLoadInfoForwarderArgs(
     aLoadInfo->GetAllowInsecureRedirectToDataURI(),
     ipcController,
     tainting,
     aLoadInfo->GetServiceWorkerTaintingSynthesized(),
-    aLoadInfo->GetIsTracker(),
-    aLoadInfo->GetIsTrackerBlocked(),
-    label,
     aLoadInfo->GetDocumentHasUserInteracted(),
     aLoadInfo->GetDocumentHasLoaded()
   );
 }
 
 nsresult
 MergeParentLoadInfoForwarder(ParentLoadInfoForwarderArgs const& aForwarderArgs,
                              nsILoadInfo* aLoadInfo)
@@ -694,19 +684,16 @@ MergeParentLoadInfoForwarder(ParentLoadI
 
   if (aForwarderArgs.serviceWorkerTaintingSynthesized()) {
     aLoadInfo->SynthesizeServiceWorkerTainting(
       static_cast<LoadTainting>(aForwarderArgs.tainting()));
   } else {
     aLoadInfo->MaybeIncreaseTainting(aForwarderArgs.tainting());
   }
 
-  MOZ_ALWAYS_SUCCEEDS(aLoadInfo->SetIsTracker(aForwarderArgs.isTracker()));
-  MOZ_ALWAYS_SUCCEEDS(aLoadInfo->SetIsTrackerBlocked(aForwarderArgs.isTrackerBlocked()));
-  MOZ_ALWAYS_SUCCEEDS(aLoadInfo->SetTrackerBlockedReason(aForwarderArgs.trackerBlockedReason()));
   MOZ_ALWAYS_SUCCEEDS(aLoadInfo->SetDocumentHasUserInteracted(aForwarderArgs.documentHasUserInteracted()));
   MOZ_ALWAYS_SUCCEEDS(aLoadInfo->SetDocumentHasLoaded(aForwarderArgs.documentHasLoaded()));
 
   return NS_OK;
 }
 
 void
 LoadInfoToChildLoadInfoForwarder(nsILoadInfo* aLoadInfo,
--- a/ipc/glue/ProtocolUtils.cpp
+++ b/ipc/glue/ProtocolUtils.cpp
@@ -10,17 +10,16 @@
 #ifdef OS_POSIX
 #include <errno.h>
 #endif
 
 #include "mozilla/IntegerPrintfMacros.h"
 
 #include "mozilla/ipc/ProtocolUtils.h"
 
-#include "mozilla/dom/ContentParent.h"
 #include "mozilla/ipc/MessageChannel.h"
 #include "mozilla/ipc/Transport.h"
 #include "mozilla/recordreplay/ChildIPC.h"
 #include "mozilla/recordreplay/ParentIPC.h"
 #include "mozilla/StaticMutex.h"
 #include "mozilla/SystemGroup.h"
 #include "mozilla/Unused.h"
 #include "nsPrintfCString.h"
--- a/ipc/mscom/Interceptor.cpp
+++ b/ipc/mscom/Interceptor.cpp
@@ -1,17 +1,16 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #define INITGUID
 
-#include "mozilla/dom/ContentChild.h"
 #include "mozilla/Move.h"
 #include "mozilla/mscom/DispatchForwarder.h"
 #include "mozilla/mscom/FastMarshaler.h"
 #include "mozilla/mscom/Interceptor.h"
 #include "mozilla/mscom/InterceptorLog.h"
 #include "mozilla/mscom/MainThreadInvoker.h"
 #include "mozilla/mscom/Objref.h"
 #include "mozilla/mscom/Registration.h"
--- a/ipc/testshell/TestShellParent.cpp
+++ b/ipc/testshell/TestShellParent.cpp
@@ -3,17 +3,16 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "TestShellParent.h"
 
 /* This must occur *after* TestShellParent.h to avoid typedefs conflicts. */
 #include "jsfriendapi.h"
 #include "mozilla/ArrayUtils.h"
 
-#include "mozilla/dom/ContentParent.h"
 #include "mozilla/dom/ScriptSettings.h"
 
 #include "xpcpublic.h"
 
 using namespace mozilla;
 using mozilla::ipc::TestShellParent;
 using mozilla::ipc::TestShellCommandParent;
 using mozilla::ipc::PTestShellCommandParent;
--- a/js/ipc/JavaScriptChild.cpp
+++ b/js/ipc/JavaScriptChild.cpp
@@ -1,17 +1,16 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
  * vim: set ts=4 sw=4 et tw=80:
  *
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "JavaScriptChild.h"
-#include "mozilla/dom/ContentChild.h"
 #include "mozilla/dom/BindingUtils.h"
 #include "mozilla/ipc/MessageChannel.h"
 #include "nsContentUtils.h"
 #include "xpcprivate.h"
 #include "jsfriendapi.h"
 #include "AccessCheck.h"
 
 using namespace JS;
--- a/js/ipc/WrapperAnswer.cpp
+++ b/js/ipc/WrapperAnswer.cpp
@@ -2,17 +2,16 @@
  * vim: set ts=4 sw=4 et tw=80:
  *
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "WrapperAnswer.h"
 #include "JavaScriptLogging.h"
-#include "mozilla/dom/ContentChild.h"
 #include "mozilla/dom/BindingUtils.h"
 #include "mozilla/dom/ScriptSettings.h"
 #include "xpcprivate.h"
 #include "js/Class.h"
 #include "jsfriendapi.h"
 
 using namespace JS;
 using namespace mozilla;
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/cacheir/shape-teleporting-2.js
@@ -0,0 +1,47 @@
+function A(name) { this.name = name; }
+function B() { }
+function C() { }
+
+B.prototype = A0 = new A("0");
+C.prototype = B0 = new B();
+
+var A1 = new A("1");
+var A2 = new A("2");
+
+var B1 = new B();
+var B2 = new B();
+
+var C1 = new C();
+var C2 = new C();
+
+// Object <-+- A0 <-+- B0 <-+
+//          |       |       |
+//          +- A1   +- B1   +- C1
+//          |       |       |
+//          +- A2   +- B2   +- C2
+
+Object.setPrototypeOf(C1, B1);
+Object.setPrototypeOf(C2, B2);
+
+Object.setPrototypeOf(B1, A1);
+Object.setPrototypeOf(B2, A2);
+
+// Object <-+- A0 <--- B0
+//          |
+//          +- A1 <--- B1 <--- C1
+//          |
+//          +- A2 <--- B2 <--- C2
+
+
+function getName(o) { return o.name; }
+
+// Warm up JIT
+for (var i = 0; i < 100; i++) {
+    getName(C1);
+}
+
+assertEq(B1.name, "1");
+assertEq(B2.name, "2");
+
+assertEq(getName(B1), "1");
+assertEq(getName(B2), "2");
--- a/js/src/jit/CacheIR.cpp
+++ b/js/src/jit/CacheIR.cpp
@@ -711,16 +711,36 @@ GeneratePrototypeGuardsForReceiver(Cache
     } else if (obj->is<TypedObject>()) {
         MOZ_ASSERT(!obj->group()->hasUncacheableProto());
     } else if (obj->is<ProxyObject>()) {
         MOZ_ASSERT(!obj->hasUncacheableProto());
     }
 #endif // DEBUG
 }
 
+static bool
+ProtoChainSupportsTeleporting(JSObject* obj, JSObject* holder)
+{
+    // Any non-delegate should already have been handled since its checks are
+    // always required.
+    MOZ_ASSERT(obj->isDelegate());
+
+    // Prototype chain must have cacheable prototypes to ensure the cached
+    // holder is the current holder.
+    for (JSObject* tmp = obj; tmp != holder; tmp = tmp->staticPrototype()) {
+        if (tmp->hasUncacheableProto()) {
+            return false;
+        }
+    }
+
+    // The holder itself only gets reshaped by teleportation if it is not
+    // marked UNCACHEABLE_PROTO. See: ReshapeForProtoMutation.
+    return !holder->hasUncacheableProto();
+}
+
 static void
 GeneratePrototypeGuards(CacheIRWriter& writer, JSObject* obj, JSObject* holder, ObjOperandId objId)
 {
     // Assuming target property is on |holder|, generate appropriate guards to
     // ensure |holder| is still on the prototype chain of |obj| and we haven't
     // introduced any shadowing definitions.
     //
     // For each item in the proto chain before holder, we must ensure that
@@ -751,28 +771,21 @@ GeneratePrototypeGuards(CacheIRWriter& w
     // change since the lookup of 'x' will stop at B.
     //
     // The second condition we must verify is that the prototype chain was not
     // mutated. The same mechanism as above is used. When the prototype link is
     // changed, we generate a new shape for the object. If the object whose
     // link we are mutating is itself a prototype, we regenerate shapes down
     // the chain. This means the same two shape checks as above are sufficient.
     //
-    // Unfortunately we don't stop there and add further caveats. We may set
-    // the UNCACHEABLE_PROTO flag on the shape of an object to indicate that it
-    // will not generate a new shape if its prototype link is modified. If the
-    // object is itself a prototype we follow the shape chain and regenerate
-    // shapes (if they aren't themselves uncacheable).
-    //
-    // Let's consider the effect of the UNCACHEABLE_PROTO flag on our example:
-    // - D is uncacheable: Add check that D still links to C
-    // - C is uncacheable: Modifying C.__proto__ will still reshape B (if B is
-    //                     not uncacheable)
-    // - B is uncacheable: Add shape check C since B will not reshape OR check
-    //                     proto of D and C
+    // An additional wrinkle is the UNCACHEABLE_PROTO shape flag. This
+    // indicates that the shape no longer implies any specific prototype. As
+    // well, the shape will not be updated by the teleporting optimization.
+    // If any shape from receiver to holder (inclusive) is UNCACHEABLE_PROTO,
+    // we don't apply the optimization.
     //
     // See:
     //  - ReshapeForProtoMutation
     //  - ReshapeForShadowedProp
 
     MOZ_ASSERT(holder);
     MOZ_ASSERT(obj != holder);
 
@@ -783,19 +796,18 @@ GeneratePrototypeGuards(CacheIRWriter& w
         // TestMatchingReceiver does not always ensure the prototype is
         // unchanged, so generate extra guards as needed.
         GeneratePrototypeGuardsForReceiver(writer, obj, objId);
 
         pobj = obj->staticPrototype();
     }
     MOZ_ASSERT(pobj->isDelegate());
 
-    // In the common case, holder has a cacheable prototype and will regenerate
-    // its shape if any (delegate) objects in the proto chain are updated.
-    if (!holder->hasUncacheableProto()) {
+    // If teleporting is supported for this prototype chain, we are done.
+    if (ProtoChainSupportsTeleporting(pobj, holder)) {
         return;
     }
 
     // If already at the holder, no further proto checks are needed.
     if (pobj == holder) {
         return;
     }
 
--- a/js/src/jit/CacheIRCompiler.h
+++ b/js/src/jit/CacheIRCompiler.h
@@ -116,16 +116,87 @@ namespace jit {
     _(MegamorphicLoadSlotByValueResult)   \
     _(MegamorphicStoreSlot)               \
     _(MegamorphicHasPropResult)           \
     _(CallObjectHasSparseElementResult)   \
     _(CallInt32ToString)                  \
     _(CallNumberToString)                 \
     _(WrapResult)
 
+
+// [SMDDOC] CacheIR Value Representation and Tracking
+//
+// While compiling an IC stub the CacheIR compiler needs to keep track of the
+// physical location for each logical piece of data we care about, as well as
+// ensure that in the case of a stub failing, we are able to restore the input
+// state so that a subsequent stub can attempt to provide a value.
+//
+// OperandIds are created in the CacheIR front-end to keep track of values that
+// are passed between CacheIR ops during the execution of a given CacheIR stub.
+// In the CacheRegisterAllocator these OperandIds are given OperandLocations,
+// that represent the physical location of the OperandId at a given point in
+// time during CacheRegister allocation.
+//
+// In the CacheRegisterAllocator physical locations include the stack, and
+// registers, as well as whether or not the value has been unboxed or not.
+// Constants are also represented separately to provide for on-demand
+// materialization.
+//
+// Intra-op Register allocation:
+//
+// During the emission of a CacheIR op, code can ask the CacheRegisterAllocator
+// for access to a particular OperandId, and the register allocator will
+// generate the required code to fill that request.
+//
+// There are also a number of RAII classes that interact with the register
+// allocator, in order to provide access to more registers than just those
+// provided for by the OperandIds.
+//
+// - AutoOutputReg: The register which will hold the output value of the stub.
+// - AutoScratchReg: By default, an arbitrary scratch register, however a
+//   specific register can be requested.
+// - AutoScratchRegMaybeOutput: Any arbitrary scratch register, but the output
+//   register may be used as well.
+//
+// These RAII classes take ownership of a register for the duration of their
+// lifetime so they can be used for computation or output. The register
+// allocator can spill values with OperandLocations in order to try to ensure
+// that a register is made available for use.
+//
+// If a specific register is required (via AutoScratchRegister), it should be
+// the first register acquired, as the register rallocator will be unable to
+// allocate the fixed register if the current op is using it for something else.
+//
+// If no register can be provided after attempting to spill, a
+// MOZ_RELEASE_ASSERT ensures the browser will crash. The register allocator is
+// not provided enough information in its current design to insert spills and
+// fills at arbitrary locations, and so it can fail to find an allocation
+// solution. However, this will only happen within the implementation of an
+// operand emitter, and because the cache register allocator is mostly
+// determinstic, so long as the operand id emitter is tested, this won't
+// suddenly crop up in an arbitrary webpage. It's worth noting the most
+// difficult platform to support is x86-32, because it has the least number of
+// registers available.
+//
+// FailurePaths checkpoint the state of the register allocator so that the input
+// state can be recomputed from the current state before jumping to the next
+// stub in the IC chain. An important invariant is that the FailurePath must be
+// allocated for each op after all the manipulation of OperandLocations has
+// happened, so that its recording is correct.
+//
+// Inter-op Register Allocation:
+//
+// The RAII register management classes are RAII because all register state
+// outside the OperandLocations is reset before the compilation of each
+// individual CacheIR op. This means that you cannot rely on a value surviving
+// between ops, even if you use the ability of AutoScratchRegister to name a
+// specific register. Values that need to be preserved between ops must be given
+// an OperandId.
+
+
 // Represents a Value on the Baseline frame's expression stack. Slot 0 is the
 // value on top of the stack (the most recently pushed value), slot 1 is the
 // value pushed before that, etc.
 class BaselineFrameSlot
 {
     uint32_t slot_;
 
   public:
--- a/js/src/jit/MacroAssembler.h
+++ b/js/src/jit/MacroAssembler.h
@@ -494,17 +494,95 @@ class MacroAssembler : public MacroAssem
     // Emit a nop that can be patched to and from a nop and a call with int32
     // relative displacement.
     CodeOffset nopPatchableToCall(const wasm::CallSiteDesc& desc) PER_SHARED_ARCH;
     static void patchNopToCall(uint8_t* callsite, uint8_t* target) PER_SHARED_ARCH;
     static void patchCallToNop(uint8_t* callsite) PER_SHARED_ARCH;
 
   public:
     // ===============================================================
-    // ABI function calls.
+    // [SMDOC] JIT-to-C++ Function Calls (callWithABI)
+    //
+    // callWithABI is used to make a call using the standard C/C++ system ABI.
+    //
+    // callWithABI is a low level interface for making calls, as such every call
+    // made with callWithABI should be organized with 6 steps: spilling live
+    // registers, aligning the stack, listing arguments of the called function,
+    // calling a function pointer, extracting the returned value and restoring
+    // live registers.
+    //
+    // A more detailed example of the six stages:
+    //
+    // 1) Saving of registers that are live. This will vary depending on which
+    //    SpiderMonkey compiler you are working on. Registers that shouldn't be
+    //    restored can be excluded.
+    //
+    //      LiveRegisterSet volatileRegs(...);
+    //      volatileRegs.take(scratch);
+    //      masm.PushRegsInMask(volatileRegs);
+    //
+    // 2) Align the stack to perform the call with the correct stack alignment.
+    //
+    //    When the stack pointer alignment is unknown and cannot be corrected
+    //    when generating the code, setupUnalignedABICall must be used to
+    //    dynamically align the stack pointer to the expectation of the ABI.
+    //    When the stack pointer is known at JIT compilation time, the stack can
+    //    be fixed manually and setupAlignedABICall and setupWasmABICall can be
+    //    used.
+    //
+    //    setupWasmABICall is a special case of setupAlignedABICall as
+    //    SpiderMonkey's WebAssembly implementation mostly follow the system
+    //    ABI, except for float/double arguments, which always use floating
+    //    point registers, even if this is not supported by the system ABI.
+    //
+    //      masm.setupUnalignedABICall(scratch);
+    //
+    // 3) Passing arguments. Arguments are passed left-to-right.
+    //
+    //      masm.passABIArg(scratch);
+    //      masm.passABIArg(FloatOp0, MoveOp::Double);
+    //
+    //    Note how float register arguments are annotated with MoveOp::Double.
+    //
+    //    Concerning stack-relative address, see the note on passABIArg.
+    //
+    // 4) Make the call:
+    //
+    //      masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, Callee));
+    //
+    //    In the case where the call returns a double, that needs to be
+    //    indicated to the callWithABI like this:
+    //
+    //      masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, ...), MoveOp::DOUBLE);
+    //
+    //    There are overloads to allow calls to registers and addresses.
+    //
+    // 5) Take care of the ReturnReg or ReturnDoubleReg
+    //
+    //      masm.mov(ReturnReg, scratch1);
+    //
+    // 6) Restore the potentially clobbered volatile registers
+    //
+    //      masm.PopRegsInMask(volatileRegs);
+    //
+    //    If expecting a returned value, this call should use
+    //    PopRegsInMaskIgnore to filter out the registers which are containing
+    //    the returned value.
+    //
+    // Unless an exit frame is pushed prior to the setupABICall, the callee
+    // should not GC. To ensure this is the case callWithABI is instrumented to
+    // make sure that in the default case callees are annotated with an
+    // AutoUnsafeCallWithABI on the stack.
+    //
+    // A callWithABI can opt out of checking, if for example it is known there
+    // is an exit frame, or the callee is known not to GC.
+    //
+    // If your callee needs to be able to GC, consider using a VMFunction, or
+    // create a fake exit frame, and instrument the TraceJitExitFrame
+    // accordingly.
 
     // Setup a call to C/C++ code, given the assumption that the framePushed
     // accruately define the state of the stack, and that the top of the stack
     // was properly aligned. Note that this only supports cdecl.
     void setupAlignedABICall(); // CRASH_ON(arm64)
 
     // As setupAlignedABICall, but for WebAssembly native ABI calls, which pass
     // through a builtin thunk that uses the wasm ABI. All the wasm ABI calls
--- a/js/src/jit/VMFunctions.h
+++ b/js/src/jit/VMFunctions.h
@@ -53,27 +53,86 @@ struct PopValues
     { }
 };
 
 enum MaybeTailCall : bool {
     TailCall,
     NonTailCall
 };
 
-// Contains information about a virtual machine function that can be called
-// from JIT code. Functions described in this manner must conform to a simple
+// [SMDOC] JIT-to-C++ Function Calls. (callVM)
+//
+// Sometimes it is easier to reuse C++ code by calling VM's functions. Calling a
+// function from the VM can be achieved with the use of callWithABI but this is
+// discouraged when the called functions might trigger exceptions and/or
+// garbage collections which are expecting to walk the stack. VMFunctions and
+// callVM are interfaces provided to handle the exception handling and register
+// the stack end (JITActivation) such that walking the stack is made possible.
+//
+// A VMFunction is a structure which contains the necessary information needed
+// for generating a trampoline function to make a call (with generateVMWrapper)
+// and to root the arguments of the function (in TraceJitExitFrame). VMFunctions
+// are created with the FunctionInfo template, which infers the fields of the
+// VMFunction from the function signature. The rooting and trampoline code is
+// therefore determined by the arguments of a function and their locations in
+// the signature of a function.
+//
+// VMFunction all expect a JSContext* as first argument. This argument is
+// implicitly provided by the trampoline code (in generateVMWrapper) and used
+// for creating new objects or reporting errors. If your function does not make
+// use of a JSContext* argument, then you might probably use a callWithABI
+// call.
+//
+// Functions described using the VMFunction system must conform to a simple
 // protocol: the return type must have a special "failure" value (for example,
 // false for bool, or nullptr for Objects). If the function is designed to
 // return a value that does not meet this requirement - such as
 // object-or-nullptr, or an integer, an optional, final outParam can be
 // specified. In this case, the return type must be boolean to indicate
 // failure.
 //
-// All functions described by VMFunction take a JSContext * as a first
-// argument, and are treated as re-entrant into the VM and therefore fallible.
+// JIT Code usage:
+//
+// Different JIT compilers in SpiderMonkey have their own implementations of
+// callVM to consume VMFunctions. However, the general shape of them is that
+// arguments that don't include the JIT Context or trailing out-param are pushed
+// on to the stack from right to left (rightmost argument is pushed first).
+//
+// Regardless of return value protocol being used (final outParam, or return
+// value) the generated trampolines ensure the return value ends up in
+// JSReturnReg, ReturnReg or ReturnDoubleReg.
+//
+// Example:
+//
+// The details will differ slightly between the different compilers in
+// SpiderMonkey, but the general shape of our usage looks like this:
+//
+// Suppose we have a function Foo:
+//
+//      bool Foo(JSContext* cx, HandleObject x, HandleId y,
+//               MutableHandleValue z);
+//
+// This function returns true on success, and z is the outparam return value.
+//
+// A VMFunction for this can be created using FunctionInfo. The typical pattern
+// used is:
+//
+//      typedef bool (*FooFn)(JSContext*, HandleObject, HandleId,
+//                            MutableHandleValue);
+//      const VMFunction FooInfo = FunctionInfo<FooFn>(Foo, "Foo");
+//
+// In the compiler code the call would then be issued like this:
+//
+//      masm.Push(id);
+//      masm.Push(obj);
+//      if (!callVM(FooInfo)) {
+//          return false;
+//      }
+//
+// After this, the result value is in the return value register.
 struct VMFunction
 {
     // Global linked list of all VMFunctions.
     static VMFunction* functions;
     VMFunction* next;
 
     // Address of the C function.
     void* wrapped;
--- a/js/xpconnect/loader/URLPreloader.cpp
+++ b/js/xpconnect/loader/URLPreloader.cpp
@@ -26,18 +26,16 @@
 #include "nsNetUtil.h"
 #include "nsPromiseFlatString.h"
 #include "nsProxyRelease.h"
 #include "nsThreadUtils.h"
 #include "nsXULAppAPI.h"
 #include "nsZipArchive.h"
 #include "xpcpublic.h"
 
-#include "mozilla/dom/ContentChild.h"
-
 #undef DELAYED_STARTUP_TOPIC
 #define DELAYED_STARTUP_TOPIC "sessionstore-windows-restored"
 
 namespace mozilla {
 namespace {
 static LazyLogModule gURLLog("URLPreloader");
 
 #define LOG(level, ...) MOZ_LOG(gURLLog, LogLevel::level, (__VA_ARGS__))
--- a/layout/base/PresShell.cpp
+++ b/layout/base/PresShell.cpp
@@ -2096,19 +2096,22 @@ PresShell::FireResizeEvent()
 }
 
 static nsIContent* GetNativeAnonymousSubtreeRoot(nsIContent* aContent)
 {
   if (!aContent || !aContent->IsInNativeAnonymousSubtree()) {
     return nullptr;
   }
   auto* current = aContent;
+  // FIXME(emilio): This should not need to worry about current being null, but
+  // editor removes nodes in native anonymous subtrees, and we don't clean nodes
+  // from the current event content stack from ContentRemoved, so it can
+  // actually happen, see bug 1510208.
   while (current && !current->IsRootOfNativeAnonymousSubtree()) {
     current = current->GetFlattenedTreeParent();
-    MOZ_DIAGNOSTIC_ASSERT(current, "How?");
   }
   return current;
 }
 
 void
 nsIPresShell::NativeAnonymousContentRemoved(nsIContent* aAnonContent)
 {
   MOZ_ASSERT(aAnonContent->IsRootOfNativeAnonymousSubtree());
--- a/layout/base/nsLayoutUtils.cpp
+++ b/layout/base/nsLayoutUtils.cpp
@@ -15,17 +15,16 @@
 #include "mozilla/EventStateManager.h"
 #include "mozilla/FloatingPoint.h"
 #include "mozilla/gfx/gfxVars.h"
 #include "mozilla/gfx/PathHelpers.h"
 #include "mozilla/layers/PAPZ.h"
 #include "mozilla/Likely.h"
 #include "mozilla/Maybe.h"
 #include "mozilla/MemoryReporting.h"
-#include "mozilla/dom/ContentChild.h"
 #include "mozilla/ServoStyleSetInlines.h"
 #include "mozilla/StaticPrefs.h"
 #include "mozilla/Unused.h"
 #include "nsCharTraits.h"
 #include "nsDocument.h"
 #include "nsFontMetrics.h"
 #include "nsPresContext.h"
 #include "nsIContent.h"
--- a/layout/reftests/bugs/reftest.list
+++ b/layout/reftests/bugs/reftest.list
@@ -2072,17 +2072,17 @@ fuzzy-if(!(webrender&&gtkWidget),1-2,175
 test-pref(font.size.systemFontScale,200) == 1412743.html 1412743-ref.html
 == 1419820-1.html 1419820-1-ref.html
 == 1420946-1.html 1420946-1-ref.html
 == 1422393.html 1422393-ref.html
 == 1424177.html 1424177-ref.html
 == 1424680.html 1424680-ref.html
 == 1424798-1.html 1424798-ref.html
 fuzzy-if(!webrender,0-74,0-2234) == 1425243-1.html 1425243-1-ref.html
-fuzzy-if(Android,0-66,0-574) fuzzy-if(d2d,0-89,0-777) fuzzy-if(!Android&&!d2d,0-1,0-31341) fuzzy-if(webrender&&winWidget,1-1,31284-31284) == 1425243-2.html 1425243-2-ref.html
+fuzzy-if(Android,0-66,0-574) fuzzy-if(d2d,0-89,0-777) fuzzy-if(!Android&&!d2d,0-1,0-31341) fuzzy-if(webrender&&winWidget,1-1,31320-31320) == 1425243-2.html 1425243-2-ref.html
 == 1430869.html 1430869-ref.html
 == 1432541.html 1432541-ref.html
 pref(layout.css.moz-document.url-prefix-hack.enabled,true) == 1446470.html 1035091-ref.html
 pref(layout.css.moz-document.url-prefix-hack.enabled,false) == 1446470-2.html 1035091-ref.html
 test-pref(layout.css.prefixes.gradients,false) == 1451874.html 1451874-ref.html
 == 1456111-1.html about:blank
 test-pref(layout.css.contain.enabled,false) == 1466008.html 1466008-ref.html
 fuzzy(0-1,0-625) == 1466638-1.html 1466638-1-ref.html
--- a/layout/reftests/svg/as-image/reftest.list
+++ b/layout/reftests/svg/as-image/reftest.list
@@ -39,17 +39,17 @@ include zoom/reftest.list
 
 == canvas-drawImage-scale-1a.html lime100x100-ref.html
 == canvas-drawImage-scale-1b.html lime100x100-ref.html
 == canvas-drawImage-scale-1c.html lime100x100-ref.html
 
 fuzzy(0-1,0-2) fuzzy-if(skiaContent,0-1,0-529) == canvas-drawImage-scale-2a.html canvas-drawImage-scale-2-ref.html
 fuzzy(0-1,0-2) fuzzy-if(skiaContent,0-1,0-529) == canvas-drawImage-scale-2b.html canvas-drawImage-scale-2-ref.html
 
-fuzzy-if(winWidget&&!d2d,0-1,0-10000) fuzzy-if(azureSkia,0-1,0-10000) fuzzy-if(Android,0-1,0-10000) fuzzy-if(webrender&&winWidget,1-1,10000-10000) == canvas-drawImage-alpha-1.html canvas-drawImage-alpha-1-ref.html
+fuzzy-if(winWidget&&!d2d,0-1,0-10000) fuzzy-if(azureSkia,0-1,0-10000) fuzzy-if(Android,0-1,0-10000) == canvas-drawImage-alpha-1.html canvas-drawImage-alpha-1-ref.html
 #Same as scale-2a but with globalAlpha:
 fuzzy(0-1,0-2) fuzzy-if(/^Windows\x20NT\x2010\.0/.test(http.oscpu),0-1,0-7018) fuzzy-if(azureSkia,0-1,0-40000) fuzzy-if(webrender&&winWidget,1-1,32679-39743) == canvas-drawImage-alpha-2.html canvas-drawImage-alpha-2-ref.html
 
 == canvas-drawImage-slice-1a.html lime100x100-ref.html
 == canvas-drawImage-slice-1b.html lime100x100-ref.html
 
 == canvas-drawImage-origin-clean-1.html lime100x100-ref.html
 == canvas-drawImage-transform-restored.html canvas-drawImage-transform-restored-ref.html
--- a/layout/reftests/table-background/reftest.list
+++ b/layout/reftests/table-background/reftest.list
@@ -36,18 +36,18 @@ asserts-if(gtkWidget,0-6) != backgr_bord
 == border-separate-table.html border-separate-table-ref.html
 == border-collapse-table-cell.html border-collapse-table-cell-ref.html
 == border-collapse-table-column-group.html border-collapse-table-column-group-ref.html
 == border-collapse-table-column.html border-collapse-table-column-ref.html
 == border-collapse-table-row-group.html border-collapse-table-row-group-ref.html
 == border-collapse-table-row.html border-collapse-table-row-ref.html
 == border-collapse-table.html border-collapse-table-ref.html
 fuzzy-if(d2d,0-1,0-1083) fuzzy-if(skiaContent,0-1,0-2200) == border-collapse-opacity-table-cell.html border-collapse-opacity-table-cell-ref.html
-fuzzy-if(d2d,0-1,0-33174) fuzzy-if(skiaContent,0-1,0-16587) == border-collapse-opacity-table-column-group.html border-collapse-opacity-table-column-group-ref.html
-fuzzy-if(d2d,0-1,0-11058) fuzzy-if(skiaContent,0-1,0-5529) == border-collapse-opacity-table-column.html border-collapse-opacity-table-column-ref.html
+fuzzy-if(d2d,0-1,0-33174) fuzzy-if(skiaContent,0-1,0-16863) == border-collapse-opacity-table-column-group.html border-collapse-opacity-table-column-group-ref.html
+fuzzy-if(d2d,0-1,0-11058) fuzzy-if(skiaContent,0-1,0-5625) == border-collapse-opacity-table-column.html border-collapse-opacity-table-column-ref.html
 fuzzy-if(d2d,0-1,0-24606) fuzzy-if(skiaContent,0-1,0-17000) == border-collapse-opacity-table-row-group.html border-collapse-opacity-table-row-group-ref.html
 fuzzy-if(d2d,0-1,0-11000) fuzzy-if(skiaContent,0-1,0-11000) == border-collapse-opacity-table-row.html border-collapse-opacity-table-row-ref.html
 fuzzy-if(d2d||skiaContent,0-1,0-60000) == border-collapse-opacity-table.html border-collapse-opacity-table-ref.html
 fuzzy-if(d2d,0-1,0-2478) fuzzy-if(skiaContent,0-1,0-2500) == border-separate-opacity-table-cell.html border-separate-opacity-table-cell-ref.html
 fuzzy-if(d2d,0-1,0-38000) fuzzy-if(webrender&&gtkWidget,1-1,4078-4078) == border-separate-opacity-table-column-group.html border-separate-opacity-table-column-group-ref.html
 fuzzy-if(d2d,0-1,0-13000) fuzzy-if(webrender&&gtkWidget,1-1,1329-1329) == border-separate-opacity-table-column.html border-separate-opacity-table-column-ref.html
 fuzzy-if(d2d,0-1,0-37170) fuzzy-if(skiaContent,0-1,0-38000) == border-separate-opacity-table-row-group.html border-separate-opacity-table-row-group-ref.html
 fuzzy-if(d2d,0-1,0-12390) fuzzy-if(skiaContent,0-1,0-13000) == border-separate-opacity-table-row.html border-separate-opacity-table-row-ref.html
new file mode 100644
--- /dev/null
+++ b/layout/style/crashtests/1509989.html
@@ -0,0 +1,11 @@
+<script>
+function go() {
+  window.getSelection().getRangeAt(0).insertNode(a);
+}
+</script>
+<body onload=go()>
+<dl>
+<dd id="a">
+<video>
+</dd>
+<input type="number" autofocus="">
--- a/layout/style/crashtests/crashtests.list
+++ b/layout/style/crashtests/crashtests.list
@@ -290,8 +290,9 @@ load 1457288.html
 load 1457985.html
 load 1468640.html
 load 1469076.html
 load 1475003.html
 load 1479681.html
 load 1488817.html
 load 1490012.html
 load 1502893.html
+load 1509989.html
new file mode 100644
--- /dev/null
+++ b/media/libdav1d/README_MOZILLA
@@ -0,0 +1,24 @@
+This directory contains build files for dav1d. The actual library
+source is in $TOPSRCDIR/third_party/dav1d/
+
+Any patches or additional configuration to be applied to the
+upstream source should be kept here in the media/libaom
+directory.
+
+To update the library source and build config files, execute
+
+  ./mach vendor dav1d
+
+To update to a specific upstream git tag or commit, use
+
+  ./mach vendor dav1d -r <commit>
+
+The upstream git repository is https://aomedia.googlesource.com/aom
+
+To update to a fork, use
+
+  ./mach vendor dav1d --repo <repository url> [-r <commit>]
+
+The last update was pulled from https://code.videolan.org/videolan/dav1d
+
+The git commit ID used was 46e2a2d0cc451e1d6bb929f80088f8a7b8940dd0 (2018-10-25T16:51:31.000Z).
new file mode 100644
--- /dev/null
+++ b/media/libdav1d/config/aarch64/config.h
@@ -0,0 +1,29 @@
+/*
+ * Autogenerated by the Meson build system.
+ * Do not edit, your changes will be lost.
+ */
+
+#pragma once
+
+#define ARCH_AARCH64 1
+
+#define ARCH_ARM 0
+
+#define ARCH_X86 0
+
+#define ARCH_X86_32 0
+
+#define ARCH_X86_64 0
+
+#define CONFIG_10BPC 1
+
+#define CONFIG_8BPC 1
+
+#define HAVE_ASM 0
+
+#define HAVE_POSIX_MEMALIGN 1
+
+#define HAVE_UNISTD_H 1
+
+#define STACK_ALIGNMENT 32
+
new file mode 100644
--- /dev/null
+++ b/media/libdav1d/config/arm/config.h
@@ -0,0 +1,29 @@
+/*
+ * Autogenerated by the Meson build system.
+ * Do not edit, your changes will be lost.
+ */
+
+#pragma once
+
+#define ARCH_AARCH64 0
+
+#define ARCH_ARM 1
+
+#define ARCH_X86 0
+
+#define ARCH_X86_32 0
+
+#define ARCH_X86_64 0
+
+#define CONFIG_10BPC 1
+
+#define CONFIG_8BPC 1
+
+#define HAVE_ASM 0
+
+#define HAVE_MEMALIGN 1
+
+#define HAVE_UNISTD_H 1
+
+#define STACK_ALIGNMENT 32
+
new file mode 100644
--- /dev/null
+++ b/media/libdav1d/config/x86_32/android/config.h
@@ -0,0 +1,29 @@
+/*
+ * Autogenerated by the Meson build system.
+ * Do not edit, your changes will be lost.
+ */
+
+#pragma once
+
+#define ARCH_AARCH64 0
+
+#define ARCH_ARM 0
+
+#define ARCH_X86 1
+
+#define ARCH_X86_32 1
+
+#define ARCH_X86_64 0
+
+#define CONFIG_10BPC 1
+
+#define CONFIG_8BPC 1
+
+#define HAVE_ASM 0
+
+#define HAVE_MEMALIGN 1
+
+#define HAVE_UNISTD_H 1
+
+#define STACK_ALIGNMENT 32
+
new file mode 100644
--- /dev/null
+++ b/media/libdav1d/config/x86_32/config.h
@@ -0,0 +1,29 @@
+/*
+ * Autogenerated by the Meson build system.
+ * Do not edit, your changes will be lost.
+ */
+
+#pragma once
+
+#define ARCH_AARCH64 0
+
+#define ARCH_ARM 0
+
+#define ARCH_X86 1
+
+#define ARCH_X86_32 1
+
+#define ARCH_X86_64 0
+
+#define CONFIG_10BPC 1
+
+#define CONFIG_8BPC 1
+
+#define HAVE_ASM 0
+
+#define HAVE_POSIX_MEMALIGN 1
+
+#define HAVE_UNISTD_H 1
+
+#define STACK_ALIGNMENT 32
+
new file mode 100644
--- /dev/null
+++ b/media/libdav1d/config/x86_64/config.h
@@ -0,0 +1,29 @@
+/*
+ * Autogenerated by the Meson build system.
+ * Do not edit, your changes will be lost.
+ */
+
+#pragma once
+
+#define ARCH_AARCH64 0
+
+#define ARCH_ARM 0
+
+#define ARCH_X86 1
+
+#define ARCH_X86_32 0
+
+#define ARCH_X86_64 1
+
+#define CONFIG_10BPC 1
+
+#define CONFIG_8BPC 1
+
+#define HAVE_ASM 0
+
+#define HAVE_POSIX_MEMALIGN 1
+
+#define HAVE_UNISTD_H 1
+
+#define STACK_ALIGNMENT 32
+
new file mode 100644
--- /dev/null
+++ b/media/libdav1d/config/x86_64/win_asan/config.h
@@ -0,0 +1,29 @@
+/*
+ * Autogenerated by the Meson build system.
+ * Do not edit, your changes will be lost.
+ */
+
+#pragma once
+
+#define ARCH_AARCH64 0
+
+#define ARCH_ARM 0
+
+#define ARCH_X86 1
+
+#define ARCH_X86_32 0
+
+#define ARCH_X86_64 1
+
+#define CONFIG_10BPC 1
+
+#define CONFIG_8BPC 1
+
+#define HAVE_ASM 0
+
+#define HAVE_ALIGNED_MALLOC 1
+
+#define HAVE_UNISTD_H 1
+
+#define STACK_ALIGNMENT 32
+
new file mode 100644
--- /dev/null
+++ b/media/libdav1d/dav1d.rc
@@ -0,0 +1,30 @@
+#define VERSION_NUMBER 0,0,1,0
+#define VERSION_NUMBER_STR "0.0.1.0"
+
+#include <windows.h>
+
+1 VERSIONINFO
+FILETYPE VFT_DLL
+FILEOS VOS_NT_WINDOWS32
+PRODUCTVERSION VERSION_NUMBER
+FILEVERSION VERSION_NUMBER
+BEGIN
+  BLOCK "StringFileInfo"
+  BEGIN
+    BLOCK "040904E4"
+    BEGIN
+      VALUE "CompanyName", "VideoLAN"
+      VALUE "ProductName", "dav1d"
+      VALUE "ProductVersion", VERSION_NUMBER_STR
+      VALUE "FileVersion", VERSION_NUMBER_STR
+      VALUE "FileDescription", "dav1d AV1 decoder"
+      VALUE "InternalName", "dav1d"
+      VALUE "OriginalFilename", "libdav1d.dll"
+      VALUE "LegalCopyright", "Copyright \251 2018 VideoLAN and dav1d Authors"
+    END
+  END
+  BLOCK "VarFileInfo"
+  BEGIN
+    VALUE "Translation", 0x409, 1252
+  END
+END
new file mode 100644
--- /dev/null
+++ b/media/libdav1d/generate_source.py
@@ -0,0 +1,11 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+def add_define(out_file, in_path, expr, num = ''):
+    out_file.write('#define %s %s\n' % (expr, num))
+    with open(in_path, 'r') as fh:
+        out_file.write(fh.read())
new file mode 100644
--- /dev/null
+++ b/media/libdav1d/moz.build
@@ -0,0 +1,181 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+Library('dav1d')
+
+LOCAL_INCLUDES += [
+    '/third_party/dav1d',
+    '/third_party/dav1d/include',
+    '/third_party/dav1d/include/dav1d',
+]
+
+if CONFIG['CPU_ARCH'] == 'x86':
+    if CONFIG['OS_TARGET'] == 'Android':
+        LOCAL_INCLUDES += ['/media/libdav1d/config/x86_32/android/']
+        EXPORTS.dav1d += ['config/x86_32/android/config.h']
+    else:
+        LOCAL_INCLUDES += ['/media/libdav1d/config/x86_32/']
+        EXPORTS.dav1d += ['config/x86_32/config.h']
+elif CONFIG['CPU_ARCH'] == 'x86_64':
+    if CONFIG['MOZ_ASAN'] and CONFIG['OS_TARGET'] == 'WINNT':
+        LOCAL_INCLUDES += ['/media/libdav1d/config/x86_64/win_asan/']
+        EXPORTS.dav1d += ['config/x86_64/win_asan/config.h']
+    else:
+        LOCAL_INCLUDES += ['/media/libdav1d/config/x86_64/']
+        EXPORTS.dav1d += ['config/x86_64/config.h']
+elif CONFIG['CPU_ARCH'] == 'arm':
+    LOCAL_INCLUDES += ['/media/libdav1d/config/arm/']
+    EXPORTS.dav1d += ['config/arm/config.h']
+elif CONFIG['CPU_ARCH'] == 'aarch64':
+    LOCAL_INCLUDES += ['/media/libdav1d/config/aarch64/']
+    EXPORTS.dav1d += ['config/aarch64/config.h']
+
+EXPORTS.dav1d += [
+    'version.h',
+]
+
+# entrypoint source files
+SOURCES += [
+    '../../third_party/dav1d/src/lib.c',
+    '../../third_party/dav1d/src/thread_task.c',
+]
+
+# common sources
+SOURCES += [
+    '../../third_party/dav1d/src/cdf.c',
+    '../../third_party/dav1d/src/cpu.c',
+    '../../third_party/dav1d/src/data.c',
+    '../../third_party/dav1d/src/decode.c',
+    '../../third_party/dav1d/src/dequant_tables.c',
+    '../../third_party/dav1d/src/getbits.c',
+    '../../third_party/dav1d/src/intra_edge.c',
+    '../../third_party/dav1d/src/lf_mask.c',
+    '../../third_party/dav1d/src/msac.c',
+    '../../third_party/dav1d/src/obu.c',
+    '../../third_party/dav1d/src/picture.c',
+    '../../third_party/dav1d/src/qm.c',
+    '../../third_party/dav1d/src/ref.c',
+    '../../third_party/dav1d/src/ref_mvs.c',
+    '../../third_party/dav1d/src/scan.c',
+    '../../third_party/dav1d/src/tables.c',
+    '../../third_party/dav1d/src/warpmv.c',
+    '../../third_party/dav1d/src/wedge.c',
+]
+
+# includes src
+EXPORTS.dav1d.src += [
+    '../../third_party/dav1d/src/cdf.h',
+    '../../third_party/dav1d/src/cpu.h',
+    '../../third_party/dav1d/src/ctx.h',
+    '../../third_party/dav1d/src/data.h',
+    '../../third_party/dav1d/src/decode.h',
+    '../../third_party/dav1d/src/dequant_tables.h',
+    '../../third_party/dav1d/src/getbits.h',
+    '../../third_party/dav1d/src/intra_edge.h',
+    '../../third_party/dav1d/src/lf_mask.h',
+    '../../third_party/dav1d/src/msac.h',
+    '../../third_party/dav1d/src/obu.h',
+    '../../third_party/dav1d/src/picture.h',
+    '../../third_party/dav1d/src/qm.h',
+    '../../third_party/dav1d/src/ref.h',
+    '../../third_party/dav1d/src/ref_mvs.h',
+    '../../third_party/dav1d/src/scan.h',
+    '../../third_party/dav1d/src/tables.h',
+    '../../third_party/dav1d/src/thread.h',
+    '../../third_party/dav1d/src/warpmv.h',
+    '../../third_party/dav1d/src/wedge.h',
+]
+
+# common BITDEPTH 8, 10
+relative_path = '../../third_party/dav1d/src/'
+bitdepth_basenames = [
+    'cdef_apply_tmpl.c',
+    'cdef_tmpl.c',
+    'ipred_prepare_tmpl.c',
+    'ipred_tmpl.c',
+    'itx_tmpl.c',
+    'lf_apply_tmpl.c',
+    'loopfilter_tmpl.c',
+    'looprestoration_tmpl.c',
+    'lr_apply_tmpl.c',
+    'mc_tmpl.c',
+    'recon_tmpl.c'
+]
+
+GENERATED_FILES += [
+    '10bd_%s' % p for p in bitdepth_basenames
+]
+
+for f in bitdepth_basenames:
+    a = GENERATED_FILES['10bd_%s' % f]
+    a.script = 'generate_source.py:add_define'
+    a.inputs = [relative_path + f]
+    a.flags = ['BITDEPTH', '10']
+
+GENERATED_FILES += [
+    '8bd_%s' % p for p in bitdepth_basenames
+]
+
+for f in bitdepth_basenames:
+    a = GENERATED_FILES['8bd_%s' % f]
+    a.script = 'generate_source.py:add_define'
+    a.inputs = [relative_path + f]
+    a.flags = ['BITDEPTH', '8']
+
+SOURCES += [
+    '!%s' % p for p in GENERATED_FILES if p.endswith('.c')
+]
+
+EXPORTS.dav1d.src += [
+    '../../third_party/dav1d/src/cdef.h',
+    '../../third_party/dav1d/src/cdef_apply.h',
+    '../../third_party/dav1d/src/ipred.h',
+    '../../third_party/dav1d/src/ipred_prepare.h',
+    '../../third_party/dav1d/src/itx.h',
+    '../../third_party/dav1d/src/lf_apply.h',
+    '../../third_party/dav1d/src/loopfilter.h',
+    '../../third_party/dav1d/src/looprestoration.h',
+    '../../third_party/dav1d/src/lr_apply.h',
+    '../../third_party/dav1d/src/mc.h',
+    '../../third_party/dav1d/src/recon.h',
+]
+
+# include/common
+EXPORTS.dav1d += [
+    '../../third_party/dav1d/include/common/attributes.h',
+    '../../third_party/dav1d/include/common/bitdepth.h',
+    '../../third_party/dav1d/include/common/dump.h',
+    '../../third_party/dav1d/include/common/intops.h',
+    '../../third_party/dav1d/include/common/mem.h',
+    '../../third_party/dav1d/include/common/validate.h',
+]
+
+# include/dav1d
+EXPORTS.dav1d += [
+   '../../third_party/dav1d/include/dav1d/common.h',
+   '../../third_party/dav1d/include/dav1d/data.h',
+   '../../third_party/dav1d/include/dav1d/dav1d.h',
+   '../../third_party/dav1d/include/dav1d/picture.h',
+]
+
+if CONFIG['OS_TARGET'] == 'WINNT':
+    RCFILE = 'dav1d.rc'
+    SOURCES += [
+        '../../third_party/dav1d/src/win32/thread.c'
+    ]
+
+if CONFIG['CC_TYPE'] == 'msvc':
+    LOCAL_INCLUDES += ['../../third_party/dav1d/include/compat/msvc/']
+    EXPORTS.dav1d += ['../../third_party/dav1d/include/compat/msvc/stdatomic.h']
+
+if CONFIG['CC_TYPE'] == 'gcc':
+    LOCAL_INCLUDES += ['../../third_party/dav1d/include/compat/gcc/']
+    EXPORTS.dav1d += ['../../third_party/dav1d/include/compat/gcc/stdatomic.h']
+
+FINAL_LIBRARY = 'gkmedias'
+
+# We allow warnings for third-party code that can be updated from upstream.
+AllowCompilerWarnings()
new file mode 100644
--- /dev/null
+++ b/media/libdav1d/moz.yaml
@@ -0,0 +1,29 @@
+# Version of this schema
+schema: 1
+
+bugzilla:
+  # Bugzilla product and component for this directory and subdirectories
+  product: Core
+  component: Audio/Video: Playback
+
+# Document the source of externally hosted code
+origin:
+
+  # Short name of the package/library
+  name: dav1d
+
+  description: dav1d, a fast AV1 decoder
+
+  # Full URL for the package's homepage/etc
+  # Usually different from repository url
+  url: https://code.videolan.org/videolan/dav1d
+
+  # Human-readable identifier for this version/release
+  # Generally "version NNN", "tag SSS", "bookmark SSS"
+  release: commit 46e2a2d0cc451e1d6bb929f80088f8a7b8940dd0
+
+  # The package's license, where possible using the mnemonic from
+  # https://spdx.org/licenses/
+  # Multiple licenses can be specified (as a YAML list)
+  # A "LICENSE" file must exist containing the full license text
+  license: BSD-2-Clause
new file mode 100644
--- /dev/null
+++ b/media/libdav1d/version.h
@@ -0,0 +1,2 @@
+/* auto-generated, do not edit */
+#define DAV1D_VERSION "0.0.1"
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
@@ -105,63 +105,33 @@ bool WebrtcAudioConduit::SetLocalSSRCs(c
     return true;
   }
   // Update the value of the ssrcs in the config structure.
   mRecvStreamConfig.rtp.local_ssrc = aSSRCs[0];
   mSendStreamConfig.rtp.ssrc = aSSRCs[0];
 
   mRecvChannelProxy->SetLocalSSRC(aSSRCs[0]);
 
-  bool wasTransmitting = mEngineTransmitting;
-  if (StopTransmitting() != kMediaConduitNoError) {
-    return false;
-  }
-
-  if (wasTransmitting) {
-    if (StartTransmitting() != kMediaConduitNoError) {
-      return false;
-    }
-  }
-  return true;
+  return RecreateSendStreamIfExists();
 }
 
 std::vector<unsigned int> WebrtcAudioConduit::GetLocalSSRCs() {
   MutexAutoLock lock(mMutex);
   return std::vector<unsigned int>(1, mRecvStreamConfig.rtp.local_ssrc);
 }
 
 bool WebrtcAudioConduit::SetRemoteSSRC(unsigned int ssrc) {
   MOZ_ASSERT(NS_IsMainThread());
 
   if (mRecvStreamConfig.rtp.remote_ssrc == ssrc) {
     return true;
   }
   mRecvStreamConfig.rtp.remote_ssrc = ssrc;
 
-  bool wasReceiving = mEngineReceiving;
-  if (StopReceiving() != kMediaConduitNoError) {
-    return false;
-  }
-
-  {
-    MutexAutoLock lock(mMutex);
-    // On the next StartReceiving() or ConfigureRecvMediaCodec, force
-    // building a new RecvStream to switch SSRCs.
-    DeleteRecvStream();
-    if (!wasReceiving) {
-      return true;
-    }
-    MediaConduitErrorCode rval = CreateRecvStream();
-    if (rval != kMediaConduitNoError) {
-      CSFLogError(LOGTAG, "%s Start Receive Error %d ", __FUNCTION__, rval);
-      return false;
-    }
-  }
-  return (StartReceiving() == kMediaConduitNoError);
-
+  return RecreateRecvStreamIfExists();
 }
 
 bool WebrtcAudioConduit::GetRemoteSSRC(unsigned int* ssrc) {
   {
     MutexAutoLock lock(mMutex);
     if (!mRecvStream) {
       return false;
     }
@@ -192,17 +162,17 @@ void WebrtcAudioConduit::SetSyncGroup(co
   MOZ_ASSERT(NS_IsMainThread());
   mRecvStreamConfig.sync_group = group;
 }
 
 bool WebrtcAudioConduit::GetSendPacketTypeStats(
   webrtc::RtcpPacketTypeCounter* aPacketCounts)
 {
   ASSERT_ON_THREAD(mStsThread);
-  if (!mEngineTransmitting) {
+  if (!mSendStream) {
     return false;
   }
   return mSendChannelProxy->GetRTCPPacketTypeCounters(*aPacketCounts);
 }
 
 bool WebrtcAudioConduit::GetRecvPacketTypeStats(
   webrtc::RtcpPacketTypeCounter* aPacketCounts)
 {
@@ -529,58 +499,86 @@ WebrtcAudioConduit::ConfigureRecvMediaCo
 
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
 WebrtcAudioConduit::SetLocalRTPExtensions(LocalDirection aDirection,
                                           const RtpExtList& extensions)
 {
+  MOZ_ASSERT(NS_IsMainThread());
   CSFLogDebug(LOGTAG, "%s direction: %s", __FUNCTION__,
               MediaSessionConduit::LocalDirectionToString(aDirection).c_str());
-  MOZ_ASSERT(NS_IsMainThread());
 
   bool isSend = aDirection == LocalDirection::kSend;
-  if (isSend) {
-    mSendStreamConfig.rtp.extensions.clear();
-  } else {
-    mRecvStreamConfig.rtp.extensions.clear();
-  }
+  RtpExtList filteredExtensions;
+
+  int ssrcAudioLevelId = -1;
+  int csrcAudioLevelId = -1;
+  int midId = -1;
+
   for(const auto& extension : extensions) {
     // ssrc-audio-level RTP header extension
     if (extension.uri == webrtc::RtpExtension::kAudioLevelUri) {
-      if (isSend) {
-        mSendStreamConfig.rtp.extensions.push_back(
-          webrtc::RtpExtension(extension.uri, extension.id));
-        mSendChannelProxy->SetSendAudioLevelIndicationStatus(true, extension.id);
-      } else {
-        mRecvStreamConfig.rtp.extensions.push_back(
-          webrtc::RtpExtension(extension.uri, extension.id));
-        mRecvChannelProxy->SetReceiveAudioLevelIndicationStatus(true, extension.id);
-      }
+      ssrcAudioLevelId = extension.id;
+      filteredExtensions.push_back(webrtc::RtpExtension(
+            extension.uri, extension.id));
     }
+
     // csrc-audio-level RTP header extension
     if (extension.uri == webrtc::RtpExtension::kCsrcAudioLevelUri) {
       if (isSend) {
         CSFLogError(LOGTAG, "%s SetSendAudioLevelIndicationStatus Failed"
                     " can not send CSRC audio levels.", __FUNCTION__);
         return kMediaConduitMalformedArgument;
       }
-      mRecvStreamConfig.rtp.extensions.push_back(
-        webrtc::RtpExtension(extension.uri, extension.id));
-      mRecvChannelProxy->SetReceiveCsrcAudioLevelIndicationStatus(true, extension.id);
+      csrcAudioLevelId = extension.id;
+      filteredExtensions.push_back(webrtc::RtpExtension(
+            extension.uri, extension.id));
     }
+
     // MID RTP header extension
-    if (aDirection == LocalDirection::kSend &&
-        extension.uri == webrtc::RtpExtension::kMIdUri) {
-        mSendStreamConfig.rtp.extensions.push_back(
-          webrtc::RtpExtension(extension.uri, extension.id));
-        mSendChannelProxy->SetSendMIDStatus(true, extension.id);
+    if (extension.uri == webrtc::RtpExtension::kMIdUri) {
+      if (!isSend) {
+        // TODO(bug 1405495): Why do we error out for csrc-audio-level, but not
+        // mid?
+        continue;
+      }
+      midId = extension.id;
+      filteredExtensions.push_back(webrtc::RtpExtension(
+            extension.uri, extension.id));
     }
   }
+
+  auto& currentExtensions = isSend ?
+    mSendStreamConfig.rtp.extensions : mRecvStreamConfig.rtp.extensions;
+  if (filteredExtensions == currentExtensions) {
+    return kMediaConduitNoError;
+  }
+
+  currentExtensions = filteredExtensions;
+
+  if (isSend) {
+    mSendChannelProxy->SetSendAudioLevelIndicationStatus(
+        ssrcAudioLevelId != -1, ssrcAudioLevelId);
+    mSendChannelProxy->SetSendMIDStatus(midId != -1, midId);
+  } else {
+    mRecvChannelProxy->SetReceiveAudioLevelIndicationStatus(
+        ssrcAudioLevelId != -1, ssrcAudioLevelId);
+    mRecvChannelProxy->SetReceiveCsrcAudioLevelIndicationStatus(
+        csrcAudioLevelId != -1, csrcAudioLevelId);
+    // TODO(bug 1405495): recv mid support
+  }
+
+  if (isSend) {
+    RecreateSendStreamIfExists();
+  } else {
+    RecreateRecvStreamIfExists();
+  }
+
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
 WebrtcAudioConduit::SendAudioFrame(const int16_t audio_data[],
                                    int32_t lengthSamples, // per channel
                                    int32_t samplingFreqHz,
                                    uint32_t channels,
@@ -833,47 +831,54 @@ WebrtcAudioConduit::StartReceiving()
 MediaConduitErrorCode
 WebrtcAudioConduit::StopTransmittingLocked()
 {
   MOZ_ASSERT(NS_IsMainThread());
   mMutex.AssertCurrentThreadOwns();
 
   if(mEngineTransmitting)
   {
+    MOZ_ASSERT(mSendStream);
     CSFLogDebug(LOGTAG, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
-    DeleteSendStream();
+    mSendStream->Stop();
     mEngineTransmitting = false;
   }
 
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
 WebrtcAudioConduit::StartTransmittingLocked()
 {
   MOZ_ASSERT(NS_IsMainThread());
   mMutex.AssertCurrentThreadOwns();
 
-  if (!mEngineTransmitting) {
+  if (mEngineTransmitting) {
+    return kMediaConduitNoError;
+  }
+
+  if (!mSendStream) {
     CreateSendStream();
-    mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkUp);
-    mSendStream->Start();
-    mEngineTransmitting = true;
   }
 
+  mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkUp);
+  mSendStream->Start();
+  mEngineTransmitting = true;
+
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
 WebrtcAudioConduit::StopReceivingLocked()
 {
   MOZ_ASSERT(NS_IsMainThread());
   mMutex.AssertCurrentThreadOwns();
 
-  if(mEngineReceiving && mRecvStream) {
+  if(mEngineReceiving) {
+    MOZ_ASSERT(mRecvStream);
     mRecvStream->Stop();
     mEngineReceiving = false;
   }
 
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
@@ -881,17 +886,20 @@ WebrtcAudioConduit::StartReceivingLocked
 {
   MOZ_ASSERT(NS_IsMainThread());
   mMutex.AssertCurrentThreadOwns();
 
   if (mEngineReceiving) {
     return kMediaConduitNoError;
   }
 
-  CreateRecvStream();
+  if (!mRecvStream) {
+    CreateRecvStream();
+  }
+
   mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkUp);
   mRecvStream->Start();
   mEngineReceiving = true;
 
   return kMediaConduitNoError;
 }
 
 //WebRTC::RTP Callback Implementation
@@ -1058,16 +1066,17 @@ WebrtcAudioConduit::CreateSendStream()
 }
 
 void
 WebrtcAudioConduit::DeleteRecvStream()
 {
   mMutex.AssertCurrentThreadOwns();
   if (mRecvStream) {
     mRecvStream->Stop();
+    mEngineReceiving = false;
     mCall->Call()->DestroyAudioReceiveStream(mRecvStream);
     mRecvStream = nullptr;
   }
   // Destroying the stream unregisters the transport
   mRecvChannelProxy->RegisterTransport(nullptr);
 }
 
 MediaConduitErrorCode
@@ -1079,16 +1088,56 @@ WebrtcAudioConduit::CreateRecvStream()
   mRecvStream = mCall->Call()->CreateAudioReceiveStream(mRecvStreamConfig);
   if (!mRecvStream) {
     return kMediaConduitUnknownError;
   }
 
   return kMediaConduitNoError;
 }
 
+bool
+WebrtcAudioConduit::RecreateSendStreamIfExists()
+{
+  MutexAutoLock lock(mMutex);
+  bool wasTransmitting = mEngineTransmitting;
+  bool hadSendStream = mSendStream;
+  DeleteSendStream();
+
+  if (wasTransmitting) {
+    if (StartTransmittingLocked() != kMediaConduitNoError) {
+      return false;
+    }
+  } else if (hadSendStream) {
+    if (CreateSendStream() != kMediaConduitNoError) {
+      return false;
+    }
+  }
+  return true;
+}
+
+bool
+WebrtcAudioConduit::RecreateRecvStreamIfExists()
+{
+  MutexAutoLock lock(mMutex);
+  bool wasReceiving = mEngineReceiving;
+  bool hadRecvStream = mRecvStream;
+  DeleteRecvStream();
+
+  if (wasReceiving) {
+    if (StartReceivingLocked() != kMediaConduitNoError) {
+      return false;
+    }
+  } else if (hadRecvStream) {
+    if (CreateRecvStream() != kMediaConduitNoError) {
+      return false;
+    }
+  }
+  return true;
+}
+
 MediaConduitErrorCode
 WebrtcAudioConduit::DeliverPacket(const void *data, int len)
 {
   // Bug 1499796 - we need to get passed the time the packet was received
   webrtc::PacketReceiver::DeliveryStatus status =
     mCall->Call()->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO,
                                              static_cast<const uint8_t*>(data),
                                              len, webrtc::PacketTime());
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.h
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.h
@@ -292,16 +292,19 @@ private:
   //Checks the codec to be applied
   MediaConduitErrorCode ValidateCodecConfig(const AudioCodecConfig* codecInfo, bool send);
 
   MediaConduitErrorCode CreateSendStream();
   void DeleteSendStream();
   MediaConduitErrorCode CreateRecvStream();
   void DeleteRecvStream();
 
+  bool RecreateSendStreamIfExists();
+  bool RecreateRecvStreamIfExists();
+
   MediaConduitErrorCode CreateChannels();
   virtual void DeleteChannels();
 
   UniquePtr<webrtc::FakeAudioDeviceModule> mFakeAudioDevice;
   mozilla::ReentrantMonitor mTransportMonitor;
   RefPtr<TransportInterface> mTransmitterTransport;
   RefPtr<TransportInterface> mReceiverTransport;
   ScopedCustomReleasePtr<webrtc::VoEBase> mPtrVoEBase;
--- a/mobile/android/components/geckoview/GeckoViewHistory.cpp
+++ b/mobile/android/components/geckoview/GeckoViewHistory.cpp
@@ -6,17 +6,16 @@
 
 #include "JavaBuiltins.h"
 #include "jsapi.h"
 #include "nsIURI.h"
 #include "nsXULAppAPI.h"
 
 #include "mozilla/ClearOnShutdown.h"
 
-#include "mozilla/dom/ContentChild.h"
 #include "mozilla/dom/ContentParent.h"
 #include "mozilla/dom/Element.h"
 #include "mozilla/dom/Link.h"
 #include "mozilla/dom/TabChild.h"
 
 #include "mozilla/ipc/URIUtils.h"
 
 #include "mozilla/widget/EventDispatcher.h"
--- a/mobile/android/geckoview/src/androidTest/java/org/mozilla/geckoview/test/TestRunnerActivity.java
+++ b/mobile/android/geckoview/src/androidTest/java/org/mozilla/geckoview/test/TestRunnerActivity.java
@@ -21,16 +21,18 @@ import android.graphics.SurfaceTexture;
 import android.net.Uri;
 import android.os.Bundle;
 import android.view.Surface;
 
 import java.util.HashMap;
 
 public class TestRunnerActivity extends Activity {
     private static final String LOGTAG = "TestRunnerActivity";
+    private static final String ERROR_PAGE =
+            "<!DOCTYPE html><head><title>Error</title></head><body>Error!</body></html>";
 
     static GeckoRuntime sRuntime;
 
     private GeckoSession mSession;
     private GeckoView mView;
     private boolean mKillProcessOnDestroy;
 
     private HashMap<GeckoSession, GeckoDisplay> mDisplays = new HashMap<>();
@@ -60,17 +62,18 @@ public class TestRunnerActivity extends 
 
         @Override
         public GeckoResult<GeckoSession> onNewSession(GeckoSession session, String uri) {
             return GeckoResult.fromValue(createBackgroundSession(session.getSettings()));
         }
 
         @Override
         public GeckoResult<String> onLoadError(GeckoSession session, String uri, WebRequestError error) {
-            return null;
+
+            return GeckoResult.fromValue("data:text/html," + ERROR_PAGE);
         }
     };
 
     private GeckoSession.ContentDelegate mContentDelegate = new GeckoSession.ContentDelegate() {
         @Override
         public void onTitleChange(GeckoSession session, String title) {
 
         }
@@ -164,22 +167,19 @@ public class TestRunnerActivity extends 
                 runtimeSettingsBuilder.extras(extras);
             }
 
             runtimeSettingsBuilder
                     .consoleOutput(true)
                     .crashHandler(TestCrashHandler.class);
 
             sRuntime = GeckoRuntime.create(this, runtimeSettingsBuilder.build());
-            sRuntime.setDelegate(new GeckoRuntime.Delegate() {
-                @Override
-                public void onShutdown() {
-                    mKillProcessOnDestroy = true;
-                    finish();
-                }
+            sRuntime.setDelegate(() -> {
+                mKillProcessOnDestroy = true;
+                finish();
             });
         }
 
         mSession = createSession();
         mSession.open(sRuntime);
 
         // If we were passed a URI in the Intent, open it
         final Uri uri = intent.getData();
--- a/modules/libmar/src/mar.h
+++ b/modules/libmar/src/mar.h
@@ -22,143 +22,171 @@ extern "C" {
 */
 #define MAX_SIGNATURES 8
 #ifdef __cplusplus
 static_assert(MAX_SIGNATURES <= 9, "too many signatures");
 #else
 MOZ_STATIC_ASSERT(MAX_SIGNATURES <= 9, "too many signatures");
 #endif
 
-struct ProductInformationBlock {
-  const char *MARChannelID;
-  const char *productVersion;
+struct ProductInformationBlock
+{
+  const char* MARChannelID;
+  const char* productVersion;
 };
 
 /**
  * The MAR item data structure.
  */
-typedef struct MarItem_ {
-  struct MarItem_ *next;  /* private field */
+typedef struct MarItem_
+{
+  struct MarItem_* next;  /* private field */
   uint32_t offset;        /* offset into archive */
   uint32_t length;        /* length of data in bytes */
   uint32_t flags;         /* contains file mode bits */
   char name[1];           /* file path */
 } MarItem;
 
+/**
+ * File offset and length for tracking access of byte indexes
+ */
+typedef struct SeenIndex_
+{
+  struct SeenIndex_* next; /* private field */
+  uint32_t offset;         /* offset into archive */
+  uint32_t length;         /* length of the data in bytes */
+} SeenIndex;
+
 #define TABLESIZE 256
 
-struct MarFile_ {
-  FILE *fp;
-  MarItem *item_table[TABLESIZE];
-  int item_table_is_valid;
+/**
+ * Mozilla ARchive (MAR) file data structure
+ */
+struct MarFile_
+{
+  FILE* fp;                       /* file pointer to the archive */
+  MarItem* item_table[TABLESIZE]; /* hash table of files in the archive */
+  SeenIndex* index_list;          /* file indexes processed */
+  int item_table_is_valid;        /* header and index validation flag */
 };
 
 typedef struct MarFile_ MarFile;
 
 /**
  * Signature of callback function passed to mar_enum_items.
  * @param mar       The MAR file being visited.
  * @param item      The MAR item being visited.
  * @param data      The data parameter passed by the caller of mar_enum_items.
  * @return          A non-zero value to stop enumerating.
  */
-typedef int (* MarItemCallback)(MarFile *mar, const MarItem *item, void *data);
+typedef int (*MarItemCallback)(MarFile* mar, const MarItem* item, void* data);
 
 /**
  * Open a MAR file for reading.
  * @param path      Specifies the path to the MAR file to open.  This path must
  *                  be compatible with fopen.
  * @return          NULL if an error occurs.
  */
-MarFile *mar_open(const char *path);
+MarFile*
+mar_open(const char* path);
 
 #ifdef XP_WIN
 MarFile *mar_wopen(const wchar_t *path);
 #endif
 
 /**
  * Close a MAR file that was opened using mar_open.
  * @param mar       The MarFile object to close.
  */
-void mar_close(MarFile *mar);
+void
+mar_close(MarFile* mar);
 
 /**
  * Find an item in the MAR file by name.
  * @param mar       The MarFile object to query.
  * @param item      The name of the item to query.
  * @return          A const reference to a MAR item or NULL if not found.
  */
-const MarItem *mar_find_item(MarFile *mar, const char *item);
+const MarItem*
+mar_find_item(MarFile* mar, const char* item);
 
 /**
  * Enumerate all MAR items via callback function.
  * @param mar       The MAR file to enumerate.
  * @param callback  The function to call for each MAR item.
  * @param data      A caller specified value that is passed along to the
  *                  callback function.
  * @return          0 if the enumeration ran to completion.  Otherwise, any
  *                  non-zero return value from the callback is returned.
  */
-int mar_enum_items(MarFile *mar, MarItemCallback callback, void *data);
+int
+mar_enum_items(MarFile* mar, MarItemCallback callback, void* data);
 
 /**
  * Read from MAR item at given offset up to bufsize bytes.
  * @param mar       The MAR file to read.
  * @param item      The MAR item to read.
  * @param offset    The byte offset relative to the start of the item.
  * @param buf       A pointer to a buffer to copy the data into.
  * @param bufsize   The length of the buffer to copy the data into.
  * @return          The number of bytes written or a negative value if an
  *                  error occurs.
  */
-int mar_read(MarFile *mar, const MarItem *item, int offset, uint8_t *buf,
-             int bufsize);
+int
+mar_read(MarFile* mar,
+         const MarItem* item,
+         int offset,
+         uint8_t* buf,
+         int bufsize);
 
 /**
  * Create a MAR file from a set of files.
  * @param dest      The path to the file to create.  This path must be
  *                  compatible with fopen.
  * @param numfiles  The number of files to store in the archive.
  * @param files     The list of null-terminated file paths.  Each file
  *                  path must be compatible with fopen.
  * @param infoBlock The information to store in the product information block.
  * @return          A non-zero value if an error occurs.
  */
-int mar_create(const char *dest,
-               int numfiles,
-               char **files,
-               struct ProductInformationBlock *infoBlock);
+int
+mar_create(const char* dest,
+           int numfiles,
+           char** files,
+           struct ProductInformationBlock* infoBlock);
 
 /**
  * Extract a MAR file to the current working directory.
  * @param path      The path to the MAR file to extract.  This path must be
  *                  compatible with fopen.
  * @return          A non-zero value if an error occurs.
  */
-int mar_extract(const char *path);
+int
+mar_extract(const char* path);
 
 #define MAR_MAX_CERT_SIZE (16*1024) // Way larger than necessary
 
 /* Read the entire file (not a MAR file) into a newly-allocated buffer.
  * This function does not write to stderr. Instead, the caller should
  * write whatever error messages it sees fit. The caller must free the returned
  * buffer using free().
  *
  * @param filePath The path to the file that should be read.
  * @param maxSize  The maximum valid file size.
  * @param data     On success, *data will point to a newly-allocated buffer
  *                 with the file's contents in it.
  * @param size     On success, *size will be the size of the created buffer.
  *
  * @return 0 on success, -1 on error
  */
-int mar_read_entire_file(const char * filePath,
-                         uint32_t maxSize,
-                         /*out*/ const uint8_t * *data,
-                         /*out*/ uint32_t *size);
+int
+mar_read_entire_file(const char* filePath,
+                     uint32_t maxSize,
+                     /*out*/ const uint8_t** data,
+                     /*out*/ uint32_t* size);
 
 /**
  * Verifies a MAR file by verifying each signature with the corresponding
  * certificate. That is, the first signature will be verified using the first
  * certificate given, the second signature will be verified using the second
  * certificate given, etc. The signature count must exactly match the number of
  * certificates given, and all signature verifications must succeed.
  * We do not check that the certificate was issued by any trusted authority.
@@ -170,30 +198,31 @@ int mar_read_entire_file(const char * fi
  *                       file data.
  * @param certDataSizes  Pointer to the first element in an array for size of
  *                       the cert data.
  * @param certCount      The number of elements in certData and certDataSizes
  * @return 0 on success
  *         a negative number if there was an error
  *         a positive number if the signature does not verify
  */
-int mar_verify_signatures(MarFile *mar,
-                          const uint8_t * const *certData,
-                          const uint32_t *certDataSizes,
-                          uint32_t certCount);
+int
+mar_verify_signatures(MarFile* mar,
+                      const uint8_t* const* certData,
+                      const uint32_t* certDataSizes,
+                      uint32_t certCount);
 
 /**
  * Reads the product info block from the MAR file's additional block section.
  * The caller is responsible for freeing the fields in infoBlock
  * if the return is successful.
  *
  * @param infoBlock Out parameter for where to store the result to
  * @return 0 on success, -1 on failure
 */
 int
-mar_read_product_info_block(MarFile *mar,
-                            struct ProductInformationBlock *infoBlock);
+mar_read_product_info_block(MarFile* mar,
+                            struct ProductInformationBlock* infoBlock);
 
 #ifdef __cplusplus
 }
 #endif
 
 #endif  /* MAR_H__ */
--- a/modules/libmar/src/mar_read.c
+++ b/modules/libmar/src/mar_read.c
@@ -14,22 +14,30 @@
 
 /* This block must be at most 104 bytes.
    MAR channel name < 64 bytes, and product version < 32 bytes + 3 NULL
    terminator bytes. We only check for 96 though because we remove 8
    bytes above from the additionalBlockSize: We subtract
    sizeof(additionalBlockSize) and sizeof(additionalBlockID) */
 #define MAXADDITIONALBLOCKSIZE 96
 
-static uint32_t mar_hash_name(const char *name) {
+static uint32_t
+mar_hash_name(const char* name)
+{
   return CityHash64(name, strlen(name)) % TABLESIZE;
 }
 
-static int mar_insert_item(MarFile *mar, const char *name, int namelen,
-                           uint32_t offset, uint32_t length, uint32_t flags) {
+static int
+mar_insert_item(MarFile* mar,
+                const char* name,
+                int namelen,
+                uint32_t offset,
+                uint32_t length,
+                uint32_t flags)
+{
   MarItem *item, *root;
   uint32_t hash;
 
   item = (MarItem *) malloc(sizeof(MarItem) + namelen);
   if (!item)
     return -1;
   item->next = NULL;
   item->offset = offset;
@@ -46,17 +54,19 @@ static int mar_insert_item(MarFile *mar,
     /* append item */
     while (root->next)
       root = root->next;
     root->next = item;
   }
   return 0;
 }
 
-static int mar_consume_index(MarFile *mar, char **buf, const char *buf_end) {
+static int
+mar_consume_index(MarFile* mar, char** buf, const char* buf_end)
+{
   /*
    * Each item has the following structure:
    *   uint32_t offset      (network byte order)
    *   uint32_t length      (network byte order)
    *   uint32_t flags       (network byte order)
    *   char     name[N]     (where N >= 1)
    *   char     null_byte;
    */
@@ -98,17 +108,19 @@ static int mar_consume_index(MarFile *ma
   /* consume null byte */
   if (*buf == buf_end)
     return -1;
   ++(*buf);
 
   return mar_insert_item(mar, name, namelen, offset, length, flags);
 }
 
-static int mar_read_index(MarFile *mar) {
+static int
+mar_read_index(MarFile* mar)
+{
   char id[MAR_ID_SIZE], *buf, *bufptr, *bufend;
   uint32_t offset_to_index, size_of_index;
 
   /* verify MAR ID */
   fseek(mar->fp, 0, SEEK_SET);
   if (fread(id, MAR_ID_SIZE, 1, mar->fp) != 1)
     return -1;
   if (memcmp(id, MAR_ID, MAR_ID_SIZE) != 0)
@@ -136,79 +148,151 @@ static int mar_read_index(MarFile *mar) 
   bufend = buf + size_of_index;
   while (bufptr < bufend && mar_consume_index(mar, &bufptr, bufend) == 0);
 
   free(buf);
   return (bufptr == bufend) ? 0 : -1;
 }
 
 /**
+ * Adds an offset and length to the MarFile's index_list
+ * @param mar     The MarFile that owns this offset length pair
+ * @param offset  The byte offset in the archive to be marked as processed
+ * @param length  The length corresponding to this byte offset
+ * @return int    1 on success, 0 if offset has been previously processed
+ *                -1 if unable to allocate space for the SeenIndexes
+ */
+static int
+mar_insert_offset(MarFile* mar, uint32_t offset, uint32_t length)
+{
+  /* Ignore files with no length */
+  if (length == 0) {
+    return 1;
+  }
+
+  SeenIndex* index = (SeenIndex*)malloc(sizeof(SeenIndex));
+  if (!index) {
+    return -1;
+  }
+  index->next = NULL;
+  index->offset = offset;
+  index->length = length;
+  uint32_t index_end = index->offset + index->length - 1;
+
+  /* If this is our first index store it at the front */
+  if (mar->index_list == NULL) {
+    mar->index_list = index;
+    return 1;
+  }
+
+  /* Search for matching indexes in the list of those previously visited */
+  SeenIndex* previous;
+  SeenIndex* current = mar->index_list;
+  while (current != NULL) {
+    uint32_t current_end = current->offset + current->length - 1;
+
+    /* If index has collided with the front or end of current or if current has
+       collided with the front or end of index return false */
+    if ((index->offset >= current->offset && index->offset <= current_end) ||
+        (index_end >= current->offset && index_end <= current_end) ||
+        (current->offset >= index->offset && current->offset <= index_end) ||
+        (current_end >= index->offset && current_end <= index_end)) {
+      free(index);
+      return 0;
+    }
+
+    /* else move to the next in the list */
+    previous = current;
+    current = current->next;
+  }
+
+  /* These indexes are valid, track them */
+  previous->next = index;
+  return 1;
+}
+
+/**
  * Internal shared code for mar_open and mar_wopen.
  * On failure, will fclose(fp).
  */
-static MarFile *mar_fpopen(FILE *fp)
+static MarFile*
+mar_fpopen(FILE* fp)
 {
-  MarFile *mar;
+  MarFile* mar;
 
-  mar = (MarFile *) malloc(sizeof(*mar));
+  mar = (MarFile*)malloc(sizeof(*mar));
   if (!mar) {
     fclose(fp);
     return NULL;
   }
 
   mar->fp = fp;
   mar->item_table_is_valid = 0;
   memset(mar->item_table, 0, sizeof(mar->item_table));
+  mar->index_list = NULL;
 
   return mar;
 }
 
-MarFile *mar_open(const char *path) {
+MarFile*
+mar_open(const char* path)
+{
   FILE *fp;
 
   fp = fopen(path, "rb");
   if (!fp) {
     fprintf(stderr, "ERROR: could not open file in mar_open()\n");
     perror(path);
     return NULL;
   }
 
   return mar_fpopen(fp);
 }
 
 #ifdef XP_WIN
-MarFile *mar_wopen(const wchar_t *path) {
+MarFile*
+mar_wopen(const wchar_t* path)
+{
   FILE *fp;
 
   _wfopen_s(&fp, path, L"rb");
   if (!fp) {
     fprintf(stderr, "ERROR: could not open file in mar_wopen()\n");
     _wperror(path);
     return NULL;
   }
 
   return mar_fpopen(fp);
 }
 #endif
 
-void mar_close(MarFile *mar) {
-  MarItem *item;
+void
+mar_close(MarFile* mar)
+{
+  MarItem* item;
+  SeenIndex* index;
   int i;
 
   fclose(mar->fp);
 
   for (i = 0; i < TABLESIZE; ++i) {
     item = mar->item_table[i];
     while (item) {
-      MarItem *temp = item;
+      MarItem* temp = item;
       item = item->next;
       free(temp);
     }
   }
 
+  while (mar->index_list != NULL) {
+    index = mar->index_list;
+    mar->index_list = index->next;
+    free(index);
+  }
+
   free(mar);
 }
 
 /**
  * Determines the MAR file information.
  *
  * @param fp                     An opened MAR file in read mode.
  * @param hasSignatureBlock      Optional out parameter specifying if the MAR
@@ -220,22 +304,23 @@ void mar_close(MarFile *mar) {
  * @param offsetAdditionalBlocks Optional out parameter for the offset to the
  *                               first additional block. Value is only valid if
  *                               hasAdditionalBlocks is not equal to 0.
  * @param numAdditionalBlocks    Optional out parameter for the number of
  *                               additional blocks.  Value is only valid if
  *                               hasAdditionalBlocks is not equal to 0.
  * @return 0 on success and non-zero on failure.
  */
-int get_mar_file_info_fp(FILE *fp,
-                         int *hasSignatureBlock,
-                         uint32_t *numSignatures,
-                         int *hasAdditionalBlocks,
-                         uint32_t *offsetAdditionalBlocks,
-                         uint32_t *numAdditionalBlocks)
+int
+get_mar_file_info_fp(FILE* fp,
+                     int* hasSignatureBlock,
+                     uint32_t* numSignatures,
+                     int* hasAdditionalBlocks,
+                     uint32_t* offsetAdditionalBlocks,
+                     uint32_t* numAdditionalBlocks)
 {
   uint32_t offsetToIndex, offsetToContent, signatureCount, signatureLen, i;
 
   /* One of hasSignatureBlock or hasAdditionalBlocks must be non NULL */
   if (!hasSignatureBlock && !hasAdditionalBlocks) {
     return -1;
   }
 
@@ -358,18 +443,17 @@ int get_mar_file_info_fp(FILE *fp,
  * Reads the product info block from the MAR file's additional block section.
  * The caller is responsible for freeing the fields in infoBlock
  * if the return is successful.
  *
  * @param infoBlock Out parameter for where to store the result to
  * @return 0 on success, -1 on failure
 */
 int
-read_product_info_block(char *path,
-                        struct ProductInformationBlock *infoBlock)
+read_product_info_block(char* path, struct ProductInformationBlock* infoBlock)
 {
   int rv;
   MarFile mar;
   mar.fp = fopen(path, "rb");
   if (!mar.fp) {
     fprintf(stderr, "ERROR: could not open file in read_product_info_block()\n");
     perror(path);
     return -1;
@@ -383,18 +467,18 @@ read_product_info_block(char *path,
  * Reads the product info block from the MAR file's additional block section.
  * The caller is responsible for freeing the fields in infoBlock
  * if the return is successful.
  *
  * @param infoBlock Out parameter for where to store the result to
  * @return 0 on success, -1 on failure
 */
 int
-mar_read_product_info_block(MarFile *mar,
-                            struct ProductInformationBlock *infoBlock)
+mar_read_product_info_block(MarFile* mar,
+                            struct ProductInformationBlock* infoBlock)
 {
   uint32_t offsetAdditionalBlocks, numAdditionalBlocks,
     additionalBlockSize, additionalBlockID;
   int hasAdditionalBlocks;
 
   /* The buffer size is 97 bytes because the MAR channel name < 64 bytes, and
      product version < 32 bytes + 3 NULL terminator bytes. */
   char buf[MAXADDITIONALBLOCKSIZE + 1] = { '\0' };
@@ -471,64 +555,87 @@ mar_read_product_info_block(MarFile *mar
       }
     }
   }
 
   /* If we had a product info block we would have already returned */
   return -1;
 }
 
-const MarItem *mar_find_item(MarFile *mar, const char *name) {
+const MarItem*
+mar_find_item(MarFile* mar, const char* name)
+{
   uint32_t hash;
-  const MarItem *item;
+  const MarItem* item;
 
   if (!mar->item_table_is_valid) {
     if (mar_read_index(mar)) {
       return NULL;
     } else {
       mar->item_table_is_valid = 1;
     }
   }
 
   hash = mar_hash_name(name);
 
   item = mar->item_table[hash];
-  while (item && strcmp(item->name, name) != 0)
+  while (item && strcmp(item->name, name) != 0) {
     item = item->next;
+  }
 
-  return item;
+  /* If this is the first time seeing this item's indexes, return it */
+  if (mar_insert_offset(mar, item->offset, item->length) == 1) {
+    return item;
+  } else {
+    fprintf(stderr, "ERROR: file content collision in mar_find_item()\n");
+    return NULL;
+  }
 }
 
-int mar_enum_items(MarFile *mar, MarItemCallback callback, void *closure) {
-  MarItem *item;
-  int i;
+int
+mar_enum_items(MarFile* mar, MarItemCallback callback, void* closure)
+{
+  MarItem* item;
+  int i, rv;
 
   if (!mar->item_table_is_valid) {
     if (mar_read_index(mar)) {
       return -1;
     } else {
       mar->item_table_is_valid = 1;
     }
   }
 
   for (i = 0; i < TABLESIZE; ++i) {
     item = mar->item_table[i];
     while (item) {
-      int rv = callback(mar, item, closure);
-      if (rv)
-        return rv;
+      /* if this is the first time seeing this item's indexes, process it */
+      if (mar_insert_offset(mar, item->offset, item->length) == 1) {
+        rv = callback(mar, item, closure);
+        if (rv) {
+          return rv;
+        }
+      } else {
+        fprintf(stderr, "ERROR: file content collision in mar_enum_items()\n");
+        return 1;
+      }
       item = item->next;
     }
   }
 
   return 0;
 }
 
-int mar_read(MarFile *mar, const MarItem *item, int offset, uint8_t *buf,
-             int bufsize) {
+int
+mar_read(MarFile* mar,
+         const MarItem* item,
+         int offset,
+         uint8_t* buf,
+         int bufsize)
+{
   int nr;
 
   if (offset == (int) item->length)
     return 0;
   if (offset > (int) item->length)
     return -1;
 
   nr = item->length - offset;
@@ -554,22 +661,23 @@ int mar_read(MarFile *mar, const MarItem
  * @param offsetAdditionalBlocks Optional out parameter for the offset to the
  *                               first additional block. Value is only valid if
  *                               hasAdditionalBlocks is not equal to 0.
  * @param numAdditionalBlocks    Optional out parameter for the number of
  *                               additional blocks.  Value is only valid if
  *                               has_additional_blocks is not equal to 0.
  * @return 0 on success and non-zero on failure.
  */
-int get_mar_file_info(const char *path,
-                      int *hasSignatureBlock,
-                      uint32_t *numSignatures,
-                      int *hasAdditionalBlocks,
-                      uint32_t *offsetAdditionalBlocks,
-                      uint32_t *numAdditionalBlocks)
+int
+get_mar_file_info(const char* path,
+                  int* hasSignatureBlock,
+                  uint32_t* numSignatures,
+                  int* hasAdditionalBlocks,
+                  uint32_t* offsetAdditionalBlocks,
+                  uint32_t* numAdditionalBlocks)
 {
   int rv;
   FILE *fp = fopen(path, "rb");
   if (!fp) {
     fprintf(stderr, "ERROR: could not open file in get_mar_file_info()\n");
     perror(path);
     return -1;
   }
new file mode 100644
index 0000000000000000000000000000000000000000..41d4f78482848d279230c35db097f81bcf21bb44
GIT binary patch
literal 210
zc%1Wf3^HV3U|7Kb0hgdOBM@hRXa`@%pm=8wM?XJTpLkCf2L>}!J%dC;25JFA1tSGx
z1rr5R1v3S61q%gB1p|ddg(QV!g%pKUg*1h9g$#vEg)9Z2QZ*oM1Y!}OvzDY~=A_0a
Mf=p;Y5lUhJ0KAtN(*OVf
new file mode 100644
index 0000000000000000000000000000000000000000..582af58b59b13ceebc261dd0fe142471361ce241
GIT binary patch
literal 210
zc%1Wf3^HV3U|7Kb0hgdOBM@hRXa`@%pm=8wM?XJTpLkCf2L>}!J%dC;25JFA1tSGx
z1rr5R1v3S61q%gB1p|ddg(QV!g%pKUg*1h9g$#vEg)9Z2QZ*oM0AdlKvzDY~=A_0a
Mf=p;c5lUhJ0K9}4(*OVf
new file mode 100644
index 0000000000000000000000000000000000000000..d51b23587d0b49f6282bcaf5f77697f1e5bad3ea
GIT binary patch
literal 210
zc%1Wf3^HV3U|7Kb0hgdOBM@hRXa`@%pm=8wM?XJTpLkCf2L>}!J%dC;25JFA1tSGx
z1rr5R1v3S61q%gB1p|ddg(QV!g%pKUg*1h9g$#vEg)9Z2QZ*oM0b&uLvzDY~=A_0a
Of=p-tVhy-Z5(5Ce@EGa<
new file mode 100644
index 0000000000000000000000000000000000000000..98b33ce9e5a895acf90a5ebf3ef9a68f7672f3bc
GIT binary patch
literal 210
zc%1Wf3^HV3U|7Kb0hgdOBM@hRXa`@%pm=8wM?XJTpLkCf2L>}!J%dC;25JFA1tSGx
z1rr5R1v3S61q%gB1p|ddg(QV!g%pKUg*1h9g$#vEg)9Z2QZ*oM0AdZGvzDY~=A_0a
Of=p-xViCAd5(5Cf@EGR+
new file mode 100644
index 0000000000000000000000000000000000000000..7e0a3dd72458417ce0f7f38b44abd868a902883d
GIT binary patch
literal 249
zc%1Wf3^HV3VA#U|0Y9NMBM@hRXa`@%pm=8wM?XJTpLkCf2L>}!J%dC;25JFA1tSGx
z1rr5R1v3S61q%gB1p|ddg(QV!g%pKUg*1h9g$#vEg)9X{1qTIX1r-HVg*XK@1uX>)
m1x*E@c1s{`0AdlKQ<tP==A_0af=r!)B9sIYYDE!BW&i*pyC2{H
new file mode 100644
index 0000000000000000000000000000000000000000..a10d3eb53b30e247f9e4fae42a40f6b2c2c7b230
GIT binary patch
literal 249
zc%1Wf3^HV3VA#U|0Y9NMBM@hRXa`@%pm=8wM?XJTpLkCf2L>}!J%dC;25JFA1tSGx
z1rr5R1v3S61q%gB1p|ddg(QV!g%pKUg*1h9g$#vEg)9X{1qTIX1r-HVg*XK@1uX>)
m1x*E@c1s{`0AdlKQ<tP==A_0af=r!)B9sIYT8<)=%m4r++aLk}
new file mode 100644
index 0000000000000000000000000000000000000000..bfbb9ba8535739329d560a6d84b3ddbe5aef2ed9
GIT binary patch
literal 249
zc%1Wf3^HV3VA#U|0Y9NMBM@hRXa`@%pm=8wM?XJTpLkCf2L>}!J%dC;25JFA1tSGx
z1rr5R1v3S61q%gB1p|ddg(QV!g%pKUg*1h9g$#vEg)9X{1qTIX1r-HVg*XK@1uX>)
m1x*E@c1s{`0AdlKQ<tP==A_0af=r!^B9sIYT7e>z%m4r+$RGj$
new file mode 100644
index 0000000000000000000000000000000000000000..1326d1afd8d56e0311930d6e93b529ac93765a02
GIT binary patch
literal 210
zc%1Wf3^HV3U|7Kb0hgdOBM@hRXa`@%pm=8wM?XJTpLkCf2L>}!J%dC;25JFA1tSGx
z1rr5R1v3S61q%gB1p|ddg(QV!g%pKUg*1h9g$#vEg)9Z2QZ*oM0AdlKvzDY~=A_0a
KGGGWLF#rI(bQsbA
--- a/modules/libmar/tests/unit/head_libmar.js
+++ b/modules/libmar/tests/unit/head_libmar.js
@@ -1,28 +1,28 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
-'use strict';
+"use strict";
 
 const BIN_SUFFIX = mozinfo.bin_suffix;
 const tempDir = do_get_tempdir();
 
 /**
  * Compares binary data of 2 arrays and throws if they aren't the same.
  * Throws on mismatch, does nothing on match.
  *
  * @param arr1 The first array to compare
  * @param arr2 The second array to compare
  */
 function compareBinaryData(arr1, arr2) {
   Assert.equal(arr1.length, arr2.length);
   for (let i = 0; i < arr1.length; i++) {
     if (arr1[i] != arr2[i]) {
-      throw "Data differs at index " + i + 
+      throw "Data differs at index " + i +
             ", arr1: " + arr1[i] + ", arr2: " + arr2[i];
     }
   }
 }
 
 /**
  * Reads a file's data and returns it
  *
@@ -108,17 +108,17 @@ function createMAR(outMAR, dataDir, file
     f.permissions = 0o664;
   }
 
   // Setup the command line arguments to create the MAR.
   let args = ["-C", dataDir.path, "-H", "\@MAR_CHANNEL_ID\@",
               "-V", "13.0a1", "-c", outMAR.path];
   args = args.concat(files);
 
-  info('Running: ' + signmarBin.path + " " + args.join(" "));
+  info("Running: " + signmarBin.path + " " + args.join(" "));
   process.init(signmarBin);
   process.run(true, args, args.length);
 
   // Verify signmar returned 0 for success.
   Assert.equal(process.exitValue, 0);
 
   // Verify the out MAR file actually exists.
   Assert.ok(outMAR.exists());
@@ -135,18 +135,17 @@ function extractMAR(mar, dataDir) {
   let process = Cc["@mozilla.org/process/util;1"].
                 createInstance(Ci.nsIProcess);
   let signmarBin = do_get_file("signmar" + BIN_SUFFIX);
 
   // Make sure the signmar binary exists and is an executable.
   Assert.ok(signmarBin.exists());
   Assert.ok(signmarBin.isExecutable());
 
-  // Setup the command line arguments to create the MAR.
+  // Setup the command line arguments to extract the MAR.
   let args = ["-C", dataDir.path, "-x", mar.path];
 
-  info('Running: ' + signmarBin.path + " " + args.join(" "));
+  info("Running: " + signmarBin.path + " " + args.join(" "));
   process.init(signmarBin);
   process.run(true, args, args.length);
 
-  // Verify signmar returned 0 for success.
-  Assert.equal(process.exitValue, 0);
+  return process.exitValue;
 }
--- a/modules/libmar/tests/unit/test_extract.js
+++ b/modules/libmar/tests/unit/test_extract.js
@@ -4,17 +4,17 @@
 function run_test() {
 
   /**
    * Extracts a MAR and makes sure each file matches the reference files.
    *
    * @param marFileName The name of the MAR file to extract
    * @param files       The files that the extracted MAR should contain
    */
-  function run_one_test(marFileName, files) {
+  function extract_and_compare(marFileName, files) {
     // Get the MAR file that we will be extracting
     let mar = do_get_file("data/" + marFileName);
 
     // Get the path that we will extract to
     let outDir = tempDir.clone();
     outDir.append("out");
     Assert.ok(!outDir.exists());
     outDir.create(Ci.nsIFile.DIRECTORY_TYPE, 0o777);
@@ -26,62 +26,114 @@ function run_test() {
       let outFile = outDir.clone();
       outFile.append(files[i]);
       Assert.ok(!outFile.exists());
 
       outFiles.push(outFile);
       refFiles.push(do_get_file("data/" + files[i]));
     }
 
-    // Extract the MAR contents into the ./out dir.
-    extractMAR(mar, outDir);
+    // Extract the MAR contents to ./out dir and verify 0 for success.
+    Assert.equal(extractMAR(mar, outDir), 0);
 
     // Compare to make sure the extracted files are the same.
     for (let i = 0; i < files.length; i++) {
       Assert.ok(outFiles[i].exists());
       let refFileData = getBinaryFileData(refFiles[i]);
       let outFileData = getBinaryFileData(outFiles[i]);
       compareBinaryData(refFileData, outFileData);
     }
   }
 
+  /**
+   * Attempts to extract a MAR and expects a failure
+   *
+   * @param marFileName The name of the MAR file to extract
+   */
+  function extract_and_fail(marFileName) {
+    // Get the MAR file that we will be extracting
+    let mar = do_get_file("data/" + marFileName);
+
+    // Get the path that we will extract to
+    let outDir = tempDir.clone();
+    outDir.append("out");
+    Assert.ok(!outDir.exists());
+    outDir.create(Ci.nsIFile.DIRECTORY_TYPE, 0o777);
+
+    // Extract the MAR contents to ./out dir and verify -1 (255 from the
+    // nsIprocess) for failure
+    Assert.equal(extractMAR(mar, outDir), 1);
+  }
+
   // Define the unit tests to run.
   let tests = {
     // Test extracting a MAR file with a 0 byte file.
     test_zero_sized: function _test_zero_sized() {
-      return run_one_test("0_sized.mar", ["0_sized_file"]);
+      return extract_and_compare("0_sized.mar", ["0_sized_file"]);
     },
     // Test extracting a MAR file with a 1 byte file.
     test_one_byte: function _test_one_byte() {
-      return run_one_test("1_byte.mar", ["1_byte_file"]);
+      return extract_and_compare("1_byte.mar", ["1_byte_file"]);
     },
     // Test extracting a MAR file with binary data.
     test_binary_data: function _test_binary_data() {
-      return run_one_test("binary_data.mar", ["binary_data_file"]);
+      return extract_and_compare("binary_data.mar", ["binary_data_file"]);
     },
     // Test extracting a MAR without a product information block (PIB) which
     // contains binary data.
     test_no_pib: function _test_no_pib() {
-      return run_one_test("no_pib.mar", ["binary_data_file"]);
+      return extract_and_compare("no_pib.mar", ["binary_data_file"]);
     },
     // Test extracting a MAR without a product information block (PIB) that is
     // signed and which contains binary data.
     test_no_pib_signed: function _test_no_pib_signed() {
-      return run_one_test("signed_no_pib.mar", ["binary_data_file"]);
+      return extract_and_compare("signed_no_pib.mar", ["binary_data_file"]);
     },
     // Test extracting a MAR with a product information block (PIB) that is
     // signed and which contains binary data.
     test_pib_signed: function _test_pib_signed() {
-      return run_one_test("signed_pib.mar", ["binary_data_file"]);
+      return extract_and_compare("signed_pib.mar", ["binary_data_file"]);
     },
     // Test extracting a MAR file with multiple files inside of it.
     test_multiple_file: function _test_multiple_file() {
-      return run_one_test("multiple_file.mar",
+      return extract_and_compare("multiple_file.mar",
                           ["0_sized_file", "1_byte_file", "binary_data_file"]);
     },
+    // Test collision detection where file A + B are the same offset
+    test_collision_same_offset: function test_collision_same_offset() {
+      return extract_and_fail("manipulated_same_offset.mar");
+    },
+    // Test collision detection where file A's indexes are a subset of file B's
+    test_collision_is_contained: function test_collision_is_contained() {
+      return extract_and_fail("manipulated_is_container.mar");
+    },
+    // Test collision detection where file B's indexes are a subset of file A's
+    test_collision_contained_by: function test_collision_contained_by() {
+      return extract_and_fail("manipulated_is_contained.mar");
+    },
+    // Test collision detection where file A ends in file B's indexes
+    test_collision_a_onto_b: function test_collision_a_onto_b() {
+      return extract_and_fail("manipulated_frontend_collision.mar");
+    },
+    // Test collision detection where file B ends in file A's indexes
+    test_collsion_b_onto_a: function test_collsion_b_onto_a()  {
+      return extract_and_fail("manipulated_backend_collision.mar");
+    },
+    // Test collision detection where file C shares indexes with both file A & B
+    test_collision_multiple: function test_collision_multiple() {
+      return extract_and_fail("manipulated_multiple_collision.mar");
+    },
+    // Test collision detection where A is the last file in the list
+    test_collision_last: function test_collision_multiple_last() {
+      return extract_and_fail("manipulated_multiple_collision_last.mar");
+    },
+    // Test collision detection where A is the first file in the list
+    test_collision_first: function test_collision_multiple_first() {
+      return extract_and_fail("manipulated_multiple_collision_first.mar");
+    },
     // Between each test make sure the out directory and its subfiles do
     // not exist.
     cleanup_per_test: function _cleanup_per_test() {
       let outDir = tempDir.clone();
       outDir.append("out");
       if (outDir.exists()) {
         outDir.remove(true);
       }
--- a/modules/libpref/init/StaticPrefList.h
+++ b/modules/libpref/init/StaticPrefList.h
@@ -1392,16 +1392,21 @@ VARCACHE_PREF(
 )
 
 // AV1
 VARCACHE_PREF(
   "media.av1.enabled",
    MediaAv1Enabled,
   RelaxedAtomicBool, false
 )
+VARCACHE_PREF(
+  "media.av1.use-dav1d",
+   MediaAv1UseDav1d,
+  RelaxedAtomicBool, false
+)
 
 VARCACHE_PREF(
   "media.flac.enabled",
    MediaFlacEnabled,
   bool, true
 )
 
 // Hls
@@ -1706,23 +1711,16 @@ VARCACHE_PREF(
 // How many recent block/unblock actions per origins we remember in the
 // Content Blocking log for each top-level window.
 VARCACHE_PREF(
   "browser.contentblocking.originlog.length",
    browser_contentblocking_originlog_length,
   uint32_t, 32
 )
 
-// Whether FastBlock has been enabled.
-VARCACHE_PREF(
-  "browser.fastblock.enabled",
-  browser_fastblock_enabled,
-  bool, false
-)
-
 // Anti-tracking permission expiration
 VARCACHE_PREF(
   "privacy.restrict3rdpartystorage.expiration",
    privacy_restrict3rdpartystorage_expiration,
   uint32_t, 2592000 // 30 days (in seconds)
 )
 
 // Anti-tracking user-interaction expiration
--- a/modules/libpref/init/all.js
+++ b/modules/libpref/init/all.js
@@ -652,16 +652,18 @@ pref("media.audioipc.pool_size", 2);
 // 64 * 4 kB stack per pool thread.
 pref("media.audioipc.stack_size", 262144);
 #else
 pref("media.cubeb.sandbox", false);
 #endif
 
 #ifdef MOZ_AV1
 pref("media.av1.enabled", false);
+// Use libdav1d instead of libaom
+pref("media.av1.use-dav1d", false);
 #endif
 
 pref("media.webaudio.audiocontextoptions-samplerate.enabled", true);
 
 // setSinkId expected to be unconditionally enabled in 63. Till then the
 // implementation will remain hidden behind this pref (Bug 1152401, Bug 934425).
 pref("media.setsinkid.enabled", false);
 
@@ -5555,17 +5557,17 @@ pref("urlclassifier.trackingAnnotationTa
 #else
 pref("urlclassifier.trackingAnnotationTable", "test-track-simple,base-track-digest256");
 #endif
 pref("urlclassifier.trackingAnnotationWhitelistTable", "test-trackwhite-simple,mozstd-trackwhite-digest256");
 pref("urlclassifier.trackingTable", "test-track-simple,base-track-digest256");
 pref("urlclassifier.trackingWhitelistTable", "test-trackwhite-simple,mozstd-trackwhite-digest256");
 
 // These tables will never trigger a gethash call.
-pref("urlclassifier.disallow_completions", "test-malware-simple,test-harmful-simple,test-phish-simple,test-unwanted-simple,test-track-simple,test-trackwhite-simple,test-block-simple,goog-downloadwhite-digest256,base-track-digest256,mozstd-trackwhite-digest256,content-track-digest256,mozplugin-block-digest256,mozplugin2-block-digest256,block-flash-digest256,except-flash-digest256,allow-flashallow-digest256,except-flashallow-digest256,block-flashsubdoc-digest256,except-flashsubdoc-digest256,except-flashinfobar-digest256,goog-passwordwhite-proto,ads-track-digest256,social-track-digest256,analytics-track-digest256,fastblock1-track-digest256,fastblock1-trackwhite-digest256,fastblock2-track-digest256,fastblock2-trackwhite-digest256,fastblock3-track-digest256");
+pref("urlclassifier.disallow_completions", "test-malware-simple,test-harmful-simple,test-phish-simple,test-unwanted-simple,test-track-simple,test-trackwhite-simple,test-block-simple,goog-downloadwhite-digest256,base-track-digest256,mozstd-trackwhite-digest256,content-track-digest256,mozplugin-block-digest256,mozplugin2-block-digest256,block-flash-digest256,except-flash-digest256,allow-flashallow-digest256,except-flashallow-digest256,block-flashsubdoc-digest256,except-flashsubdoc-digest256,except-flashinfobar-digest256,goog-passwordwhite-proto,ads-track-digest256,social-track-digest256,analytics-track-digest256");
 
 // Number of random entries to send with a gethash request
 pref("urlclassifier.gethashnoise", 4);
 
 // Gethash timeout for Safe Browsing
 pref("urlclassifier.gethash.timeout_ms", 5000);
 // Update server response timeout for Safe Browsing
 pref("urlclassifier.update.response_timeout_ms", 30000);
@@ -5627,17 +5629,17 @@ pref("browser.safebrowsing.provider.goog
 pref("browser.safebrowsing.provider.google4.advisoryName", "Google Safe Browsing");
 pref("browser.safebrowsing.provider.google4.dataSharingURL", "https://safebrowsing.googleapis.com/v4/threatHits?$ct=application/x-protobuf&key=%GOOGLE_API_KEY%&$httpMethod=POST");
 pref("browser.safebrowsing.provider.google4.dataSharing.enabled", false);
 
 pref("browser.safebrowsing.reportPhishURL", "https://%LOCALE%.phish-report.mozilla.com/?hl=%LOCALE%&url=");
 
 // Mozilla Safe Browsing provider (for tracking protection and plugin blocking)
 pref("browser.safebrowsing.provider.mozilla.pver", "2.2");
-pref("browser.safebrowsing.provider.mozilla.lists", "base-track-digest256,mozstd-trackwhite-digest256,content-track-digest256,mozplugin-block-digest256,mozplugin2-block-digest256,block-flash-digest256,except-flash-digest256,allow-flashallow-digest256,except-flashallow-digest256,block-flashsubdoc-digest256,except-flashsubdoc-digest256,except-flashinfobar-digest256,ads-track-digest256,social-track-digest256,analytics-track-digest256,fastblock1-track-digest256,fastblock1-trackwhite-digest256,fastblock2-track-digest256,fastblock2-trackwhite-digest256,fastblock3-track-digest256");
+pref("browser.safebrowsing.provider.mozilla.lists", "base-track-digest256,mozstd-trackwhite-digest256,content-track-digest256,mozplugin-block-digest256,mozplugin2-block-digest256,block-flash-digest256,except-flash-digest256,allow-flashallow-digest256,except-flashallow-digest256,block-flashsubdoc-digest256,except-flashsubdoc-digest256,except-flashinfobar-digest256,ads-track-digest256,social-track-digest256,analytics-track-digest256");
 pref("browser.safebrowsing.provider.mozilla.updateURL", "https://shavar.services.mozilla.com/downloads?client=SAFEBROWSING_ID&appver=%MAJOR_VERSION%&pver=2.2");
 pref("browser.safebrowsing.provider.mozilla.gethashURL", "https://shavar.services.mozilla.com/gethash?client=SAFEBROWSING_ID&appver=%MAJOR_VERSION%&pver=2.2");
 // Set to a date in the past to force immediate download in new profiles.
 pref("browser.safebrowsing.provider.mozilla.nextupdatetime", "1");
 // Block lists for tracking protection. The name values will be used as the keys
 // to lookup the localized name in preferences.properties.
 pref("browser.safebrowsing.provider.mozilla.lists.base", "moz-std");
 pref("browser.safebrowsing.provider.mozilla.lists.content", "moz-full");
@@ -5967,23 +5969,16 @@ pref("layers.omtp.dump-capture", false);
 // a content to view.  This is mostly intended to prevent infinite
 // loops with faulty converters involved.
 pref("general.document_open_conversion_depth_limit", 20);
 
 // If true, touchstart and touchmove listeners on window, document,
 // documentElement and document.body are passive by default.
 pref("dom.event.default_to_passive_touch_listeners", true);
 
-// The amount of time (ms) since navigation start after which all
-// tracker connections will be cancelled.
-pref("browser.fastblock.timeout", 5000);
-// The amount of time (ms) since navigation start after which
-// we'll stop blocking tracker connections (0 = no limit).
-pref("browser.fastblock.limit", 20000);
-
 // Enable clipboard readText() and writeText() by default
 pref("dom.events.asyncClipboard", true);
 // Disable clipboard read() and write() by default
 pref("dom.events.asyncClipboard.dataTransfer", false);
 // Should only be enabled in tests
 pref("dom.events.testing.asyncClipboard", false);
 
 #ifdef NIGHTLY_BUILD
--- a/netwerk/base/LoadInfo.cpp
+++ b/netwerk/base/LoadInfo.cpp
@@ -80,23 +80,20 @@ LoadInfo::LoadInfo(nsIPrincipal* aLoadin
   , mParentOuterWindowID(0)
   , mTopOuterWindowID(0)
   , mFrameOuterWindowID(0)
   , mEnforceSecurity(false)
   , mInitialSecurityCheckDone(false)
   , mIsThirdPartyContext(false)
   , mIsDocshellReload(false)
   , mSendCSPViolationEvents(true)
-  , mTrackerBlockedReason(mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::all)
   , mForcePreflight(false)
   , mIsPreflight(false)
   , mLoadTriggeredFromExternal(false)
   , mServiceWorkerTaintingSynthesized(false)
-  , mIsTracker(false)
-  , mIsTrackerBlocked(false)
   , mDocumentHasUserInteracted(false)
   , mDocumentHasLoaded(false)
   , mIsFromProcessingFrameAttributes(false)
 {
   MOZ_ASSERT(mLoadingPrincipal);
   MOZ_ASSERT(mTriggeringPrincipal);
 
 #ifdef DEBUG
@@ -353,23 +350,20 @@ LoadInfo::LoadInfo(nsPIDOMWindowOuter* a
   , mParentOuterWindowID(0)
   , mTopOuterWindowID(0)
   , mFrameOuterWindowID(0)
   , mEnforceSecurity(false)
   , mInitialSecurityCheckDone(false)
   , mIsThirdPartyContext(false) // NB: TYPE_DOCUMENT implies not third-party.
   , mIsDocshellReload(false)
   , mSendCSPViolationEvents(true)
-  , mTrackerBlockedReason(mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::all)
   , mForcePreflight(false)
   , mIsPreflight(false)
   , mLoadTriggeredFromExternal(false)
   , mServiceWorkerTaintingSynthesized(false)
-  , mIsTracker(false)
-  , mIsTrackerBlocked(false)
   , mDocumentHasUserInteracted(false)
   , mDocumentHasLoaded(false)
   , mIsFromProcessingFrameAttributes(false)
 {
   // Top-level loads are never third-party
   // Grab the information we can out of the window.
   MOZ_ASSERT(aOuterWindow);
   MOZ_ASSERT(mTriggeringPrincipal);
@@ -455,24 +449,21 @@ LoadInfo::LoadInfo(const LoadInfo& rhs)
   , mSendCSPViolationEvents(rhs.mSendCSPViolationEvents)
   , mOriginAttributes(rhs.mOriginAttributes)
   , mRedirectChainIncludingInternalRedirects(
       rhs.mRedirectChainIncludingInternalRedirects)
   , mRedirectChain(rhs.mRedirectChain)
   , mAncestorPrincipals(rhs.mAncestorPrincipals)
   , mAncestorOuterWindowIDs(rhs.mAncestorOuterWindowIDs)
   , mCorsUnsafeHeaders(rhs.mCorsUnsafeHeaders)
-  , mTrackerBlockedReason(rhs.mTrackerBlockedReason)
   , mForcePreflight(rhs.mForcePreflight)
   , mIsPreflight(rhs.mIsPreflight)
   , mLoadTriggeredFromExternal(rhs.mLoadTriggeredFromExternal)
   // mServiceWorkerTaintingSynthesized must be handled specially during redirect
   , mServiceWorkerTaintingSynthesized(false)
-  , mIsTracker(rhs.mIsTracker)
-  , mIsTrackerBlocked(rhs.mIsTrackerBlocked)
   , mDocumentHasUserInteracted(rhs.mDocumentHasUserInteracted)
   , mDocumentHasLoaded(rhs.mDocumentHasLoaded)
   , mIsFromProcessingFrameAttributes(rhs.mIsFromProcessingFrameAttributes)
 {
 }
 
 LoadInfo::LoadInfo(nsIPrincipal* aLoadingPrincipal,
                    nsIPrincipal* aTriggeringPrincipal,
@@ -551,23 +542,20 @@ LoadInfo::LoadInfo(nsIPrincipal* aLoadin
   , mInitialSecurityCheckDone(aInitialSecurityCheckDone)
   , mIsThirdPartyContext(aIsThirdPartyContext)
   , mIsDocshellReload(aIsDocshellReload)
   , mSendCSPViolationEvents(aSendCSPViolationEvents)
   , mOriginAttributes(aOriginAttributes)
   , mAncestorPrincipals(std::move(aAncestorPrincipals))
   , mAncestorOuterWindowIDs(aAncestorOuterWindowIDs)
   , mCorsUnsafeHeaders(aCorsUnsafeHeaders)
-  , mTrackerBlockedReason(mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::all)
   , mForcePreflight(aForcePreflight)
   , mIsPreflight(aIsPreflight)
   , mLoadTriggeredFromExternal(aLoadTriggeredFromExternal)
   , mServiceWorkerTaintingSynthesized(aServiceWorkerTaintingSynthesized)
-  , mIsTracker(false)
-  , mIsTrackerBlocked(false)
   , mDocumentHasUserInteracted(aDocumentHasUserInteracted)
   , mDocumentHasLoaded(aDocumentHasLoaded)
   , mIsFromProcessingFrameAttributes(false)
 {
   // Only top level TYPE_DOCUMENT loads can have a null loadingPrincipal
   MOZ_ASSERT(mLoadingPrincipal || aContentPolicyType == nsIContentPolicy::TYPE_DOCUMENT);
   MOZ_ASSERT(mTriggeringPrincipal);
 
@@ -1367,61 +1355,16 @@ LoadInfo::SynthesizeServiceWorkerTaintin
   MOZ_DIAGNOSTIC_ASSERT(aTainting <= LoadTainting::Opaque);
   mTainting = aTainting;
 
   // Flag to prevent the tainting from being increased.
   mServiceWorkerTaintingSynthesized = true;
 }
 
 NS_IMETHODIMP
-LoadInfo::GetIsTracker(bool *aIsTracker)
-{
-  MOZ_ASSERT(aIsTracker);
-  *aIsTracker = mIsTracker;
-  return NS_OK;
-}
-
-NS_IMETHODIMP
-LoadInfo::SetIsTracker(bool aIsTracker)
-{
-  mIsTracker = aIsTracker;
-  return NS_OK;
-}
-
-NS_IMETHODIMP
-LoadInfo::GetIsTrackerBlocked(bool *aIsTrackerBlocked)
-{
-  MOZ_ASSERT(aIsTrackerBlocked);
-  *aIsTrackerBlocked = mIsTrackerBlocked;
-  return NS_OK;
-}
-
-NS_IMETHODIMP
-LoadInfo::SetIsTrackerBlocked(bool aIsTrackerBlocked)
-{
-  mIsTrackerBlocked = aIsTrackerBlocked;
-  return NS_OK;
-}
-
-NS_IMETHODIMP
-LoadInfo::GetTrackerBlockedReason(mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED *aLabel)
-{
-  MOZ_ASSERT(aLabel);
-  *aLabel = mTrackerBlockedReason;
-  return NS_OK;
-}
-
-NS_IMETHODIMP
-LoadInfo::SetTrackerBlockedReason(mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED aLabel)
-{
-  mTrackerBlockedReason = aLabel;
-  return NS_OK;
-}
-
-NS_IMETHODIMP
 LoadInfo::GetDocumentHasUserInteracted(bool *aDocumentHasUserInteracted)
 {
   MOZ_ASSERT(aDocumentHasUserInteracted);
   *aDocumentHasUserInteracted = mDocumentHasUserInteracted;
   return NS_OK;
 }
 
 NS_IMETHODIMP
--- a/netwerk/base/LoadInfo.h
+++ b/netwerk/base/LoadInfo.h
@@ -203,26 +203,20 @@ private:
   bool                             mIsDocshellReload;
   bool                             mSendCSPViolationEvents;
   OriginAttributes                 mOriginAttributes;
   RedirectHistoryArray             mRedirectChainIncludingInternalRedirects;
   RedirectHistoryArray             mRedirectChain;
   nsTArray<nsCOMPtr<nsIPrincipal>> mAncestorPrincipals;
   nsTArray<uint64_t>               mAncestorOuterWindowIDs;
   nsTArray<nsCString>              mCorsUnsafeHeaders;
-
-  mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED mTrackerBlockedReason;
-
   bool                             mForcePreflight;
   bool                             mIsPreflight;
   bool                             mLoadTriggeredFromExternal;
   bool                             mServiceWorkerTaintingSynthesized;
-
-  bool                             mIsTracker;
-  bool                             mIsTrackerBlocked;
   bool                             mDocumentHasUserInteracted;
   bool                             mDocumentHasLoaded;
 
   // Is true if this load was triggered by processing the attributes of the
   // browsing context container.
   // See nsILoadInfo.isFromProcessingFrameAttributes
   bool                             mIsFromProcessingFrameAttributes;
 };
--- a/netwerk/base/nsILoadInfo.idl
+++ b/netwerk/base/nsILoadInfo.idl
@@ -15,30 +15,28 @@ interface nsIRedirectHistoryEntry;
 interface nsIURI;
 webidl Document;
 native LoadContextRef(already_AddRefed<nsISupports>);
 %{C++
 #include "nsTArray.h"
 #include "mozilla/BasePrincipal.h"
 #include "mozilla/LoadTainting.h"
 #include "mozilla/UniquePtr.h"
-#include "mozilla/TelemetryHistogramEnums.h"
 #include "nsStringFwd.h"
 
 namespace mozilla {
 namespace dom {
 class ClientInfo;
 class ClientSource;
 class PerformanceStorage;
 class ServiceWorkerDescriptor;
 } // namespace dom
 } // namespace mozilla
 %}
 
-native AnalyticsProvider(mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED);
 [ref] native nsIRedirectHistoryEntryArray(const nsTArray<nsCOMPtr<nsIRedirectHistoryEntry>>);
 native OriginAttributes(mozilla::OriginAttributes);
 [ref] native const_OriginAttributesRef(const mozilla::OriginAttributes);
 [ref] native CStringArrayRef(const nsTArray<nsCString>);
 [ref] native StringArrayRef(const nsTArray<nsString>);
 [ref] native Uint64ArrayRef(const nsTArray<uint64_t>);
 [ref] native PrincipalArrayRef(const nsTArray<nsCOMPtr<nsIPrincipal>>);
 [ref] native const_ClientInfoRef(const mozilla::dom::ClientInfo);
@@ -1039,24 +1037,16 @@ interface nsILoadInfo : nsISupports
     *
     * NOTE: This should not be used outside of service worker code! Use
     *       nsILoadInfo::MaybeIncreaseTainting() instead.
    */
   [noscript, nostdcall, notxpcom]
   void SynthesizeServiceWorkerTainting(in LoadTainting aTainting);
 
   /**
-    * These flags are used for FastBlock statistics to see if a resource is a
-    * tracker and whether it was blocked by the FastBlock mechanism or not.
-    */
-  [infallible] attribute boolean isTracker;
-  [infallible] attribute boolean isTrackerBlocked;
-  attribute AnalyticsProvider trackerBlockedReason;
-
-  /**
     * The top-level document has been user-interacted.
     */
   [infallible] attribute boolean documentHasUserInteracted;
 
   /**
     * This attribute represents whether the document to which this
     * load belongs had finished loading when the load was initiated.
     */
--- a/netwerk/base/nsIPermissionManager.idl
+++ b/netwerk/base/nsIPermissionManager.idl
@@ -121,16 +121,26 @@ interface nsIPermissionManager : nsISupp
    * enumerator of all permissions which are not set to default and which
    * belong to the matching principal of the given nsIPrincipal.
    *
    * @param principal  the URI to get all permissions for
    */
   nsISimpleEnumerator getAllForPrincipal(in nsIPrincipal principal);
 
   /**
+   * Get all custom permissions of a specific type, specified with a prefix
+   * string.  This will return an array of all permissions which are not set to
+   * default.  Also the passed type argument is either equal to or a prefix of
+   * the type of the returned permissions.
+   *
+   * @param prefix  the type prefix string
+   */
+  Array<nsIPermission> getAllWithTypePrefix(in ACString prefix);
+
+  /**
    * Add permission information for a given principal.
    * It is internally calling the other add() method using the nsIURI from the
    * principal.
    * Passing a system principal will be a no-op because they will always be
    * granted permissions.
    */
   void addFromPrincipal(in nsIPrincipal principal, in string type,
                         in uint32_t permission,
--- a/netwerk/ipc/NeckoChannelParams.ipdlh
+++ b/netwerk/ipc/NeckoChannelParams.ipdlh
@@ -17,17 +17,16 @@ include PBackgroundSharedTypes;
 
 using mozilla::OriginAttributes from "mozilla/ipc/BackgroundUtils.h";
 using struct mozilla::void_t from "ipc/IPCMessageUtils.h";
 using RequestHeaderTuples from "mozilla/net/PHttpChannelParams.h";
 using ArrayOfStringPairs from "mozilla/net/PHttpChannelParams.h";
 using struct nsHttpAtom from "nsHttp.h";
 using class mozilla::net::nsHttpResponseHead from "nsHttpResponseHead.h";
 using class mozilla::TimeStamp from "mozilla/TimeStamp.h";
-using Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED from "DocumentAnalyticsTrackerFastBlocked.h";
 
 namespace mozilla {
 namespace net {
 
 //-----------------------------------------------------------------------------
 // LoadInfo IPDL structs
 //-----------------------------------------------------------------------------
 
@@ -143,20 +142,16 @@ struct ParentLoadInfoForwarderArgs
   // The service worker may synthesize a Response with a particular
   // tainting value.
   uint32_t tainting;
 
   // We must also note that the tainting value was explicitly set
   // by the service worker.
   bool serviceWorkerTaintingSynthesized;
 
-  // Tracker information, currently used by FastBlock
-  bool isTracker;
-  bool isTrackerBlocked;
-  LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED trackerBlockedReason;
   bool documentHasUserInteracted;
   bool documentHasLoaded;
 
   // IMPORTANT: when you add new properites here you must also update
   // LoadInfoToParentLoadInfoForwarder and MergeParentLoadInfoForwarder
   // in BackgroundUtils.cpp/.h!
 };
 
--- a/netwerk/locales/en-US/necko.properties
+++ b/netwerk/locales/en-US/necko.properties
@@ -41,17 +41,16 @@ SuperfluousAuth=You are about to log in to the site ā€œ%1$Sā€ with the username ā€œ%2$Sā€, but the website does not require authentication. This may be an attempt to trick you.\n\nIs ā€œ%1$Sā€ the site you want to visit?
 AutomaticAuth=You are about to log in to the site ā€œ%1$Sā€ with the username ā€œ%2$Sā€.
 
 TrackerUriBlocked=The resource at ā€œ%1$Sā€ was blocked because content blocking is enabled.
 UnsafeUriBlocked=The resource at ā€œ%1$Sā€ was blocked by Safe Browsing.
 CookieBlockedByPermission=Request to access cookies or storage on ā€œ%1$Sā€ was blocked because of custom cookie permission.
 CookieBlockedTracker=Request to access cookie or storage on ā€œ%1$Sā€ was blocked because it came from a tracker and content blocking is enabled.
 CookieBlockedAll=Request to access cookie or storage on ā€œ%1$Sā€ was blocked because we are blocking all storage access requests.
 CookieBlockedForeign=Request to access cookie or storage on ā€œ%1$Sā€ was blocked because we are blocking all third-party storage access requests and content blocking is enabled.
-CookieBlockedSlowTrackingContent=The resource at ā€œ%1$Sā€ was blocked because content blocking is enabled and the resource was classified as a slow tracking resource.
 
 # LOCALIZATION NOTE (CookieAllowedForOriginOnTrackerByStorageAccessAPI): %3$S, %2$S and %1$S are URLs.
 CookieAllowedForOriginOnTrackerByStorageAccessAPI=Storage access granted for ā€œ%3$Sā€ opened by tracker ā€œ%2$Sā€ on ā€œ%1$Sā€.
 # LOCALIZATION NOTE (CookieAllowedForTrackerByStorageAccessAPI): %2$S and %1$S are URLs.
 CookieAllowedForTrackerByStorageAccessAPI=Storage access granted for tracker ā€œ%2$Sā€ on ā€œ%1$Sā€.
 # LOCALIZATION NOTE (CookieAllowedForOriginOnTrackerByHeuristic): %3$S, %2$S and %1$S are URLs.
 CookieAllowedForOriginOnTrackerByHeuristic=Storage access automatically granted for ā€œ%3$Sā€ opened by tracker ā€œ%2$Sā€ on ā€œ%1$Sā€.
 # LOCALIZATION NOTE (CookieAllowedForTrackerByHeuristic): %2$S and %1$S are URLs.
deleted file mode 100644
--- a/netwerk/protocol/http/DocumentAnalyticsTrackerFastBlocked.h
+++ /dev/null
@@ -1,25 +0,0 @@
-/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
-/* vim:set ts=4 sw=4 sts=4 et cin: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef DocumentAnalyticsTrackerFastBlocked_h__
-#define DocumentAnalyticsTrackerFastBlocked_h__
-
-#include "ipc/IPCMessageUtils.h"
-#include "mozilla/TelemetryHistogramEnums.h"
-
-namespace IPC {
-
-  template <>
-  struct ParamTraits<mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED> :
-    public ContiguousEnumSerializer<mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED,
-                                    mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::other,
-                                    (mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED)
-                                      (uint32_t(mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::all) + 1)>
-  {};
-
-}
-
-#endif
--- a/netwerk/protocol/http/HttpBaseChannel.cpp
+++ b/netwerk/protocol/http/HttpBaseChannel.cpp
@@ -327,20 +327,16 @@ HttpBaseChannel::SetIsTrackingResource(b
 
   if (aIsThirdParty) {
     MOZ_ASSERT(!mIsFirstPartyTrackingResource);
     mIsThirdPartyTrackingResource = true;
   } else {
     MOZ_ASSERT(!mIsThirdPartyTrackingResource);
     mIsFirstPartyTrackingResource = true;
   }
-
-  if (mLoadInfo) {
-    MOZ_ALWAYS_SUCCEEDS(mLoadInfo->SetIsTracker(true));
-  }
 }
 
 nsresult
 HttpBaseChannel::Init(nsIURI *aURI,
                       uint32_t aCaps,
                       nsProxyInfo *aProxyInfo,
                       uint32_t aProxyResolveFlags,
                       nsIURI *aProxyURI,
--- a/netwerk/protocol/http/HttpChannelChild.cpp
+++ b/netwerk/protocol/http/HttpChannelChild.cpp
@@ -696,48 +696,16 @@ public:
     return NS_OK;
   }
 
   NS_DECL_ISUPPORTS
 };
 
 NS_IMPL_ISUPPORTS(SyntheticDiversionListener, nsIStreamListener);
 
-static nsresult
-GetTopDocument(nsIChannel* aChannel, nsIDocument** aResult)
-{
-  nsresult rv;
-
-  nsCOMPtr<mozIThirdPartyUtil> thirdPartyUtil = services::GetThirdPartyUtil();
-  if (NS_WARN_IF(!thirdPartyUtil)) {
-    return NS_ERROR_FAILURE;
-  }
-
-  nsCOMPtr<mozIDOMWindowProxy> win;
-  rv = thirdPartyUtil->GetTopWindowForChannel(aChannel,
-                                              getter_AddRefs(win));
-  if (NS_WARN_IF(NS_FAILED(rv))) {
-    return rv;
-  }
-
-  auto* pwin = nsPIDOMWindowOuter::From(win);
-  nsCOMPtr<nsIDocShell> docShell = pwin->GetDocShell();
-  if (!docShell) {
-    return NS_ERROR_FAILURE;
-  }
-
-  nsCOMPtr<nsIDocument> doc = docShell->GetDocument();
-  if (!doc) {
-    return NS_ERROR_FAILURE;
-  }
-
-  doc.forget(aResult);
-  return NS_OK;
-}
-
 void
 HttpChannelChild::DoOnStartRequest(nsIRequest* aRequest, nsISupports* aContext)
 {
   LOG(("HttpChannelChild::DoOnStartRequest [this=%p]\n", this));
 
   // In theory mListener should not be null, but in practice sometimes it is.
   MOZ_ASSERT(mListener);
   if (!mListener) {
@@ -745,40 +713,16 @@ HttpChannelChild::DoOnStartRequest(nsIRe
     return;
   }
 
   if (mSynthesizedResponsePump && mLoadFlags & LOAD_CALL_CONTENT_SNIFFERS) {
     mSynthesizedResponsePump->PeekStream(CallTypeSniffers,
                                          static_cast<nsIChannel*>(this));
   }
 
-  bool isTracker;
-  MOZ_ALWAYS_SUCCEEDS(mLoadInfo->GetIsTracker(&isTracker));
-  if (isTracker) {
-    bool isTrackerBlocked;
-    MOZ_ALWAYS_SUCCEEDS(mLoadInfo->GetIsTrackerBlocked(&isTrackerBlocked));
-    LOG(("HttpChannelChild::DoOnStartRequest FastBlock %d [this=%p]\n",
-         isTrackerBlocked,
-         this));
-
-    nsCOMPtr<nsIDocument> doc;
-    if (!NS_WARN_IF(NS_FAILED(GetTopDocument(this,
-                                             getter_AddRefs(doc))))) {
-      doc->IncrementTrackerCount();
-      if (isTrackerBlocked) {
-        doc->IncrementTrackerBlockedCount();
-
-        Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED label =
-          Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::other;
-        MOZ_ALWAYS_SUCCEEDS(mLoadInfo->GetTrackerBlockedReason(&label));
-        doc->NoteTrackerBlockedReason(label);
-      }
-    }
-  }
-
   nsresult rv = mListener->OnStartRequest(aRequest, aContext);
   if (NS_FAILED(rv)) {
     Cancel(rv);
     return;
   }
 
   if (mDivertingToParent) {
     mListener = nullptr;
--- a/netwerk/protocol/http/moz.build
+++ b/netwerk/protocol/http/moz.build
@@ -22,17 +22,16 @@ XPIDL_SOURCES += [
     'nsIRaceCacheWithNetwork.idl',
     'nsIRedirectProcessChooser.idl',
     'nsIWellKnownOpportunisticUtils.idl',
 ]
 
 XPIDL_MODULE = 'necko_http'
 
 EXPORTS += [
-    'DocumentAnalyticsTrackerFastBlocked.h',
     'nsCORSListenerProxy.h',
     'nsHttp.h',
     'nsHttpAtomList.h',
     'nsHttpHeaderArray.h',
     'nsHttpRequestHead.h',
     'nsHttpResponseHead.h',
 ]
 
--- a/netwerk/protocol/http/nsHttpChannel.cpp
+++ b/netwerk/protocol/http/nsHttpChannel.cpp
@@ -157,36 +157,16 @@ static uint32_t sRCWNMaxWaitMs = 500;
 
 #define WRONG_RACING_RESPONSE_SOURCE(req)                                                  \
     (mRaceCacheWithNetwork &&                                                                 \
         (((mFirstResponseSource == RESPONSE_FROM_CACHE) && (req != mCachePump)) ||         \
          ((mFirstResponseSource == RESPONSE_FROM_NETWORK) && (req != mTransactionPump))))
 
 static NS_DEFINE_CID(kStreamListenerTeeCID, NS_STREAMLISTENERTEE_CID);
 
-using mozilla::Telemetry::LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED;
-
-static const struct {
-  LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED mTelemetryLabel;
-  const char* mHostName;
-} gFastBlockAnalyticsProviders[] = {
-  // clang-format off
-  { LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::googleanalytics, "google-analytics.com" },
-  { LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::scorecardresearch, "scorecardresearch.com" },
-  { LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::hotjar, "hotjar.com" },
-  { LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::newrelic, "newrelic.com" },
-  { LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::nrdata, "nr-data.net" },
-  { LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::crwdcntrl, "crwdcntrl.net" },
-  { LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::eyeota, "eyeota.net" },
-  { LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::yahooanalytics, "analytics.yahoo.com" },
-  { LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::statcounter, "statcounter.com" },
-  { LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::v12group, "v12group.com" }
-  // clang-format on
-};
-
 void
 AccumulateCacheHitTelemetry(CacheDisposition hitOrMiss)
 {
     Telemetry::Accumulate(Telemetry::HTTP_CACHE_DISPOSITION_2_V2, hitOrMiss);
 }
 
 // Computes and returns a SHA1 hash of the input buffer. The input buffer
 // must be a null-terminated string.
@@ -683,174 +663,23 @@ nsHttpChannel::Connect()
         MOZ_DIAGNOSTIC_ASSERT(!mOnTailUnblock);
         mOnTailUnblock = &nsHttpChannel::ConnectOnTailUnblock;
         return NS_OK;
     }
 
     return ConnectOnTailUnblock();
 }
 
-static bool
-IsContentPolicyTypeWhitelistedForFastBlock(nsILoadInfo* aLoadInfo)
-{
-  nsContentPolicyType type = aLoadInfo ?
-                             aLoadInfo->GetExternalContentPolicyType() :
-                             nsIContentPolicy::TYPE_OTHER;
-  switch (type) {
-  // images
-  case nsIContentPolicy::TYPE_IMAGE:
-  case nsIContentPolicy::TYPE_IMAGESET:
-  case nsIContentPolicy::TYPE_INTERNAL_IMAGE:
-  case nsIContentPolicy::TYPE_INTERNAL_IMAGE_PRELOAD:
-  case nsIContentPolicy::TYPE_INTERNAL_IMAGE_FAVICON:
-  // fonts
-  case nsIContentPolicy::TYPE_FONT:
-  // stylesheets
-  case nsIContentPolicy::TYPE_STYLESHEET:
-  case nsIContentPolicy::TYPE_INTERNAL_STYLESHEET:
-  case nsIContentPolicy::TYPE_INTERNAL_STYLESHEET_PRELOAD:
-    return true;
-  default:
-    return false;
-  }
-}
-
-bool
-nsHttpChannel::CheckFastBlocked()
-{
-    LOG(("nsHttpChannel::CheckFastBlocked [this=%p, url=%s]",
-         this, mSpec.get()));
-    MOZ_ASSERT(mIsThirdPartyTrackingResource);
-
-    static bool sFastBlockInited = false;
-    static uint32_t sFastBlockTimeout = 0;
-    static uint32_t sFastBlockLimit = 0;
-
-    if (!sFastBlockInited) {
-        sFastBlockInited = true;
-        Preferences::AddUintVarCache(&sFastBlockTimeout, "browser.fastblock.timeout");
-        Preferences::AddUintVarCache(&sFastBlockLimit, "browser.fastblock.limit");
-    }
-
-    if (!StaticPrefs::browser_fastblock_enabled()) {
-        LOG(("FastBlock disabled by pref [this=%p]\n", this));
-
-        return false;
-    }
-
-    TimeStamp timestamp;
-    if (NS_FAILED(GetNavigationStartTimeStamp(&timestamp)) || !timestamp) {
-        LOG(("FastBlock passed (no timestamp) [this=%p]\n", this));
-
-        return false;
-    }
-
-    bool engageFastBlock = false;
-
-    TimeDuration duration = TimeStamp::NowLoRes() - timestamp;
-    if (IsContentPolicyTypeWhitelistedForFastBlock(mLoadInfo)) {
-        LOG(("FastBlock passed (whitelisted content type %u) (%lf) [this=%p]\n",
-             mLoadInfo ? mLoadInfo->GetExternalContentPolicyType() : nsIContentPolicy::TYPE_OTHER,
-             duration.ToMilliseconds(), this));
-    } else if (mLoadInfo && mLoadInfo->GetDocumentHasUserInteracted()) {
-        LOG(("FastBlock passed (user interaction) (%lf) [this=%p]\n",
-             duration.ToMilliseconds(), this));
-    } else if (mLoadInfo && mLoadInfo->GetDocumentHasLoaded()) {
-        LOG(("FastBlock passed (document loaded) (%lf) [this=%p]\n",
-             duration.ToMilliseconds(), this));
-    } else {
-            bool hasFastBlockStarted = duration.ToMilliseconds() >= sFastBlockTimeout;
-        bool hasFastBlockStopped = false;
-        if ((sFastBlockLimit != 0) && (sFastBlockLimit > sFastBlockTimeout)) {
-            hasFastBlockStopped = duration.ToMilliseconds() > sFastBlockLimit;
-        }
-        LOG(("FastBlock started=%d stopped=%d (%lf) [this=%p]\n",
-             static_cast<int>(hasFastBlockStarted),
-             static_cast<int>(hasFastBlockStopped),
-             duration.ToMilliseconds(),
-             this));
-        engageFastBlock = hasFastBlockStarted && !hasFastBlockStopped;
-    }
-
-    // Remember the data needed for fastblock telemetry in case fastblock is
-    // enabled, we have decided to block the channel, and the channel isn't
-    // marked as private.
-    if (engageFastBlock && !NS_UsePrivateBrowsing(this)) {
-        nsCOMPtr<nsIURI> uri;
-        nsresult rv = GetURI(getter_AddRefs(uri));
-        NS_ENSURE_SUCCESS(rv, false);
-
-        nsAutoCString host;
-        rv = uri->GetHost(host);
-        NS_ENSURE_SUCCESS(rv, false);
-
-        nsCOMPtr<nsIEffectiveTLDService> tldService =
-            do_GetService(NS_EFFECTIVETLDSERVICE_CONTRACTID);
-        NS_ENSURE_TRUE(tldService, false);
-
-        LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED label =
-            LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::other;
-        for (const auto& entry : gFastBlockAnalyticsProviders) {
-          // For each entry in the list of our analytics providers, use the
-          // effective TLD service to look up subdomains to make sure we find a
-          // potential match if one is available.
-          while (true) {
-            if (host == entry.mHostName) {
-              label = entry.mTelemetryLabel;
-              break;
-            }
-
-            nsAutoCString newHost;
-            rv = tldService->GetNextSubDomain(host, newHost);
-            if (rv == NS_ERROR_INSUFFICIENT_DOMAIN_LEVELS) {
-              // we're done searching this entry.
-              break;
-            }
-            NS_ENSURE_SUCCESS(rv, false);
-
-            host = newHost;
-          }
-
-          if (label != LABELS_DOCUMENT_ANALYTICS_TRACKER_FASTBLOCKED::other) {
-            // We have found a label in the previous loop, bail out now!
-            break;
-          }
-        }
-
-        if (mLoadInfo) {
-          MOZ_ALWAYS_SUCCEEDS(mLoadInfo->SetIsTrackerBlocked(true));
-          MOZ_ALWAYS_SUCCEEDS(mLoadInfo->SetTrackerBlockedReason(label));
-        }
-    }
-
-    return engageFastBlock;
-}
-
 nsresult
 nsHttpChannel::ConnectOnTailUnblock()
 {
     nsresult rv;
 
     LOG(("nsHttpChannel::ConnectOnTailUnblock [this=%p]\n", this));
 
-    bool isTrackingResource = mIsThirdPartyTrackingResource; // is atomic
-    if (isTrackingResource) {
-        bool engageFastBlock = CheckFastBlocked();
-        AntiTrackingCommon::NotifyBlockingDecision(this,
-                                                   engageFastBlock ?
-                                                     AntiTrackingCommon::BlockingDecision::eBlock :
-                                                     AntiTrackingCommon::BlockingDecision::eAllow,
-                                                   nsIWebProgressListener::STATE_BLOCKED_SLOW_TRACKING_CONTENT);
-        if (engageFastBlock) {
-          Unused << AsyncAbort(NS_ERROR_TRACKING_ANNOTATION_URI);
-          CloseCacheEntry(false);
-          return NS_OK;
-        }
-    }
-
     // Consider opening a TCP connection right away.
     SpeculativeConnect();
 
     // open a cache entry for this channel...
     bool isHttps = false;
     rv = mURI->SchemeIs("https", &isHttps);
     NS_ENSURE_SUCCESS(rv,rv);
     rv = OpenCacheEntry(isHttps);
@@ -6229,20 +6058,16 @@ nsHttpChannel::ContinueCancelledByTracki
 
 nsresult
 nsHttpChannel::CancelInternal(nsresult status)
 {
     bool trackingProtectionCancellationPending =
       !!mTrackingProtectionCancellationPending;
     if (status == NS_ERROR_TRACKING_URI) {
       mTrackingProtectionCancellationPending = 0;
-      if (mLoadInfo) {
-        MOZ_ALWAYS_SUCCEEDS(mLoadInfo->SetIsTracker(true));
-        MOZ_ALWAYS_SUCCEEDS(mLoadInfo->SetIsTrackerBlocked(true));
-      }
     }
 
     mCanceled = true;
     mStatus = status;
     if (mProxyRequest)
         mProxyRequest->Cancel(status);
     CancelNetworkRequest(status);
     mCacheInputStream.CloseAndRelease();
--- a/netwerk/protocol/http/nsHttpChannel.h
+++ b/netwerk/protocol/http/nsHttpChannel.h
@@ -683,19 +683,16 @@ private:
     // A function we trigger when untail callback is triggered by our request
     // context in case this channel was tail-blocked.
     nsresult (nsHttpChannel::*mOnTailUnblock)();
     // Called on untail when tailed during AsyncOpen execution.
     nsresult AsyncOpenOnTailUnblock();
     // Called on untail when tailed because of being a tracking resource.
     nsresult ConnectOnTailUnblock();
 
-    // Check if current channel should be canceled by FastBlock rules.
-    bool CheckFastBlocked();
-
     nsCString mUsername;
 
     // If non-null, warnings should be reported to this object.
     RefPtr<HttpChannelSecurityWarningReporter> mWarningReporter;
 
     RefPtr<ADivertableParentChannel> mParentChannel;
 
     // True if the channel is reading from cache.
--- a/netwerk/protocol/wyciwyg/nsWyciwygProtocolHandler.cpp
+++ b/netwerk/protocol/wyciwyg/nsWyciwygProtocolHandler.cpp
@@ -10,16 +10,17 @@
 #include "nsWyciwygProtocolHandler.h"
 #include "nsNetCID.h"
 #include "nsServiceManagerUtils.h"
 #include "plstr.h"
 #include "nsIObserverService.h"
 #include "nsIURI.h"
 #include "nsIURIMutator.h"
 
+#include "mozilla/dom/ContentChild.h"
 #include "mozilla/net/NeckoChild.h"
 
 using namespace mozilla::net;
 #include "mozilla/net/WyciwygChannelChild.h"
 
 ////////////////////////////////////////////////////////////////////////////////
 
 nsWyciwygProtocolHandler::nsWyciwygProtocolHandler()
--- a/python/mozbuild/mozbuild/mach_commands.py
+++ b/python/mozbuild/mozbuild/mach_commands.py
@@ -2901,16 +2901,29 @@ class Vendor(MachCommandBase):
         help='Repository url to pull a snapshot from. Supports github and googlesource.')
     @CommandArgument('--ignore-modified', action='store_true',
         help='Ignore modified files in current checkout',
         default=False)
     def vendor_aom(self, **kwargs):
         from mozbuild.vendor_aom import VendorAOM
         vendor_command = self._spawn(VendorAOM)
         vendor_command.vendor(**kwargs)
+    @SubCommand('vendor', 'dav1d',
+                description='Vendor dav1d implementation of AV1 into the source repository.')
+    @CommandArgument('-r', '--revision',
+        help='Repository tag or commit to update to.')
+    @CommandArgument('--repo',
+        help='Repository url to pull a snapshot from. Supports gitlab.')
+    @CommandArgument('--ignore-modified', action='store_true',
+        help='Ignore modified files in current checkout',
+        default=False)
+    def vendor_dav1d(self, **kwargs):
+        from mozbuild.vendor_dav1d import VendorDav1d
+        vendor_command = self._spawn(VendorDav1d)
+        vendor_command.vendor(**kwargs)
 
     @SubCommand('vendor', 'python',
                 description='Vendor Python packages from pypi.org into third_party/python')
     @CommandArgument('--with-windows-wheel', action='store_true',
         help='Vendor a wheel for Windows along with the source package',
         default=False)
     @CommandArgument('packages', default=None, nargs='*', help='Packages to vendor. If omitted, packages and their dependencies defined in Pipfile.lock will be vendored. If Pipfile has been modified, then Pipfile.lock will be regenerated. Note that transient dependencies may be updated when running this command.')
     def vendor_python(self, **kwargs):
--- a/python/mozbuild/mozbuild/nodeutil.py
+++ b/python/mozbuild/mozbuild/nodeutil.py
@@ -42,22 +42,32 @@ def find_node_paths():
             os.path.join(os.environ.get("ProgramFiles"), "nodejs"),
             os.path.join(os.environ.get("PROGRAMW6432"), "nodejs"),
             os.path.join(os.environ.get("PROGRAMFILES"), "nodejs")
         ]
 
     return paths
 
 
-def check_executable_version(exe):
+def check_executable_version(exe, wrap_call_with_node=False):
     """Determine the version of a Node executable by invoking it.
 
     May raise ``subprocess.CalledProcessError`` or ``ValueError`` on failure.
     """
-    out = subprocess.check_output([exe, "--version"]).lstrip('v').rstrip()
+    out = None
+    # npm may be a script, so we must call it with node.
+    if wrap_call_with_node:
+        binary, _ = find_node_executable()
+        if binary:
+            out = subprocess.check_output([binary, exe, "--version"]).lstrip('v').rstrip()
+
+    # If we can't find node, or we don't need to wrap it, fallback to calling
+    # direct.
+    if not out:
+        out = subprocess.check_output([exe, "--version"]).lstrip('v').rstrip()
     return StrictVersion(out)
 
 
 def simple_which(filename, path=None):
     # Note: On windows, npm uses ".cmd"
     exts = [".cmd", ".exe", ""] if platform.system() == "Windows" else [""]
 
     for ext in exts:
@@ -97,20 +107,20 @@ def find_node_executable(nodejs_exe=os.e
 
 def find_npm_executable(min_version=NPM_MIN_VERSION):
     """Find a Node executable from the mozbuild directory.
 
     Returns a tuple containing the the path to an executable binary and a
     version tuple. Both tuple entries will be None if a Node executable
     could not be resolved.
     """
-    return find_executable(["npm"], min_version)
+    return find_executable(["npm"], min_version, True)
 
 
-def find_executable(names, min_version):
+def find_executable(names, min_version, use_node_for_version_check=False):
     paths = find_node_paths()
 
     found_exe = None
     for name in names:
         try:
             exe = simple_which(name, paths)
         except which.WhichError:
             continue
@@ -119,16 +129,16 @@ def find_executable(names, min_version):
             continue
 
         if not found_exe:
             found_exe = exe
 
         # We always verify we can invoke the executable and its version is
         # sane.
         try:
-            version = check_executable_version(exe)
+            version = check_executable_version(exe, use_node_for_version_check)
         except (subprocess.CalledProcessError, ValueError):
             continue
 
         if version >= min_version:
             return exe, version.version
 
     return found_exe, None
new file mode 100644
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor_dav1d.py
@@ -0,0 +1,163 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from distutils.version import LooseVersion
+import logging
+from mozbuild.base import (
+    BuildEnvironmentNotFoundException,
+    MozbuildObject,
+)
+import mozfile
+import mozpack.path as mozpath
+import os
+import requests
+import re
+import sys
+import tarfile
+from urlparse import urlparse
+
+class VendorDav1d(MozbuildObject):
+    def upstream_snapshot(self, revision):
+        '''Construct a url for a tarball snapshot of the given revision.'''
+        if 'code.videolan.org' in self.repo_url:
+            return mozpath.join(self.repo_url, '-', 'archive', revision + '.tar.gz')
+        else:
+            raise ValueError('Unknown git host, no snapshot lookup method')
+
+    def upstream_commit(self, revision):
+        '''Convert a revision to a git commit and timestamp.
+
+        Ask the upstream repo to convert the requested revision to
+        a git commit id and timestamp, so we can be precise in
+        what we're vendoring.'''
+        if 'code.videolan.org' in self.repo_url:
+            return self.upstream_gitlab_commit(revision)
+        else:
+            raise ValueError('Unknown git host, no commit lookup method')
+
+    def upstream_validate(self, url):
+        '''Validate repository urls to make sure we can handle them.'''
+        host = urlparse(url).netloc
+        valid_domains = ('code.videolan.org')
+        if not any(filter(lambda domain: domain in host, valid_domains)):
+            self.log(logging.ERROR, 'upstream_url', {},
+                     '''Unsupported git host %s; cannot fetch snapshots.
+
+Please set a repository url with --repo on either googlesource or github.''' % host)
+            sys.exit(1)
+
+    def upstream_gitlab_commit(self, revision):
+        '''Query the github api for a git commit id and timestamp.'''
+        gitlab_api = 'https://code.videolan.org/api/v4/projects/videolan%2Fdav1d/repository/commits'
+        url = mozpath.join(gitlab_api, revision)
+        self.log(logging.INFO, 'fetch', {'url': url},
+                 'Fetching commit id from {url}')
+        req = requests.get(url)
+        req.raise_for_status()
+        info = req.json()
+        return (info['id'], info['committed_date'])
+
+    def fetch_and_unpack(self, revision, target):
+        '''Fetch and unpack upstream source'''
+        url = self.upstream_snapshot(revision)
+        self.log(logging.INFO, 'fetch', {'url': url}, 'Fetching {url}')
+        prefix = 'dav1d-' + revision
+        filename = prefix + '.tar.gz'
+        with open(filename, 'wb') as f:
+            req = requests.get(url, stream=True)
+            for data in req.iter_content(4096):
+                f.write(data)
+        tar = tarfile.open(filename)
+        bad_paths = filter(lambda name: name.startswith('/') or '..' in name,
+                           tar.getnames())
+        if any(bad_paths):
+            raise Exception("Tar archive contains non-local paths,"
+                            "e.g. '%s'" % bad_paths[0])
+        self.log(logging.INFO, 'rm_vendor_dir', {}, 'rm -rf %s' % target)
+        mozfile.remove(target)
+        self.log(logging.INFO, 'unpack', {}, 'Unpacking upstream files.')
+        tar.extractall(target)
+        # Github puts everything properly down a directory; move it up.
+        if all(map(lambda name: name.startswith(prefix), tar.getnames())):
+            tardir = mozpath.join(target, prefix)
+            os.system('mv %s/* %s/.* %s' % (tardir, tardir, target))
+            os.rmdir(tardir)
+        # Remove the tarball.
+        mozfile.remove(filename)
+
+    def update_yaml(self, revision, timestamp, target):
+        filename = mozpath.join(target, 'moz.yaml')
+        with open(filename) as f:
+            yaml = f.read()
+
+        prefix = '  release: commit'
+        if prefix in yaml:
+            new_yaml = re.sub(prefix + ' [v\.a-f0-9]+.*$',
+                                prefix + ' %s (%s).' % (revision, timestamp),
+                                yaml)
+        else:
+            new_yaml = '%s\n\n%s %s.' % (yaml, prefix, revision)
+
+        if yaml != new_yaml:
+            with open(filename, 'w') as f:
+                f.write(new_yaml)
+
+    def clean_upstream(self, target):
+        '''Remove files we don't want to import.'''
+        mozfile.remove(mozpath.join(target, '.gitattributes'))
+        mozfile.remove(mozpath.join(target, '.gitignore'))
+        mozfile.remove(mozpath.join(target, 'build', '.gitattributes'))
+        mozfile.remove(mozpath.join(target, 'build' ,'.gitignore'))
+
+    def check_modified_files(self):
+        '''
+        Ensure that there aren't any uncommitted changes to files
+        in the working copy, since we're going to change some state
+        on the user.
+        '''
+        modified = self.repository.get_changed_files('M')
+        if modified:
+            self.log(logging.ERROR, 'modified_files', {},
+                     '''You have uncommitted changes to the following files:
+
+{files}
+
+Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`.
+'''.format(files='\n'.join(sorted(modified))))
+            sys.exit(1)
+
+    def vendor(self, revision, repo, ignore_modified=False):
+        self.populate_logger()
+        self.log_manager.enable_unstructured()
+
+        if not ignore_modified:
+            self.check_modified_files()
+        if not revision:
+            revision = 'master'
+        if repo:
+            self.repo_url = repo
+        else:
+            self.repo_url = 'https://code.videolan.org/videolan/dav1d'
+        self.upstream_validate(self.repo_url)
+
+        commit, timestamp = self.upstream_commit(revision)
+
+        vendor_dir = mozpath.join(self.topsrcdir, 'third_party/dav1d')
+        self.fetch_and_unpack(commit, vendor_dir)
+        self.log(logging.INFO, 'clean_upstream', {},
+                 '''Removing unnecessary files.''')
+        self.clean_upstream(vendor_dir)
+        glue_dir = mozpath.join(self.topsrcdir, 'media/libdav1d')
+        self.log(logging.INFO, 'update_moz.yaml', {},
+                 '''Updating moz.yaml.''')
+        self.update_yaml(commit, timestamp, glue_dir)
+        self.repository.add_remove_files(vendor_dir)
+        self.log(logging.INFO, 'add_remove_files', {},
+                 '''Registering changes with version control.''')
+        self.repository.add_remove_files(vendor_dir)
+        self.repository.add_remove_files(glue_dir)
+        self.log(logging.INFO, 'done', {'revision': revision},
+                 '''Update to dav1d version '{revision}' ready to commit.''')
new file mode 100644
--- /dev/null
+++ b/python/mozrelease/mozrelease/balrog.py
@@ -0,0 +1,66 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+
+def _generate_show_url(context, entry):
+    url = entry['url']
+    return {
+        "actions": "showURL",
+        "openURL": url.format(**context),
+    }
+
+
+def _generate_product_details(context, entry):
+    url = entry['url']
+    return {
+        'detailsURL': url.format(**context),
+        'type': 'minor',
+    }
+
+
+_FIELD_TYPES = {
+    'show-url': _generate_show_url,
+    'product-details': _generate_product_details,
+}
+
+
+def _generate_conditions(context, entry):
+    if 'release-types' in entry and context['release-type'] not in entry['release-types']:
+        return None
+    if 'blob-types' in entry and context['blob-type'] not in entry['blob-types']:
+        return None
+    if 'products' in entry and context['product'] not in entry['products']:
+        return None
+
+    conditions = {}
+    if 'locales' in entry:
+        conditions['locales'] = entry['locales']
+    if 'versions' in entry:
+        conditions['versions'] = [
+            version.format(**context)
+            for version in entry['versions']
+        ]
+    if 'update-channel' in entry:
+        conditions['channels'] = [
+            entry['update-channel'] + suffix
+            for suffix in ('', '-localtest', '-cdntest')
+        ]
+    return conditions
+
+
+def generate_update_properties(context, config):
+    result = []
+    for entry in config:
+        fields = _FIELD_TYPES[entry['type']](context, entry)
+        conditions = _generate_conditions(context, entry.get('conditions', {}))
+
+        if conditions is not None:
+            result.append({
+                'fields': fields,
+                'for': conditions,
+            })
+    return result
new file mode 100644
--- /dev/null
+++ b/python/mozrelease/mozrelease/util.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from yaml.loader import SafeLoader
+
+
+class UnicodeLoader(SafeLoader):
+    def construct_yaml_str(self, node):
+        return self.construct_scalar(node)
+
+
+UnicodeLoader.add_constructor(
+    'tag:yaml.org,2002:str',
+    UnicodeLoader.construct_yaml_str)
+
+
+def load(stream):
+    """
+    Parse the first YAML document in a stream
+    and produce the corresponding Python object.
+    """
+    loader = UnicodeLoader(stream)
+    try:
+        return loader.get_single_data()
+    finally:
+        loader.dispose()
new file mode 100644
--- /dev/null
+++ b/python/mozrelease/test/data/Firefox-62.0.3.update.json
@@ -0,0 +1,74 @@
+[
+    {
+        "fields": {
+            "detailsURL": "https://www.mozilla.org/%LOCALE%/firefox/62.0.3/releasenotes/",
+            "type": "minor"
+        },
+        "for": {}
+    },
+    {
+        "fields": {
+            "actions": "showURL",
+            "openURL": "https://www.mozilla.org/%LOCALE%/firefox/62.0.3/whatsnew/?oldversion=%OLD_VERSION%"
+        },
+        "for": {
+            "channels": [
+                "release",
+                "release-localtest",
+                "release-cdntest"
+            ],
+            "locales": [
+                "cak",
+                "cy",
+                "da",
+                "de",
+                "dsb",
+                "en-CA",
+                "en-US",
+                "es-AR",
+                "es-CL",
+                "es-ES",
+                "es-MX",
+                "et",
+                "fa",
+                "fi",
+                "fr",
+                "fy-NL",
+                "gn",
+                "gu-IN",
+                "hsb",
+                "hu",
+                "ia",
+                "id",
+                "it",
+                "ja",
+                "ja-JP-mac",
+                "ka",
+                "kab",
+                "ko",
+                "lij",
+                "lt",
+                "ms",
+                "nb-NO",
+                "nl",
+                "nn-NO",
+                "pl",
+                "pt-BR",
+                "pt-PT",
+                "sk",
+                "sl",
+                "sq",
+                "sr",
+                "sv-SE",
+                "tr",
+                "uk",
+                "vi",
+                "zh-CN",
+                "zh-TW"
+            ],
+            "versions": [
+                "<62.0"
+            ]
+        }
+    }
+]
new file mode 100644
--- /dev/null
+++ b/python/mozrelease/test/data/Firefox-62.0b11-update.json
@@ -0,0 +1,74 @@
+[
+    {
+        "fields": {
+            "detailsURL": "https://www.mozilla.org/%LOCALE%/firefox/62.0.3/releasenotes/",
+            "type": "minor"
+        },
+        "for": {}
+    },
+    {
+        "fields": {
+            "actions": "showURL",
+            "openURL": "https://www.mozilla.org/%LOCALE%/firefox/62.0.3/whatsnew/?oldversion=%OLD_VERSION%"
+        },
+        "for": {
+            "channels": [
+                "release",
+                "release-localtest",
+                "release-cdntest"
+            ],
+            "locales": [
+                "cak",
+                "cy",
+                "da",
+                "de",
+                "dsb",
+                "en-CA",
+                "en-US",
+                "es-AR",
+                "es-CL",
+                "es-ES",
+                "es-MX",
+                "et",
+                "fa",
+                "fi",
+                "fr",
+                "fy-NL",
+                "gn",
+                "gu-IN",
+                "hsb",
+                "hu",
+                "ia",
+                "id",
+                "it",
+                "ja",
+                "ja-JP-mac",
+                "ka",
+                "kab",
+                "ko",
+                "lij",
+                "lt",
+                "ms",
+                "nb-NO",
+                "nl",
+                "nn-NO",
+                "pl",
+                "pt-BR",
+                "pt-PT",
+                "sk",
+                "sl",
+                "sq",
+                "sr",
+                "sv-SE",
+                "tr",
+                "uk",
+                "vi",
+                "zh-CN",
+                "zh-TW"
+            ],
+            "versions": [
+                "<62.0"
+            ]
+        }
+    }
+]
new file mode 100644
--- /dev/null
+++ b/python/mozrelease/test/data/Firefox-62.0b11.update.json
@@ -0,0 +1,9 @@
+[
+    {
+        "fields": {
+            "detailsURL": "https://www.mozilla.org/%LOCALE%/firefox/62.0b11/releasenotes/",
+            "type": "minor"
+        },
+        "for": {}
+    }
+]
new file mode 100644
--- /dev/null
+++ b/python/mozrelease/test/data/whatsnew-62.0.3.yml
@@ -0,0 +1,65 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+---
+- type: product-details
+  url: "https://www.mozilla.org/%LOCALE%/{product}/{version}/releasenotes/"
+# %LOCALE% is automatically replaced by Balrog.
+- type: show-url
+  # yamllint disable-line rule:line-length
+  url: "https://www.mozilla.org/%LOCALE%/{product}/{version}/whatsnew/?oldversion=%OLD_VERSION%"
+  conditions:
+      release-types: [release]
+      products: [firefox]
+      update-channel: release
+      # e.g.: ["<61.0"]. {version.major_number} reflects the current version.
+      # This is done by taskgraph.
+      versions: ["<{version.major_number}.0"]
+      locales:
+          - cak
+          - cy
+          - da
+          - de
+          - dsb
+          - en-CA
+          - en-US
+          - es-AR
+          - es-CL
+          - es-ES
+          - es-MX
+          - et
+          - fa
+          - fi
+          - fr
+          - fy-NL
+          - gn
+          - gu-IN
+          - hsb
+          - hu
+          - ia
+          - id
+          - it
+          - ja
+          - ja-JP-mac
+          - ka
+          - kab
+          - ko
+          - lij
+          - lt
+          - ms
+          - nb-NO
+          - nl
+          - nn-NO
+          - pl
+          - pt-BR
+          - pt-PT
+          - sk
+          - sl
+          - sq
+          - sr
+          - sv-SE
+          - tr
+          - uk
+          - vi
+          - zh-CN
+          - zh-TW
new file mode 100644
--- /dev/null
+++ b/python/mozrelease/test/data/whatsnew-release.yml
@@ -0,0 +1,65 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+---
+- type: product-details
+  url: "https://www.mozilla.org/%LOCALE%/{product}/{version}/releasenotes/"
+# %LOCALE% is automatically replaced by Balrog.
+- type: show-url
+  # yamllint disable-line rule:line-length
+  url: "https://www.mozilla.org/%LOCALE%/{product}/{version}/whatsnew/?oldversion=%OLD_VERSION%"
+  conditions:
+      release-types: [release]
+      products: [firefox]
+      update-channel: release
+      # e.g.: ["<61.0"]. {version.major_number} reflects the current version.
+      # This is done by taskgraph.
+      versions: ["<{version.major_number}.0"]
+      locales:
+          - cak
+          - cy
+          - da
+          - de
+          - dsb
+          - en-CA
+          - en-US
+          - es-AR
+          - es-CL
+          - es-ES
+          - es-MX
+          - et
+          - fa
+          - fi
+          - fr
+          - fy-NL
+          - gn
+          - gu-IN
+          - hsb
+          - hu
+          - ia
+          - id
+          - it
+          - ja
+          - ja-JP-mac
+          - ka
+          - kab
+          - ko
+          - lij
+          - lt
+          - ms
+          - nb-NO
+          - nl
+          - nn-NO
+          - pl
+          - pt-BR
+          - pt-PT
+          - sk
+          - sl
+          - sq
+          - sr
+          - sv-SE
+          - tr
+          - uk
+          - vi
+          - zh-CN
+          - zh-TW
--- a/python/mozrelease/test/python.ini
+++ b/python/mozrelease/test/python.ini
@@ -1,6 +1,8 @@
 [DEFAULT]
 subsuite=mozrelease
-skip-if = python == 3
 
 [test_versions.py]
+skip-if = python == 3
+[test_balrog.py]
 [test_buglist_creator.py]
+skip-if = python == 3
new file mode 100644
--- /dev/null
+++ b/python/mozrelease/test/test_balrog.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import mozunit
+import pytest
+
+import json
+from mozrelease.util import load as yaml_load
+from pathlib2 import Path
+
+from mozrelease.balrog import generate_update_properties
+from mozilla_version.gecko import GeckoVersion
+
+DATA_PATH = Path(__file__).parent.joinpath('data')
+
+
+@pytest.mark.parametrize('context,config_file,output_file', [
+    ({
+        'release-type': 'release',
+        'product': 'firefox',
+        'version': GeckoVersion.parse('62.0.3'),
+    }, 'whatsnew-62.0.3.yml', 'Firefox-62.0.3.update.json'),
+    ({
+        'release-type': 'beta',
+        'product': 'firefox',
+        'version': GeckoVersion.parse('62.0b11'),
+    }, 'whatsnew-62.0.3.yml', 'Firefox-62.0b11.update.json'),
+])
+def test_update_properties(context, config_file, output_file):
+    with DATA_PATH.joinpath(config_file).open('r', encoding='utf-8') as f:
+        config = yaml_load(f)
+
+    update_line = generate_update_properties(context, config)
+
+    assert update_line == json.load(DATA_PATH.joinpath(output_file).open('r', encoding='utf-8'))
+
+
+if __name__ == '__main__':
+    mozunit.main()
--- a/security/manager/ssl/DataStorage.cpp
+++ b/security/manager/ssl/DataStorage.cpp
@@ -4,17 +4,16 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "DataStorage.h"
 
 #include "mozilla/Assertions.h"
 #include "mozilla/ClearOnShutdown.h"
 #include "mozilla/dom/PContent.h"
-#include "mozilla/dom/ContentChild.h"
 #include "mozilla/dom/ContentParent.h"
 #include "mozilla/Preferences.h"
 #include "mozilla/Services.h"
 #include "mozilla/StaticMutex.h"
 #include "mozilla/Telemetry.h"
 #include "mozilla/Unused.h"
 #include "nsAppDirectoryServiceDefs.h"
 #include "nsDirectoryServiceUtils.h"
--- a/security/manager/ssl/nsSecureBrowserUIImpl.cpp
+++ b/security/manager/ssl/nsSecureBrowserUIImpl.cpp
@@ -180,20 +180,16 @@ nsSecureBrowserUIImpl::CheckForBlockedCo
     }
   }
 
   // Has tracking content been blocked or loaded?
   if (docShell->GetHasTrackingContentBlocked()) {
     mState |= STATE_BLOCKED_TRACKING_CONTENT;
   }
 
-  if (docShell->GetHasSlowTrackingContentBlocked()) {
-    mState |= STATE_BLOCKED_SLOW_TRACKING_CONTENT;
-  }
-
   if (docShell->GetHasTrackingContentLoaded()) {
     mState |= STATE_LOADED_TRACKING_CONTENT;
   }
 
   if (docShell->GetHasCookiesBlockedByPermission()) {
     mState |= STATE_COOKIES_BLOCKED_BY_PERMISSION;
   }
 
new file mode 100644
--- /dev/null
+++ b/services/settings/RemoteSettingsClient.jsm
@@ -0,0 +1,478 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+var EXPORTED_SYMBOLS = [
+  "RemoteSettingsClient",
+];
+
+ChromeUtils.import("resource://gre/modules/Services.jsm");
+ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
+
+ChromeUtils.defineModuleGetter(this, "Kinto",
+                               "resource://services-common/kinto-offline-client.js");
+ChromeUtils.defineModuleGetter(this, "KintoHttpClient",
+                               "resource://services-common/kinto-http-client.js");
+ChromeUtils.defineModuleGetter(this, "UptakeTelemetry",
+                               "resource://services-common/uptake-telemetry.js");
+ChromeUtils.defineModuleGetter(this, "ClientEnvironmentBase",
+                               "resource://gre/modules/components-utils/ClientEnvironment.jsm");
+ChromeUtils.defineModuleGetter(this, "RemoteSettingsWorker",
+                               "resource://services-settings/RemoteSettingsWorker.jsm");
+
+XPCOMUtils.defineLazyGlobalGetters(this, ["fetch"]);
+
+// IndexedDB name.
+const DB_NAME = "remote-settings";
+
+const INVALID_SIGNATURE = "Invalid content signature";
+const MISSING_SIGNATURE = "Missing signature";
+
+XPCOMUtils.defineLazyPreferenceGetter(this, "gServerURL",
+                                      "services.settings.server");
+XPCOMUtils.defineLazyPreferenceGetter(this, "gVerifySignature",
+                                      "services.settings.verify_signature", true);
+
+/**
+ * cacheProxy returns an object Proxy that will memoize properties of the target.
+ * @param {Object} target the object to wrap.
+ * @returns {Proxy}
+ */
+function cacheProxy(target) {
+  const cache = new Map();
+  return new Proxy(target, {
+    get(target, prop, receiver) {
+      if (!cache.has(prop)) {
+        cache.set(prop, target[prop]);
+      }
+      return cache.get(prop);
+    },
+  });
+}
+
+class ClientEnvironment extends ClientEnvironmentBase {
+  static get appID() {
+    // eg. Firefox is "{ec8030f7-c20a-464f-9b0e-13a3a9e97384}".
+    Services.appinfo.QueryInterface(Ci.nsIXULAppInfo);
+    return Services.appinfo.ID;
+  }
+
+  static get toolkitVersion() {
+    Services.appinfo.QueryInterface(Ci.nsIPlatformInfo);
+    return Services.appinfo.platformVersion;
+  }
+}
+
+/**
+ * Retrieve the Autograph signature information from the collection metadata.
+ *
+ * @param {String} bucket Bucket name.
+ * @param {String} collection Collection name.
+ * @param {int} expectedTimestamp Timestamp to be used for cache busting.
+ * @returns {Promise<{String, String}>}
+ */
+async function fetchCollectionSignature(bucket, collection, expectedTimestamp) {
+  const client = new KintoHttpClient(gServerURL);
+  const { signature: signaturePayload } = await client.bucket(bucket)
+    .collection(collection)
+    .getData({ query: { _expected: expectedTimestamp } });
+  if (!signaturePayload) {
+    throw new Error(MISSING_SIGNATURE);
+  }
+  const { x5u, signature } = signaturePayload;
+  const certChainResponse = await fetch(x5u);
+  const certChain = await certChainResponse.text();
+
+  return { signature, certChain };
+}
+
+/**
+ * Retrieve the current list of remote records.
+ *
+ * @param {String} bucket Bucket name.
+ * @param {String} collection Collection name.
+ * @param {int} expectedTimestamp Timestamp to be used for cache busting.
+ */
+async function fetchRemoteRecords(bucket, collection, expectedTimestamp) {
+  const client = new KintoHttpClient(gServerURL);
+  return client.bucket(bucket)
+    .collection(collection)
+    .listRecords({ sort: "id", filters: { _expected: expectedTimestamp } });
+}
+
+/**
+ * Minimalist event emitter.
+ *
+ * Note: we don't use `toolkit/modules/EventEmitter` because **we want** to throw
+ * an error when a listener fails to execute.
+ */
+class EventEmitter {
+
+  constructor(events) {
+    this._listeners = new Map();
+    for (const event of events) {
+      this._listeners.set(event, []);
+    }
+  }
+
+  /**
+   * Event emitter: will execute the registered listeners in the order and
+   * sequentially.
+   *
+   * @param {string} event    the event name
+   * @param {Object} payload  the event payload to call the listeners with
+   */
+  async emit(event, payload) {
+    const callbacks = this._listeners.get("sync");
+    let lastError;
+    for (const cb of callbacks) {
+      try {
+        await cb(payload);
+      } catch (e) {
+        lastError = e;
+      }
+    }
+    if (lastError) {
+      throw lastError;
+    }
+  }
+
+  on(event, callback) {
+    if (!this._listeners.has(event)) {
+      throw new Error(`Unknown event type ${event}`);
+    }
+    this._listeners.get(event).push(callback);
+  }
+
+  off(event, callback) {
+    if (!this._listeners.has(event)) {
+      throw new Error(`Unknown event type ${event}`);
+    }
+    const callbacks = this._listeners.get(event);
+    const i = callbacks.indexOf(callback);
+    if (i < 0) {
+      throw new Error(`Unknown callback`);
+    } else {
+      callbacks.splice(i, 1);
+    }
+  }
+}
+
+
+class RemoteSettingsClient extends EventEmitter {
+
+  constructor(collectionName, { bucketNamePref, signerName, filterFunc, localFields = [], lastCheckTimePref }) {
+    super(["sync"]);
+
+    this.collectionName = collectionName;
+    this.signerName = signerName;
+    this.filterFunc = filterFunc;
+    this.localFields = localFields;
+    this._lastCheckTimePref = lastCheckTimePref;
+
+    // The bucket preference value can be changed (eg. `main` to `main-preview`) in order
+    // to preview the changes to be approved in a real client.
+    this.bucketNamePref = bucketNamePref;
+    XPCOMUtils.defineLazyPreferenceGetter(this, "bucketName", this.bucketNamePref);
+
+    XPCOMUtils.defineLazyGetter(this, "_kinto", () => new Kinto({
+      bucket: this.bucketName,
+      adapter: Kinto.adapters.IDB,
+      adapterOptions: { dbName: DB_NAME, migrateOldData: false },
+    }));
+  }
+
+  get identifier() {
+    return `${this.bucketName}/${this.collectionName}`;
+  }
+
+  get lastCheckTimePref() {
+    return this._lastCheckTimePref || `services.settings.${this.bucketName}.${this.collectionName}.last_check`;
+  }
+
+  /**
+   * Open the underlying Kinto collection, using the appropriate adapter and options.
+   */
+  async openCollection() {
+    const options = {
+      localFields: this.localFields,
+      bucket: this.bucketName,
+    };
+    return this._kinto.collection(this.collectionName, options);
+  }
+
+  /**
+   * Lists settings.
+   *
+   * @param  {Object} options         The options object.
+   * @param  {Object} options.filters Filter the results (default: `{}`).
+   * @param  {Object} options.order   The order to apply (eg. `-last_modified`).
+   * @return {Promise}
+   */
+  async get(options = {}) {
+    const {
+      filters = {},
+      order = "", // not sorted by default.
+    } = options;
+
+    const c = await this.openCollection();
+
+    const timestamp = await c.db.getLastModified();
+    if (timestamp == null) {
+      // The local database for this collection was never synchronized.
+      // Before returning an empty list, we attempt to load a packaged JSON dump.
+      try {
+        // Load JSON dump if there is one.
+        await RemoteSettingsWorker.importJSONDump(this.bucketName, this.collectionName);
+      } catch (e) {
+        // Report but return an empty list since there will be no data anyway.
+        Cu.reportError(e);
+        return [];
+      }
+    }
+
+    // Read from the local DB.
+    const { data } = await c.list({ filters, order });
+    // Filter the records based on `this.filterFunc` results.
+    return this._filterEntries(data);
+  }
+
+  /**
+   * Synchronize from Kinto server, if necessary.
+   *
+   * @param {int}    expectedTimestamp the lastModified date (on the server) for the remote collection.
+   *                                   This will be compared to the local timestamp, and will be used for
+   *                                   cache busting if local data is out of date.
+   * @param {int}   serverTimeMillis   the current date return by the server.
+   *                                   This is only used to track the last check or synchronization.
+   * @param {Object} options           additional advanced options.
+   * @param {bool}   options.loadDump  load initial dump from disk on first sync (default: true)
+   * @return {Promise}                 which rejects on sync or process failure.
+   */
+  async maybeSync(expectedTimestamp, serverTimeMillis, options = { loadDump: true }) {
+    const { loadDump } = options;
+
+    let reportStatus = null;
+    try {
+      const collection = await this.openCollection();
+      // Synchronize remote data into a local Sqlite DB.
+      let collectionLastModified = await collection.db.getLastModified();
+
+      // If there is no data currently in the collection, attempt to import
+      // initial data from the application defaults.
+      // This allows to avoid synchronizing the whole collection content on
+      // cold start.
+      if (!collectionLastModified && loadDump) {
+        try {
+          await RemoteSettingsWorker.importJSONDump(this.bucketName, this.collectionName);
+          collectionLastModified = await collection.db.getLastModified();
+        } catch (e) {
+          // Report but go-on.
+          Cu.reportError(e);
+        }
+      }
+
+      // If the data is up to date, there's no need to sync. We still need
+      // to record the fact that a check happened.
+      if (expectedTimestamp <= collectionLastModified) {
+        this._updateLastCheck(serverTimeMillis);
+        reportStatus = UptakeTelemetry.STATUS.UP_TO_DATE;
+        return;
+      }
+
+      // If there is a `signerName` and collection signing is enforced, add a
+      // hook for incoming changes that validates the signature.
+      if (this.signerName && gVerifySignature) {
+        collection.hooks["incoming-changes"] = [async (payload, collection) => {
+          await this._validateCollectionSignature(payload.changes,
+                                                  payload.lastModified,
+                                                  collection,
+                                                  { expectedTimestamp });
+          // In case the signature is valid, apply the changes locally.
+          return payload;
+        }];
+      }
+
+      // Fetch changes from server.
+      let syncResult;
+      try {
+        // Server changes have priority during synchronization.
+        const strategy = Kinto.syncStrategy.SERVER_WINS;
+        syncResult = await collection.sync({ remote: gServerURL, strategy, expectedTimestamp });
+        const { ok } = syncResult;
+        if (!ok) {
+          // Some synchronization conflicts occured.
+          reportStatus = UptakeTelemetry.STATUS.CONFLICT_ERROR;
+          throw new Error("Sync failed");
+        }
+      } catch (e) {
+        if (e.message.includes(INVALID_SIGNATURE)) {
+          // Signature verification failed during synchronzation.
+          reportStatus = UptakeTelemetry.STATUS.SIGNATURE_ERROR;
+          // if sync fails with a signature error, it's likely that our
+          // local data has been modified in some way.
+          // We will attempt to fix this by retrieving the whole
+          // remote collection.
+          const payload = await fetchRemoteRecords(collection.bucket, collection.name, expectedTimestamp);
+          try {
+            await this._validateCollectionSignature(payload.data,
+                                                    payload.last_modified,
+                                                    collection,
+                                                    { expectedTimestamp, ignoreLocal: true });
+          } catch (e) {
+            reportStatus = UptakeTelemetry.STATUS.SIGNATURE_RETRY_ERROR;
+            throw e;
+          }
+
+          // The signature is good (we haven't thrown).
+          // Now we will Inspect what we had locally.
+          const { data: oldData } = await collection.list({ order: "" }); // no need to sort.
+
+          // We build a sync result as if a diff-based sync was performed.
+          syncResult = { created: [], updated: [], deleted: [] };
+
+          // If the remote last_modified is newer than the local last_modified,
+          // replace the local data
+          const localLastModified = await collection.db.getLastModified();
+          if (payload.last_modified >= localLastModified) {
+            const { data: newData } = payload;
+            await collection.clear();
+            await collection.loadDump(newData);
+
+            // Compare local and remote to populate the sync result
+            const oldById = new Map(oldData.map(e => [e.id, e]));
+            for (const r of newData) {
+              const old = oldById.get(r.id);
+              if (old) {
+                if (old.last_modified != r.last_modified) {
+                  syncResult.updated.push({ old, new: r });
+                }
+                oldById.delete(r.id);
+              } else {
+                syncResult.created.push(r);
+              }
+            }
+            // Records that remain in our map now are those missing from remote
+            syncResult.deleted = Array.from(oldById.values());
+          }
+
+        } else {
+          // The sync has thrown, it can be related to metadata, network or a general error.
+          if (e.message == MISSING_SIGNATURE) {
+            // Collection metadata has no signature info, no need to retry.
+            reportStatus = UptakeTelemetry.STATUS.SIGNATURE_ERROR;
+          } else if (/NetworkError/.test(e.message)) {
+            reportStatus = UptakeTelemetry.STATUS.NETWORK_ERROR;
+          } else if (/Backoff/.test(e.message)) {
+            reportStatus = UptakeTelemetry.STATUS.BACKOFF;
+          } else {
+            reportStatus = UptakeTelemetry.STATUS.SYNC_ERROR;
+          }
+          throw e;
+        }
+      }
+
+      // Handle the obtained records (ie. apply locally through events).
+      // Build the event data list. It should be filtered (ie. by application target)
+      const { created: allCreated, updated: allUpdated, deleted: allDeleted } = syncResult;
+      const [created, deleted, updatedFiltered] = await Promise.all(
+          [allCreated, allDeleted, allUpdated.map(e => e.new)].map(this._filterEntries.bind(this))
+        );
+      // For updates, keep entries whose updated form matches the target.
+      const updatedFilteredIds = new Set(updatedFiltered.map(e => e.id));
+      const updated = allUpdated.filter(({ new: { id } }) => updatedFilteredIds.has(id));
+
+      // If every changed entry is filtered, we don't even fire the event.
+      if (created.length || updated.length || deleted.length) {
+        // Read local collection of records (also filtered).
+        const { data: allData } = await collection.list({ order: "" }); // no need to sort.
+        const current = await this._filterEntries(allData);
+        const payload = { data: { current, created, updated, deleted } };
+        try {
+          await this.emit("sync", payload);
+        } catch (e) {
+          reportStatus = UptakeTelemetry.STATUS.APPLY_ERROR;
+          throw e;
+        }
+      }
+
+      // Track last update.
+      this._updateLastCheck(serverTimeMillis);
+
+    } catch (e) {
+      // No specific error was tracked, mark it as unknown.
+      if (reportStatus === null) {
+        reportStatus = UptakeTelemetry.STATUS.UNKNOWN_ERROR;
+      }
+      throw e;
+    } finally {
+      // No error was reported, this is a success!
+      if (reportStatus === null) {
+        reportStatus = UptakeTelemetry.STATUS.SUCCESS;
+      }
+      // Report success/error status to Telemetry.
+      UptakeTelemetry.report(this.identifier, reportStatus);
+    }
+  }
+
+  /**
+   *
+   * @param {Array<Object>} remoteRecords
+   * @param {int} timestamp
+   * @param {Collection} collection
+   * @param {Object} options
+   * @returns {Promise}
+   */
+  async _validateCollectionSignature(remoteRecords, timestamp, kintoCollection, options = {}) {
+    const { expectedTimestamp, ignoreLocal = false } = options;
+    // this is a content-signature field from an autograph response.
+    const { name: collection, bucket } = kintoCollection;
+    const { signature, certChain } = await fetchCollectionSignature(bucket, collection, expectedTimestamp);
+
+    let localRecords = [];
+    if (!ignoreLocal) {
+      const { data } = await kintoCollection.list({ order: "" }); // no need to sort.
+      // Local fields are stripped to compute the collection signature (server does not have them).
+      localRecords = data.map(r => kintoCollection.cleanLocalFields(r));
+    }
+
+    const serialized = await RemoteSettingsWorker.canonicalStringify(localRecords,
+                                                                     remoteRecords,
+                                                                     timestamp);
+    const verifier = Cc["@mozilla.org/security/contentsignatureverifier;1"]
+      .createInstance(Ci.nsIContentSignatureVerifier);
+    if (!verifier.verifyContentSignature(serialized,
+                                         "p384ecdsa=" + signature,
+                                         certChain,
+                                         this.signerName)) {
+      throw new Error(INVALID_SIGNATURE + ` (${bucket}/${collection})`);
+    }
+  }
+
+  /**
+   * Save last time server was checked in users prefs.
+   *
+   * @param {int} serverTimeMillis   the current date return by server.
+   */
+  _updateLastCheck(serverTimeMillis) {
+    const checkedServerTimeInSeconds = Math.round(serverTimeMillis / 1000);
+    Services.prefs.setIntPref(this.lastCheckTimePref, checkedServerTimeInSeconds);
+  }
+
+  /**
+   *
+   * @param {Array<Objet>} data
+   */
+  async _filterEntries(data) {
+    // Filter entries for which calls to `this.filterFunc` returns null.
+    if (!this.filterFunc) {
+      return data;
+    }
+    const environment = cacheProxy(ClientEnvironment);
+    const dataPromises = data.map(e => this.filterFunc(e, environment));
+    const results = await Promise.all(dataPromises);
+    return results.filter(Boolean);
+  }
+}
--- a/services/settings/moz.build
+++ b/services/settings/moz.build
@@ -11,13 +11,14 @@ DIRS += [
 
 EXTRA_COMPONENTS += [
     'RemoteSettingsComponents.js',
     'servicesSettings.manifest',
 ]
 
 EXTRA_JS_MODULES['services-settings'] += [
     'remote-settings.js',
+    'RemoteSettingsClient.jsm',
     'RemoteSettingsWorker.js',
     'RemoteSettingsWorker.jsm',
 ]
 
 XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini']
--- a/services/settings/remote-settings.js
+++ b/services/settings/remote-settings.js
@@ -9,95 +9,57 @@
 var EXPORTED_SYMBOLS = [
   "RemoteSettings",
   "jexlFilterFunc",
   "remoteSettingsBroadcastHandler",
 ];
 
 ChromeUtils.import("resource://gre/modules/Services.jsm");
 ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
-const { OS } = ChromeUtils.import("resource://gre/modules/osfile.jsm", {});
-XPCOMUtils.defineLazyGlobalGetters(this, ["fetch", "indexedDB"]);
 
-ChromeUtils.defineModuleGetter(this, "Kinto",
-                               "resource://services-common/kinto-offline-client.js");
-ChromeUtils.defineModuleGetter(this, "KintoHttpClient",
-                               "resource://services-common/kinto-http-client.js");
-ChromeUtils.defineModuleGetter(this, "CanonicalJSON",
-                               "resource://gre/modules/CanonicalJSON.jsm");
 ChromeUtils.defineModuleGetter(this, "UptakeTelemetry",
                                "resource://services-common/uptake-telemetry.js");
-ChromeUtils.defineModuleGetter(this, "ClientEnvironmentBase",
-                               "resource://gre/modules/components-utils/ClientEnvironment.jsm");
+ChromeUtils.defineModuleGetter(this, "pushBroadcastService",
+                               "resource://gre/modules/PushBroadcastService.jsm");
+ChromeUtils.defineModuleGetter(this, "RemoteSettingsClient",
+                               "resource://services-settings/RemoteSettingsClient.jsm");
 ChromeUtils.defineModuleGetter(this, "FilterExpressions",
                                "resource://gre/modules/components-utils/FilterExpressions.jsm");
-ChromeUtils.defineModuleGetter(this, "pushBroadcastService",
-                               "resource://gre/modules/PushBroadcastService.jsm");
-ChromeUtils.defineModuleGetter(this, "RemoteSettingsWorker",
-                               "resource://services-settings/RemoteSettingsWorker.jsm");
+
+XPCOMUtils.defineLazyGlobalGetters(this, ["fetch"]);
 
 const PREF_SETTINGS_DEFAULT_BUCKET     = "services.settings.default_bucket";
 const PREF_SETTINGS_BRANCH             = "services.settings.";
 const PREF_SETTINGS_SERVER             = "server";
 const PREF_SETTINGS_DEFAULT_SIGNER     = "default_signer";
-const PREF_SETTINGS_VERIFY_SIGNATURE   = "verify_signature";
 const PREF_SETTINGS_SERVER_BACKOFF     = "server.backoff";
 const PREF_SETTINGS_CHANGES_PATH       = "changes.path";
 const PREF_SETTINGS_LAST_UPDATE        = "last_update_seconds";
 const PREF_SETTINGS_LAST_ETAG          = "last_etag";
 const PREF_SETTINGS_CLOCK_SKEW_SECONDS = "clock_skew_seconds";
 const PREF_SETTINGS_LOAD_DUMP          = "load_dump";
 
-// IndexedDB name.
-const DB_NAME = "remote-settings";
 
 // Telemetry update source identifier.
 const TELEMETRY_HISTOGRAM_KEY = "settings-changes-monitoring";
-
-const INVALID_SIGNATURE = "Invalid content signature";
-const MISSING_SIGNATURE = "Missing signature";
+// Push broadcast id.
+const BROADCAST_ID = "remote-settings/monitor_changes";
 
 XPCOMUtils.defineLazyGetter(this, "gPrefs", () => {
   return Services.prefs.getBranch(PREF_SETTINGS_BRANCH);
 });
-XPCOMUtils.defineLazyPreferenceGetter(this, "gVerifySignature", PREF_SETTINGS_BRANCH + PREF_SETTINGS_VERIFY_SIGNATURE, true);
 XPCOMUtils.defineLazyPreferenceGetter(this, "gServerURL", PREF_SETTINGS_BRANCH + PREF_SETTINGS_SERVER);
 XPCOMUtils.defineLazyPreferenceGetter(this, "gChangesPath", PREF_SETTINGS_BRANCH + PREF_SETTINGS_CHANGES_PATH);
 
 /**
- * cacheProxy returns an object Proxy that will memoize properties of the target.
- */
-function cacheProxy(target) {
-  const cache = new Map();
-  return new Proxy(target, {
-    get(target, prop, receiver) {
-      if (!cache.has(prop)) {
-        cache.set(prop, target[prop]);
-      }
-      return cache.get(prop);
-    },
-  });
-}
-
-class ClientEnvironment extends ClientEnvironmentBase {
-  static get appID() {
-    // eg. Firefox is "{ec8030f7-c20a-464f-9b0e-13a3a9e97384}".
-    Services.appinfo.QueryInterface(Ci.nsIXULAppInfo);
-    return Services.appinfo.ID;
-  }
-
-  static get toolkitVersion() {
-    Services.appinfo.QueryInterface(Ci.nsIPlatformInfo);
-    return Services.appinfo.platformVersion;
-  }
-}
-
-/**
  * Default entry filtering function, in charge of excluding remote settings entries
  * where the JEXL expression evaluates into a falsy value.
+ * @param {Object}            entry       The Remote Settings entry to be excluded or kept.
+ * @param {ClientEnvironment} environment Information about version, language, platform etc.
+ * @returns {?Object} the entry or null if excluded.
  */
 async function jexlFilterFunc(entry, environment) {
   const { filter_expression } = entry;
   if (!filter_expression) {
     return entry;
   }
   let result;
   try {
@@ -106,31 +68,16 @@ async function jexlFilterFunc(entry, env
     };
     result = await FilterExpressions.eval(filter_expression, context);
   } catch (e) {
     Cu.reportError(e);
   }
   return result ? entry : null;
 }
 
-async function fetchCollectionMetadata(remote, collection, expectedTimestamp) {
-  const client = new KintoHttpClient(remote);
-  const { signature } = await client.bucket(collection.bucket)
-                                    .collection(collection.name)
-                                    .getData({ query: { _expected: expectedTimestamp }});
-  return signature;
-}
-
-async function fetchRemoteCollection(collection, expectedTimestamp) {
-  const client = new KintoHttpClient(gServerURL);
-  return client.bucket(collection.bucket)
-           .collection(collection.name)
-           .listRecords({ sort: "id", filters: { _expected: expectedTimestamp } });
-}
-
 /**
  * Fetch the list of remote collections and their timestamp.
  * @param {String} url               The poll URL (eg. `http://${server}{pollingEndpoint}`)
  * @param {String} lastEtag          (optional) The Etag of the latest poll to be matched
  *                                    by the server (eg. `"123456789"`).
  * @param {int}    expectedTimestamp The timestamp that the server is supposed to return.
  *                                   We obtained it from the Megaphone notification payload,
  *                                   and we use it only for cache busting (Bug 1497159).
@@ -196,372 +143,17 @@ async function fetchLatestChanges(url, l
   let backoffSeconds;
   if (response.headers.has("Backoff")) {
     const value = parseInt(response.headers.get("Backoff"), 10);
     if (!isNaN(value)) {
       backoffSeconds = value;
     }
   }
 
-  return {changes, currentEtag, serverTimeMillis, backoffSeconds};
-}
-
-
-class RemoteSettingsClient {
-
-  constructor(collectionName, { bucketNamePref, signerName, filterFunc = jexlFilterFunc, localFields = [], lastCheckTimePref }) {
-    this.collectionName = collectionName;
-    this.signerName = signerName;
-    this.filterFunc = filterFunc;
-    this.localFields = localFields;
-    this._lastCheckTimePref = lastCheckTimePref;
-
-    // The bucket preference value can be changed (eg. `main` to `main-preview`) in order
-    // to preview the changes to be approved in a real client.
-    this.bucketNamePref = bucketNamePref;
-    XPCOMUtils.defineLazyPreferenceGetter(this, "bucketName", this.bucketNamePref);
-
-    this._listeners = new Map();
-    this._listeners.set("sync", []);
-  }
-
-  get identifier() {
-    return `${this.bucketName}/${this.collectionName}`;
-  }
-
-  get lastCheckTimePref() {
-    return this._lastCheckTimePref || `services.settings.${this.bucketName}.${this.collectionName}.last_check`;
-  }
-
-  /**
-   * Event emitter: will execute the registered listeners in the order and
-   * sequentially.
-   *
-   * Note: we don't use `toolkit/modules/EventEmitter` because we want to throw
-   * an error when a listener fails to execute.
-   *
-   * @param {string} event    the event name
-   * @param {Object} payload  the event payload to call the listeners with
-   */
-  async emit(event, payload) {
-    const callbacks = this._listeners.get("sync");
-    let firstError;
-    for (const cb of callbacks) {
-      try {
-        await cb(payload);
-      } catch (e) {
-        firstError = e;
-      }
-    }
-    if (firstError) {
-      throw firstError;
-    }
-  }
-
-  on(event, callback) {
-    if (!this._listeners.has(event)) {
-      throw new Error(`Unknown event type ${event}`);
-    }
-    this._listeners.get(event).push(callback);
-  }
-
-  off(event, callback) {
-    if (!this._listeners.has(event)) {
-      throw new Error(`Unknown event type ${event}`);
-    }
-    const callbacks = this._listeners.get(event);
-    const i = callbacks.indexOf(callback);
-    if (i < 0) {
-      throw new Error(`Unknown callback`);
-    } else {
-      callbacks.splice(i, 1);
-    }
-  }
-
-  /**
-   * Open the underlying Kinto collection, using the appropriate adapter and
-   * options.
-   */
-  async openCollection() {
-    if (!this._kinto) {
-      this._kinto = new Kinto({
-        bucket: this.bucketName,
-        adapter: Kinto.adapters.IDB,
-        adapterOptions: { dbName: DB_NAME, migrateOldData: false },
-      });
-    }
-    const options = {
-      localFields: this.localFields,
-      bucket: this.bucketName,
-    };
-    return this._kinto.collection(this.collectionName, options);
-  }
-
-  /**
-   * Lists settings.
-   *
-   * @param  {Object} options         The options object.
-   * @param  {Object} options.filters Filter the results (default: `{}`).
-   * @param  {Object} options.order   The order to apply   (default: `-last_modified`).
-   * @return {Promise}
-   */
-  async get(options = {}) {
-    // In Bug 1451031, we will do some jexl filtering to limit the list items
-    // whose target is matched.
-    const { filters = {}, order = "" } = options; // not sorted by default.
-    const c = await this.openCollection();
-
-    const timestamp = await c.db.getLastModified();
-    // If the local database was never synchronized, then we attempt to load
-    // a packaged JSON dump.
-    if (timestamp == null) {
-      try {
-        await RemoteSettingsWorker.importJSONDump(this.bucketName, this.collectionName);
-      } catch (e) {
-        // Report but return an empty list since there will be no data anyway.
-        Cu.reportError(e);
-        return [];
-      }
-    }
-
-    const { data } = await c.list({ filters, order });
-    return this._filterEntries(data);
-  }
-
-  /**
-   * Synchronize from Kinto server, if necessary.
-   *
-   * @param {int}  expectedTimestamp       the lastModified date (on the server) for
-                                      the remote collection.
-   * @param {Date}   serverTime       the current date return by the server.
-   * @param {Object} options          additional advanced options.
-   * @param {bool}   options.loadDump load initial dump from disk on first sync (default: true)
-   * @return {Promise}                which rejects on sync or process failure.
-   */
-  async maybeSync(expectedTimestamp, serverTime, options = { loadDump: true }) {
-    const {loadDump} = options;
-
-    let reportStatus = null;
-    try {
-      const collection = await this.openCollection();
-      // Synchronize remote data into a local Sqlite DB.
-      let collectionLastModified = await collection.db.getLastModified();
-
-      // If there is no data currently in the collection, attempt to import
-      // initial data from the application defaults.
-      // This allows to avoid synchronizing the whole collection content on
-      // cold start.
-      if (!collectionLastModified && loadDump) {
-        try {
-          await RemoteSettingsWorker.importJSONDump(this.bucketName, this.collectionName);
-          collectionLastModified = await collection.db.getLastModified();
-        } catch (e) {
-          // Report but go-on.
-          Cu.reportError(e);
-        }
-      }
-
-      // If the data is up to date, there's no need to sync. We still need
-      // to record the fact that a check happened.
-      if (expectedTimestamp <= collectionLastModified) {
-        this._updateLastCheck(serverTime);
-        reportStatus = UptakeTelemetry.STATUS.UP_TO_DATE;
-        return;
-      }
-
-      // If there is a `signerName` and collection signing is enforced, add a
-      // hook for incoming changes that validates the signature.
-      if (this.signerName && gVerifySignature) {
-        collection.hooks["incoming-changes"] = [async (payload, collection) => {
-          await this._validateCollectionSignature(payload.changes,
-                                                  payload.lastModified,
-                                                  collection,
-                                                  { expectedTimestamp });
-          // In case the signature is valid, apply the changes locally.
-          return payload;
-        }];
-      }
-
-      // Fetch changes from server.
-      let syncResult;
-      try {
-        // Server changes have priority during synchronization.
-        const strategy = Kinto.syncStrategy.SERVER_WINS;
-        //
-        // XXX: https://github.com/Kinto/kinto.js/issues/859
-        //
-        syncResult = await collection.sync({ remote: gServerURL, strategy, expectedTimestamp });
-        const { ok } = syncResult;
-        if (!ok) {
-          // Some synchronization conflicts occured.
-          reportStatus = UptakeTelemetry.STATUS.CONFLICT_ERROR;
-          throw new Error("Sync failed");
-        }
-      } catch (e) {
-        if (e.message.includes(INVALID_SIGNATURE)) {
-          // Signature verification failed during synchronzation.
-          reportStatus = UptakeTelemetry.STATUS.SIGNATURE_ERROR;
-          // if sync fails with a signature error, it's likely that our
-          // local data has been modified in some way.
-          // We will attempt to fix this by retrieving the whole
-          // remote collection.
-          const payload = await fetchRemoteCollection(collection, expectedTimestamp);
-          try {
-            await this._validateCollectionSignature(payload.data,
-                                                    payload.last_modified,
-                                                    collection,
-                                                    { expectedTimestamp, ignoreLocal: true });
-          } catch (e) {
-            reportStatus = UptakeTelemetry.STATUS.SIGNATURE_RETRY_ERROR;
-            throw e;
-          }
-
-          // The signature is good (we haven't thrown).
-          // Now we will Inspect what we had locally.
-          const { data: oldData } = await collection.list({ order: "" }); // no need to sort.
-
-          // We build a sync result as if a diff-based sync was performed.
-          syncResult = { created: [], updated: [], deleted: [] };
-
-          // If the remote last_modified is newer than the local last_modified,
-          // replace the local data
-          const localLastModified = await collection.db.getLastModified();
-          if (payload.last_modified >= localLastModified) {
-            const { data: newData } = payload;
-            await collection.clear();
-            await collection.loadDump(newData);
-
-            // Compare local and remote to populate the sync result
-            const oldById = new Map(oldData.map(e => [e.id, e]));
-            for (const r of newData) {
-              const old = oldById.get(r.id);
-              if (old) {
-                if (old.last_modified != r.last_modified) {
-                  syncResult.updated.push({ old, new: r });
-                }
-                oldById.delete(r.id);
-              } else {
-                syncResult.created.push(r);
-              }
-            }
-            // Records that remain in our map now are those missing from remote
-            syncResult.deleted = Array.from(oldById.values());
-          }
-
-        } else {
-          // The sync has thrown, it can be related to metadata, network or a general error.
-          if (e.message == MISSING_SIGNATURE) {
-            // Collection metadata has no signature info, no need to retry.
-            reportStatus = UptakeTelemetry.STATUS.SIGNATURE_ERROR;
-          } else if (/NetworkError/.test(e.message)) {
-            reportStatus = UptakeTelemetry.STATUS.NETWORK_ERROR;
-          } else if (/Backoff/.test(e.message)) {
-            reportStatus = UptakeTelemetry.STATUS.BACKOFF;
-          } else {
-            reportStatus = UptakeTelemetry.STATUS.SYNC_ERROR;
-          }
-          throw e;
-        }
-      }
-
-      // Handle the obtained records (ie. apply locally through events).
-      // Build the event data list. It should be filtered (ie. by application target)
-      const { created: allCreated, updated: allUpdated, deleted: allDeleted } = syncResult;
-      const [created, deleted, updatedFiltered] = await Promise.all(
-          [allCreated, allDeleted, allUpdated.map(e => e.new)].map(this._filterEntries.bind(this))
-        );
-      // For updates, keep entries whose updated form is matches the target.
-      const updatedFilteredIds = new Set(updatedFiltered.map(e => e.id));
-      const updated = allUpdated.filter(({ new: { id } }) => updatedFilteredIds.has(id));
-
-      // If every changed entry is filtered, we don't even fire the event.
-      if (created.length || updated.length || deleted.length) {
-        // Read local collection of records (also filtered).
-        const { data: allData } = await collection.list({ order: "" }); // no need to sort.
-        const current = await this._filterEntries(allData);
-        const payload = { data: { current, created, updated, deleted } };
-        try {
-          await this.emit("sync", payload);
-        } catch (e) {
-          reportStatus = UptakeTelemetry.STATUS.APPLY_ERROR;
-          throw e;
-        }
-      }
-
-      // Track last update.
-      this._updateLastCheck(serverTime);
-
-    } catch (e) {
-      // No specific error was tracked, mark it as unknown.
-      if (reportStatus === null) {
-        reportStatus = UptakeTelemetry.STATUS.UNKNOWN_ERROR;
-      }
-      throw e;
-    } finally {
-      // No error was reported, this is a success!
-      if (reportStatus === null) {
-        reportStatus = UptakeTelemetry.STATUS.SUCCESS;
-      }
-      // Report success/error status to Telemetry.
-      UptakeTelemetry.report(this.identifier, reportStatus);
-    }
-  }
-
-  async _validateCollectionSignature(remoteRecords, timestamp, collection, options = {}) {
-    const { expectedTimestamp, ignoreLocal = false } = options;
-    // this is a content-signature field from an autograph response.
-    const signaturePayload = await fetchCollectionMetadata(gServerURL, collection, expectedTimestamp);
-    if (!signaturePayload) {
-      throw new Error(MISSING_SIGNATURE);
-    }
-    const {x5u, signature} = signaturePayload;
-    const certChainResponse = await fetch(x5u);
-    const certChain = await certChainResponse.text();
-
-    const verifier = Cc["@mozilla.org/security/contentsignatureverifier;1"]
-                       .createInstance(Ci.nsIContentSignatureVerifier);
-
-    let localRecords = [];
-    if (!ignoreLocal) {
-      const { data } = await collection.list({ order: "" }); // no need to sort.
-      // Local fields are stripped to compute the collection signature (server does not have them).
-      localRecords = data.map(r => collection.cleanLocalFields(r));
-    }
-
-    const serialized = await RemoteSettingsWorker.canonicalStringify(localRecords,
-                                                                     remoteRecords,
-                                                                     timestamp);
-    if (!verifier.verifyContentSignature(serialized,
-                                         "p384ecdsa=" + signature,
-                                         certChain,
-                                         this.signerName)) {
-      throw new Error(INVALID_SIGNATURE + ` (${collection.bucket}/${collection.name})`);
-    }
-  }
-
-  /**
-   * Save last time server was checked in users prefs.
-   *
-   * @param {Date} serverTime   the current date return by server.
-   */
-  _updateLastCheck(serverTime) {
-    const checkedServerTimeInSeconds = Math.round(serverTime / 1000);
-    Services.prefs.setIntPref(this.lastCheckTimePref, checkedServerTimeInSeconds);
-  }
-
-  async _filterEntries(data) {
-    // Filter entries for which calls to `this.filterFunc` returns null.
-    if (!this.filterFunc) {
-      return data;
-    }
-    const environment = cacheProxy(ClientEnvironment);
-    const dataPromises = data.map(e => this.filterFunc(e, environment));
-    const results = await Promise.all(dataPromises);
-    return results.filter(v => !!v);
-  }
+  return { changes, currentEtag, serverTimeMillis, backoffSeconds };
 }
 
 /**
  * Check if local data exist for the specified client.
  *
  * @param {RemoteSettingsClient} client
  * @return {bool} Whether it exists or not.
  */
@@ -591,16 +183,17 @@ async function hasLocalDump(bucket, coll
 function remoteSettingsFunction() {
   const _clients = new Map();
 
   // If not explicitly specified, use the default signer.
   const defaultSigner = gPrefs.getCharPref(PREF_SETTINGS_DEFAULT_SIGNER);
   const defaultOptions = {
     bucketNamePref: PREF_SETTINGS_DEFAULT_BUCKET,
     signerName: defaultSigner,
+    filterFunc: jexlFilterFunc,
   };
 
   /**
    * RemoteSettings constructor.
    *
    * @param {String} collectionName The remote settings identifier
    * @param {Object} options Advanced options
    * @returns {RemoteSettingsClient} An instance of a Remote Settings client.
@@ -795,28 +388,27 @@ function remoteSettingsFunction() {
     };
   };
 
   /**
    * Startup function called from nsBrowserGlue.
    */
   remoteSettings.init = () => {
     // Hook the Push broadcast and RemoteSettings polling.
-    const broadcastID = "remote-settings/monitor_changes";
     // When we start on a new profile there will be no ETag stored.
     // Use an arbitrary ETag that is guaranteed not to occur.
     // This will trigger a broadcast message but that's fine because we
     // will check the changes on each collection and retrieve only the