Merge mozilla-central to inbound. a=merge CLOSED TREE
authorshindli <shindli@mozilla.com>
Fri, 08 Mar 2019 11:42:48 +0200
changeset 521001 15d3e8135d577b3ef62ca25050b4bdd6c0af8142
parent 521000 9cd8b6db0567d942d21ed34c7e2cad9f61405832 (current diff)
parent 520975 54ed5eac2abca2519704c74bc5c421b846031504 (diff)
child 521002 bf190c7c3de34db32f46a258c396e5d885e491cf
push id10862
push userffxbld-merge
push dateMon, 11 Mar 2019 13:01:11 +0000
treeherdermozilla-beta@a2e7f5c935da [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge
milestone67.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge mozilla-central to inbound. a=merge CLOSED TREE
modules/libpref/init/all.js
testing/web-platform/meta/html/semantics/embedded-content/media-elements/track/track-element/track-cues-enter-exit.html.ini
--- a/.cron.yml
+++ b/.cron.yml
@@ -106,16 +106,27 @@ jobs:
           target-tasks-method: customv8_update
           optimize-target-tasks: false
       run-on-projects:
           - mozilla-central
       when:
           by-project:
               mozilla-central: [{hour: 10, minute: 30}]
 
+    - name: chromium-update
+      job:
+          type: decision-task
+          treeherder-symbol: Chromium
+          target-tasks-method: chromium_update
+      run-on-projects:
+          - mozilla-central
+      when:
+          by-project:
+              mozilla-central: [{hour: 10, minute: 30}]
+
     - name: bouncer-check
       job:
           type: decision-task
           treeherder-symbol: Rel
           target-tasks-method: cron_bouncer_check
       run-on-projects:
           - mozilla-beta
           - mozilla-release
--- a/accessible/atk/nsMaiInterfaceText.cpp
+++ b/accessible/atk/nsMaiInterfaceText.cpp
@@ -282,27 +282,27 @@ static gchar* getTextBeforeOffsetCB(AtkT
   return DOMtoATK::Convert(autoStr);
 }
 
 static gint getCaretOffsetCB(AtkText* aText) {
   AccessibleWrap* accWrap = GetAccessibleWrap(ATK_OBJECT(aText));
   if (accWrap) {
     HyperTextAccessible* text = accWrap->AsHyperText();
     if (!text || !text->IsTextRole()) {
-      return 0;
+      return -1;
     }
 
     return static_cast<gint>(text->CaretOffset());
   }
 
   if (ProxyAccessible* proxy = GetProxy(ATK_OBJECT(aText))) {
     return static_cast<gint>(proxy->CaretOffset());
   }
 
-  return 0;
+  return -1;
 }
 
 static AtkAttributeSet* getRunAttributesCB(AtkText* aText, gint aOffset,
                                            gint* aStartOffset,
                                            gint* aEndOffset) {
   *aStartOffset = -1;
   *aEndOffset = -1;
   int32_t startOffset = 0, endOffset = 0;
--- a/browser/app/profile/firefox.js
+++ b/browser/app/profile/firefox.js
@@ -1804,17 +1804,20 @@ pref("intl.multilingual.enabled", false)
 pref("intl.multilingual.downloadEnabled", false);
 #endif
 
 // Simulate conditions that will happen when the browser
 // is running with Fission enabled. This is meant to assist
 // development and testing of Fission.
 // The current simulated conditions are:
 // - Don't propagate events from subframes to JS child actors
-pref("browser.fission.simulate", false);
+pref("fission.frontend.simulate-events", false);
+// - Only deliver subframe messages that specifies
+//   their destination (using the BrowsingContext id).
+pref("fission.frontend.simulate-messages", false);
 
 // Prio preferences
 // Only enable by default on Nightly.
 // On platforms that do not build libprio, do not set these prefs at all, which gives us a way to detect support.
 
 // Curve25519 public keys for Prio servers
 pref("prio.publicKeyA", "35AC1C7576C7C6EDD7FED6BCFC337B34D48CB4EE45C86BEEFB40BD8875707733");
 pref("prio.publicKeyB", "26E6674E65425B823F1F1D5F96E3BB3EF9E406EC7FBA7DEF8B08A35DD135AF50");
--- a/browser/base/content/test/keyboard/browser_toolbarKeyNav.js
+++ b/browser/base/content/test/keyboard/browser_toolbarKeyNav.js
@@ -47,28 +47,41 @@ function startFromUrlBar() {
 
 // The Reload button is disabled for a short time even after the page finishes
 // loading. Wait for it to be enabled.
 async function waitUntilReloadEnabled() {
   let button = document.getElementById("reload-button");
   await TestUtils.waitForCondition(() => !button.disabled);
 }
 
+// Opens a new, blank tab, executes a task and closes the tab.
+function withNewBlankTab(taskFn) {
+  return BrowserTestUtils.withNewTab("about:blank", async function() {
+    // For a blank tab, the Reload button should be disabled. However, when we
+    // open about:blank with BrowserTestUtils.withNewTab, this is unreliable.
+    // Therefore, explicitly disable the reload command.
+    // We disable the command (rather than disabling the button directly) so the
+    // button will be updated correctly for future page loads.
+    document.getElementById("Browser:Reload").setAttribute("disabled", "true");
+    await taskFn();
+  });
+}
+
 add_task(async function setPref() {
   await SpecialPowers.pushPrefEnv({
     set: [
       ["browser.toolbars.keyboard_navigation", true],
       ["accessibility.tabfocus", 7],
     ],
   });
 });
 
 // Test tab stops with no page loaded.
 add_task(async function testTabStopsNoPage() {
-  await BrowserTestUtils.withNewTab("about:blank", async function() {
+  await withNewBlankTab(async function() {
     startFromUrlBar();
     await expectFocusAfterKey("Shift+Tab", "home-button");
     await expectFocusAfterKey("Shift+Tab", "tabbrowser-tabs", true);
     await expectFocusAfterKey("Tab", "home-button");
     await expectFocusAfterKey("Tab", gURLBar.inputField);
     await expectFocusAfterKey("Tab", "library-button");
     await expectFocusAfterKey("Tab", gBrowser.selectedBrowser);
   });
@@ -120,17 +133,17 @@ add_task(async function testTabStopsWith
     startFromUrlBar();
     await expectFocusAfterKey("Tab", "library-button");
     await expectFocusAfterKey("Tab", gBrowser.selectedBrowser);
   });
 });
 
 // Test a focusable toolbartabstop which has no navigable buttons.
 add_task(async function testTabStopNoButtons() {
-  await BrowserTestUtils.withNewTab("about:blank", async function() {
+  await withNewBlankTab(async function() {
     // The Back, Forward and Reload buttons are all currently disabled.
     // The Home button is the only other button at that tab stop.
     CustomizableUI.removeWidgetFromArea("home-button");
     startFromUrlBar();
     await expectFocusAfterKey("Shift+Tab", "tabbrowser-tabs", true);
     await expectFocusAfterKey("Tab", gURLBar.inputField);
     CustomizableUI.reset();
     // Make sure the button is reachable now that it has been re-added.
--- a/browser/components/enterprisepolicies/EnterprisePolicies.js
+++ b/browser/components/enterprisepolicies/EnterprisePolicies.js
@@ -281,20 +281,33 @@ EnterprisePoliciesManager.prototype = {
 
   setSupportMenu(supportMenu) {
     SupportMenu = supportMenu;
   },
 
   getSupportMenu() {
     return SupportMenu;
   },
+
+  setExtensionPolicies(extensionPolicies) {
+    ExtensionPolicies = extensionPolicies;
+  },
+
+  getExtensionPolicy(extensionID) {
+    if (ExtensionPolicies &&
+        extensionID in ExtensionPolicies) {
+      return ExtensionPolicies[extensionID];
+    }
+    return null;
+  },
 };
 
 let DisallowedFeatures = {};
 let SupportMenu = null;
+let ExtensionPolicies = null;
 
 /**
  * areEnterpriseOnlyPoliciesAllowed
  *
  * Checks whether the policies marked as enterprise_only in the
  * schema are allowed to run on this browser.
  *
  * This is meant to only allow policies to run on ESR, but in practice
--- a/browser/components/enterprisepolicies/Policies.jsm
+++ b/browser/components/enterprisepolicies/Policies.jsm
@@ -61,16 +61,22 @@ var EXPORTED_SYMBOLS = ["Policies"];
  *   It will be different for each policy. It could be a boolean,
  *   a string, an array or a complex object. All parameters have
  *   been validated according to the schema, and no unknown
  *   properties will be present on them.
  *
  * The callbacks will be bound to their parent policy object.
  */
 var Policies = {
+  "3rdparty": {
+    onBeforeAddons(manager, param) {
+      manager.setExtensionPolicies(param.Extensions);
+    },
+  },
+
   "AppUpdateURL": {
     onBeforeAddons(manager, param) {
       setDefaultPref("app.update.url", param.href);
     },
   },
 
   "Authentication": {
     onBeforeAddons(manager, param) {
--- a/browser/components/enterprisepolicies/schemas/policies-schema.json
+++ b/browser/components/enterprisepolicies/schemas/policies-schema.json
@@ -1,12 +1,27 @@
 {
   "$schema": "http://json-schema.org/draft-04/schema#",
   "type": "object",
   "properties": {
+
+    "3rdparty": {
+      "type": "object",
+      "properties": {
+        "Extensions" : {
+          "type": "object",
+          "patternProperties": {
+            "^.*$": {
+              "type": "JSON"
+            }
+          }
+        }
+      }
+    },
+
     "AppUpdateURL": {
       "type": "URL"
     },
 
     "Authentication": {
       "type": "object",
       "properties": {
         "SPNEGO" : {
--- a/browser/components/enterprisepolicies/tests/browser/browser.ini
+++ b/browser/components/enterprisepolicies/tests/browser/browser.ini
@@ -20,16 +20,17 @@ support-files =
 skip-if = os != 'mac'
 [browser_policies_mistyped_json.js]
 [browser_policies_notice_in_aboutpreferences.js]
 [browser_policies_popups_cookies_addons_flash.js]
 [browser_policies_runOnce_helper.js]
 [browser_policies_setAndLockPref_API.js]
 [browser_policies_simple_pref_policies.js]
 [browser_policies_sorted_alphabetically.js]
+[browser_policy_3rdparty.js]
 [browser_policy_app_update.js]
 [browser_policy_app_update_URL.js]
 [browser_policy_block_about_addons.js]
 [browser_policy_block_about_config.js]
 [browser_policy_block_about_profiles.js]
 [browser_policy_block_about_support.js]
 [browser_policy_block_set_desktop_background.js]
 [browser_policy_bookmarks.js]
new file mode 100644
--- /dev/null
+++ b/browser/components/enterprisepolicies/tests/browser/browser_policy_3rdparty.js
@@ -0,0 +1,20 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+"use strict";
+
+add_task(async function setup() {
+  await setupPolicyEngineWithJson({
+                                    "policies": {
+                                      "3rdparty": {
+                                        "Extensions": {
+                                          "3rdparty-policy@mozilla.com": {
+                                            "string": "value",
+                                          },
+                                        },
+                                      },
+                                    },
+                                  });
+
+  let extensionPolicy = Services.policies.getExtensionPolicy("3rdparty-policy@mozilla.com");
+  Assert.deepEqual(extensionPolicy, {"string": "value"});
+});
--- a/browser/components/urlbar/UrlbarInput.jsm
+++ b/browser/components/urlbar/UrlbarInput.jsm
@@ -1004,22 +1004,30 @@ class UrlbarInput {
 
   _on_blur(event) {
     this.formatValue();
     // Respect the autohide preference for easier inspecting/debugging via
     // the browser toolbox.
     if (!UrlbarPrefs.get("ui.popup.disable_autohide")) {
       this.view.close(UrlbarUtils.CANCEL_REASON.BLUR);
     }
+    // We may have hidden popup notifications, show them again if necessary.
+    if (this.getAttribute("pageproxystate") != "valid") {
+      this.window.UpdatePopupNotificationsVisibility();
+    }
   }
 
   _on_focus(event) {
     this._updateUrlTooltip();
+    this.formatValue();
 
-    this.formatValue();
+    // Hide popup notifications, to reduce visual noise.
+    if (this.getAttribute("pageproxystate") != "valid") {
+      this.window.UpdatePopupNotificationsVisibility();
+    }
   }
 
   _on_mouseover(event) {
     this._updateUrlTooltip();
   }
 
   _on_mousedown(event) {
     if (event.originalTarget == this.inputField &&
--- a/browser/config/mozconfigs/linux64/plain-opt
+++ b/browser/config/mozconfigs/linux64/plain-opt
@@ -3,14 +3,15 @@ export LLVM_CONFIG="${TOOLTOOL_DIR}/clan
 
 CARGO="${TOOLTOOL_DIR}/rustc/bin/cargo"
 RUSTC="${TOOLTOOL_DIR}/rustc/bin/rustc"
 RUSTDOC="${TOOLTOOL_DIR}/rustc/bin/rustdoc"
 RUSTFMT="${TOOLTOOL_DIR}/rustc/bin/rustfmt"
 CBINDGEN="${TOOLTOOL_DIR}/cbindgen/cbindgen"
 
 export NODEJS="${TOOLTOOL_DIR}/node/bin/node"
+NASM="${TOOLTOOL_DIR}/nasm/nasm"
 
 CC="${TOOLTOOL_DIR}/clang/bin/clang"
 CXX="${TOOLTOOL_DIR}/clang/bin/clang++"
 
 mk_add_options "export PATH=${TOOLTOOL_DIR}/gcc/bin:${PATH}"
 mk_add_options "export LD_LIBRARY_PATH=${TOOLTOOL_DIR}/gcc/lib64:${TOOLTOOL_DIR}/gcc/lib32:${TOOLTOOL_DIR}/gcc/lib"
--- a/browser/locales/en-US/browser/policies/policies-descriptions.ftl
+++ b/browser/locales/en-US/browser/policies/policies-descriptions.ftl
@@ -4,16 +4,18 @@
 
 ## The Enterprise Policies feature is aimed at system administrators
 ## who want to deploy these settings across several Firefox installations
 ## all at once. This is traditionally done through the Windows Group Policy
 ## feature, but the system also supports other forms of deployment.
 ## These are short descriptions for individual policies, to be displayed
 ## in the documentation section in about:policies.
 
+policy-3rdparty = Set policies that WebExtensions can access via chrome.storage.managed.
+
 policy-AppUpdateURL = Set custom app update URL.
 
 policy-Authentication = Configure integrated authentication for websites that support it.
 
 policy-BlockAboutAddons = Block access to the Add-ons Manager (about:addons).
 
 policy-BlockAboutConfig = Block access to the about:config page.
 
--- a/devtools/client/accessibility/test/browser/browser.ini
+++ b/devtools/client/accessibility/test/browser/browser.ini
@@ -1,23 +1,27 @@
 [DEFAULT]
 tags = devtools
 subsuite = devtools
+skip-if = (os == 'win' && processor == 'aarch64')
 support-files =
   head.js
   !/devtools/client/shared/test/shared-head.js
   !/devtools/client/shared/test/test-actor.js
   !/devtools/client/shared/test/test-actor-registry.js
   !/devtools/client/inspector/test/shared-head.js
   !/devtools/client/shared/test/shared-redux-head.js
   !/devtools/client/shared/test/telemetry-test-helpers.js
 
 [browser_accessibility_context_menu_browser.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1533184
 [browser_accessibility_context_menu_inspector.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1533484
 [browser_accessibility_mutations.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1533534
 [browser_accessibility_panel_highlighter.js]
 [browser_accessibility_panel_highlighter_multi_tab.js]
 skip-if = (os == 'linux' && debug && bits == 64) # Bug 1511247
 [browser_accessibility_relation_navigation.js]
 [browser_accessibility_reload.js]
 [browser_accessibility_sidebar.js]
 [browser_accessibility_sidebar_checks.js]
 [browser_accessibility_tree.js]
--- a/devtools/client/inspector/markup/test/browser.ini
+++ b/devtools/client/inspector/markup/test/browser.ini
@@ -92,18 +92,21 @@ skip-if = os == "mac" # Full keyboard na
 [browser_markup_accessibility_navigation.js]
 skip-if = os == "mac" # Full keyboard navigation on OSX only works if Full Keyboard Access setting is set to All Control in System Keyboard Preferences
 [browser_markup_accessibility_new_selection.js]
 [browser_markup_accessibility_navigation_after_edit.js]
 skip-if = os == "mac" # Full keyboard navigation on OSX only works if Full Keyboard Access setting is set to All Control in System Keyboard Preferences
 [browser_markup_accessibility_semantics.js]
 [browser_markup_anonymous_01.js]
 [browser_markup_anonymous_02.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1531584
 [browser_markup_anonymous_03.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1531584
 [browser_markup_anonymous_04.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1531584
 [browser_markup_copy_html.js]
 subsuite = clipboard
 [browser_markup_copy_image_data.js]
 subsuite = clipboard
 skip-if = (os == 'linux' && bits == 32 && debug) # bug 1328915, disable linux32 debug devtools for timeouts
 [browser_markup_css_completion_style_attribute_01.js]
 [browser_markup_css_completion_style_attribute_02.js]
 [browser_markup_css_completion_style_attribute_03.js]
@@ -198,16 +201,17 @@ subsuite = clipboard
 [browser_markup_shadowdom_dynamic.js]
 [browser_markup_shadowdom_hover.js]
 [browser_markup_shadowdom_maxchildren.js]
 [browser_markup_shadowdom_mutations_shadow.js]
 [browser_markup_shadowdom_navigation.js]
 [browser_markup_shadowdom_nested_pick_inspect.js]
 [browser_markup_shadowdom_noslot.js]
 [browser_markup_shadowdom_open_debugger.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1533507
 [browser_markup_shadowdom_shadowroot_mode.js]
 [browser_markup_shadowdom_show_nodes_button.js]
 [browser_markup_shadowdom_slotted_keyboard_focus.js]
 [browser_markup_shadowdom_slotupdate.js]
 [browser_markup_shadowdom_ua_widgets.js]
 [browser_markup_shadowdom_ua_widgets_with_nac.js]
 [browser_markup_tag_delete_whitespace_node.js]
 [browser_markup_tag_edit_01.js]
--- a/devtools/client/inspector/test/browser.ini
+++ b/devtools/client/inspector/test/browser.ini
@@ -44,30 +44,33 @@ support-files =
   img_browser_inspector_highlighter-eyedropper-image.png
   shared-head.js
   !/devtools/client/shared/test/shared-head.js
   !/devtools/client/shared/test/telemetry-test-helpers.js
   !/devtools/client/shared/test/test-actor.js
   !/devtools/client/shared/test/test-actor-registry.js
 
 [browser_inspector_addNode_01.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1533529
 [browser_inspector_addNode_02.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1533529
 [browser_inspector_addNode_03.js]
 [browser_inspector_addSidebarTab.js]
 [browser_inspector_breadcrumbs.js]
 [browser_inspector_breadcrumbs_highlight_hover.js]
 [browser_inspector_breadcrumbs_keybinding.js]
 [browser_inspector_breadcrumbs_keyboard_trap.js]
 skip-if = os == "mac" # Full keyboard navigation on OSX only works if Full Keyboard Access setting is set to All Control in System Keyboard Preferences
 [browser_inspector_breadcrumbs_mutations.js]
 [browser_inspector_breadcrumbs_namespaced.js]
 [browser_inspector_breadcrumbs_shadowdom.js]
 [browser_inspector_breadcrumbs_visibility.js]
 [browser_inspector_delete-selected-node-01.js]
 [browser_inspector_delete-selected-node-02.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1533490
 [browser_inspector_delete-selected-node-03.js]
 [browser_inspector_destroy-after-navigation.js]
 [browser_inspector_destroy-before-ready.js]
 [browser_inspector_expand-collapse.js]
 [browser_inspector_highlighter-01.js]
 [browser_inspector_highlighter-02.js]
 [browser_inspector_highlighter-03.js]
 [browser_inspector_highlighter-04.js]
@@ -152,16 +155,17 @@ skip-if = (os == 'linux' && bits == 32 &
 [browser_inspector_menu-03-paste-items.js]
 subsuite = clipboard
 skip-if = (os == 'linux' && bits == 32 && debug) # bug 1328915, disable linux32 debug devtools for timeouts
 [browser_inspector_menu-03-paste-items-svg.js]
 subsuite = clipboard
 skip-if = (os == 'linux' && bits == 32 && debug) # bug 1328915, disable linux32 debug devtools for timeouts
 [browser_inspector_menu-04-use-in-console.js]
 [browser_inspector_menu-05-attribute-items.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1533492
 [browser_inspector_menu-06-other.js]
 [browser_inspector_navigation.js]
 [browser_inspector_navigate_to_errors.js]
 [browser_inspector_open_on_neterror.js]
 [browser_inspector_pane_state_restore.js]
 [browser_inspector_pane-toggle-01.js]
 [browser_inspector_pane-toggle-02.js]
 [browser_inspector_pane-toggle-03.js]
--- a/devtools/server/tests/browser/browser.ini
+++ b/devtools/server/tests/browser/browser.ini
@@ -33,21 +33,26 @@ support-files =
   test-spawn-actor-in-parent.js
   inspector-helpers.js
   storage-helpers.js
   !/devtools/client/shared/test/shared-head.js
   !/devtools/client/shared/test/telemetry-test-helpers.js
   !/devtools/server/tests/mochitest/hello-actor.js
 
 [browser_accessibility_highlighter_infobar.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1533184
 [browser_accessibility_infobar_show.js]
 [browser_accessibility_node.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1533184
 [browser_accessibility_node_events.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1533184
 [browser_accessibility_simple.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1533184
 [browser_accessibility_walker.js]
+skip-if = (os == 'win' && processor == 'aarch64') # bug 1533487
 [browser_actor_error.js]
 [browser_animation_actor-lifetime.js]
 [browser_animation_emitMutations.js]
 [browser_animation_getProperties.js]
 [browser_animation_getMultipleStates.js]
 [browser_animation_getPlayers.js]
 [browser_animation_getStateAfterFinished.js]
 [browser_animation_getSubTreeAnimations.js]
--- a/dom/html/HTMLVideoElement.cpp
+++ b/dom/html/HTMLVideoElement.cpp
@@ -28,16 +28,17 @@
 #include "MediaDecoderStateMachine.h"
 #include "mozilla/Preferences.h"
 #include "mozilla/dom/WakeLock.h"
 #include "mozilla/dom/power/PowerManagerService.h"
 #include "mozilla/dom/Performance.h"
 #include "mozilla/dom/TimeRanges.h"
 #include "mozilla/dom/VideoPlaybackQuality.h"
 #include "mozilla/dom/VideoStreamTrack.h"
+#include "mozilla/Unused.h"
 
 #include <algorithm>
 #include <limits>
 
 nsGenericHTMLElement* NS_NewHTMLVideoElement(
     already_AddRefed<mozilla::dom::NodeInfo>&& aNodeInfo,
     mozilla::dom::FromParser aFromParser) {
   mozilla::dom::HTMLVideoElement* element =
@@ -175,19 +176,17 @@ HTMLVideoElement::IsAttributeMapped(cons
 nsMapRuleToAttributesFunc HTMLVideoElement::GetAttributeMappingFunction()
     const {
   return &MapAttributesIntoRule;
 }
 
 void HTMLVideoElement::UnbindFromTree(bool aDeep, bool aNullParent) {
   if (mVisualCloneSource) {
     mVisualCloneSource->EndCloningVisually();
-    SetVisualCloneSource(nullptr);
   } else if (mVisualCloneTarget) {
-    mVisualCloneTarget->SetVisualCloneSource(nullptr);
     EndCloningVisually();
   }
 
   HTMLMediaElement::UnbindFromTree(aDeep, aNullParent);
 }
 
 nsresult HTMLVideoElement::SetAcceptHeader(nsIHttpChannel* aChannel) {
   nsAutoCString value(
@@ -452,16 +451,21 @@ void HTMLVideoElement::CloneElementVisua
              "Can't clone a video that's not bound to a DOM tree.");
   MOZ_ASSERT(!aTargetVideo.mUnboundFromTree,
              "Can't clone to a video that's not bound to a DOM tree.");
   if (mUnboundFromTree || aTargetVideo.mUnboundFromTree) {
     rv.Throw(NS_ERROR_UNEXPECTED);
     return;
   }
 
+  // Do we already have a visual clone target? If so, shut it down.
+  if (mVisualCloneTarget) {
+    EndCloningVisually();
+  }
+
   if (!SetVisualCloneTarget(&aTargetVideo)) {
     rv.Throw(NS_ERROR_FAILURE);
     return;
   }
 
   if (!aTargetVideo.SetVisualCloneSource(this)) {
     mVisualCloneTarget = nullptr;
     rv.Throw(NS_ERROR_FAILURE);
@@ -508,13 +512,14 @@ void HTMLVideoElement::EndCloningVisuall
     VideoFrameContainer* container =
         mVisualCloneTarget->GetVideoFrameContainer();
     if (container && mVisualCloneTarget->mSelectedVideoStreamTrack) {
       mVisualCloneTarget->mSelectedVideoStreamTrack->RemoveVideoOutput(
           container);
     }
   }
 
-  mVisualCloneTarget = nullptr;
+  Unused << mVisualCloneTarget->SetVisualCloneSource(nullptr);
+  Unused << SetVisualCloneTarget(nullptr);
 }
 
 }  // namespace dom
 }  // namespace mozilla
--- a/dom/html/TextTrackManager.cpp
+++ b/dom/html/TextTrackManager.cpp
@@ -194,17 +194,17 @@ void TextTrackManager::AddCues(TextTrack
 
   TextTrackCueList* cueList = aTextTrack->GetCues();
   if (cueList) {
     bool dummy;
     WEBVTT_LOGV("AddCues cueList->Length() %d", cueList->Length());
     for (uint32_t i = 0; i < cueList->Length(); ++i) {
       mNewCues->AddCue(*cueList->IndexedGetter(i, dummy));
     }
-    DispatchTimeMarchesOn();
+    TimeMarchesOn();
   }
 }
 
 void TextTrackManager::RemoveTextTrack(TextTrack* aTextTrack,
                                        bool aPendingListOnly) {
   if (!mPendingTextTracks || !mTextTracks) {
     return;
   }
@@ -219,25 +219,22 @@ void TextTrackManager::RemoveTextTrack(T
   // Remove the cues in mNewCues belong to aTextTrack.
   TextTrackCueList* removeCueList = aTextTrack->GetCues();
   if (removeCueList) {
     WEBVTT_LOGV("RemoveTextTrack removeCueList->Length() %d",
                 removeCueList->Length());
     for (uint32_t i = 0; i < removeCueList->Length(); ++i) {
       mNewCues->RemoveCue(*((*removeCueList)[i]));
     }
-    DispatchTimeMarchesOn();
+    TimeMarchesOn();
   }
 }
 
 void TextTrackManager::DidSeek() {
   WEBVTT_LOG("%p DidSeek", this);
-  if (mTextTracks) {
-    mTextTracks->DidSeek();
-  }
   if (mMediaElement) {
     mLastTimeMarchesOnCalled = mMediaElement->CurrentTime();
     WEBVTT_LOGV("DidSeek set mLastTimeMarchesOnCalled %lf",
                 mLastTimeMarchesOnCalled);
   }
   mHasSeeked = true;
 }
 
@@ -283,30 +280,27 @@ void TextTrackManager::UpdateCueDisplay(
   }
 }
 
 void TextTrackManager::NotifyCueAdded(TextTrackCue& aCue) {
   WEBVTT_LOG("NotifyCueAdded");
   if (mNewCues) {
     mNewCues->AddCue(aCue);
   }
-  DispatchTimeMarchesOn();
+  TimeMarchesOn();
   ReportTelemetryForCue();
 }
 
 void TextTrackManager::NotifyCueRemoved(TextTrackCue& aCue) {
   WEBVTT_LOG("NotifyCueRemoved");
   if (mNewCues) {
     mNewCues->RemoveCue(aCue);
   }
-  DispatchTimeMarchesOn();
-  if (aCue.GetActive()) {
-    // We remove an active cue, need to update the display.
-    DispatchUpdateCueDisplay();
-  }
+  TimeMarchesOn();
+  DispatchUpdateCueDisplay();
 }
 
 void TextTrackManager::PopulatePendingList() {
   if (!mTextTracks || !mPendingTextTracks || !mMediaElement) {
     return;
   }
   uint32_t len = mTextTracks->Length();
   bool dummy;
@@ -608,29 +602,30 @@ void TextTrackManager::DispatchTimeMarch
       mTimeMarchesOnDispatched = true;
     }
   }
 }
 
 // https://html.spec.whatwg.org/multipage/embedded-content.html#time-marches-on
 void TextTrackManager::TimeMarchesOn() {
   NS_ASSERTION(NS_IsMainThread(), "Wrong thread!");
+  mTimeMarchesOnDispatched = false;
+
   CycleCollectedJSContext* context = CycleCollectedJSContext::Get();
   if (context && context->IsInStableOrMetaStableState()) {
     // FireTimeUpdate can be called while at stable state following a
     // current position change which triggered a state watcher in MediaDecoder
     // (see bug 1443429).
     // TimeMarchesOn() will modify JS attributes which is forbidden while in
     // stable state. So we dispatch a task to perform such operation later
     // instead.
     DispatchTimeMarchesOn();
     return;
   }
   WEBVTT_LOG("TimeMarchesOn");
-  mTimeMarchesOnDispatched = false;
 
   // Early return if we don't have any TextTracks or shutting down.
   if (!mTextTracks || mTextTracks->Length() == 0 || IsShutdown()) {
     return;
   }
 
   nsISupports* parentObject = mMediaElement->OwnerDoc()->GetParentObject();
   if (NS_WARN_IF(!parentObject)) {
@@ -656,23 +651,17 @@ void TextTrackManager::TimeMarchesOn() {
   // Step 1, 2.
   RefPtr<TextTrackCueList> currentCues = new TextTrackCueList(window);
   RefPtr<TextTrackCueList> otherCues = new TextTrackCueList(window);
   bool dummy;
   for (uint32_t index = 0; index < mTextTracks->Length(); ++index) {
     TextTrack* ttrack = mTextTracks->IndexedGetter(index, dummy);
     if (ttrack && dummy) {
       // TODO: call GetCueListByTimeInterval on mNewCues?
-      ttrack->UpdateActiveCueList();
-      TextTrackCueList* activeCueList = ttrack->GetActiveCues();
-      if (activeCueList) {
-        for (uint32_t i = 0; i < activeCueList->Length(); ++i) {
-          currentCues->AddCue(*((*activeCueList)[i]));
-        }
-      }
+      ttrack->GetCurrentCueList(currentCues);
     }
   }
   WEBVTT_LOGV("TimeMarchesOn currentCues %d", currentCues->Length());
   // Populate otherCues with 'non-active" cues.
   if (hasNormalPlayback) {
     if (currentPlaybackTime < mLastTimeMarchesOnCalled) {
       // TODO: Add log and find the root cause why the
       // playback position goes backward.
@@ -826,17 +815,17 @@ void TextTrackManager::TimeMarchesOn() {
 
   // Step 18.
   UpdateCueDisplay();
 }
 
 void TextTrackManager::NotifyCueUpdated(TextTrackCue* aCue) {
   // TODO: Add/Reorder the cue to mNewCues if we have some optimization?
   WEBVTT_LOG("NotifyCueUpdated");
-  DispatchTimeMarchesOn();
+  TimeMarchesOn();
   // For the case "Texttrack.mode = hidden/showing", if the mode
   // changing between showing and hidden, TimeMarchesOn
   // doesn't render the cue. Call DispatchUpdateCueDisplay() explicitly.
   DispatchUpdateCueDisplay();
 }
 
 void TextTrackManager::NotifyReset() {
   WEBVTT_LOG("NotifyReset");
--- a/dom/media/BitReader.cpp
+++ b/dom/media/BitReader.cpp
@@ -62,17 +62,18 @@ uint32_t BitReader::ReadUE() {
   if (i == 32) {
     // This can happen if the data is invalid, or if it's
     // short, since ReadBit() will return 0 when it runs
     // off the end of the buffer.
     NS_WARNING("Invalid H.264 data");
     return 0;
   }
   uint32_t r = ReadBits(i);
-  r += (1 << i) - 1;
+  r += (uint32_t(1) << i) - 1;
+
   return r;
 }
 
 // Read signed integer Exp-Golomb-coded.
 int32_t BitReader::ReadSE() {
   int32_t r = ReadUE();
   if (r & 1) {
     return (r + 1) / 2;
--- a/dom/media/TextTrack.cpp
+++ b/dom/media/TextTrack.cpp
@@ -123,82 +123,40 @@ void TextTrack::AddCue(TextTrackCue& aCu
   mCueList->AddCue(aCue);
   aCue.SetTrack(this);
   if (mTextTrackList) {
     HTMLMediaElement* mediaElement = mTextTrackList->GetMediaElement();
     if (mediaElement && (mMode != TextTrackMode::Disabled)) {
       mediaElement->NotifyCueAdded(aCue);
     }
   }
-  SetDirty();
 }
 
 void TextTrack::RemoveCue(TextTrackCue& aCue, ErrorResult& aRv) {
   // Bug1304948, check the aCue belongs to the TextTrack.
   mCueList->RemoveCue(aCue, aRv);
   if (aRv.Failed()) {
     return;
   }
   aCue.SetActive(false);
   aCue.SetTrack(nullptr);
   if (mTextTrackList) {
     HTMLMediaElement* mediaElement = mTextTrackList->GetMediaElement();
     if (mediaElement) {
       mediaElement->NotifyCueRemoved(aCue);
     }
   }
-  SetDirty();
 }
 
 void TextTrack::SetCuesDirty() {
   for (uint32_t i = 0; i < mCueList->Length(); i++) {
     ((*mCueList)[i])->Reset();
   }
 }
 
-void TextTrack::UpdateActiveCueList() {
-  if (!mTextTrackList) {
-    return;
-  }
-
-  HTMLMediaElement* mediaElement = mTextTrackList->GetMediaElement();
-  if (!mediaElement) {
-    return;
-  }
-
-  // If we are dirty, i.e. an event happened that may cause the sorted mCueList
-  // to have changed like a seek or an insert for a cue, than we need to rebuild
-  // the active cue list from scratch.
-  if (mDirty) {
-    mCuePos = 0;
-    mDirty = false;
-    mActiveCueList->RemoveAll();
-  }
-
-  double playbackTime = mediaElement->CurrentTime();
-  // Remove all the cues from the active cue list whose end times now occur
-  // earlier then the current playback time.
-  for (uint32_t i = mActiveCueList->Length(); i > 0; i--) {
-    if ((*mActiveCueList)[i - 1]->EndTime() <= playbackTime) {
-      mActiveCueList->RemoveCueAt(i - 1);
-    }
-  }
-  // Add all the cues, starting from the position of the last cue that was
-  // added, that have valid start and end times for the current playback time.
-  // We can stop iterating safely once we encounter a cue that does not have
-  // a valid start time as the cue list is sorted.
-  for (; mCuePos < mCueList->Length() &&
-         (*mCueList)[mCuePos]->StartTime() <= playbackTime;
-       mCuePos++) {
-    if ((*mCueList)[mCuePos]->EndTime() > playbackTime) {
-      mActiveCueList->AddCue(*(*mCueList)[mCuePos]);
-    }
-  }
-}
-
 TextTrackCueList* TextTrack::GetActiveCues() {
   if (mMode != TextTrackMode::Disabled) {
     return mActiveCueList;
   }
   return nullptr;
 }
 
 void TextTrack::GetActiveCueArray(nsTArray<RefPtr<TextTrackCue> >& aCues) {
@@ -247,17 +205,16 @@ void TextTrack::SetCuesInactive() { mCue
 void TextTrack::NotifyCueUpdated(TextTrackCue* aCue) {
   mCueList->NotifyCueUpdated(aCue);
   if (mTextTrackList) {
     HTMLMediaElement* mediaElement = mTextTrackList->GetMediaElement();
     if (mediaElement) {
       mediaElement->NotifyCueUpdated(aCue);
     }
   }
-  SetDirty();
 }
 
 void TextTrack::GetLabel(nsAString& aLabel) const {
   if (mTrackElement) {
     mTrackElement->GetLabel(aLabel);
   } else {
     aLabel = mLabel;
   }
@@ -293,10 +250,45 @@ bool TextTrack::IsLoaded() {
     nsAutoString src;
     if (!(mTrackElement->GetAttr(kNameSpaceID_None, nsGkAtoms::src, src))) {
       return true;
     }
   }
   return (mReadyState >= Loaded);
 }
 
+void TextTrack::NotifyCueActiveStateChanged(TextTrackCue* aCue) {
+  MOZ_ASSERT(aCue);
+  if (aCue->GetActive()) {
+    MOZ_ASSERT(!mActiveCueList->IsCueExist(aCue));
+    mActiveCueList->AddCue(*aCue);
+  } else {
+    MOZ_ASSERT(mActiveCueList->IsCueExist(aCue));
+    mActiveCueList->RemoveCue(*aCue);
+  }
+}
+
+void TextTrack::GetCurrentCueList(RefPtr<TextTrackCueList>& aCueList) const {
+  if (!mTextTrackList) {
+    return;
+  }
+
+  const HTMLMediaElement* mediaElement = mTextTrackList->GetMediaElement();
+  if (!mediaElement) {
+    return;
+  }
+
+  // According to `time marches on` step1, current cue list contains the cues
+  // whose start times are less than or equal to the current playback position
+  // and whose end times are greater than the current playback position.
+  // https://html.spec.whatwg.org/multipage/media.html#time-marches-on
+  MOZ_ASSERT(aCueList);
+  const double playbackTime = mediaElement->CurrentTime();
+  for (uint32_t idx = 0; idx < mCueList->Length(); idx++) {
+    TextTrackCue* cue = (*mCueList)[idx];
+    if (cue->StartTime() <= playbackTime && cue->EndTime() > playbackTime) {
+      aCueList->AddCue(*cue);
+    }
+  }
+}
+
 }  // namespace dom
 }  // namespace mozilla
--- a/dom/media/TextTrack.h
+++ b/dom/media/TextTrack.h
@@ -64,17 +64,16 @@ class TextTrack final : public DOMEventT
   TextTrackCueList* GetCues() const {
     if (mMode == TextTrackMode::Disabled) {
       return nullptr;
     }
     return mCueList;
   }
 
   TextTrackCueList* GetActiveCues();
-  void UpdateActiveCueList();
   void GetActiveCueArray(nsTArray<RefPtr<TextTrackCue> >& aCues);
 
   TextTrackReadyState ReadyState() const;
   void SetReadyState(TextTrackReadyState aState);
   void SetReadyState(uint32_t aReadyState);
 
   void AddCue(TextTrackCue& aCue);
   void RemoveCue(TextTrackCue& aCue, ErrorResult& aRv);
@@ -94,16 +93,25 @@ class TextTrack final : public DOMEventT
   void SetCuesInactive();
 
   void NotifyCueUpdated(TextTrackCue* aCue);
 
   void DispatchAsyncTrustedEvent(const nsString& aEventName);
 
   bool IsLoaded();
 
+  // Called when associated cue's active flag has been changed, and then we
+  // would add or remove the cue to the active cue list.
+  void NotifyCueActiveStateChanged(TextTrackCue* aCue);
+
+  // Use this function to request current cues which start time are less than or
+  // equal to the current playback position and whose end times are greater than
+  // the current playback position.
+  void GetCurrentCueList(RefPtr<TextTrackCueList>& aCueList) const;
+
  private:
   ~TextTrack();
 
   RefPtr<TextTrackList> mTextTrackList;
 
   TextTrackKind mKind;
   nsString mLabel;
   nsString mLanguage;
--- a/dom/media/TextTrackCue.cpp
+++ b/dom/media/TextTrackCue.cpp
@@ -211,10 +211,22 @@ void TextTrackCue::NotifyDisplayStatesCh
     return;
   }
 
   mTrack->GetTextTrackList()
       ->GetMediaElement()
       ->NotifyCueDisplayStatesChanged();
 }
 
+void TextTrackCue::SetActive(bool aActive) {
+  if (mActive == aActive) {
+    return;
+  }
+
+  mActive = aActive;
+  mDisplayState = mActive ? mDisplayState : nullptr;
+  if (mTrack) {
+    mTrack->NotifyCueActiveStateChanged(this);
+  }
+}
+
 }  // namespace dom
 }  // namespace mozilla
--- a/dom/media/TextTrackCue.h
+++ b/dom/media/TextTrackCue.h
@@ -275,24 +275,17 @@ class TextTrackCue final : public DOMEve
    *
    * Returns a DocumentFragment that is the head of the tree of anonymous
    * content.
    */
   already_AddRefed<DocumentFragment> GetCueAsHTML();
 
   void SetTrackElement(HTMLTrackElement* aTrackElement);
 
-  void SetActive(bool aActive) {
-    if (mActive == aActive) {
-      return;
-    }
-
-    mActive = aActive;
-    mDisplayState = mActive ? mDisplayState : nullptr;
-  }
+  void SetActive(bool aActive);
 
   bool GetActive() { return mActive; }
 
  private:
   ~TextTrackCue();
 
   void NotifyCueUpdated(TextTrackCue* aCue) {
     if (mTrack) {
--- a/dom/media/TextTrackList.cpp
+++ b/dom/media/TextTrackList.cpp
@@ -100,22 +100,16 @@ TextTrack* TextTrackList::GetTrackById(c
 }
 
 void TextTrackList::RemoveTextTrack(TextTrack* aTrack) {
   if (mTextTracks.RemoveElement(aTrack)) {
     CreateAndDispatchTrackEventRunner(aTrack, NS_LITERAL_STRING("removetrack"));
   }
 }
 
-void TextTrackList::DidSeek() {
-  for (uint32_t i = 0; i < mTextTracks.Length(); i++) {
-    mTextTracks[i]->SetDirty();
-  }
-}
-
 class TrackEventRunner : public Runnable {
  public:
   TrackEventRunner(TextTrackList* aList, Event* aEvent)
       : Runnable("dom::TrackEventRunner"), mList(aList), mEvent(aEvent) {}
 
   NS_IMETHOD Run() override { return mList->DispatchTrackEvent(mEvent); }
 
   RefPtr<TextTrackList> mList;
--- a/dom/media/TextTrackList.h
+++ b/dom/media/TextTrackList.h
@@ -45,17 +45,16 @@ class TextTrackList final : public DOMEv
       TextTrackKind aKind, const nsAString& aLabel, const nsAString& aLanguage,
       TextTrackMode aMode, TextTrackReadyState aReadyState,
       TextTrackSource aTextTrackSource, const CompareTextTracks& aCompareTT);
   TextTrack* GetTrackById(const nsAString& aId);
 
   void AddTextTrack(TextTrack* aTextTrack, const CompareTextTracks& aCompareTT);
 
   void RemoveTextTrack(TextTrack* aTrack);
-  void DidSeek();
 
   HTMLMediaElement* GetMediaElement();
   void SetTextTrackManager(TextTrackManager* aTextTrackManager);
 
   nsresult DispatchTrackEvent(Event* aEvent);
   void CreateAndDispatchChangeEvent();
   void SetCuesInactive();
 
new file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..24eb3be139090f822e6d664dc8b58c2498d8f4ac
GIT binary patch
literal 15036
zc%1E8Yfuwc6h1e*dCJ&C0wSUW1Pu&z04btHtDqEZY1OKISf>ryAkl<`WO=Anqf$|)
z9ZOrqI{ks~$FwtI)oP!yQf-}SM-&uo9idi`PHpjRI~})oHyc2OJJy+wKX%XT$)4<Y
z_MH3exqHr^1%RjmcD~!`PD=#<gwH9wZTGsI00|47E>|hO?r@eCGWZNl8#0e#JqA#c
z45%>!5b#Hr-1X=F|Kort3IO^h>#@wk_N(kXAt&m}DMsV--YXA~m-6ETQNrIi8^e0>
zIm+hnvYmGX;|llEMNY<U!G_7n^vr!mo#35P$T&Q~Jf(KV);({w%VRU;*>EJ&ObogB
zz+p~%Q2{oLrOp7sZvJsh0E=Qgwt{X{LP|UiQ^2Mgy==Y%+x1@7%XITOWGS_#^zt|`
zc}zVQ8@auC+$RwnhkwW7sM&~tXs!i@4a4V;Lc{$XhQ%G>H71i8P=*i2Xw?)be+E>h
z{PT@yO+^s`+Wbd!623ds>T-CBtd4U2@dk403hXwAVCmf!uUlLSuMu8Ws7i2o<#^fc
zd<8Jk;B{KWqOp#_Ity!smtpxhVA7N1|3h(VGhU-rT%$r<;~?>@>{>Uvj{O#Nd&rn0
zoX0x&5vbJiHb0`gabDKy66=lkLc(@{$A83);|4%F?nk31U>8FP!{Tl;WG7qb%;z2F
zx@~i~CaxLulXE8J07Cq$i(g*+8u})d+i`Qq%Y#Jhr~w*93VV#-Yh%&CPCEygff8)I
zBP;El@6L1LT5`qzfiqQuL~nepMiKO;{54e24s=iMGGt6Cbg?dPq04O=F+3SEvL@xx
zbRei;#|x&uVbEJ=*1#UR-KSmu`|Yg7oF5DtMHJ<yP%J16h%z9qj(?Sr5(y$M43L7N
zl`6GHtLuXu_US=^C`v(7N+q8LEy6g^24%mPl<}(2sTOr?u`zXN)ke*btlC3i)9O3o
zM&x^zX?5ZKBL+mq4;^MsNKAVoePqU{(b*Fw<~*G{Y4QuxXUu%@rI%l^GPZ&_h4#5#
zwxqPIeEx#v6>qMnT)ArXyYIcfc3t)Q4>oMtyk+aQ?K^hv+VjcYefvK>aPYHl4%dBq
z<mh+bA3Jffq4Ct|rZZ<-e!1}L#n!e<?Y~{QdhPm+o40QN^*7H8KTi~t9(frsF9l6g
zw3_FI6lJ`FfmX((sQQhcs<sq|#-=XS7_+K2)*jLh88NLREZ<YF3y(`{iNC^A6PP_1
zv1Jd%tdm$5ub&_r6oOFrP*N17R4K8jG%8*+I*lMYQ9=ai%gbY(!UsV(0!`CueCYdV
z`w;tUkHVe9+cJ|o3n3KV;0DS7Pr;ofmo}{ivxJm8E<w)q66AbBf}BrEkaL3sIX6m>
z^C<~(J}p7c@_d$r%JW$oD$i$Ws63ygq4Io|hRX9<8Y<6cX{bD(rJ?eCmWImnSsE(O
zXKAQBpQWLaoX=+fAd&G}g^$i=)U|%R<>HsMZL2EVt3PaA_eJZ<s<sUkfh)YjK-SLk
z-|Iox(Ow}O<Q_;<CVWb0802Iu{3nV=l~RB3^1<ZUG0zk)FHX{LHh!Mq!`BbTAAoT{
zE&1X0;I_8Bh_y}Jd7EmNZNRx}m3}R^booDD`GNngM?4fyp4>KjMJtZK8x=u;FUe%=
z&>dG_qWyFl<)@5PMR)K2a71pZzPM<%xu&*gmesgF;kESZ{e5dLWFA>KYH?`Vtwq~U
zROjz~d+W1)4R^I=dP8AE=JR{SJF9pakul8*(2xK6&b~z7U2%0g0w@h80;+;!0;+*=
z1XKr82xthrPC)yDn}F)UM?m|*I|S4SH3T#qb`#JD@DtETI8H#LpqYRUh7JO1LO?)c
z5vjg$h*aO9h*V!QBGoq$k?Na-NcA0#NcBxdr23{JQhn19slMrmRNo9ls_$sjYkf&g
z!~}3&ZX$9Mk()?QsN6(4QBo60mYay&L_lsLaua#TCQ^r+h#p`hCwCFKi^yH1Csght
G0rX!AEnrIk
--- a/dom/media/test/reftest/reftest.list
+++ b/dom/media/test/reftest/reftest.list
@@ -1,4 +1,5 @@
 skip-if(Android) fuzzy-if(OSX,0-22,0-49977) fuzzy-if(webrender&&cocoaWidget,23-23,76795-76795) skip-if(winWidget) fuzzy-if(gtkWidget&&layersGPUAccelerated,0-70,0-600) HTTP(..) == short.mp4.firstframe.html short.mp4.firstframe-ref.html
 skip-if(Android) fuzzy-if(OSX,0-23,0-51392) fuzzy-if(webrender&&cocoaWidget,23-23,76798-76798) fuzzy-if(winWidget,0-59,0-76797) fuzzy-if(gtkWidget&&layersGPUAccelerated,0-60,0-1800) HTTP(..) == short.mp4.lastframe.html short.mp4.lastframe-ref.html
 skip-if(Android) skip-if(winWidget) fuzzy-if(gtkWidget&&layersGPUAccelerated,0-55,0-4281) fuzzy-if(OSX,0-3,0-111852) HTTP(..) == bipbop_300_215kbps.mp4.lastframe.html bipbop_300_215kbps.mp4.lastframe-ref.html
 skip-if(Android) fuzzy-if(OSX,0-25,0-175921) fuzzy-if(winWidget,0-71,0-179198) HTTP(..) == gizmo.mp4.seek.html gizmo.mp4.55thframe-ref.html
+skip-if(Android) == vtt_update_display_after_removed_cue.html vtt_update_display_after_removed_cue_ref.html
new file mode 100644
--- /dev/null
+++ b/dom/media/test/reftest/vtt_update_display_after_removed_cue.html
@@ -0,0 +1,36 @@
+<!DOCTYPE HTML>
+<html class="reftest-wait">
+<head>
+</head>
+<body>
+<video id="v1" autoplay></video>
+<script type="text/javascript">
+
+/**
+ * This test is used to ensure we would update the cue display after removing
+ * cue from the text track, the removed cue should not display on the video's
+ * rendering area.
+ */
+function testUpdateDisplayAfterRemovedCue() {
+  let video = document.getElementById("v1");
+  video.src = "../black.mp4";
+  let cue = new VTTCue(0, 4, "hello testing");
+  let track = video.addTextTrack("captions");
+  track.mode = "showing";
+  track.addCue(cue);
+  cue.onenter = () => {
+    cue.onenter = null;
+    track.removeCue(cue);
+    video.pause();
+    video.onpause = () => {
+      video.onpause = null;
+      document.documentElement.removeAttribute('class');
+    }
+  }
+};
+
+window.addEventListener("MozReftestInvalidate",
+                        testUpdateDisplayAfterRemovedCue);
+</script>
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/dom/media/test/reftest/vtt_update_display_after_removed_cue_ref.html
@@ -0,0 +1,6 @@
+<!DOCTYPE HTML>
+<html>
+<body>
+<video id="v1" src="../black.mp4"></video>
+</body>
+</html>
--- a/gfx/layers/wr/AsyncImagePipelineManager.cpp
+++ b/gfx/layers/wr/AsyncImagePipelineManager.cpp
@@ -222,16 +222,20 @@ Maybe<TextureHost::ResourceUpdateOp> Asy
       HoldExternalImage(aPipelineId, aEpoch, aPipeline->mWrTextureWrapper);
     }
     return Nothing();
   }
 
   aPipeline->mCurrentTexture = texture;
 
   WebRenderTextureHost* wrTexture = texture->AsWebRenderTextureHost();
+  MOZ_ASSERT(wrTexture);
+  if (!wrTexture) {
+    gfxCriticalNote << "WebRenderTextureHost is not used";
+  }
 
   bool useExternalImage = !gfxEnv::EnableWebRenderRecording() && wrTexture;
   aPipeline->mUseExternalImage = useExternalImage;
 
   // Use WebRenderTextureHostWrapper only for video.
   // And WebRenderTextureHostWrapper could be used only with
   // WebRenderTextureHost that supports NativeTexture
   bool useWrTextureWrapper = useExternalImage && wrTexture &&
@@ -381,17 +385,17 @@ void AsyncImagePipelineManager::ApplyAsy
     // because the previous one is still up to date. We may, however, have
     // updated some resources.
 
     // Use transaction of scene builder thread to notify epoch.
     // It is for making epoch update consistent.
     aSceneBuilderTxn.UpdateEpoch(aPipelineId, aEpoch);
     if (aPipeline->mCurrentTexture) {
       HoldExternalImage(aPipelineId, aEpoch,
-                        aPipeline->mCurrentTexture->AsWebRenderTextureHost());
+                        aPipeline->mCurrentTexture);
     }
     return;
   }
 
   aPipeline->mIsChanged = false;
 
   wr::LayoutSize contentSize{aPipeline->mScBounds.Width(),
                              aPipeline->mScBounds.Height()};
@@ -424,17 +428,17 @@ void AsyncImagePipelineManager::ApplyAsy
 
     if (aPipeline->mUseExternalImage) {
       MOZ_ASSERT(aPipeline->mCurrentTexture->AsWebRenderTextureHost());
       Range<wr::ImageKey> range_keys(&keys[0], keys.Length());
       aPipeline->mCurrentTexture->PushDisplayItems(
           builder, wr::ToRoundedLayoutRect(rect), wr::ToRoundedLayoutRect(rect),
           aPipeline->mFilter, range_keys);
       HoldExternalImage(aPipelineId, aEpoch,
-                        aPipeline->mCurrentTexture->AsWebRenderTextureHost());
+                        aPipeline->mCurrentTexture);
     } else {
       MOZ_ASSERT(keys.Length() == 1);
       builder.PushImage(wr::ToRoundedLayoutRect(rect),
                         wr::ToRoundedLayoutRect(rect), true, aPipeline->mFilter,
                         keys[0]);
     }
   }
 
@@ -505,17 +509,17 @@ void AsyncImagePipelineManager::SetEmpty
   txn.SetDisplayList(
       gfx::Color(0.f, 0.f, 0.f, 0.f), epoch,
       LayerSize(pipeline->mScBounds.Width(), pipeline->mScBounds.Height()),
       aPipelineId, builderContentSize, dl.dl_desc, dl.dl);
 }
 
 void AsyncImagePipelineManager::HoldExternalImage(
     const wr::PipelineId& aPipelineId, const wr::Epoch& aEpoch,
-    WebRenderTextureHost* aTexture) {
+    TextureHost* aTexture) {
   if (mDestroyed) {
     return;
   }
   MOZ_ASSERT(aTexture);
 
   PipelineTexturesHolder* holder =
       mPipelineTexturesHolders.Get(wr::AsUint64(aPipelineId));
   MOZ_ASSERT(holder);
--- a/gfx/layers/wr/AsyncImagePipelineManager.h
+++ b/gfx/layers/wr/AsyncImagePipelineManager.h
@@ -49,17 +49,17 @@ class AsyncImagePipelineManager final {
   void AddPipeline(const wr::PipelineId& aPipelineId,
                    WebRenderBridgeParent* aWrBridge);
   void RemovePipeline(const wr::PipelineId& aPipelineId,
                       const wr::Epoch& aEpoch);
   WebRenderBridgeParent* GetWrBridge(const wr::PipelineId& aPipelineId);
 
   void HoldExternalImage(const wr::PipelineId& aPipelineId,
                          const wr::Epoch& aEpoch,
-                         WebRenderTextureHost* aTexture);
+                         TextureHost* aTexture);
   void HoldExternalImage(const wr::PipelineId& aPipelineId,
                          const wr::Epoch& aEpoch,
                          WebRenderTextureHostWrapper* aWrTextureWrapper);
   void HoldExternalImage(const wr::PipelineId& aPipelineId,
                          const wr::Epoch& aEpoch,
                          const wr::ExternalImageId& aImageId);
 
   // This is called from the Renderer thread to notify this class about the
new file mode 100644
--- /dev/null
+++ b/gfx/wr/webrender/res/cs_gradient.glsl
@@ -0,0 +1,56 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include shared
+
+varying float vPos;
+flat varying vec4 vStops;
+flat varying vec4 vColor0;
+flat varying vec4 vColor1;
+flat varying vec4 vColor2;
+flat varying vec4 vColor3;
+
+#ifdef WR_VERTEX_SHADER
+
+in vec4 aTaskRect;
+in float aAxisSelect;
+in vec4 aStops;
+in vec4 aColor0;
+in vec4 aColor1;
+in vec4 aColor2;
+in vec4 aColor3;
+in vec2 aStartStop;
+
+void main(void) {
+    vPos = mix(aStartStop.x, aStartStop.y, mix(aPosition.x, aPosition.y, aAxisSelect));
+
+    vStops = aStops;
+    vColor0 = aColor0;
+    vColor1 = aColor1;
+    vColor2 = aColor2;
+    vColor3 = aColor3;
+
+    gl_Position = uTransform * vec4(aTaskRect.xy + aTaskRect.zw * aPosition.xy, 0.0, 1.0);
+}
+#endif
+
+#ifdef WR_FRAGMENT_SHADER
+float linear_step(float edge0, float edge1, float x) {
+    if (edge0 >= edge1) {
+        return 0.0;
+    }
+
+    return clamp((x - edge0) / (edge1 - edge0), 0.0, 1.0);
+}
+
+void main(void) {
+    vec4 color = vColor0;
+
+    color = mix(color, vColor1, linear_step(vStops.x, vStops.y, vPos));
+    color = mix(color, vColor2, linear_step(vStops.y, vStops.z, vPos));
+    color = mix(color, vColor3, linear_step(vStops.z, vStops.w, vPos));
+
+    oFragColor = color;
+}
+#endif
--- a/gfx/wr/webrender/src/batch.rs
+++ b/gfx/wr/webrender/src/batch.rs
@@ -2072,39 +2072,87 @@ impl AlphaBatchBuilder {
                                     z_id,
                                     base_instance.into(),
                                 );
                             }
                         }
                     }
                 }
             }
-            PrimitiveInstanceKind::LinearGradient { data_handle, ref visible_tiles_range, .. } => {
+            PrimitiveInstanceKind::LinearGradient { data_handle, gradient_index, .. } => {
+                let gradient = &ctx.prim_store.linear_gradients[gradient_index];
                 let prim_data = &ctx.data_stores.linear_grad[data_handle];
                 let specified_blend_mode = BlendMode::PremultipliedAlpha;
 
                 let mut prim_header = PrimitiveHeader {
                     local_rect: prim_rect,
                     local_clip_rect: prim_info.combined_local_clip_rect,
                     task_address,
                     specific_prim_address: GpuCacheAddress::invalid(),
                     clip_task_address,
                     transform_id,
                 };
 
-                if visible_tiles_range.is_empty() {
-                    let non_segmented_blend_mode = if !prim_data.opacity.is_opaque ||
-                        prim_info.clip_task_index != ClipTaskIndex::INVALID ||
-                        transform_kind == TransformedRectKind::Complex
-                    {
-                        specified_blend_mode
-                    } else {
-                        BlendMode::None
+                let non_segmented_blend_mode = if !prim_data.opacity.is_opaque ||
+                    prim_info.clip_task_index != ClipTaskIndex::INVALID ||
+                    transform_kind == TransformedRectKind::Complex
+                {
+                    specified_blend_mode
+                } else {
+                    BlendMode::None
+                };
+
+                if let Some(ref cache_handle) = gradient.cache_handle {
+                    let rt_cache_entry = ctx.resource_cache
+                        .get_cached_render_task(cache_handle);
+                    let cache_item = ctx.resource_cache
+                        .get_texture_cache_item(&rt_cache_entry.handle);
+
+                    if cache_item.texture_id == TextureSource::Invalid {
+                        return;
+                    }
+
+                    let textures = BatchTextures::color(cache_item.texture_id);
+                    let batch_kind = BrushBatchKind::Image(get_buffer_kind(cache_item.texture_id));
+                    let prim_user_data = [
+                        ShaderColorMode::Image as i32 | ((AlphaType::PremultipliedAlpha as i32) << 16),
+                        RasterizationSpace::Local as i32,
+                        get_shader_opacity(1.0),
+                    ];
+                    let segment_user_data = cache_item.uv_rect_handle.as_int(gpu_cache);
+                    prim_header.specific_prim_address = gpu_cache.get_address(&ctx.globals.default_image_handle);
+
+                    let prim_header_index = prim_headers.push(
+                        &prim_header,
+                        z_id,
+                        prim_user_data,
+                    );
+
+                    let batch_key = BatchKey {
+                        blend_mode: non_segmented_blend_mode,
+                        kind: BatchKind::Brush(batch_kind),
+                        textures: textures,
                     };
 
+                    let instance = PrimitiveInstanceData::from(BrushInstance {
+                        segment_index: INVALID_SEGMENT_INDEX,
+                        edge_flags: EdgeAaSegmentMask::all(),
+                        clip_task_address,
+                        brush_flags: BrushFlags::PERSPECTIVE_INTERPOLATION,
+                        prim_header_index,
+                        user_data: segment_user_data,
+                    });
+
+                    self.current_batch_list().push_single_instance(
+                        batch_key,
+                        bounding_rect,
+                        z_id,
+                        PrimitiveInstanceData::from(instance),
+                    );
+                } else if gradient.visible_tiles_range.is_empty() {
                     let batch_params = BrushBatchParameters::shared(
                         BrushBatchKind::LinearGradient,
                         BatchTextures::no_texture(),
                         [
                             prim_data.stops_handle.as_int(gpu_cache),
                             0,
                             0,
                         ],
@@ -2136,17 +2184,17 @@ impl AlphaBatchBuilder {
                         bounding_rect,
                         transform_kind,
                         render_tasks,
                         z_id,
                         prim_info.clip_task_index,
                         ctx,
                     );
                 } else {
-                    let visible_tiles = &ctx.scratch.gradient_tiles[*visible_tiles_range];
+                    let visible_tiles = &ctx.scratch.gradient_tiles[gradient.visible_tiles_range];
 
                     add_gradient_tiles(
                         visible_tiles,
                         &prim_data.stops_handle,
                         BrushBatchKind::LinearGradient,
                         specified_blend_mode,
                         bounding_rect,
                         clip_task_address,
--- a/gfx/wr/webrender/src/prim_store/gradient.rs
+++ b/gfx/wr/webrender/src/prim_store/gradient.rs
@@ -1,36 +1,59 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 use api::{
     ColorF, ColorU,ExtendMode, GradientStop, LayoutPoint, LayoutSize,
-    LayoutPrimitiveInfo, PremultipliedColorF, LayoutVector2D,
+    LayoutPrimitiveInfo, PremultipliedColorF, LayoutVector2D, LineOrientation,
 };
 use display_list_flattener::IsVisible;
+use euclid::approxeq::ApproxEq;
 use frame_builder::FrameBuildingState;
 use gpu_cache::{GpuCacheHandle, GpuDataRequest};
 use intern::{Internable, InternDebug, Handle as InternHandle};
-use prim_store::{BrushSegment, GradientTileRange};
+use prim_store::{BrushSegment, GradientTileRange, VectorKey};
 use prim_store::{PrimitiveInstanceKind, PrimitiveOpacity, PrimitiveSceneData};
 use prim_store::{PrimKeyCommonData, PrimTemplateCommonData, PrimitiveStore};
 use prim_store::{NinePatchDescriptor, PointKey, SizeKey, InternablePrimitive};
+use render_task::RenderTaskCacheEntryHandle;
 use std::{hash, ops::{Deref, DerefMut}, mem};
 use util::pack_as_float;
 
+/// The maximum number of stops a gradient may have to use the fast path.
+pub const GRADIENT_FP_STOPS: usize = 4;
+
 /// A hashable gradient stop that can be used in primitive keys.
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
-#[derive(Debug, Clone, MallocSizeOf, PartialEq)]
+#[derive(Debug, Copy, Clone, MallocSizeOf, PartialEq)]
 pub struct GradientStopKey {
     pub offset: f32,
     pub color: ColorU,
 }
 
+impl GradientStopKey {
+    pub fn empty() -> Self {
+        GradientStopKey {
+            offset: 0.0,
+            color: ColorU::new(0, 0, 0, 0),
+        }
+    }
+}
+
+impl Into<GradientStopKey> for GradientStop {
+    fn into(self) -> GradientStopKey {
+        GradientStopKey {
+            offset: self.offset,
+            color: self.color.into(),
+        }
+    }
+}
+
 impl Eq for GradientStopKey {}
 
 impl hash::Hash for GradientStopKey {
     fn hash<H: hash::Hasher>(&self, state: &mut H) {
         self.offset.to_bits().hash(state);
         self.color.hash(state);
     }
 }
@@ -71,31 +94,43 @@ impl LinearGradientKey {
             reverse_stops: linear_grad.reverse_stops,
             nine_patch: linear_grad.nine_patch,
         }
     }
 }
 
 impl InternDebug for LinearGradientKey {}
 
+#[derive(Clone, Debug, Hash, MallocSizeOf, PartialEq, Eq)]
+#[cfg_attr(feature = "capture", derive(Serialize))]
+#[cfg_attr(feature = "replay", derive(Deserialize))]
+pub struct GradientCacheKey {
+    pub orientation: LineOrientation,
+    pub start_stop_point: VectorKey,
+    pub stops: [GradientStopKey; GRADIENT_FP_STOPS],
+}
+
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 #[derive(MallocSizeOf)]
 pub struct LinearGradientTemplate {
     pub common: PrimTemplateCommonData,
     pub extend_mode: ExtendMode,
     pub start_point: LayoutPoint,
     pub end_point: LayoutPoint,
     pub stretch_size: LayoutSize,
     pub tile_spacing: LayoutSize,
     pub stops_opacity: PrimitiveOpacity,
     pub stops: Vec<GradientStop>,
     pub brush_segments: Vec<BrushSegment>,
     pub reverse_stops: bool,
     pub stops_handle: GpuCacheHandle,
+    /// If true, this gradient can be drawn via the fast path
+    /// (cache gradient, and draw as image).
+    pub supports_caching: bool,
 }
 
 impl Deref for LinearGradientTemplate {
     type Target = PrimTemplateCommonData;
     fn deref(&self) -> &Self::Target {
         &self.common
     }
 }
@@ -106,22 +141,54 @@ impl DerefMut for LinearGradientTemplate
     }
 }
 
 impl From<LinearGradientKey> for LinearGradientTemplate {
     fn from(item: LinearGradientKey) -> Self {
         let common = PrimTemplateCommonData::with_key_common(item.common);
         let mut min_alpha: f32 = 1.0;
 
+        // Check if we can draw this gradient via a fast path by caching the
+        // gradient in a smaller task, and drawing as an image.
+        // TODO(gw): Aim to reduce the constraints on fast path gradients in future,
+        //           although this catches the vast majority of gradients on real pages.
+        let mut supports_caching =
+            // No repeating support in fast path
+            item.extend_mode == ExtendMode::Clamp &&
+            // Gradient must cover entire primitive
+            item.tile_spacing.w + item.stretch_size.w >= common.prim_size.width &&
+            item.tile_spacing.h + item.stretch_size.h >= common.prim_size.height &&
+            // Must be a vertical or horizontal gradient
+            (item.start_point.x.approx_eq(&item.end_point.x) ||
+             item.start_point.y.approx_eq(&item.end_point.y)) &&
+            // Fast path supports a limited number of stops
+            item.stops.len() <= GRADIENT_FP_STOPS &&
+            // Fast path not supported on segmented (border-image) gradients.
+            item.nine_patch.is_none();
+
         // Convert the stops to more convenient representation
         // for the current gradient builder.
-        let stops = item.stops.iter().map(|stop| {
+        let mut prev_color = None;
+
+        let stops: Vec<GradientStop> = item.stops.iter().map(|stop| {
             let color: ColorF = stop.color.into();
             min_alpha = min_alpha.min(color.a);
 
+            if let Some(prev_color) = prev_color {
+                // The fast path doesn't support hard color stops, yet.
+                // Since the length of the gradient is a fixed size (512 device pixels), if there
+                // is a hard stop you will see bilinear interpolation with this method, instead
+                // of an abrupt color change.
+                if prev_color == color {
+                    supports_caching = false;
+                }
+            }
+
+            prev_color = Some(color);
+
             GradientStop {
                 offset: stop.offset,
                 color,
             }
         }).collect();
 
         let mut brush_segments = Vec::new();
 
@@ -141,16 +208,17 @@ impl From<LinearGradientKey> for LinearG
             end_point: item.end_point.into(),
             stretch_size: item.stretch_size.into(),
             tile_spacing: item.tile_spacing.into(),
             stops_opacity,
             stops,
             brush_segments,
             reverse_stops: item.reverse_stops,
             stops_handle: GpuCacheHandle::new(),
+            supports_caching,
         }
     }
 }
 
 impl LinearGradientTemplate {
     /// Update the GPU cache for a given primitive template. This may be called multiple
     /// times per frame, by each primitive reference that refers to this interned
     /// template. The initial request call to the GPU cache ensures that work is only
@@ -242,32 +310,44 @@ impl InternablePrimitive for LinearGradi
             info.rect.size,
             self
         )
     }
 
     fn make_instance_kind(
         _key: LinearGradientKey,
         data_handle: LinearGradientDataHandle,
-        _prim_store: &mut PrimitiveStore,
+        prim_store: &mut PrimitiveStore,
         _reference_frame_relative_offset: LayoutVector2D,
     ) -> PrimitiveInstanceKind {
+        let gradient_index = prim_store.linear_gradients.push(LinearGradientPrimitive {
+            cache_handle: None,
+            visible_tiles_range: GradientTileRange::empty(),
+        });
+
         PrimitiveInstanceKind::LinearGradient {
             data_handle,
-            visible_tiles_range: GradientTileRange::empty(),
+            gradient_index,
         }
     }
 }
 
 impl IsVisible for LinearGradient {
     fn is_visible(&self) -> bool {
         true
     }
 }
 
+#[derive(Debug)]
+#[cfg_attr(feature = "capture", derive(Serialize))]
+pub struct LinearGradientPrimitive {
+    pub cache_handle: Option<RenderTaskCacheEntryHandle>,
+    pub visible_tiles_range: GradientTileRange,
+}
+
 ////////////////////////////////////////////////////////////////////////////////
 
 /// Hashable radial gradient parameters, for use during prim interning.
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 #[derive(Debug, Clone, MallocSizeOf, PartialEq)]
 pub struct RadialGradientParams {
     pub start_radius: f32,
--- a/gfx/wr/webrender/src/prim_store/mod.rs
+++ b/gfx/wr/webrender/src/prim_store/mod.rs
@@ -1,39 +1,41 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 use api::{BorderRadius, ClipMode, ColorF};
 use api::{FilterOp, ImageRendering, TileOffset, RepeatMode, WorldPoint, WorldSize};
-use api::{PremultipliedColorF, PropertyBinding, Shadow};
+use api::{PremultipliedColorF, PropertyBinding, Shadow, GradientStop};
 use api::{BoxShadowClipMode, LineStyle, LineOrientation, AuHelpers};
 use api::{LayoutPrimitiveInfo, PrimitiveKeyKind};
 use api::units::*;
 use border::{get_max_scale_for_border, build_border_instances};
 use border::BorderSegmentCacheKey;
 use clip::{ClipStore};
 use clip_scroll_tree::{ROOT_SPATIAL_NODE_INDEX, ClipScrollTree, SpatialNodeIndex, VisibleFace};
 use clip::{ClipDataStore, ClipNodeFlags, ClipChainId, ClipChainInstance, ClipItem};
 use debug_colors;
 use debug_render::DebugItem;
 use display_list_flattener::{CreateShadow, IsVisible};
 use euclid::{SideOffsets2D, TypedTransform3D, TypedRect, TypedScale, TypedSize2D};
+use euclid::approxeq::ApproxEq;
 use frame_builder::{FrameBuildingContext, FrameBuildingState, PictureContext, PictureState};
 use frame_builder::{PrimitiveContext, FrameVisibilityContext, FrameVisibilityState};
 use glyph_rasterizer::GlyphKey;
 use gpu_cache::{GpuCache, GpuCacheAddress, GpuCacheHandle, GpuDataRequest, ToGpuBlocks};
 use gpu_types::{BrushFlags, SnapOffsets};
 use image::{Repetition};
 use intern;
 use malloc_size_of::MallocSizeOf;
 use picture::{PictureCompositeMode, PicturePrimitive};
 use picture::{ClusterIndex, PrimitiveList, RecordedDirtyRegion, SurfaceIndex, RetainedTiles, RasterConfig};
 use prim_store::borders::{ImageBorderDataHandle, NormalBorderDataHandle};
-use prim_store::gradient::{LinearGradientDataHandle, RadialGradientDataHandle};
+use prim_store::gradient::{GRADIENT_FP_STOPS, GradientCacheKey, GradientStopKey};
+use prim_store::gradient::{LinearGradientPrimitive, LinearGradientDataHandle, RadialGradientDataHandle};
 use prim_store::image::{ImageDataHandle, ImageInstance, VisibleImageTile, YuvImageDataHandle};
 use prim_store::line_dec::LineDecorationDataHandle;
 use prim_store::picture::PictureDataHandle;
 use prim_store::text_run::{TextRunDataHandle, TextRunPrimitive};
 #[cfg(debug_assertions)]
 use render_backend::{FrameId};
 use render_backend::DataStores;
 use render_task::{RenderTask, RenderTaskCacheKey, to_cache_size};
@@ -41,16 +43,17 @@ use render_task::{RenderTaskCacheKeyKind
 use renderer::{MAX_VERTEX_TEXTURE_WIDTH};
 use resource_cache::{ImageProperties, ImageRequest};
 use scene::SceneProperties;
 use segment::SegmentBuilder;
 use std::{cmp, fmt, hash, ops, u32, usize, mem};
 #[cfg(debug_assertions)]
 use std::sync::atomic::{AtomicUsize, Ordering};
 use storage;
+use texture_cache::TEXTURE_REGION_DIMENSIONS;
 use util::{ScaleOffset, MatrixHelpers, MaxRect, Recycler, TransformedRectKind};
 use util::{pack_as_float, project_rect, raster_rect_to_device_pixels};
 use util::{scale_factors, clamp_to_scale_factor};
 use smallvec::SmallVec;
 
 pub mod borders;
 pub mod gradient;
 pub mod image;
@@ -1316,17 +1319,17 @@ pub enum PrimitiveInstanceKind {
     Image {
         /// Handle to the common interned data for this primitive.
         data_handle: ImageDataHandle,
         image_instance_index: ImageInstanceIndex,
     },
     LinearGradient {
         /// Handle to the common interned data for this primitive.
         data_handle: LinearGradientDataHandle,
-        visible_tiles_range: GradientTileRange,
+        gradient_index: LinearGradientIndex,
     },
     RadialGradient {
         /// Handle to the common interned data for this primitive.
         data_handle: RadialGradientDataHandle,
         visible_tiles_range: GradientTileRange,
     },
     /// Clear out a rect, used for special effects.
     Clear {
@@ -1497,16 +1500,18 @@ pub type BorderHandleStorage = storage::
 pub type SegmentStorage = storage::Storage<BrushSegment>;
 pub type SegmentsRange = storage::Range<BrushSegment>;
 pub type SegmentInstanceStorage = storage::Storage<SegmentedInstance>;
 pub type SegmentInstanceIndex = storage::Index<SegmentedInstance>;
 pub type ImageInstanceStorage = storage::Storage<ImageInstance>;
 pub type ImageInstanceIndex = storage::Index<ImageInstance>;
 pub type GradientTileStorage = storage::Storage<VisibleGradientTile>;
 pub type GradientTileRange = storage::Range<VisibleGradientTile>;
+pub type LinearGradientIndex = storage::Index<LinearGradientPrimitive>;
+pub type LinearGradientStorage = storage::Storage<LinearGradientPrimitive>;
 
 /// Contains various vecs of data that is used only during frame building,
 /// where we want to recycle the memory each new display list, to avoid constantly
 /// re-allocating and moving memory around. Written during primitive preparation,
 /// and read during batching.
 pub struct PrimitiveScratchBuffer {
     /// Contains a list of clip mask instance parameters
     /// per segment generated.
@@ -1621,33 +1626,36 @@ impl PrimitiveScratchBuffer {
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 #[derive(Clone, Debug)]
 pub struct PrimitiveStoreStats {
     picture_count: usize,
     text_run_count: usize,
     opacity_binding_count: usize,
     image_count: usize,
+    linear_gradient_count: usize,
 }
 
 impl PrimitiveStoreStats {
     pub fn empty() -> Self {
         PrimitiveStoreStats {
             picture_count: 0,
             text_run_count: 0,
             opacity_binding_count: 0,
             image_count: 0,
+            linear_gradient_count: 0,
         }
     }
 }
 
 #[cfg_attr(feature = "capture", derive(Serialize))]
 pub struct PrimitiveStore {
     pub pictures: Vec<PicturePrimitive>,
     pub text_runs: TextRunStorage,
+    pub linear_gradients: LinearGradientStorage,
 
     /// A list of image instances. These are stored separately as
     /// storing them inline in the instance makes the structure bigger
     /// for other types.
     pub images: ImageInstanceStorage,
 
     /// List of animated opacity bindings for a primitive.
     pub opacity_bindings: OpacityBindingStorage,
@@ -1655,25 +1663,27 @@ pub struct PrimitiveStore {
 
 impl PrimitiveStore {
     pub fn new(stats: &PrimitiveStoreStats) -> PrimitiveStore {
         PrimitiveStore {
             pictures: Vec::with_capacity(stats.picture_count),
             text_runs: TextRunStorage::new(stats.text_run_count),
             images: ImageInstanceStorage::new(stats.image_count),
             opacity_bindings: OpacityBindingStorage::new(stats.opacity_binding_count),
+            linear_gradients: LinearGradientStorage::new(stats.linear_gradient_count),
         }
     }
 
     pub fn get_stats(&self) -> PrimitiveStoreStats {
         PrimitiveStoreStats {
             picture_count: self.pictures.len(),
             text_run_count: self.text_runs.len(),
             image_count: self.images.len(),
             opacity_binding_count: self.opacity_bindings.len(),
+            linear_gradient_count: self.linear_gradients.len(),
         }
     }
 
     #[allow(unused)]
     pub fn print_picture_tree(&self, root: PictureIndex) {
         use print_tree::PrintTree;
         let mut pt = PrintTree::new("picture tree");
         self.pictures[root.0].print(&self.pictures, root, &mut pt);
@@ -2725,31 +2735,104 @@ impl PrimitiveStore {
                         prim_instance.visibility_info = PrimitiveVisibilityIndex::INVALID;
                     }
                 }
 
                 write_segment(image_instance.segment_instance_index, frame_state, scratch, |request| {
                     image_data.write_prim_gpu_blocks(request);
                 });
             }
-            PrimitiveInstanceKind::LinearGradient { data_handle, ref mut visible_tiles_range, .. } => {
+            PrimitiveInstanceKind::LinearGradient { data_handle, gradient_index, .. } => {
                 let prim_data = &mut data_stores.linear_grad[*data_handle];
+                let gradient = &mut self.linear_gradients[*gradient_index];
 
                 // Update the template this instane references, which may refresh the GPU
                 // cache with any shared template data.
                 prim_data.update(frame_state);
 
+                if prim_data.supports_caching {
+                    let gradient_size = (prim_data.end_point - prim_data.start_point).to_size();
+
+                    // Calculate what the range of the gradient is that covers this
+                    // primitive. These values are included in the cache key. The
+                    // size of the gradient task is the length of a texture cache
+                    // region, for maximum accuracy, and a minimal size on the
+                    // axis that doesn't matter.
+                    let (size, orientation, start_point, end_point) = if prim_data.start_point.x.approx_eq(&prim_data.end_point.x) {
+                        let start_point = -prim_data.start_point.y / gradient_size.height;
+                        let end_point = (prim_data.common.prim_size.height - prim_data.start_point.y) / gradient_size.height;
+                        let size = DeviceIntSize::new(16, TEXTURE_REGION_DIMENSIONS);
+                        (size, LineOrientation::Vertical, start_point, end_point)
+                    } else {
+                        let start_point = -prim_data.start_point.x / gradient_size.width;
+                        let end_point = (prim_data.common.prim_size.width - prim_data.start_point.x) / gradient_size.width;
+                        let size = DeviceIntSize::new(TEXTURE_REGION_DIMENSIONS, 16);
+                        (size, LineOrientation::Horizontal, start_point, end_point)
+                    };
+
+                    // Build the cache key, including information about the stops.
+                    let mut stops = [GradientStopKey::empty(); GRADIENT_FP_STOPS];
+
+                    // Reverse the stops as required, same as the gradient builder does
+                    // for the slow path.
+                    if prim_data.reverse_stops {
+                        for (src, dest) in prim_data.stops.iter().rev().zip(stops.iter_mut()) {
+                            let stop = GradientStop {
+                                offset: 1.0 - src.offset,
+                                color: src.color,
+                            };
+                            *dest = stop.into();
+                        }
+                    } else {
+                        for (src, dest) in prim_data.stops.iter().zip(stops.iter_mut()) {
+                            *dest = (*src).into();
+                        }
+                    }
+
+                    let cache_key = GradientCacheKey {
+                        orientation,
+                        start_stop_point: VectorKey {
+                            x: start_point,
+                            y: end_point,
+                        },
+                        stops,
+                    };
+
+                    // Request the render task each frame.
+                    gradient.cache_handle = Some(frame_state.resource_cache.request_render_task(
+                        RenderTaskCacheKey {
+                            size: size,
+                            kind: RenderTaskCacheKeyKind::Gradient(cache_key),
+                        },
+                        frame_state.gpu_cache,
+                        frame_state.render_tasks,
+                        None,
+                        prim_data.stops_opacity.is_opaque,
+                        |render_tasks| {
+                            let task = RenderTask::new_gradient(
+                                size,
+                                stops,
+                                orientation,
+                                start_point,
+                                end_point,
+                            );
+
+                            render_tasks.add(task)
+                        }
+                    ));
+                }
+
                 if prim_data.tile_spacing != LayoutSize::zero() {
                     let prim_info = &scratch.prim_info[prim_instance.visibility_info.0 as usize];
                     let prim_rect = LayoutRect::new(
                         prim_instance.prim_origin,
                         prim_data.common.prim_size,
                     );
 
-                    *visible_tiles_range = decompose_repeated_primitive(
+                    gradient.visible_tiles_range = decompose_repeated_primitive(
                         &prim_info.combined_local_clip_rect,
                         &prim_rect,
                         &prim_data.stretch_size,
                         &prim_data.tile_spacing,
                         prim_context,
                         frame_state,
                         &mut scratch.gradient_tiles,
                         &mut |_, mut request| {
@@ -2763,17 +2846,17 @@ impl PrimitiveStore {
                                 pack_as_float(prim_data.extend_mode as u32),
                                 prim_data.stretch_size.width,
                                 prim_data.stretch_size.height,
                                 0.0,
                             ]);
                         }
                     );
 
-                    if visible_tiles_range.is_empty() {
+                    if gradient.visible_tiles_range.is_empty() {
                         prim_instance.visibility_info = PrimitiveVisibilityIndex::INVALID;
                     }
                 }
 
                 // TODO(gw): Consider whether it's worth doing segment building
                 //           for gradient primitives.
             }
             PrimitiveInstanceKind::RadialGradient { data_handle, ref mut visible_tiles_range, .. } => {
--- a/gfx/wr/webrender/src/render_task.rs
+++ b/gfx/wr/webrender/src/render_task.rs
@@ -19,16 +19,17 @@ use freelist::{FreeList, FreeListHandle,
 use glyph_rasterizer::GpuGlyphCacheKey;
 use gpu_cache::{GpuCache, GpuCacheAddress, GpuCacheHandle};
 use gpu_types::{BorderInstance, ImageSource, UvRectKind, SnapOffsets};
 use internal_types::{CacheTextureId, FastHashMap, LayerIndex, SavedTargetIndex};
 #[cfg(feature = "pathfinder")]
 use pathfinder_partitioner::mesh::Mesh;
 use prim_store::PictureIndex;
 use prim_store::image::ImageCacheKey;
+use prim_store::gradient::{GRADIENT_FP_STOPS, GradientCacheKey, GradientStopKey};
 use prim_store::line_dec::LineDecorationCacheKey;
 #[cfg(feature = "debugger")]
 use print_tree::{PrintTreePrinter};
 use render_backend::FrameId;
 use resource_cache::{CacheItem, ResourceCache};
 use std::{ops, mem, usize, f32, i32, u32};
 use texture_cache::{TextureCache, TextureCacheHandle, Eviction};
 use tiling::{RenderPass, RenderTargetIndex};
@@ -377,16 +378,26 @@ pub struct BorderTask {
 pub struct BlitTask {
     pub source: BlitSource,
     pub padding: DeviceIntSideOffsets,
 }
 
 #[derive(Debug)]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
+pub struct GradientTask {
+    pub stops: [GradientStopKey; GRADIENT_FP_STOPS],
+    pub orientation: LineOrientation,
+    pub start_point: f32,
+    pub end_point: f32,
+}
+
+#[derive(Debug)]
+#[cfg_attr(feature = "capture", derive(Serialize))]
+#[cfg_attr(feature = "replay", derive(Deserialize))]
 pub struct LineDecorationTask {
     pub wavy_line_thickness: f32,
     pub style: LineStyle,
     pub orientation: LineOrientation,
     pub local_size: LayoutSize,
 }
 
 #[derive(Debug)]
@@ -406,16 +417,17 @@ pub enum RenderTaskKind {
     VerticalBlur(BlurTask),
     HorizontalBlur(BlurTask),
     #[allow(dead_code)]
     Glyph(GlyphTask),
     Scaling(ScalingTask),
     Blit(BlitTask),
     Border(BorderTask),
     LineDecoration(LineDecorationTask),
+    Gradient(GradientTask),
 }
 
 #[derive(Debug, Copy, Clone, PartialEq)]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub enum ClearMode {
     // Applicable to color and alpha targets.
     Zero,
@@ -490,16 +502,36 @@ impl RenderTask {
                 root_spatial_node_index,
                 device_pixel_scale,
             }),
             clear_mode: ClearMode::Transparent,
             saved_index: None,
         }
     }
 
+    pub fn new_gradient(
+        size: DeviceIntSize,
+        stops: [GradientStopKey; GRADIENT_FP_STOPS],
+        orientation: LineOrientation,
+        start_point: f32,
+        end_point: f32,
+    ) -> Self {
+        RenderTask::with_dynamic_location(
+            size,
+            Vec::new(),
+            RenderTaskKind::Gradient(GradientTask {
+                stops,
+                orientation,
+                start_point,
+                end_point,
+            }),
+            ClearMode::DontCare,
+        )
+    }
+
     pub fn new_blit(
         size: DeviceIntSize,
         source: BlitSource,
     ) -> Self {
         RenderTask::new_blit_with_padding(size, &DeviceIntSideOffsets::zero(), source)
     }
 
     pub fn new_blit_with_padding(
@@ -860,16 +892,17 @@ impl RenderTask {
 
             RenderTaskKind::Scaling(ref task) => {
                 task.uv_rect_kind
             }
 
             RenderTaskKind::ClipRegion(..) |
             RenderTaskKind::Glyph(_) |
             RenderTaskKind::Border(..) |
+            RenderTaskKind::Gradient(..) |
             RenderTaskKind::LineDecoration(..) |
             RenderTaskKind::Blit(..) => {
                 UvRectKind::Rect
             }
         }
     }
 
     // Write (up to) 8 floats of data specific to the type
@@ -916,16 +949,17 @@ impl RenderTask {
                 ]
             }
             RenderTaskKind::Glyph(_) => {
                 [0.0, 1.0, 0.0]
             }
             RenderTaskKind::Scaling(..) |
             RenderTaskKind::Border(..) |
             RenderTaskKind::LineDecoration(..) |
+            RenderTaskKind::Gradient(..) |
             RenderTaskKind::Blit(..) => {
                 [0.0; 3]
             }
         };
 
         let (mut target_rect, target_index) = self.get_target_rect();
         // The primitives inside a fixed-location render task
         // are already placed to their corresponding positions,
@@ -957,16 +991,17 @@ impl RenderTask {
             RenderTaskKind::HorizontalBlur(ref info) => {
                 gpu_cache.get_address(&info.uv_rect_handle)
             }
             RenderTaskKind::ClipRegion(..) |
             RenderTaskKind::Scaling(..) |
             RenderTaskKind::Blit(..) |
             RenderTaskKind::Border(..) |
             RenderTaskKind::CacheMask(..) |
+            RenderTaskKind::Gradient(..) |
             RenderTaskKind::LineDecoration(..) |
             RenderTaskKind::Glyph(..) => {
                 panic!("texture handle not supported for this task kind");
             }
         }
     }
 
     pub fn get_dynamic_size(&self) -> DeviceIntSize {
@@ -1026,16 +1061,17 @@ impl RenderTask {
                 RenderTargetKind::Color
             }
 
             RenderTaskKind::Scaling(ref task_info) => {
                 task_info.target_kind
             }
 
             RenderTaskKind::Border(..) |
+            RenderTaskKind::Gradient(..) |
             RenderTaskKind::Picture(..) => {
                 RenderTargetKind::Color
             }
 
             RenderTaskKind::Blit(..) => {
                 RenderTargetKind::Color
             }
         }
@@ -1062,16 +1098,17 @@ impl RenderTask {
             RenderTaskKind::Picture(ref mut info) => {
                 (&mut info.uv_rect_handle, info.uv_rect_kind)
             }
             RenderTaskKind::Scaling(..) |
             RenderTaskKind::Blit(..) |
             RenderTaskKind::ClipRegion(..) |
             RenderTaskKind::Border(..) |
             RenderTaskKind::CacheMask(..) |
+            RenderTaskKind::Gradient(..) |
             RenderTaskKind::LineDecoration(..) |
             RenderTaskKind::Glyph(..) => {
                 return;
             }
         };
 
         if let Some(mut request) = gpu_cache.request(cache_handle) {
             let p0 = target_rect.origin.to_f32();
@@ -1121,16 +1158,19 @@ impl RenderTask {
             }
             RenderTaskKind::Blit(ref task) => {
                 pt.new_level("Blit".to_owned());
                 pt.add_item(format!("source: {:?}", task.source));
             }
             RenderTaskKind::Glyph(..) => {
                 pt.new_level("Glyph".to_owned());
             }
+            RenderTaskKind::Gradient(..) => {
+                pt.new_level("Gradient".to_owned());
+            }
         }
 
         pt.add_item(format!("clear to: {:?}", self.clear_mode));
 
         for &child_id in &self.children {
             if tree[child_id].print_with(pt, tree) {
                 pt.add_item(format!("self: {:?}", child_id))
             }
@@ -1159,16 +1199,17 @@ impl RenderTask {
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub enum RenderTaskCacheKeyKind {
     BoxShadow(BoxShadowCacheKey),
     Image(ImageCacheKey),
     #[allow(dead_code)]
     Glyph(GpuGlyphCacheKey),
     BorderSegment(BorderSegmentCacheKey),
     LineDecoration(LineDecorationCacheKey),
+    Gradient(GradientCacheKey),
 }
 
 #[derive(Clone, Debug, Hash, PartialEq, Eq)]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub struct RenderTaskCacheKey {
     pub size: DeviceIntSize,
     pub kind: RenderTaskCacheKeyKind,
--- a/gfx/wr/webrender/src/renderer.rs
+++ b/gfx/wr/webrender/src/renderer.rs
@@ -171,16 +171,20 @@ const GPU_TAG_CACHE_CLIP: GpuProfileTag 
 const GPU_TAG_CACHE_BORDER: GpuProfileTag = GpuProfileTag {
     label: "C_Border",
     color: debug_colors::CORNSILK,
 };
 const GPU_TAG_CACHE_LINE_DECORATION: GpuProfileTag = GpuProfileTag {
     label: "C_LineDecoration",
     color: debug_colors::YELLOWGREEN,
 };
+const GPU_TAG_CACHE_GRADIENT: GpuProfileTag = GpuProfileTag {
+    label: "C_Gradient",
+    color: debug_colors::BROWN,
+};
 const GPU_TAG_SETUP_TARGET: GpuProfileTag = GpuProfileTag {
     label: "target init",
     color: debug_colors::SLATEGREY,
 };
 const GPU_TAG_SETUP_DATA: GpuProfileTag = GpuProfileTag {
     label: "data init",
     color: debug_colors::LIGHTGREY,
 };
@@ -434,16 +438,72 @@ pub(crate) mod desc {
             VertexAttribute {
                 name: "aOrientation",
                 count: 1,
                 kind: VertexAttributeKind::I32,
             },
         ],
     };
 
+    pub const GRADIENT: VertexDescriptor = VertexDescriptor {
+        vertex_attributes: &[
+            VertexAttribute {
+                name: "aPosition",
+                count: 2,
+                kind: VertexAttributeKind::F32,
+            },
+        ],
+        instance_attributes: &[
+            VertexAttribute {
+                name: "aTaskRect",
+                count: 4,
+                kind: VertexAttributeKind::F32,
+            },
+            VertexAttribute {
+                name: "aStops",
+                count: 4,
+                kind: VertexAttributeKind::F32,
+            },
+            // TODO(gw): We should probably pack these as u32 colors instead
+            //           of passing as full float vec4 here. It won't make much
+            //           difference in real world, since these are only invoked
+            //           rarely, when creating the cache.
+            VertexAttribute {
+                name: "aColor0",
+                count: 4,
+                kind: VertexAttributeKind::F32,
+            },
+            VertexAttribute {
+                name: "aColor1",
+                count: 4,
+                kind: VertexAttributeKind::F32,
+            },
+            VertexAttribute {
+                name: "aColor2",
+                count: 4,
+                kind: VertexAttributeKind::F32,
+            },
+            VertexAttribute {
+                name: "aColor3",
+                count: 4,
+                kind: VertexAttributeKind::F32,
+            },
+            VertexAttribute {
+                name: "aAxisSelect",
+                count: 1,
+                kind: VertexAttributeKind::F32,
+            },
+            VertexAttribute {
+                name: "aStartStop",
+                count: 2,
+                kind: VertexAttributeKind::F32,
+            },
+        ],
+    };
+
     pub const BORDER: VertexDescriptor = VertexDescriptor {
         vertex_attributes: &[
             VertexAttribute {
                 name: "aPosition",
                 count: 2,
                 kind: VertexAttributeKind::F32,
             },
         ],
@@ -676,16 +736,17 @@ pub(crate) enum VertexArrayKind {
     Primitive,
     Blur,
     Clip,
     VectorStencil,
     VectorCover,
     Border,
     Scale,
     LineDecoration,
+    Gradient,
 }
 
 #[derive(Clone, Debug, PartialEq)]
 pub enum GraphicsApi {
     OpenGL,
 }
 
 #[derive(Clone, Debug)]
@@ -1501,16 +1562,17 @@ impl LazyInitializedDebugRenderer {
 // `Renderer::deinit()` below.
 pub struct RendererVAOs {
     prim_vao: VAO,
     blur_vao: VAO,
     clip_vao: VAO,
     border_vao: VAO,
     line_vao: VAO,
     scale_vao: VAO,
+    gradient_vao: VAO,
 }
 
 /// The renderer is responsible for submitting to the GPU the work prepared by the
 /// RenderBackend.
 ///
 /// We have a separate `Renderer` instance for each instance of WebRender (generally
 /// one per OS window), and all instances share the same thread.
 pub struct Renderer {
@@ -1817,16 +1879,17 @@ impl Renderer {
                                                             &prim_vao,
                                                             options.precache_flags));
 
         let blur_vao = device.create_vao_with_new_instances(&desc::BLUR, &prim_vao);
         let clip_vao = device.create_vao_with_new_instances(&desc::CLIP, &prim_vao);
         let border_vao = device.create_vao_with_new_instances(&desc::BORDER, &prim_vao);
         let scale_vao = device.create_vao_with_new_instances(&desc::SCALE, &prim_vao);
         let line_vao = device.create_vao_with_new_instances(&desc::LINE, &prim_vao);
+        let gradient_vao = device.create_vao_with_new_instances(&desc::GRADIENT, &prim_vao);
         let texture_cache_upload_pbo = device.create_pbo();
 
         let texture_resolver = TextureResolver::new(&mut device);
 
         let prim_header_f_texture = VertexDataTexture::new(&mut device, ImageFormat::RGBAF32);
         let prim_header_i_texture = VertexDataTexture::new(&mut device, ImageFormat::RGBAI32);
         let transforms_texture = VertexDataTexture::new(&mut device, ImageFormat::RGBAF32);
         let render_task_texture = VertexDataTexture::new(&mut device, ImageFormat::RGBAF32);
@@ -2020,16 +2083,17 @@ impl Renderer {
             gpu_profile,
             gpu_glyph_renderer,
             vaos: RendererVAOs {
                 prim_vao,
                 blur_vao,
                 clip_vao,
                 border_vao,
                 scale_vao,
+                gradient_vao,
                 line_vao,
             },
             transforms_texture,
             prim_header_i_texture,
             prim_header_f_texture,
             render_task_texture,
             pipeline_info: PipelineInfo::default(),
             dither_matrix_texture,
@@ -3827,34 +3891,52 @@ impl Renderer {
 
         // Draw any line decorations for this target.
         if !target.line_decorations.is_empty() {
             let _timer = self.gpu_profile.start_timer(GPU_TAG_CACHE_LINE_DECORATION);
 
             self.set_blend(true, FramebufferKind::Other);
             self.set_blend_mode_premultiplied_alpha(FramebufferKind::Other);
 
-            if !target.line_decorations.is_empty() {
-                self.shaders.borrow_mut().cs_line_decoration.bind(
-                    &mut self.device,
-                    &projection,
-                    &mut self.renderer_errors,
-                );
-
-                self.draw_instanced_batch(
-                    &target.line_decorations,
-                    VertexArrayKind::LineDecoration,
-                    &BatchTextures::no_texture(),
-                    stats,
-                );
-            }
+            self.shaders.borrow_mut().cs_line_decoration.bind(
+                &mut self.device,
+                &projection,
+                &mut self.renderer_errors,
+            );
+
+            self.draw_instanced_batch(
+                &target.line_decorations,
+                VertexArrayKind::LineDecoration,
+                &BatchTextures::no_texture(),
+                stats,
+            );
 
             self.set_blend(false, FramebufferKind::Other);
         }
 
+        // Draw any gradients for this target.
+        if !target.gradients.is_empty() {
+            let _timer = self.gpu_profile.start_timer(GPU_TAG_CACHE_GRADIENT);
+
+            self.set_blend(false, FramebufferKind::Other);
+
+            self.shaders.borrow_mut().cs_gradient.bind(
+                &mut self.device,
+                &projection,
+                &mut self.renderer_errors,
+            );
+
+            self.draw_instanced_batch(
+                &target.gradients,
+                VertexArrayKind::Gradient,
+                &BatchTextures::no_texture(),
+                stats,
+            );
+        }
+
         // Draw any blurs for this target.
         if !target.horizontal_blurs.is_empty() {
             let _timer = self.gpu_profile.start_timer(GPU_TAG_BLUR);
 
             {
                 let mut shaders = self.shaders.borrow_mut();
                 match target.target_kind {
                     RenderTargetKind::Alpha => &mut shaders.cs_blur_a8,
@@ -4708,16 +4790,17 @@ impl Renderer {
         self.transforms_texture.deinit(&mut self.device);
         self.prim_header_f_texture.deinit(&mut self.device);
         self.prim_header_i_texture.deinit(&mut self.device);
         self.render_task_texture.deinit(&mut self.device);
         self.device.delete_pbo(self.texture_cache_upload_pbo);
         self.texture_resolver.deinit(&mut self.device);
         self.device.delete_vao(self.vaos.prim_vao);
         self.device.delete_vao(self.vaos.clip_vao);
+        self.device.delete_vao(self.vaos.gradient_vao);
         self.device.delete_vao(self.vaos.blur_vao);
         self.device.delete_vao(self.vaos.line_vao);
         self.device.delete_vao(self.vaos.border_vao);
         self.device.delete_vao(self.vaos.scale_vao);
 
         self.debug.deinit(&mut self.device);
 
         for (_, target) in self.output_targets {
@@ -5485,32 +5568,34 @@ fn get_vao<'a>(vertex_array_kind: Vertex
         VertexArrayKind::Primitive => &vaos.prim_vao,
         VertexArrayKind::Clip => &vaos.clip_vao,
         VertexArrayKind::Blur => &vaos.blur_vao,
         VertexArrayKind::VectorStencil => &gpu_glyph_renderer.vector_stencil_vao,
         VertexArrayKind::VectorCover => &gpu_glyph_renderer.vector_cover_vao,
         VertexArrayKind::Border => &vaos.border_vao,
         VertexArrayKind::Scale => &vaos.scale_vao,
         VertexArrayKind::LineDecoration => &vaos.line_vao,
+        VertexArrayKind::Gradient => &vaos.gradient_vao,
     }
 }
 
 #[cfg(not(feature = "pathfinder"))]
 fn get_vao<'a>(vertex_array_kind: VertexArrayKind,
                vaos: &'a RendererVAOs,
                _: &'a GpuGlyphRenderer)
                -> &'a VAO {
     match vertex_array_kind {
         VertexArrayKind::Primitive => &vaos.prim_vao,
         VertexArrayKind::Clip => &vaos.clip_vao,
         VertexArrayKind::Blur => &vaos.blur_vao,
         VertexArrayKind::VectorStencil | VertexArrayKind::VectorCover => unreachable!(),
         VertexArrayKind::Border => &vaos.border_vao,
         VertexArrayKind::Scale => &vaos.scale_vao,
         VertexArrayKind::LineDecoration => &vaos.line_vao,
+        VertexArrayKind::Gradient => &vaos.gradient_vao,
     }
 }
 
 #[derive(Clone, Copy, PartialEq)]
 enum FramebufferKind {
     Main,
     Other,
 }
--- a/gfx/wr/webrender/src/shade.rs
+++ b/gfx/wr/webrender/src/shade.rs
@@ -162,16 +162,17 @@ impl LazilyCompiledShader {
                 ShaderKind::VectorStencil => VertexArrayKind::VectorStencil,
                 ShaderKind::VectorCover => VertexArrayKind::VectorCover,
                 ShaderKind::ClipCache => VertexArrayKind::Clip,
             };
 
             let vertex_descriptor = match vertex_format {
                 VertexArrayKind::Primitive => &desc::PRIM_INSTANCES,
                 VertexArrayKind::LineDecoration => &desc::LINE,
+                VertexArrayKind::Gradient => &desc::GRADIENT,
                 VertexArrayKind::Blur => &desc::BLUR,
                 VertexArrayKind::Clip => &desc::CLIP,
                 VertexArrayKind::VectorStencil => &desc::VECTOR_STENCIL,
                 VertexArrayKind::VectorCover => &desc::VECTOR_COVER,
                 VertexArrayKind::Border => &desc::BORDER,
                 VertexArrayKind::Scale => &desc::SCALE,
             };
 
@@ -442,16 +443,17 @@ pub struct Shaders {
     // of these shaders are then used by the primitive shaders.
     pub cs_blur_a8: LazilyCompiledShader,
     pub cs_blur_rgba8: LazilyCompiledShader,
     pub cs_border_segment: LazilyCompiledShader,
     pub cs_border_solid: LazilyCompiledShader,
     pub cs_scale_a8: LazilyCompiledShader,
     pub cs_scale_rgba8: LazilyCompiledShader,
     pub cs_line_decoration: LazilyCompiledShader,
+    pub cs_gradient: LazilyCompiledShader,
 
     // Brush shaders
     brush_solid: BrushShader,
     brush_image: Vec<Option<BrushShader>>,
     brush_blend: BrushShader,
     brush_mix_blend: BrushShader,
     brush_yuv_image: Vec<Option<BrushShader>>,
     brush_radial_gradient: BrushShader,
@@ -662,16 +664,24 @@ impl Shaders {
         let cs_line_decoration = LazilyCompiledShader::new(
             ShaderKind::Cache(VertexArrayKind::LineDecoration),
             "cs_line_decoration",
             &[],
             device,
             options.precache_flags,
         )?;
 
+        let cs_gradient = LazilyCompiledShader::new(
+            ShaderKind::Cache(VertexArrayKind::Gradient),
+            "cs_gradient",
+            &[],
+            device,
+            options.precache_flags,
+        )?;
+
         let cs_border_segment = LazilyCompiledShader::new(
             ShaderKind::Cache(VertexArrayKind::Border),
             "cs_border_segment",
              &[],
              device,
              options.precache_flags,
         )?;
 
@@ -691,16 +701,17 @@ impl Shaders {
             options.precache_flags,
         )?;
 
         Ok(Shaders {
             cs_blur_a8,
             cs_blur_rgba8,
             cs_border_segment,
             cs_line_decoration,
+            cs_gradient,
             cs_border_solid,
             cs_scale_a8,
             cs_scale_rgba8,
             brush_solid,
             brush_image,
             brush_blend,
             brush_mix_blend,
             brush_yuv_image,
@@ -787,16 +798,17 @@ impl Shaders {
             }
         }
         for shader in self.brush_yuv_image {
             if let Some(shader) = shader {
                 shader.deinit(device);
             }
         }
         self.cs_border_solid.deinit(device);
+        self.cs_gradient.deinit(device);
         self.cs_line_decoration.deinit(device);
         self.cs_border_segment.deinit(device);
         self.ps_split_composite.deinit(device);
     }
 }
 
 // A wrapper around a strong reference to a Shaders
 // object. We have this so that external (ffi)
--- a/gfx/wr/webrender/src/texture_cache.rs
+++ b/gfx/wr/webrender/src/texture_cache.rs
@@ -18,17 +18,17 @@ use render_backend::{FrameId, FrameStamp
 use resource_cache::{CacheItem, CachedImageData};
 use std::cell::Cell;
 use std::cmp;
 use std::mem;
 use std::time::{Duration, SystemTime};
 use std::rc::Rc;
 
 /// The size of each region/layer in shared cache texture arrays.
-const TEXTURE_REGION_DIMENSIONS: i32 = 512;
+pub const TEXTURE_REGION_DIMENSIONS: i32 = 512;
 
 /// The number of slices for picture caching to allocate at start.
 const BASE_PICTURE_TEXTURE_SLICES: usize = 16;
 /// The number of slices to add when we grow out of the current range.
 const ADD_PICTURE_TEXTURE_SLICES: usize = 8;
 /// The chosen image format for picture tiles.
 const PICTURE_TILE_FORMAT: ImageFormat = ImageFormat::BGRA8;
 
--- a/gfx/wr/webrender/src/tiling.rs
+++ b/gfx/wr/webrender/src/tiling.rs
@@ -1,30 +1,31 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-use api::{ColorF, BorderStyle, MixBlendMode, PipelineId};
-use api::{DocumentLayer, FilterData, FilterOp, ImageFormat};
+use api::{ColorF, BorderStyle, MixBlendMode, PipelineId, PremultipliedColorF};
+use api::{DocumentLayer, FilterData, FilterOp, ImageFormat, LineOrientation};
 use api::units::*;
 use batch::{AlphaBatchBuilder, AlphaBatchContainer, ClipBatcher, resolve_image};
 use clip::ClipStore;
 use clip_scroll_tree::{ClipScrollTree};
 use debug_render::DebugItem;
 use device::{Texture};
 #[cfg(feature = "pathfinder")]
 use euclid::{TypedPoint2D, TypedVector2D};
 use frame_builder::FrameGlobalResources;
 use gpu_cache::{GpuCache};
 use gpu_types::{BorderInstance, BlurDirection, BlurInstance, PrimitiveHeaders, ScalingInstance};
 use gpu_types::{TransformData, TransformPalette, ZBufferIdGenerator};
 use internal_types::{CacheTextureId, FastHashMap, SavedTargetIndex, TextureSource};
 #[cfg(feature = "pathfinder")]
 use pathfinder_partitioner::mesh::Mesh;
 use picture::{RecordedDirtyRegion, SurfaceInfo};
+use prim_store::gradient::GRADIENT_FP_STOPS;
 use prim_store::{PictureIndex, PrimitiveStore, DeferredResolve, PrimitiveScratchBuffer};
 use profiler::FrameProfileCounters;
 use render_backend::{DataStores, FrameId};
 use render_task::{BlitSource, RenderTaskAddress, RenderTaskId, RenderTaskKind};
 use render_task::{BlurTask, ClearMode, GlyphTask, RenderTaskLocation, RenderTaskTree, ScalingTask};
 use resource_cache::ResourceCache;
 use std::{cmp, usize, f32, i32, mem};
 use texture_allocator::{ArrayAllocationTracker, FreeRectSlice};
@@ -327,16 +328,27 @@ pub struct BlitJob {
 pub struct LineDecorationJob {
     pub task_rect: DeviceRect,
     pub local_size: LayoutSize,
     pub wavy_line_thickness: f32,
     pub style: i32,
     pub orientation: i32,
 }
 
+#[cfg_attr(feature = "capture", derive(Serialize))]
+#[cfg_attr(feature = "replay", derive(Deserialize))]
+#[repr(C)]
+pub struct GradientJob {
+    pub task_rect: DeviceRect,
+    pub stops: [f32; GRADIENT_FP_STOPS],
+    pub colors: [PremultipliedColorF; GRADIENT_FP_STOPS],
+    pub axis_select: f32,
+    pub start_stop: [f32; 2],
+}
+
 #[cfg(feature = "pathfinder")]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub struct GlyphJob {
     pub mesh: Mesh,
     pub target_rect: DeviceIntRect,
     pub origin: DeviceIntPoint,
     pub subpixel_offset: TypedPoint2D<f32, DevicePixel>,
@@ -500,16 +512,17 @@ impl RenderTarget for ColorRenderTarget 
                         pipeline_id,
                         task_id,
                     });
                 }
             }
             RenderTaskKind::ClipRegion(..) |
             RenderTaskKind::Border(..) |
             RenderTaskKind::CacheMask(..) |
+            RenderTaskKind::Gradient(..) |
             RenderTaskKind::LineDecoration(..) => {
                 panic!("Should not be added to color target!");
             }
             RenderTaskKind::Glyph(..) => {
                 // FIXME(pcwalton): Support color glyphs.
                 panic!("Glyphs should not be added to color target!");
             }
             RenderTaskKind::Scaling(..) => {
@@ -643,16 +656,17 @@ impl RenderTarget for AlphaRenderTarget 
             }
         }
 
         match task.kind {
             RenderTaskKind::Picture(..) |
             RenderTaskKind::Blit(..) |
             RenderTaskKind::Border(..) |
             RenderTaskKind::LineDecoration(..) |
+            RenderTaskKind::Gradient(..) |
             RenderTaskKind::Glyph(..) => {
                 panic!("BUG: should not be added to alpha target!");
             }
             RenderTaskKind::VerticalBlur(ref info) => {
                 info.add_instances(
                     &mut self.vertical_blurs,
                     BlurDirection::Vertical,
                     render_tasks.get_task_address(task_id),
@@ -731,29 +745,31 @@ pub struct TextureCacheRenderTarget {
     pub target_kind: RenderTargetKind,
     pub horizontal_blurs: Vec<BlurInstance>,
     pub blits: Vec<BlitJob>,
     pub glyphs: Vec<GlyphJob>,
     pub border_segments_complex: Vec<BorderInstance>,
     pub border_segments_solid: Vec<BorderInstance>,
     pub clears: Vec<DeviceIntRect>,
     pub line_decorations: Vec<LineDecorationJob>,
+    pub gradients: Vec<GradientJob>,
 }
 
 impl TextureCacheRenderTarget {
     fn new(target_kind: RenderTargetKind) -> Self {
         TextureCacheRenderTarget {
             target_kind,
             horizontal_blurs: vec![],
             blits: vec![],
             glyphs: vec![],
             border_segments_complex: vec![],
             border_segments_solid: vec![],
             clears: vec![],
             line_decorations: vec![],
+            gradients: vec![],
         }
     }
 
     fn add_task(
         &mut self,
         task_id: RenderTaskId,
         render_tasks: &mut RenderTaskTree,
     ) {
@@ -816,16 +832,38 @@ impl TextureCacheRenderTarget {
                     } else {
                         self.border_segments_complex.push(instance);
                     }
                 }
             }
             RenderTaskKind::Glyph(ref mut task_info) => {
                 self.add_glyph_task(task_info, target_rect.0)
             }
+            RenderTaskKind::Gradient(ref task_info) => {
+                let mut stops = [0.0; 4];
+                let mut colors = [PremultipliedColorF::BLACK; 4];
+
+                let axis_select = match task_info.orientation {
+                    LineOrientation::Horizontal => 0.0,
+                    LineOrientation::Vertical => 1.0,
+                };
+
+                for (stop, (offset, color)) in task_info.stops.iter().zip(stops.iter_mut().zip(colors.iter_mut())) {
+                    *offset = stop.offset;
+                    *color = ColorF::from(stop.color).premultiplied();
+                }
+
+                self.gradients.push(GradientJob {
+                    task_rect: target_rect.0.to_f32(),
+                    axis_select,
+                    stops,
+                    colors,
+                    start_stop: [task_info.start_point, task_info.end_point],
+                });
+            }
             RenderTaskKind::VerticalBlur(..) |
             RenderTaskKind::Picture(..) |
             RenderTaskKind::ClipRegion(..) |
             RenderTaskKind::CacheMask(..) |
             RenderTaskKind::Scaling(..) => {
                 panic!("BUG: unexpected task kind for texture cache target");
             }
         }
--- a/gfx/wr/webrender/tests/angle_shader_validation.rs
+++ b/gfx/wr/webrender/tests/angle_shader_validation.rs
@@ -47,16 +47,20 @@ const SHADERS: &[Shader] = &[
         name: "cs_border_segment",
         features: CACHE_FEATURES,
     },
     Shader {
         name: "cs_line_decoration",
         features: CACHE_FEATURES,
     },
     Shader {
+        name: "cs_gradient",
+        features: CACHE_FEATURES,
+    },
+    Shader {
         name: "cs_border_solid",
         features: CACHE_FEATURES,
     },
     // Prim shaders
     Shader {
         name: "ps_split_composite",
         features: PRIM_FEATURES,
     },
--- a/gfx/wr/wrench/reftests/border/reftest.list
+++ b/gfx/wr/wrench/reftests/border/reftest.list
@@ -1,10 +1,10 @@
 platform(linux,mac) == border-clamp-corner-radius.yaml border-clamp-corner-radius.png
-== border-gradient-simple.yaml border-gradient-simple-ref.yaml
+fuzzy(1,790) == border-gradient-simple.yaml border-gradient-simple-ref.yaml
 platform(linux,mac) == border-gradient-nine-patch.yaml border-gradient-nine-patch.png
 == border-radial-gradient-simple.yaml border-radial-gradient-simple-ref.yaml
 platform(linux,mac) == border-radial-gradient-nine-patch.yaml border-radial-gradient-nine-patch.png
 == border-radii.yaml border-radii.png
 == border-none.yaml border-none-ref.yaml
 fuzzy(1,68) == border-overlapping.yaml border-overlapping-ref.yaml
 == border-invisible.yaml border-invisible-ref.yaml
 platform(linux,mac) == border-suite.yaml border-suite.png
index 55518031cddf5a0bf2e8f56c475372bc7a84bdd4..95ee7491e82570e65c12d0411492c5055c60b34a
GIT binary patch
literal 6652
zc$~#qc{tSV*Eja84>I;Vq41bUWO*bd4E0E&j8e8pi#<!08B0?%mQ<ps5fw$2?CS_)
zNt9$sWyV-bn2}|y^M3D{@w|WhuIs(7-(RmkTyu@{J?DHr=X0NPpRf26$C1JUvH~0&
z9Ku$XW+yo~U^wV=8b2@edn`7D#K9rcY-M)XJ~VUYE9UB{kneodWtl+BtPI`v+UKk!
z9NeNdpPKgbY<@OdalLSl)2_mJPrlc*mvzV0W>l-?w5hvR>8W2{c<-qw%FPtTM^pGa
zK{7uTdEC@?3qMZUrtb6n@NPf+a`ge7)qn77)yv`C%bDkfi^iUyH4WqC@E?|c&l-pD
zI>yAswKI;m?o?8`u8uG>Q_;d=<Ko7i7yluaQnwwhq;xl}$<)kj{v;|c&hor~pqN<4
zF}AUaDt~CoT@T~&Xx^vKstdxg*fB5UeZ6UQ2h@GNw4kEmJMuGepFEAn1#q7x-poqE
z@8&4j4i)4&x!Mjn_18C<29zlu%~-szfjFAcS>u4RObRx;RQe<UYkn!vaalmHqbEtt
zyWg&`Sj}5=`n>B-!y&QfU19Ge;m^A~zH1;<kVe^MjyBaz*=7G3oG~JZE72y?{Tqfk
zZr+OSu_ZV0xCDr&ryu$&H5JyI%T?+LOHDl#w7=L-%vmt{w!pK)e^BpoN=i4I%C}{c
zh3(y?Q>BRrr^ajlWDEWHHOI~9*u7#C8qIi?v#z>h!emA6i}`vbsvVPYZDAnnU9UKN
zgd}UjHyPE2HPL`=&df{GPPID7VtWlcq$<TGIWoEYM5<oV@150<bn4%@rHnYQ$jQ45
zLyB-+w4n3t8XNx0<;T9P;QfBi!i+y<tY+gLx$x)5Mxx;I7N<^}*-+>>J>nNEv*h96
zu9o41d7XQpqd0Gbv2eNJ0qqpgL0x8q#Bgm`qU~>SIO)_M8P@C;As_0`G1%u22BYC`
zye(;fg+3Ee)0$MIkA@@fyUPdJ@qY9>rYP0i)kE5Auv)0^LO!FwnAuo@z(n3Mr&pfP
z=gExyBpAm~5lU*9Cd>PE(dun9rw>ND`lnzbA8NW>6g3&Jri%KQj=1LwTOSJ4*sF8k
zIPG`2^1E37vf&}@i?iW$gOQ0}zVyLYlb0oG3M<?WOy?5%?PfI)?^aBD!!!)|<U<+=
zGT2^HpkyZ3&6_1Ag;4G_P1!|KgB9ZzF71g~gLSEd;QjN~brHYfT9X&troGQrseLA$
zklucF`0MRqLL13>T76jzo~Q_$BwM&x8t{?jqXHZCwIKoh;K!L{a_Z?(U5B+BQbh9{
zg?>$O9W+k%rHD-?rH6E?FIZn`q^SR#BLw~$a?^F1ZGJgTTZHhB3gNC-QkUOeBFj?~
zBO0|2KDb;Z)D0uGw0L}1RfL<!aO^0$xbwR4fM`U0lKBE<NGzyQI15v=dd1e{OLFuw
z<ww!t{WeK>P-S#rqv`xB{MfS0TuEGXXN`>>La&lq8!sfN{q^H}WYBj+wq#q<)RW<u
ziDXYy{HY1wF0Y3P4G>8?kf>`qDvwOwnZDnL;hKyxZ-J-@aHXO4P;ncGXD-g?c@CJg
zEB$Rd7x&iaa>?aat7j8798N!7dhE4!#dzHI7Llw#mM^{&Geh9)hT-_D3)U_)QAQ*i
zi$qKAWJkIhQa>Gr_rwJt<uj`}J9A$VG)2xiCP_-=OhdUNW#D`lW1!q`2ggO1P?T_S
zRR!S;*OF+wn#d3RZcD_ZYQp25bza%NBx&gL?Mpv>w=X$Qs3y?7RzYwq(Z$fjmfCsM
zzY^~pHej)!_(x4aIKh>6O^`7DN#~bkpFWpOP^q5;F==X3nvkQ`S$sF|!fv`ktMhke
z_BYIz+vp1BGJ^5{c*c-i*Hf{|X+d*Z2wuJ@)dgo_3*`%X^XJHtb5GT5T#XFZ6#n^U
z>(fnB6JE_~P{4D|**?4&#Z029k^=^o$psgI+6}(gAW(o#iGYh|I6vl|ecYC9plReb
z?49fP#&jdGcfI7E!Qy$^M){L~wBqy-IRid~ir(+GMne}{6)3hUq8|Z6=Q56*E;v{#
zJs5u9++RMJJ88I-8<+c%;Qx2<b?parjuFtw{I&~`G|bfv6O8ZDMq^6Gtl{PXj?1)L
z#LoIg9czS2u+0*O=>9iqP9;X!{+?Q4vnMf;USYL-^-?&%y_6h0QRl6p%g&N3VRBIF
z=6=w_H;G2;0*qs)^wCGxXVv)7h5@4&qi=r^8@FrK4)8rSo7>{Me#Y=scfuGN{x0kt
zZX0f1oh<*2u1=F9@OHmV#A5@PUIJ$t3y;GoU8{Ozng|@L?uN{A4>lT_wZ}yMJV3g+
z6o_3OU_PGo1rpC6zJGs+4Q$Ll1ReW^Vtx~A#B1)|s(<dV*5XP^fUp0^{-_{^ekA?a
zA^y$T&(8SWg=tUcKdWK#5e1Nd&wHV6;CEZB)Q5lB6QMl(j}sqgf(`0Xoi)i?h~N;|
zLu*7!?ZixD&1rZr&n$o0a36LCZ;!cG{KMvJ&tkAEREc@h=Im6G-BpH)AkC*24{E`Y
z7NqEmI$$7WVANz<mtn_K7w`ysF4~AQH-9o4It3XwQZ!Db8#$q%O4NNUHaOQcIcet@
z7ozO2^|TEtQ<_rxd(F|(Ex4W+k>~1a*PfL3Q2WNfL_Wc)hB0<Uy5_<hsDwCtzYg@&
z&iHDE3M7XF=mZ{cEy>}LeqVg2<+C1_T&?5a_G0Ca=f1>D*jzwao!p1<vFOaJ2k0$P
z*Bs{KCsqI4_HJD!lPeuMf1yooGfre{<7~^%#u3}gCv^ogID@&5W0I1yUc4k&$yUEd
z+7ik^0R*?<dhw&md6y6>+RAAwYCkKP!53U<Y1*5ojc#F8e5eR2O#r^)7-B~JteyGV
z7Srp}{n0VuTnW(sYgb$CRA;Lpe2i7X1ob6jP!i4y)o``gAdF*ho<q>;Tfal}E)B=A
zi{Hdh*C%0hHHS8=WBHEP-R7F~1}ESOs-R8+65rh_zT;3kZEE!Rj#Pwi%tWmBbeM`)
zRtV?KxuoFzky)d5GhX@jnBJ_wGT|9a87p`XP2ErAPxX(60^P8Gn0c3Y@K9HGLhjZc
z{O%<lhOfwkaEqWlc^;5nZoYW?^mk_oWw~PWj7{}^gb13K8TiP$J@h3)h=sfBvdaRu
zpq0}&CmGp{nAe%*7t(s!8YA=5S2nY%jH3_9PFqL|HW`IgnQhtSCxVi$Dp;FpcKiOl
zkr;?j@z=S?RZDUMX_H@nASHgW>m%tJgm~!9ZYI8lO6DVst2`(Qom;DK)f$lc_d*A+
z=N9qN*XT|O=;_{fQT20g+CNk&Q)fSYwGGZ+&ES=A==56?I|~lIEPPIr2ZZ*a`~_)+
zm3GM|y{nWC*xB$zBhD{H(!SfZV(?!<hwLwIM`NX|63LTwtRyM<6|NFCXwNG}{>VSq
zvS=1GrBGr&G~4uV5o7BtFLM`B-N<idb3}1xz81%;##SuJhH$^hPtd<D^%l)cKpvhS
zn(~={={MW195`GW9J=yNti4csSnWJ_IFqA5RKbY(N#(&#95j!T4<Z^o<@9J{xh#jG
zG9bq^w&8&m+%pU6J}s7ZLI(kuV=TjIW@I0B<Y^E&cq=mFjc`WjJgH)dx3$7UaX$#H
zHMp8l{=L<(cu{{%O~pJ$%7wx-4Fg~{ST2KF<%l?!Miyvu1+BZXZ#meZy^0}^pDwb<
zNoSHX%qGVjkn>?Hv@if2Jj4WzZVt@}6<Z%<8|s4`(eK%)WxbnsKUrWKb}|h=vJESl
zhV5JQcmsO)SSebA6tmjcDM~;o9v0n7u>wl*Zb|t-mGYhMUUjy0{K12u(i6pl?GTH_
zu~J-6?}<c3v;h{Ey=V5%@?SU$SUg<GRG$bAeqLpNjL&(tbUJ`tFiSe8M-wULA3eZ$
z$bdQ`B%CbVmcId}zpG4t_`QJa4@0cLp)o*Vj%<fLJ`j*+&Hd*`lMYpM0mRqXc3z{9
z9;U{KtV3=ybkq94^;V``a4Fz=(32I|O$XFOkL{0k-+Qp-UM>qrB~#n4nMc?VQ9)ID
zDRy<7?eHw1`YMZ=$RQB;fXB3Nf3VzSV?XIvFQ}glja@%)7!)Fd<rB{ot<4sgn*fZ}
z7MM!_Ocx6bv(9B-S$0!o2#nVxJN5@Am?oy(QaGp{xt5(|LmLqHxh;ntz+oQ?buAP1
z1vY9UfO_|q9gxE_BFo_#)1lFp!w<lr!dCs7L0v3Z^-BN_y(&O=>~c*3$IokmouM&M
z>?2nBFfKqh4c@H46h1%=Y0J>044kSxEQjZrg79JsA`J*4g)NA6<}uZ0c?^*SbrEJ~
z7(5T^Lf+!$E8ylmi^C6qLtVCFr+^^5w(Ph-hCf;U))IkW>)C=u1Hp!E=^-2F0eN*s
zIKjKKfr(^C`J(=n+QdMYY2PIt%iG+^O)X51wLlRCr7T5sFcoo?jd>SvdF#41tM_(-
z&^z8R4L4o_66ufFvGD*)L?RpXuX2c-H*C=+14GmmcKJj>`Oa)rg#^g?i$&sLK;nrl
zyE}ly2W)?JZs61(X4&DHO|ghAM7j)c=&+UI2Y@AlEr=*)<V$QU9Ki4>JF@>+F!?7m
z!M?rrX#5nyoV=kSb2DlgFQ5U~GQRIyLn~-2l{bmC;o$Zcp8-}5L3T|ie8}$%p`rM)
zH2MR9sAMvl)OfabuJzYLF#l=|<U)!{S)-TM?*k1!OG@`SZfF>>T3FI)uC(LOjin34
zfspY8^UsBj>_eRZqc^^XCvWowd;E<kMh9|OMN8z6<r`U*A~?e`O<GLFLnZ^rI(*Em
z4XT2mv7HOX_CT6D5zd}|e3f8d5D%FO`gibBl4lydRH32mAxC{F!kqbk|NPohaT>a7
zbQ^nn%~i6m3%1Ou>omFJe;-|kp*6$6G<}=Dey!xLIl5ILI`x<cx$iZE_(1XaDS3m-
zfabH!ZsX!i#5>5jx4Dpx?C0M}@70B_Nre*#!dM~eszMocH!y<0zResem0rWKG2c#(
z1$pb|!l7FStCjO_1Z(<TzkyZpJR?c1?nPWpFjAUacv78LgWRzG)ShuFy|#atSu67?
zT}6!j_hwUZo%!AW-Vn8iIM)rK?|#R7qA&JzHNA*Zh@MT{<5k#IeD~P}-i)3%&ERn@
z6-13bleBv>pjatTu~ZS7!Z6jji#y{VGGzDW+{9YLj5zlT-lPs`F6vt?7h!~tsQzMJ
zjGMAOVPHkkUM`xnV@R+=i^)#p<Y|Q2r^LOC;6CMzF%$dC1v!?9#wKS^<Pof-4Lgx1
zMB)9c=%%^OBROi3*wi>N^e)5Y!*Cw3YG8?9&o$!pO8%HTSglZ%pUS*kzn-osu6Nw9
zZhG-P_}Yvn`+i?m2rZ`A&`Nb^+re={#M7H7Lk51Y#H$X%Xj2dNxeTlAeJ+nx^FwB>
zC;2Drcxm?K+TuWNe8)a}O$8#|2n*^jD2$soqDzj8`H9di>K>+#Iov(=){vaR@~3pI
zP{DxIVOm#RDxCiPZT4QC*|^N4Hb_P1Hx^$8I>-&-HR{;g#@{>mKsvo0bGL`^?zVqI
zN5>c1K{UImE7$3E6e;whsx*@F4HH58mL#%o>ZtUJ2iTmn)^%+cpQn8LVCradUE5U*
zT_e3Ro`@GGP<ze#(tOqt_o2e*BTS{jS2oNg1mnM-Gj4ou0mo{%A;)wNe#Sjf?Q>Rq
z^yJ6kxlZCDYkN8|dIap=gZqsEGfDtSv(mzZIC)+h+CDGgqQjHks2xv%WHmLs3$^aJ
zsDHc-T`7I6e>-@cp{0i~MJ@wnxCQ<SfgGvb?W|drfX_YVY@H~*Giaa5?UMm*uOddA
zzh{lH_XsC3qq+>7s{hY_{#{?SSnDf+AH=UN2sd#RtI_3f%DcETC8JODeV)7Rfm;eG
zb`C~Sn-}{?2A@rOlA{k-f-!Lnb>rt{(1B9&F`VqxnE;)Ma-Q_|;ydm4_j4&udycgF
zBgd|p^u<i93Teb2eL;}fV^h1#+`N@OZ)^v__sMJ?9Guq`L~2eCLuF`P$!{CqK(u@*
zJ9Vgfv`Chr5u=czQ&kGje~?eG9mIG^Ue-73h0rG}AxvY9Kmy%Miky>C=G;&MjAH(C
zPB5ntQq*?ezicS)V-eY6JxT3`^iKi^M$GCo<zo@li<)}?|2YD<k8jh!-ezvJa0TiO
zX5h7r1pH|TecR}5G}oSY_UP;?<wqm~lGI32yG+Cr{N9FwO}INliyluYVZA=NY3@b6
zj&7pw6I37Ebf!uW-4RM-Z)Wca;y$Vnn&}x&{H4Znx*D%7UmRpOv=}MnhRYF}2k*G<
zI{onRdXih<0yw035E=D}Bv-Qq>2c4L5%;<WT?nNK$cr%UV>p+PaHO2)zQ?41@2A_8
z!nA+IW!4mlW-$yG2eLP5XrXnLCr~-A%e#ubTKgLiTC~~*u_6Xd1bhVQMj@V(HZ0s&
z?jE76xYt9Q@P3xd=V6;ZE$$L>?ETt;`ofG?BSm&`nPjL}CM_(t=J!*S^==Ddr_Lre
zkfR$$YUh)e<795-@$sECHb_+6++6+ovl1-BVl+aT|J{m}n+7$-q9NWP!HA+LD;HK0
z^c#NvgFWWRs8nZ($Jf&fOGF^pWF5rtKs!mW(ua34%J&BFZNL{Hp#x=FSj>+->|c&6
z+R^ZkJnd=E`)#PjLBpt}k=odt)V?h?yS!S^S}yFw>EOVn5Vve0W220Zsh7sHslBZC
zdQbPNoymaOL0RB8UkeF&#*@cn`EUF}icq&(ZK5bj%MS3;<8J$BD2%LS96CWnTCbO1
zIjTUSJ1WinE>qroS{ET7lq{6x8mahdw%%~nDBy)aWUcI8s+&S<8|z&smbmtAW#Ggz
zzapUk>K_<C32DRTHvd|BeGBeefK%W>Xmj6wi)hjzLMu-$YK}-9blb+IglZkQ+;ExB
z*FmjQ-ZbxuJc2q`E86*B$3&Cb>&=@me|r1AUvf@~PY3RSDYYho!<-IO1TQD=pZB`~
z{h1$prizL~YmkY&D^07F{&zQKTAv=6US!K5MSnDQC8kJMz~rLM<Q2GcD#xo8eL|dx
z)cmQp4)fyQ#2DGxZbE`mJKD0bzF9S4(y6IA>HDWSU)bQsMsHtR(vK+6H8fO^(tO`E
z<+bbPt$bQ5&D;PTv9q=HQxC+e%|67YBM)N;J|Bvp<UeYA{zg2L>xt5ioU(%z0*V^e
z#{=a26%vzTKp`t7o!v)<j>6J{7(0ao<B$A7Rod0beBUiD`v3fj|38EOU;nK5ul}>~
cUl?zQrr#QG#w0jsaX>#-=EuznO<iLC2N`20NB{r;
index 1d06f241bc72ad97c13159e86f26e292b1ff5745..ed80ec9988bc2a403208590727337e5c2ca23215
GIT binary patch
literal 912
zc%17D@N?(olHy`uVBq!ia0y~yU}OMc4mKbaQ?w!tNU;<<d4_NRK@CSr2Ll7MyQhm|
zNX4AD*9`fX6h#~yBi`4i96WWzjazzBt?^m)g$%RApUp~MpWX93x##<znEKiO-&sEX
zeoyYR9RJRflGSGm)33keW_ZrQ(84*2MiKSU*}dQGYB?Y9*fVel4|-J1DE#iicTMTJ
RmcT5=;OXk;vd$@?2>@ZosV@Kk
index 71436ebf36a6b13f1911e900bac0bb1e967b8c16..b27b9df586035770229313f74b28222318a890d6
GIT binary patch
literal 10340
zc%17D@N?(olHy`uVBq!ia0y~yV4MKL9Be?5hW%z|7#K9PJzX3_D(1YsdXS4bT9ozT
za{G1vcAQ_=azaYhK!SafdB}aO7e9ZVo`3$|jcvKNxBV}$R=5phgx_Yo38ePj;yVGP
zzFjSI0LiZ3#=Hqg)ozuY0Hofoe%p`(R2-kno&uzPXN_tlMeFbVmJ0R%9>1^u^K-iW
zzq9uLf7LnjNApMbul~9L2zNvwam4GtFxFo<eC_bH$62;HK)CK2g7f#``}*p8xxe-E
z*;#OCw^n%ny?O2Nt5r9uZu~mTcj8<3+lB{;uM4-zZ3Dva*NLxToPSB|PyW`y^w-~D
zyjizVR=@_;Mkpu#O~ao1{4o9eK!53PV{U4P+EW#?wtX#(^Q&8S!uQ_?VJ=Q&Px*bc
z%;5u?JE4|tWd2lV57Te|rXgql4L%MucS1S(We)e=v%~ba%T9PJew*<i_Fy{7cjEWo
z1eiZJFmL*sWUWwv>P{$U{|&~<{qivV^1x_~&lwHe(ZC%I+|j@t4cyVd9Sz*kz#R?T
z(ZC%I+|j@t4cyVd9Sz*kz#R?T(ZC%I+|j@t4cyVd9Sz*kz#R?T(ZC%I+|j@t4cyVd
z9Sz*kz@=v3{x^rLCAptB7*?{3Y8}-&s&%Mq{igqTg2>w#A%lesK;Y@>=d#Wzp$P!7
C-nXs*
index c6525703f690609bbe95b3334305515bb2147a4c..6fc18cd8c0eb2931605546f016a87a0b0363dd2a
GIT binary patch
literal 12560
zc%17D@N?(olHy`uVBq!ia0y~yV2S`?4mP03zO)&47#Iw$d%8G=RLprhjgd>2d*ac3
zyZ)=5w~%csSopw3V3wPSCf`Y+SNZ>IerPKJLEK_?5dA2%Y6F<NY?Mn|?!RKWHT(bn
z{r|nc{?C~=scG@X)iZCNKC>_Fo6+X=Gxx%U`v3pjzW?8^`_KP9w>*3E^qg7{vG>f)
z>mUM|+q<))<42#I4sMgGH`I+PA5~74axgKfd{p_U@=@iZ%14!tDj!uosvNzoE-SBd
zF7DZzR1lF3BXZ8Tx*RG0l3@6I7Eo7Q+Beh9(`VM61?ti_1`E~QY5w|pRQag#QRSn`
zX{r3rdb<eJaRVr8P?Vmd%14!tDj!uos(e)WsPa+eqsm8>k18KkKB^p{{5q&#H>!M8
z`Ka<y<pZz$|2_6@#8?whZFZVIT>E3WHSzJq-;C11M6#Ze(!Ry)ul>=@g_eNw-xL1+
znzebl<!=xnT`Va0QPwu1-Uw|B3u@Kh9qg~KUxzP*nbF<R@&3iZUz5?@0JZA4ZN&b4
zpmBLHaV*BgrN*8A+I)03K&`TWcktI&({%mfZy>_#nUc~Y>#7a^QqbH0wd!{v|F!k&
zEvsj41`~$jf`a!iHh)zoCDhm0qq_m`2;bk6Lo&#Z*Pm+^6$ILn@5CSG;^K1T^p^xz
yAic+Dlsn2D<&JVkxue`s?kIPZJIck*{Ud+FH*2%@Kjy0pK;Y@>=d#Wzp$P!^m+X81
--- a/gfx/wr/wrench/reftests/gradient/reftest.list
+++ b/gfx/wr/wrench/reftests/gradient/reftest.list
@@ -13,17 +13,17 @@ platform(linux,mac) fuzzy(1,35000) == li
 == linear-clamp-1a.yaml linear-clamp-1-ref.yaml
 == linear-clamp-1b.yaml linear-clamp-1-ref.yaml
 == linear-clamp-2.yaml linear-clamp-2-ref.yaml
 
 # dithering requires us to fuzz here
 fuzzy(1,20000) == linear.yaml linear-ref.yaml
 fuzzy(1,20000) == linear-reverse.yaml linear-ref.yaml
 
-== linear-aligned-clip.yaml linear-aligned-clip-ref.yaml
+fuzzy(1,15200) == linear-aligned-clip.yaml linear-aligned-clip-ref.yaml
 
 platform(linux,mac) fuzzy(1,80000) == radial-circle.yaml radial-circle-ref.png
 platform(linux,mac) fuzzy(1,80000) == radial-ellipse.yaml radial-ellipse-ref.png
 
 != radial-circle.yaml radial-ellipse.yaml
 
 == norm-linear-1.yaml norm-linear-1-ref.yaml
 == norm-linear-2.yaml norm-linear-2-ref.yaml
@@ -39,19 +39,19 @@ platform(linux,mac) fuzzy(1,80000) == ra
 # fuzzy because of differences from normalization
 # this might be able to be improved
 fuzzy(255,1200) == repeat-linear.yaml repeat-linear-ref.yaml
 fuzzy(255,1200) == repeat-linear-reverse.yaml repeat-linear-ref.yaml
 fuzzy(255,2664) == repeat-radial.yaml repeat-radial-ref.yaml
 fuzzy(255,2664) == repeat-radial-negative.yaml repeat-radial-ref.yaml
 
 # fuzzy because of thin spaced out column of pixels that are 1 off
-fuzzy(1,50) == tiling-linear-1.yaml tiling-linear-1-ref.yaml
-fuzzy(1,38) == tiling-linear-2.yaml tiling-linear-2-ref.yaml
-== tiling-linear-3.yaml tiling-linear-3-ref.yaml
+fuzzy(1,83164) == tiling-linear-1.yaml tiling-linear-1-ref.yaml
+fuzzy(1,46279) == tiling-linear-2.yaml tiling-linear-2-ref.yaml
+fuzzy(1,62154) == tiling-linear-3.yaml tiling-linear-3-ref.yaml
 
 fuzzy(1,17) == tiling-radial-1.yaml tiling-radial-1-ref.yaml
 fuzzy(1,1) == tiling-radial-2.yaml tiling-radial-2-ref.yaml
 == tiling-radial-3.yaml tiling-radial-3-ref.yaml
 fuzzy(1,17) == tiling-radial-4.yaml tiling-radial-4-ref.yaml
 
 == radial-zero-size-1.yaml radial-zero-size-ref.yaml
 == radial-zero-size-2.yaml radial-zero-size-ref.yaml
index 10dd14b48af77b2ad3127c3da4f04da27a65156b..49fb1195df215bd5cddf2247d756dea62f87d7f3
GIT binary patch
literal 19136
zc$~$&XIPWlwgxI9A|fS#pmak~G=eC-iiRd2ptMMn7L+PYq$3~_dXXaCMz4u9sY2*Y
zM4BLA=uJ9=e!nlkT6?dv&vVYbKko5)Oe7(5jycLZ-ZAEgD7E|Y6lWRFo;Y!WLQw(!
z;KT`{AmA^{nbW{4dm~CcCr;dZtq7OVc#*hJNf91B<22&(XIeL-u-@2ACrI5^v4E0T
zSFsJQEMwiH6?t#|C06;I0-Ea@^`(KwnawvtZ{I8~IA#C%ZtH0!xs$pkx-^gP3VA0!
zTI|)zeSNa$vH7)g*mu!4PuJgyA)}?ZGZNpi_3L-4_Tt*YX2qt9w9SZAHO{NkVR&b4
z$Em~N{>b3kLdQp_4D~6Q8x&nIANDgocitmT0YATjW6l6So<$)cz>jn^?NH#yE1dDN
zz^|$)2m}GYnHv6t9{l8`zdxui#^Q0tpMLS`A{=^7kY_?FUiKT81@_sP{zMndKjDo%
zTRU``M$ou)IdA0&eVxj^vM?mX5yi+q^g`Q3Am}&ury+UFnJ@5=3pN$m67luVq8|MD
z04)1n?ws(jCTqKKsUpuNupO^Dhd<Ne)i5Og(8o7BfEckEkh&1(XnD*9n+Gi(0zs)t
zKLUO?l(&39oD=>JVo0yX*M~4c8Thi2>B<}x$A1b&BnZp?8G@8~=$?*@fJBHoz$ZHy
zF0!0Q6f2u<-OZYSk~6fhf18=EJ5iu!(_~+Eg{nCUt<2k;p%=Af|8TO2<zmFw;6-am
zjJT`6@VavT%tgcRx4a(YkG?l(rl@=`Q&ZkT`U&x@&(UdG|Hs68#-8r@d@b#W;RVL7
zu``<%VuQr!)zI0rj&knzF6{B7kFtLBWeHLPhkM%vt9m9gj^~e%mbiA4{~Z%uM|{mh
zu94?3<K)!8Jyk!AXbO{tqCI_^611oNpFhFK<%E-?WkQ=d?6v5df8i;~XkBH^*IPno
z&KrEYW=+0$)fR)u`F7V<P4fMB&3BhS;3^V6v&NaPCOyZwbV*Q)rfr-Yd<6Y8L}i01
zby6DdmY0=s(isnWbbHO;*|vf|c%R7$wY%C5mBy%>4om#{#;np8;9{>N)S^BBdtG$n
ziMu(GKxmz7z5IHM?F`Ek8eqdOh-VB!!<W%pTtC_4B?KB<hG|zZy-hnP<>`G&FYWQ^
z)kl+#L!3Mlo=frdn4g>tn?dfosZH9pb3HS~R;fr|X>G$N)#lkv{m&!%(229@nT#?~
z{k%J7*~~vmXa%Y;)WgMM8P1QZhf!(q{?5_DJ+R3cqvC1}(`=zY2#fPq`(zazjzx1L
z@lK{mZByH9NxXH$-pJFur+}`%TF}3yLb<<)9Tv5|;L1_<4)gQ-6{%9)>V=>0?^g<_
za}+mS8n|BmS;E4dxRQKz&1Sy0{DGHyWp)J$Sty%;{S#|h_sveYGr@1njxP;EarZ<2
z`t^vrKSgE%$EK3Q;4#;ri@$QD6GYPQFG00(=(hTeBG?e`?@RtbaKto4{~6@F#)T6<
zKeg7e^nB=i<wK|6`V&q<L0p?No4)yUh|G4JfsxHm*T>22`GZK2bXyhc{rINnsh&*K
z8~YJY4F1;#sC2brC%o~rIo<x|jf1|>fZa7Y77bN>L7x;QW=M#wS`&BW!(E9Floi<(
zdC0=$38=h5F7c?|tZ3Wj;{)c#8`72`&}zzl=|=uG?edw>eTlIIvAmbS@-`U!s8EDz
z@Z2b`>xVDGZD$VZ_b2%y!kE9w6#Y==pllMl7-h#oV$iT2YTYfaEjX-cAdeAARWVjJ
zRCipBUCpRtl=Y-9Uac_lWa*9TXvAF`Gh`S@Q~6*4M66poPJd2ze}Rm}bX2+2s4`&6
z@P%i)ScED|pNx}+K1XP)E8=n){xt9X=_K8J=Yo5H__Ihm_q?Vr+S92#A|828VuRew
zBOM=ND_c>v0XYYOlvg=F4V84mh70A!!#bwBoZ7nTVoJQcikimKozy5adjq=Ba#}$R
z!qW-2TuJ+;lXA{3Ud=}%5<PR9EIf8H6Y`m>$>v_V2vt7?9-XIBy0kA?CimFM0**47
zQ17;OzWyyf6hosmcP&^|W0mEJrXbWnT|+)P{{|pj7Sc|Sss4-hOe(KP`@G=Wlp?ud
z+moS<b`%767rpPUVrVq<U4{f$4Vh5?$9j7MRrR_(<adTq_d)}IgrQifuRvA?WcgO$
zZ04-_1wfm!7?CC4?OTC;)86`aLt;4*o0x*6DT>vE0=ePN+ULo_Z7H>((&^H5FSHs_
zppfzP%O9WzH}|!bukX%6_If!Myt+R22H*`<*VXF}B3JTOejp3g6R;5nGNevlXY8(b
zBRCd?i5f9u8)y&oOj=XdpqK*try=2PSk(*JaW<(8`EGuBa^1>3=UCa{&v4SchzA^a
z$-vh4%M=!4GL>~a)vp1E<!LuY?YrYvsOG%&pH+zEkWLQle#wp4O!ozJ`O#zPOH;J%
zOsOhh^;_+@>)G&q0nba>TAq%dn#@za$#`-8IOB3hOL4bWzj7(aEqm?$W{`7qx<Jrn
z13q-*0aTYHQEnqKeTAY}b*hhOXG+9>3t>sO#U+w1y=35Ym((Nj!Jj!>`f6u|@jBgg
zm{S|YS>2RhPHN-Ux{hkeo|dQBet(@}CRwkWdb+;T#XhV#Y>VlCFP$!e7j~?%>zB<E
zT=~mOudEzun0BGJnnv`;R3{%GbY(m4+BU~7o9>zq#q1R6Bx?6Jmb{pA@5dRbF_h|q
z`qS%m=8DT?mHlWSr%AUP4=cgBhw(lt{&fy{-2+0u@y}T+t=fmS^O~Ugll^GGP_7B}
z`3rPg;|7v}p`!Qtw-?e_po@baeC^lNIZVUm->?AsWi-hjUQTkIYQ^bK><I)(ZU-7S
zy0a{OV!m7t!h!7}C^Y}sLpODpJL*Ngu{SitjP_u|!UBgBcb7fXLPa^CDb-rrJ+cFg
zU$aq`H?LyWO&SJ$!i3p+#0@ukUO7j{TGLm=mu_%CIV_aE8lk65J$o5ls>wuH8wJ}s
z9|5jWvoIwq$EP%-&A>BJRfSv@-;r%#>&A5R3*WSIcs?i<QDWQoG9^;*4nBcy2R_Cm
z&|%^_wWWK(Y$<OA-EO=y2jAE$0I<byw&>VpZEG1_q{#<ehHq3;TAJ=QR&k;Yy*$<Z
z3G(C9gJknmr`ew*lCDj`y`E{3dfw|jQ}z=v<Z&SJgw^x@=ND)_5Pq#NrC&5gq`h8I
zRus5(d<wRv2{mtAFdOt68<7kH-~hHyCB`druPi?uA>kx5BJQQLnP;-WTI{luxe8U%
zp01I9%UJl{L8hZxV~pu@L*0ld9|QN^@_OFN&35B@OL)hD3d<7L{D_I<jm}A7Yw)qp
zn*0TxCxYCt=VcTitSScIFBCC0=86;(DqUKtmjAWy&%Y<LNyLJ`qRug1qg2w|uhi&7
z_qiVL++aHBynDDfu7(NTKtKKXYJdC3mbez-eyJow()pCJ3zO!#>f^`$ogIAescLR_
zO6(cv+)wFk^<gWY%5BvPHp>}$xptAeeRI0%DeKjm>hI;6dbDSB)i!^H*nT@p+X=jG
zxY^iO-qI=%lr8PLdHqV2-=2ByF?7ro$VH^QT9B}fGkh(Wwpxhy-=;O}E_u&8+ph>{
zs|w-1<=^y$)Ui_x*=aP0Y0M9*uz5!rxZE4?YV%k4Tz(h+bna@6c7AKflm~Dids~c7
zh6-+bq(_FeD$zv695BG3%Np9cc6mX<b%2>Fk)do^g+)YxXR}8*=<<EkysHMO%-p;b
z585J^GSIH~3}OfU(0)Y_G9okDm7o0P;mAfkAbe%J@q&9ql#)}8=N1hHWGD&6e?Rs}
zxhjtTFr4d`A&AWUi)ZC+<ri}nCgw}Ki(PLHdRd@Mo71Tm%tOK@9iF#@YFoPd_73b6
zu>g#VuWDHHk2Ss9@D=nJh1d-|T2ZqXNK-5YUZEy&|HSN#{j#PGzS)|?edZr=th~A<
zrdC=rKU`<qYgqVb{4S7Kf-!PUGktjxJ^7_fBdMAl?hZvcF1-wfi&r;=g1WtwOxLMq
zy!SS?X$TlY3#^Oi?^yNU`kR$Z&0qA;ESywzng_5~dj#hB{B4t=#3$cWr{MyP@v&G(
z!XBUcu19_13&K;LRP}Jngzcb@juNXC2=fS`jO(aM$EegU&zDvLi6X&!Y{1TKAX36f
z*K~kw?&t&+9xt1`+~hj%HR`?S-<Y>tWY|hNIze~7A^T=E5W=53ZjDqL(z`bwogn=n
zxoI!|+gFVO^3Xp2^%{u;!xF)jeMcZ7#OH5~&>)t)W{wO|VV2j_SJXqYu$l#l=#)(N
zHBJ<~lgyNKjbl^Ro%H2kXIRsaWfk64yQ~Buv1#HgO%*sZ&ty5iCk7Ic{}K!g&vwM2
z4xPv9&*{nj;kC&tVpYDuNm!YiLG<$JkZ*67jyz^1Wt#XQW=F7^3|JCoHRV#bE0&`^
zeyHP3Bk{{UVuya1^5YiO>DMEhulf)irYrJ^w7zoFyKf2Vw4o1MNnKINr#yxe(3=In
zZFzV+3xDt@+m?QA-DbMlO_&>y;=-X6Y>xNOF}`yc0QKbzE>4<;9^uXflVBR*G0<u%
z?hz>fpxel0Ey>l#&+s{X6&;M1q7H%_jK7^pIv5SgcaAPks6XwIL3^LWtz#05SHOaH
zdUd-wKz5>Vl**=}1TxWjJ1%`K58VF>u)p&~y0IL{&8?Co!U5b20bm^Z1sl$yQZ}o0
z`pdh7jY2ApLf&^_H}6;yN>Gv)LktE$lr&Ftpkz24dAD(M`V)VU%iY^*q|C_<*G&Lk
z3%U{gv_#&q07<STYa^9R48{68y63<MeyjI8@q)meDIdP7sH&&&q*eI)Rd?CqT0FBq
z{~fY(#m+OkF;YiBpz`+GnV(Yuhg!Hs5I=DhwBoOJT*}Pdvn-k$iD~~#_)azZq8<8T
z@@rp@yiF<p&Vdsjr`68JjYCTuXis5NAz}w9N2W|zdzm?3!uB7$q0cCU)pM%gSQo4W
ztN0yRCE?I2eGDUtBLeNjhVGYz=k7`aX~yiBaaz-c+D=6jCaGpP9eRbXh})~mn{B#v
zyA?p@{GXqsu>^E#_8KLqF3cUoQfJJIFsU2SdohLo3V4#!%->0m<Xm;mKNI!K0l$E+
z?zqcO5VlE_|A}8m^^x|BgQS#6%bQud0lx{{6+7nl6(gsZ!^Ri^zk266)pr~Qn=_h#
zD-~=4xvP6$go$8FcPwE&#E~y~8i0mONB@W9eH>(;ZvEvNUVYadhgsBS{VYC+e|>0?
zLV_;6=8il(PD+`?Fj+EYt9yX^_lxI41^^nW)PR^ZPv^K#bsS`2>!tCjEE`+3bAW%B
z^%6wlITC6A6{sWRzr@!u)jY^s*u7SCB+H)b<h**jWUX!I!Ik?2Rm~CNJaOsB&lp~)
zU34GNATS<6_48jG?%vdg?a@O-evZuZF?zoETX@lnv;3VfxSXvW81}28$5EG0RpaUA
zLH2LD`B#@yH4<l2pF0A0_09?g=BM%LV=kIKod;)`f<WEx@(|p&dg@47q%YL?C2a}D
z>kJ)iEhcL$S)Df9NnO2mES9HYzShC7@}+`>pN2>RjE{>GPAT!{XbryEyL|m5jh1zP
zb4KT%9ehDS0E6+s^yiwho}(;x|Lp{BZ%zOK&(^>F-V0Ghy3^r_7|ti&=(IaWNQ!{D
zMDmOXYNHY>KMqEBo%~0DBt%AoMQo1`C6RU}DCpR3&*%OL&fI?$G&==JRL7y@w%+FZ
z)#&wMEG|ALtdT%S8s!LgKhkJhSEpwZ;(2{SWQ4Ls@Y}$<WB=3s8=Dy}5U{bVp5Qx|
zj%3)FC4M+y8`Vp^YUSBYc$6)!^D=vWxbqHS=klYSod`Qy9_=jRMV4bL5;8BUI>BU>
z((#u*uB=>3#pts!1EhX7p}xAeJoVtuuW-N~)`!?QG)V|>?;}LoSqBk`@;@QcWAVWs
zM}ZkO^On@NZs*V}Lw~>(%&U)9AYFS$DIzf?mPEjyuGZq)Qyzc0gZ^U7e!25lZLwv^
z%_|LHa@!-=RBHCf9U2LUc<-+wSE%~Z;6PUIp$&KalU;6z>?R&6fbAJU0gxlrQO@2y
z%gpU||KDgTa1QR#7tEWyNh#gO7)4fjcCW7zP{E+{ErJa8u>=hGb)?B%e=vSGOpcS%
zh~HoJ7j19pobzz*kvQ%TIwdYJ4^NQigWmM|FiS`Non-dYq}GVbJClR{3z%FSbcsEJ
zb~|{d!GRr-k*4)v$e~4c3kc#+9p^elkH=ScuKyd)2SodTQ9m}S?MNk5`FZE>lCkQj
zLsXNVB^Y(@=tRstuV)?$514GMfbjvQsZjoC+X4@<qpnKlqq}<@;`OqGQ%U4$zdQKz
zb-3<;Rb|K4{1C-=4uf{`$V#?B_q7T~GXF!ppMTs^`R-DFoJ>2X&lkuGf+tMDy#up~
zqUX%b3m3!=YL6JnNZ0Gk(N@u3$G^bVD&d*Cc$mTSvtqV?*{zIKoAz^^?xE8K8-8KF
zBGzH}WerSAcu7!4?T@J1Fk33Re?K}~oBJN?`A%n|WLB9xeqn3w89~ny)1Z<Xx0GTD
ze%lDb?M2W3mTN!!hDbk-W2&3l^*72lml8JF+HIyoK=U^qa-5u_D7d^v)Nh;5_YZ#a
zw=3<_6|ZI-XLs8*Ag+I3K0KMkJRzJgM+<qSQq?$aNG1{L$Mv6wepE~)CjA$vtUAb6
zx#=EL=2khl=e#Hj2rer_wr$Z7+7>`<QMMNXdAsBA7qvx1L}<?dCEh=Pw|{Vi`fa&E
z<I&0o2J9>C=8n{#JFfj<*xFH@BXELrFYA{>2gG1O^frHpk9?SrJKYlMyTgKp4t&KP
zi|sW0w<(K6Vl}79C#CnLF9F~2V`fn2<nNg8{0z*+lHzC$(;hB@?DjJsT1{u>1gm7`
zB-THDLeK9V$*#9e5%KB6dvUFc?lT&mEPoX%PC6f&4qcG%Y05TvgxO+O;n5zOmhk=U
zU`n6ft#>_8ubt0?vQBi5+|qP?1O-$p=KGFCIa}IC8zk<BH_cXO`-WyIim>+GJcco?
zwzqr%Ia{*Fr6S)Sx~-r}S1qy020!c0eN}PzaH%|Ly@;jXyvyMIfkG{?P#JzGZaQV|
zeprY0W7JI>b-{u$GA-wCa&UHjxHE8G;=Rd{GxVjeIdy#yJM+kVM$yBmP>Zl;j>jsG
z=oTMGm2b_5YS#z%J-!|lKSkN_`<@D<PF?C|odf^iREGRve`rM48^w-D%Sgqgl~<%|
zIgV`<X9tt@?ex8xD%A+f!xFvj=CK`?&S`V@V`)c~8@b_S`@K@iem=toMgIbV$2vbh
zNZN}U!lOf32tY)#+iV<>4>m_aw6D`_b!Ce!{my~t=dl{Vg7zB&Y!j4sC!mOm0XBl@
z<fYpeBV_aYWTKzp=gZ=yla4whfZW8ihBSy>dKM8yp3QZEAj?7czP;X2d!+oavgw*P
zrTd!<i2dlVLDTum4TKg(^~EUb>X|UW6Jwv!lMY|IS!_V+PPUU@`!Btyfl#merh|=+
z^PHcC`nq9Nt}^2*b%3VFzOF*^5$}lExGel~@Cgj#yN%E)>y@RQj@DNP(?@GwiwR6T
z_(-|^vd*+KGZq8~LXHj2boX882D$&b2$d~O+0vWPB(Q-NPonoIN4h%h8>V~t7dd&h
zMePdxj|T_3bZYPA9Lu&#tIn^xhS+`+p*8$ok~*x;3n<(m4N=oG(pT~x524?gQmXt9
zg*Z?a1WVi99r1aT?Mnk}vmIY=xWKT*7dFHGB#~|S@``ZrT#afo<E6n7wBg2{6}E(e
zbq6msa8PCsc6Y#z*P!f1r7EdGzvR@-B*3P$09Uc$1ImVjKQ0o_Z<`*B9k!+N5$>BY
zO<v6Bw>qpP1E&#L1^!zumT@U1|HG5pNI+AhC6w>&WwR?lZXF)dwO`njHyiI^97*N3
z@v?@?d9B0`x#k@;$>KJNw>erc?_E+U4O{He{;iYZT;?Hfy4DD{@sLeAJS@p>&;1C#
z#h#b7uaIGMo|5}>&!@8kCB?4DH#2sbb$0%F!?FnWI3OERRd=6zR=6K-;w`y&exOj1
z6w$A;5%-_?LEnvoyk9#!5lL*_c8Y|Cs*g4U?e_lGss%-80}0b_?Wt7@oXWPtDJ#Y8
z49G9w&C3i<)@srf@j7eM@V=P(@|K;a^c7#?@Vn$2J1da?8Y`d;uYtcFW>8=T`n3@G
zi;eKA+Wsv0@BH#-byGZk$wc(obji2?8|WA3NOO{o<S$cLn9s>mwY(eK1B(J+i-HMj
zD5-RYuf%+)iK;oF`mHQh6iH?&dHw5)?)bOGnNlj?0DJ8xt3KZwpPO=|{?TwvI}ql6
zhjg#YJY;$Wo37p5Qhk0oH_GAQ;2vWA)*jf4+p(r!!kWu!?X$4ZeZ3@-B6J8))<jeL
zaMK=L$8VPbaC(be&b{{Y2J4Tn`Xa^~4Sp$q_$pv4D_mmJVSbF4l!jl(@*bqT#h;E|
z9hEMFY41B6fIjyISval&jXh8!t+5e7{&&TU=`QKPyQXL|mNmtDWwt;dXom&vYVYyr
zY&;qA2I{}+{ap@d{y>KBz9OI=q5bX;_TzPcOW6x~ECn#{`F;k#+vOg{J7f+Xx|>Dm
zt#1}K*b0!5zdN~fTc9DF2iC};@?mrA;Qf&v#@oE)X?NVD2~>pEMvV`*#UL$xlY7ON
z)G=JnIAq#>lo$Y*q%pCxS;o61ji!V0-^Tz>mf3!ia-Qbm*v{$Dw^il##{mS^-AO*n
zswk_Sa7r4$#O(ma6&Z{(U0%y^8d86?Fj94|IwT<$pWC2+6*xpFn;zesr`{M7rvq+v
z&UCNW0wjEz0^}n5b`egNjx}}zW)J_a1gd`0n!|gmbsNtEXr5q8zm$XQ5?-e~wYM7B
z1I?Gr^|i>zkaHCEa16=WcY}=3b?i!Ko3%I05*8IC7ukCn(Q|N{)ajeLS$^|BGDXVo
zHa1j_OF89LDB*%?^xF2>G2LyJEAmrvuhs{(XRig8^|&`a)zIPi!hGOYRjZS`OmWWv
z7hLuJg=!IK<*NxK%m*K<YVnO?qxktRSN}r|wfK{EyRz1GxFb6p(7gf1TYURgur1bY
z=+lddxTeIl&4N`8Jz)L*T<N+OeWjr%`NmRyq^QqlNrBktgz+Y3>rFAzSQ837|J^O_
zU-CtY(Jt-wtw5^jRAa#P%K_JS(ipe=DJ4p7qt`$DyaN&uo4%JHXYfOf<BOI1pt|VI
zyk&~B0|L+Cp`K~UpyRN#pA_1E(-dLYM>O^I(st3`D3o51;At>Ey*OKX8d!U${bX`A
zbY;CAIwA?)ml~iSc*S|>j8}2Z{<y0m_`WsrU@MSWssN!?B??}eRuw!DvK0qH;UU<I
zUilJF0^5Q_)UnaF6FUWt3ZSQkTESZkLKHM;RDIo;*)2~jXGd>ULbdmmli~rY;oAiZ
zwq^US%~<aEOey~!g|MU0;Laqf!m7j~NNoLqNl0m9j5AB*FLCGQiarZ>;<EL5iGd>X
z66S4F!fk<A!mC2rYQCX8q6nAx;a?I{;V%EdHVzxT7-+et?0C4Rw2m;(gl9oKiL%9|
ztqxC*XgjsW5~n&VQ~3BNUClwkJ2@HKPgCU_->F-9jd%`h|M9(f^QQQi+s;M+^KN@C
z`oLtcNKJqi$nm!C5j7QkfmUp^1KX_a8-3HKeZdQ-zi;&J+gNV1?fNzC6Y~iKbAyNN
zgZ+7@GFC3H`FW76@}A%+<G2NnAdPM7J|&^7Ze<8RV-{pH^{LhF+Ye}$ZX?7RcgP~_
zF)k=*d?}*ceyy9SNJM}M+&K$zK_Db#@kfcF5Nh%zembKx`4G~0=CNMAC1X;V(y{o@
zTB6q-R3abzAuUCi##N+>rHN(7f1|hymuG@cL3&R2oc;BUlA<qQ(WE|sugTx3aXf-e
zI-A6#6sP1Xva=;W-jAZN|4qICLUV!*JwmGjAwUIRz0LHRH^}YTpMma{!&-~bi|{(j
zEs)y@_)@=Jm&7-U>k$wBn9E`CQZ^a9vzpSG8<Df|0Ye(U)PMCZSezNUjquP_oXnWa
zmXkGoYvZP{VBd2`bLX=TM^)G>a@(dAw#qA1l4~Dfy3ZBmMxLomc2@H^Ob%V_FUftO
zJJXRQYQH+^UbeyT!G7lZO_!DC*W<VpSS*w9X$~NihLM;iw3_Xpy*?{dX3UKJMGxPm
zUAFI<YZe}Z7J2vMH0-jR5Ql_gWMV_R=xnCVzIN_CFJAi@9zb*pC-rQ4Gs_czxo#C8
zPC4CR7ldYzEna1NgM|FJ`+qe)^S_$n`F}q>^#6J6=zsCSRHfadeeg))5ohlHpUoZp
z&x-4;LE7DQQeXc#`<IVuQ9wDeM6<TC)FFs8SA`<IBDEeV-v=D@ycL&PV-4TDW_5iK
zbG@AN!-qc8{oRC{<{c~-FJ5fJ4_8S-o&U5&BnVHn7Fqs_ch*nXk2O5T_7O&sU_S<n
z^<o?Eeo+R!ljVFnvA*n=U^zKc_Ojd=m#I)Kj_dM$c|criWnM0Nr2ivJc7XB3gO<H>
zPw5#n8a2!{X|s%sBW4K^tSRm&w*8IfnwDNT1yg7<M_6LCY>@x8Dq7m0@%aHDgbRTX
zzT*ZWrBFis*-A+!$BPp0Ta6&C#sP8PK1*d!%*DkNlE1;k0RB!q1hdHu&7#a+yWe?M
z=Td?uljf>MdzWKKobY0d)zAwKAzP`OS4PjVu?=?kC8VgU<A<RyJyswIok8-e+SbXb
zY_`X5)b+Bv%8ZHRm@Q<)l<2-GI}KQpxF1&Pwxda^S)z%77?|5?T5Hc(Sa_#doKi8~
z+1Nun&B=I(8$EA`Ey<;19lE-+Y1?Pqr3|i|iIj=z&zq^#>~>ch(aK^PE*5*Cmi@~!
z*Y!>1X;A217-i=3OvWz`#9?_?26BJC&x+-?z{XsEZMykK|Irm_)$I@cMV2MZVj6$V
z4e>dbqzO`Ruv2}WeL5fx-^zab#$KAe9T#}d@O6u*=8~~i)|yi`OugrBlAv*O<h5<I
z8zx8Z;&M`;!<5$S;EmT4EzyMb6*wv6>Qb?Xj&!6nz>4a#R<Jm~ve%&nGR%qidi5S|
zO|>>ikDTWE4^0ky88(6gVn~%B#L~4{IEMntuD%`^@G9iH@D<Z2UIx!(b+AUBKo+V9
z=AGRQ&ZNx9`V`7&H(Dcfk%_5Kebmg|UAfpgo4^;qL7&)LLTSD=?f6nH`4*FUAUx<3
z>Ro%tG}V%wvc|GzGrq_%OII26<_6(6>QJ-*1>3d&`JJY4(3tDRIfcGdfy<6tIqAw;
z3IHd~MC7O^sgvws9yvYIB+Tlh_chJ-fJ`e9aow_41FXowVV&M?LzVc`9u9J`U5qVe
z;A{?kJW1{YU8EY3du!Ur7tP!M7|Vg=U&;E=qJ054+i&F8glF4GJ+(_FS3MqzL|nCi
z?*_kj&Dy@`?;Ne&1;d%s_1u!m>xo)<3(8c0SQ_)#<M&b{-W84t$=X-Ug)CPm&wdCz
z;F<t=Ds;c^p@+tD^+x~+?gHkUmC;mVi`7)pI3AOU<*0I68TF0jn%D$nk@$pIa-)k?
ztV<PQ{b3ff;s%QJ4q155>Hhg2q}4NPep%8q7Jd@T*sCbR(rgA#JKLVDeNbgQ?Km>Y
zO8l#G*WV1ATI%<AfC^jq8{l}wSw7pM89(1&Rjn5E30oR<2=&?iz5023?+Nwd)x{FT
z5`@*gLtRbUJz1-ojRMHUmNxVy8bKQUsh;?PB(n3x%>UgKi%;hZT$Dzk%-#~>f%_f4
zVi+LnB(m^fk@k6kM#>&mO;MnbLYsHqA1LI&4RX3|{s~VKZ*4p*qukaUpNBt}F`4f%
zD~Vcp1kR?IU{V)C0^5FuU9=5?^ayq>u4;I;^qw(LS<?QEoNEj&Az`!aFr<R*%+{8-
zv<i_MsOXh*$CJ_soME$-61y5t`XG?7N`Jnq@_B(ysh;Q2Dfi{s?$wNZ>@VTN4W15@
zELO!IPoDTxD;9=1k~2>6>wE5A555U_31dRNadcJ0#n0YV?|8H|33DZ`PK8yrjLX)<
z;9!Ye8OT)!kan7?zMGxClfS3nK|B~PAjSm+$@iP)>#|ZMJF>Mh+}~tEcH8Gyr#QeS
zM%~n>wvn|BaJ0jx`mIN@0QsXaBYl6f{WNEw%+CZYJ~IY<ut9@VL+aj-GpMljCn;z)
zVx0bjS-h-!j{Ht=m}SwYAu&b5HU>;HmLF4PU)bIr;du7oz1*}e)iS-t;2V!?Erf}V
z7vxzUEmh~E0Yg5a7uKN4@YyKKdz5jtL6?vm`ts%7Nnet(mhuYQ;6Z>GMi#13xh1Wg
zeKbX+A1ZvM9Be&OwBz2)s`s4p5nrk}+vby-u%i*&6^#36k(&iJAd4(jo13Eu0~zwF
zEtwCG8h1H8b;+ESNSYzyplU>cQgWe7s%9)2ywK|V278{hk##z85edgVV`xKOvZF@%
z8N9Ep(hyvlS6fg&4_?KXctXNT<6(J7%(GtsaqlnEjohqGodQMV2j9Yb$;-~G43WQi
z)hdT&jUY`UN^j$v{sh=9zMiq?V$X@%Xir@;GCx^PYMjd{ok&uJ3k^sf&S1l|+DJX(
zW{aMunqVuPcu#vNU;q|q`?fmmaPe<mB!Gq`YG~XVtb~J3v~qFaf{WrzAY0!3Lf&t-
zEL{P|_D2>fufMVI9zCnb2#4@Thy<A4X}d*)GJ&d7H~T8E(#n$X+otMdeffB`0l_i;
z{gWBm@hR4s`o(PKC$!bu<3WtI0wu+K8J`QZn}}G_M@~FP+BSYUd$Gs+HZIjRQ-QgG
zlQITt#6)j*xqy?{1Qsq3T*yg-s*A%KS#6xZI$!gwdiL-ns?wFW|5Quk#Lq~qw(X%G
zf9)2#+5pZu;GRm(?|tk`y<MSxkCoaDK3|B9FRW(^GRUiCqfYLcDD^(J%4C*J_9Opn
zsGTxr2ns918HE~`sp(_nq-gfz0$wABx+rr-pz@z4L3$jr2(ub)HhJf;Q!)?^oCP$G
z+c+p78`TdTwWPqc6q%sFe-mD>N;AVJW|TP`O#r>PPbE&BLR#+x@uFQ8jv(eXUTi0Z
z&zmc=KI!>158i1B3Iq|Ax^dPCYjRQnrw^6|p2m&Z+_6bDjL&&Esh}=^nz;~dk??%A
zx3cakbI;VlzA1NtgK-iy2kxG7AvThq9`bM=7GRrRCwqPjwCwL+hPl*AI91^MiNKp~
zFc<H*u!*6I#3n%;^biMAm)mU+ml`4Cu!DQ&V??iyol9s4A^#Kh+mfF1tFQ(q1(|8g
zM^F?RV0XjUrA$q{#7ZYGMf#Y2jpPK35`my;h8f#rKjW{*N41L<@&>45H(4>o2<q0f
z)e{g03~@9m;o^--8^cbfz#e&8>H7`tr)}tv@vo5*!CPVtLu3s#aY{0r)X=92XJJr8
z0cWcQ8P=Iqr>a=6G~9T%c`{Ll6zg9~qXgqORn}`qhr}79cucB7`AdBGZ%jU*UY5by
z``0^b36UEs?4n$>u*_zw-u(2-PQ!pZJ2YU2V9-e<#Kl|n&u70|R*b^DrPBWCh-c@4
z9qp)1BK`S$P09SYTrNaO1njYoKA=Y3g~q|`+Ts<Mv2mx2gJtK_&(9mDHKtf~pfu~u
zs8!(-5EESU1xsiimQH+pImxNrXN<<#IJpR$L{M-cVA41Vm)r9y0u;u{=lL)1bGu_b
zAJtqG3Q5p+bOH7bzrxwxrzymZK*xOiElLw$%}F^}5Fq|iu$0bJ5srJT$lAb3VG{X@
zzu7p(8fo~cw)iB2X0Y@?B5k&RDWoo7ag%>8p2Uv30ePK`n%`WU-`pz3{Wg}h3l{#B
zi^DjKYa^iF1SM)>+;$-dwvW_pCnk&xy#9a)r{hF%g(8ZLFvh$fN<h$@I2(4~J3b1W
zG>!1@%F(w2#$|#P*&713LZT|;h6y&k7eHJ1xisF4N$A>S$9sH77E(q6RmPJ-s$<-F
zq1ByM)O)sr7-raaqod&I5Sl?h^z8`jjD+GHTpSm0LhNr**q{O{tho-*W?L2YGi?P{
z-q)v$l|eta5XGqz>1VA|9B~7&V-bzm=)0n)z`t+ma6jiPPG&yf1Hw4A2pc_BV8xN9
zW8Cs0U`*T+I5|fHb`Zwj@d(%Hpi@jxxt<g2V|5{_Hghiu+>r|dQYf6GsfVNE+5<oG
zFAD~oy2A+|t^)K=fd1)bU%U-bgh0SY8BP=m-VFSLCC-xZRS-`D!Ok*?0qH26`1Zik
zcswKAs*AeOxmSQ3r#Pg79kSBGj$r%Znuy?(>>Vk3CwSj7r^=+FUAzN4wA@vEMe5S*
z#%gy=kiz60yOviYxvxcXTPiX)AOWk=(KK*Uq2MbwPH(83v=aq9(1jZ1t-uUxu8rg5
zZxyQQu4zQKV?CLC$&Uo7$&`0bH`5Xz4>fb~=4uEo#A6vpVEj{nPMe6~l#iQ<Pt7MN
zaU;i1KuM_!8)+;XbZ{!c?Pv<i+7kE2DHOktmGKLva5~c0?<-OkJ$@aG<sdF#Z}_E4
zsQJ)mJf{**p%;Y|7-7vm3Y3@tPX2^wE^1@85F>_Lk_2m<G=|SZt0do^Z}-um8Jk%-
z!7l`H!}b|B6O98X#*C5#W6Vw{LDSMD6}%JNYqO?L6jZd6e^mspjkk?41x|+nK;&fs
zU;}Iwgv?KY9loRp1;P(FJNB_vj9;^Ea_i5A(K~_5mjOh(nkd8{h`-;O^MoGzD}hwv
zb=Xp&09k|1n|9*SNOO1AX<uKU9B@b=`N}P$%t_V~rbv$oSO7AfI-Ht>q|h)D)q2~#
z<fQ^D4QYf3Cuz(_V2e}yt;Gg0AFWOqzSsj`CGM;pyA3zaf5pW3DSaV}k`<&-7E50^
zaw%}}n=m;})*Vt<4{hLfH}2?%&ky)th$0R^%CLqLKc~>UDeB!aPGl0bg$U|5Ck9Q9
z3F7Sp&dhm2QRe;Qtqx*KsTjDCc&~eM9dzR**GnM2qTskt_LJ?-%$(FgDIYJP6izqj
z&@{rDzm~=(eK2N?*%LU)@PH8I4=)K}151ri9KG(aLX+b~Sa5hs4-C6M_6^<`CEV+d
zAxKt59|U@0IJq7%l(gxU=gNtc7YVk#SWiX}aF`f|*lv{|!cTGfgV#lf2sLgR9$htj
z9<c&6k~Bi#RPy+Zhzp1aW~BP_V6@bj+|sK)(C;#Ve1e>vFt^si_6T5?S+<cfFjPKa
z35ZwJ1krmVjU>{3tFha`OyD-U2QL%K+I<#pD1_!HP!^l#T6aDIjy9+Oh0|R6a|TyV
z0YAB{Px{hanI;^LXm!w`tpZWVyr(G_cWiHL*ab7$oc`pZVo6I>Hli4!3%7!loC2Pd
z&7Zix{(^XR4j0G3UxZI6T5t)faJe1DzK>;AI(O0hx*hj0UT3vXLOR;LM1ah^U%*E1
znO9pI@~iXfQz`IEJQBuI^!#$c>GwD{s5q%*{Wzhb)QT?^d~&Yei>rw1bQUNYBhM!&
zesrHUD!}&A-CwW(T7DRZaZ*u$-GBtUKN4ft8JbSJTv9e)jZ8G^E?s9UeLD!t34g5U
zGXq-<-$E*~W2`t(7cgF<W>2MF!iWk6{NnCy>}teJ4R|f!Yv)vjrm%FS2iR9}MyPhj
z7?&n@4E8&0(Emo&-4R#-TVXv-T_&tkmCKDa2*}mrM8*fV6VqCW=QW@mz`5F`I`=Ib
zjx_xi(Dd0et_GZ^h4KvsG0=C^Wc6-?HO@?@2H_{yBDqe}OXp`6V-9{U9~zSf?*8kX
z6XwZPsvLD{Lmcw?Rfu_VBW@~sB$TcFJwpfP{!30M{~6cb$aDP$RRc4{95kr%I}fOz
z(Y`d-?VT){^1y5@;3?>$7<fNk@Is4TPNQCSy0zBsQ;OJaqq%FvH!|-y7%@Rq2({6H
z?=BVMxp(KE=WHQY2bY<e4^xl14Tin{3X}UoPbN<(b+ut(eugt5UpoR%Jr<`Q(HKua
z4>Bo0Rj4!x(64+6H;0@~AX$cnV$TKHw(Ml|6=zl@Ti>ogX=@FLASZq$um80$5;Iy|
zi5M+BC22WeQ(Llt90+Y|j1N<IFglUBmw$#P@Kg<sV>KOJnQUI3`I7Vdn%em>f-Rs`
zHvV4_rmHo6s^<z_Mqq>Kk&>O)&Nj@a%r#9B0Xi~F?tE07u!I>b#D|Q(B+QWjp+vn~
zp>Qfko>Qqe@^-)`|A6$80SC%-(cD<^q+i(eNUn4A($_W&#jBRKfz-)Q@<*D#`5JBq
z>R*MR7N=dloUG#>7MD^qej=WiN*POS)+lRnor*9XmI9d7Fzu3&f_DzrMKpQ=VL4dH
zNfC2!@v9I`z@pfKm*SG)Fh0+FQH)yg?wyDeIyAo4fY-O9C9e(I#O(Ej@>Tq$yo>}{
zDXM@84fGVIx7g%5RY8En?RmBUT9Tx=%@$&egn7&qa9Ug7Q^5cuD%mF%zLh{?uDp~U
z#TD4>o`b@EeV|MYkT9kg_^z}|AQ==z{q?e4LBM!i%cJ+Bn-7RfW?v;7GLna*Z%{ps
zfaRmMbh=>yi)zB;p)$(pG<NMmr=zy|a{{Edyt%0G{zNj>F5c)dgOy+3Il%z3dkCaR
zA#!%2aIRp45*^0svSOP&9tml_OQWtp?R;vPg$RXJ(x5?&8!7wT5K_8xqeJ)X*-n%8
z#-FPpHLvIhM$`zPf_fq>?F3q%8$Xe%6%O(jsRClxTLg+rETUh`QOzmDH@O>kmTCjA
zJ$|yVL?YGbF{H4q1(#67?cNHImXQ-6X(=7uj|1?lCh81#<cC<0E-2yHH(n{QR}GGe
ztMU>@z$@Tf5!8x)3Mb(Yl)03cVJ)GM9gDy6$TGVpw{dSPQdX~>Mi*Q4jk>`Z@LxNQ
z4sPSY)=61PcKaIaV7SvHQOovxHqqbsaYls92z4`>OT7)<4pXd)gPSU!<mY}#I6f(^
zJj4nbA)yoL<5vsN<)%kF9Z<`+t*e~o9M3Kh*fxK3Kz`6WTYAk2q8JZ;Zc^$#?<S%j
zs;mVR3G2FlN8Sk<zmGS9td#9<G{mQzSG;@Eh=-e#1_0%94!yIAaDdGN6%QvVnV(<;
zJ5rIZ0f5o~JCpu;<uC%}^u|NPF-;BFaQ@!ioPasN#z1yt@4$*!qWJ0AekTY*ydJ8v
zs{H`yS3osN$6PkJ5X<!21Y&|My#yY0KsMGKAVWT#ZZVvvQ6@1TWdJiPn$Rz=b3#B;
zh+Y)&zEe99ew&j9P;j!<O@Nk-*IZU#N=LW-wq&Z~MY<A-01_-GPW(FNJ%ai!?!Hwa
zmicX1{pCsIfWIs@*+U03u#Y}*tTHbhfJA~y6&QMmmpe?#aF80b@cjGMTbKkAqRXOb
zG|fIh5rqyRlr>b)m()9-#cMx>!w<<}WDVRt{5DdNrU8+_uw!+uJZ^@NpUzX08w*DV
zaK-2VM3~<!7+?wzU98kX%rhRW@##iqfT9@#DTao>k;VbY1RKGH0nE3@PZ+0wY=OUZ
ziU>M+3wjjk1MMdj&P4hy-ncMzz0*jM6CgqK5fTB5BGhDl-sN`+%9tb1M8E^aFGVQO
zU^v4OG_UN2S6E052mgf}%bvTQ?9%Gck6;`@biuq!X)J+)9iLz(4sb(vzc2zjwLd}B
z$4_@m$ZY8oB~d;KbQtc)1{hdNnxId$cs3fv%0MqgzJ@~qGJY=LBUsi!Orp4`=?4HB
zdd!Ev&d@%DGq=YFWFmm9Y$r%z=Weco8GV6eh1kAfp>@LmhsClfQKni^vik!69A2q`
z@1n{P@SH*{>u>YxI~LSnS{?}EZ3N`FO``)8IrK0^u)M*L0cF-5PRPwtI`|5;yAr64
z<~uiy2IL}vu*b!jF*z_bG8lk)M~lo38VVU<;0I~|>dyIVs2ncg0MVE@3<W@)L-`BM
zgJo-+sNHxI`fqV>1iUat@te%oOddja9iXIfc6H@R6NsWO7jRaA;6geY_(?lq>SsIw
zr=SsF3xhH8q$SiynNvl}%97;R@;%hzxTiPSf&zGA;%;*10p3kdVdQSEi8*1cJfs4w
zSJ)Yd?+ldq2F?h2zKAE$mO-y=->(Dc4>Jr^ClvXbpWcpao+=1nLo4A5oKz+>z$+0~
zbd=(A%%pRVu`KKN&49U7EJLevq*a}{^z|D+iPy-r|8RmxDBHlLMyf#XstF7v`J0nO
zW55Tmj~<umPw8*p3>V;3*WwID0rgBI3j5m<;!HeX_E?cUl2mC0eE5E&0Sc5NGy(aR
z6ufDKEZ$4BjKX`eVe$|s>^T1n=&^4gA)?*^(ooU@_v^S_$g}1E5vcvk{sf?e1kRYX
zessdpcoLwDBuDuHtQII3YK35fajkLVMr}!t&jnw{>%ehwvYa6Z8WiA5B35-bt2K|y
zcm^!$f}DGKTMLp18>OQfO{xQ*FKjbYp1<huC-^0QZ158rU?-s<z@`$9t)8>5+$Kzy
z{<fsQ4Lv*{CD^5*!da^A+k~Ieqlv-F&{5NZhRflij>&(Tv`eBA0)p0@pNK55(NAC_
zOqRqb8g&^;s*IILAw^IqAC?Y=cQJ9<Uve!j<^^J9I4Kon2^u18)dNU*bHR!5a{=RD
zBLN?I$VEdYKpmWs{P6skO7T>3!Zi*Ku<N(60C_9kcQgPuwN}9KQG4*2{<wF23cslL
z5(Fs4fb!@uMY*YQSb$_qKZkLrka1@}%;mA+Ahg6uwBxUq4@s7}2JuP7sPl@x?cCHR
zFz4D+oIW5RQ$P`IlY%#f@?9Tk625z+$``(~7>1`j*(u>r4bvGto_KWa-%-q$d8sqS
zL=ppd{(f-b8T6)Kzr&0U9N@?S{C1s|w_nJ`j-71DJ3P2fn(IBm+s|GBfE%UCjtOwl
z0TD=n^|18-81S)+&HuWOXPI~v;1~TIlb33Ka?v_8=onk`mjFOCcw+etZ==~8ebU3$
zp`4SPpm+Ecu#Kf`B|k()t4OQ^_hF}LhU3_asW`JW(>1|vR-#m;oK}7)I1$;A6C-GO
zG`A>{`&wVcl>&;52he2%`y5+&V_?D<QYeQV0f4TUirw5%H1FN9PLr}KvxoplvrD@N
zt$AesSdo*4)Dl(qB6Z;1&kx|COgod4&?fT0q58|6U(XNC9O;r(a-Q<L<u}E4;s)Xl
z1N~5Oo`3X$y<rjPaDgV8r*d`=inIuJ`B{`cKHeO#$i6iF;qt_f!%6-@SnsTnYNWv}
zvOh9Hq-`f-Ly)Hb+UR4-BGr)D(bK-J0k(b7&wf*sc}jv^%%vKqGP}AXzk21`Klf4l
zn;6x!eTB>bIQnn-guXyW@ZH&;CK0n)Ew3?hj$nHp_K0rTItqo7MZYMNJ~YoWO{(eu
z0rE#CcVd4Vx8#D~#e9hAPh-?3S>R6-OI-oK68VyN(A3IGPhLhhI4rEW*m@sq?Y~9Z
z-KLTjO7l$>Y-lX{`M?I8B#e)*C+YdSwVwe&8`qPFdP(61xv8Yx4S4mIj=oHJ-W9+4
z5%G<YYVKym?J_6uF2#br#*)J$gzoc+MT%*+Q(RZnIlf?Z)fdz^u3uRKZv;-_ax*rr
z?9kI4-amNum}I58Y-AQ^@4f+4<FVs8@YmJF#v&i68oohT@!ZpSj$b|vF3PbZo-I~n
zfR#epaWNJerqoiGEwuRGf8mC%YJm3_Z-p0Q^vZ><5-*FYW6nrXSvao64}H-j3_BJh
z+Pb?-pQ{K^aXz62-_j7E`bY;jYy*-=T+>Zc#sTciM}O$<dL7voXZVc?+Oi8ZpeIQ1
zxMA04m|A_3VHy0ga(n`bb(pX~q0HWNiOa7PTBFu3`H*jmS8Zo@(t^GSWw05jEIB}}
z5Ea=KDafzc<ipa9r`PMU$vp4-U01qSmS2XbxzwYjsU}UTsjD$D^FhvW06h`TEOBAk
z+9y|+s^#6?zozeGRp;1e7=CnU>D#=%lI;O_1ry*Eg!{3^dp)#S`_q71xZqHlXnQkk
z$^DKbU7l>Mb#UNyu>?^=>*{(9yj<&;xv48Fx&S!Fh#IE!y{j7VX|o<0P4J862=sWM
zUKR6lZWJk71x!0{4g9d`Q+lPfHQp$H8Ec7**7>s&pH&GrdM87(gfe|*Mc@C-m`(9x
z>@pi`_+US-fGwdEvKE8NhoymXKa3}_`q3?MN9=SoMlrTez2(VCK+9i|M7svtrB^dT
z`Fn^o8Bv1omE6xMtiRdQ2{U-1?P7uP#$O1Zy~x%#tGJ~M-sTO#9#_u#T~#nonk~JW
zbk+hMJJ#6O`o#${a#h66I4Q@_PzAg`4nz_)u7B=}NS3*I*-<n%-Ae2By)xT(h#JP8
zi<;l0L9FRfQxBQG`OQnk)?aNhf7Au=#H@Idb8Cj(376h%U3<X_oK30y<df*1H+`i>
zb%pkNbMIWuf#~DT%N=NJ1=cPR#?7$Tg7-65&cBLy@JH8*zUJ`S)Q3O1ibs$Cd|v?X
z@~z+%jrCiB5{JWM_=|EbKv0Q2V7<ouX{clf=FPS^7q&%AO1L?l&3q^4h&te!NDn+%
z|3SWZHU9(R_AVcfwB~Mpx&h#0L{I3S$rd31Lswkz?4BvGKxMI7WI^=tlEHPag_U-3
z!#|eDofCxFV&05=xKdl*G6%%oq7ieiK&atR0;A_|8%)es@GbEFE>XbT0I%e?@!z7}
z`B4oX-pMmsVkPlU!he-JE4s)0AYZzuTK;??##6fX!#Ust=9nAcoAJHNhzEQ%<`N6v
z(S3{ZFWy`NmeawUsC4*M{mM?wNmr2RqPn%FwYDJUJ!1w${|Vjh^>b2FPl`P&MA>e2
zOg&u|=6P~KWmJDcMSwh2eW~OIKJmlA_VT2An@CD}{m92uBLRRWv(+Gto^6I^tvOk4
zMroK62Wf1Jeco{((^sMRt`<^Cs^XhszO+G}@==Uwl-AWnJTyf~pojC^(cl$@XY$uU
zKKIC3D{s~5i3Jf)-Y$)(8vqYJAshtTjTkV=!d$KFzDsg#{Aff&K#!w}YCmn$pQ3R*
zLcYV|>xDf0clM5C4|picAiLQ<@_7&7lHKi4a<6VTUtV8%N6~Y-W2rKUJu6PitvYY3
z-|7nj?8sjr2qvt-psAY0o0*4dQt4sovF>57otyvXq{{W)fJKwvwM=ODEm@c8cbClu
z&`Pz2uM(sW(%RlS$dBY*Xv41*e@_y37@ut4sx_plaD4h!Vr%@a{W8wJ%+BNncynnj
z6a*t*FTUP4u^i4DW%fl+PF{BPwsr*X2N{pSz>?@cLtPdhC#HW2c1e7@y8clsJLpl-
z6<uN=8d6@iLo<Bu%d`2st0xS+oLUe6Kb(sCpJ$Bze;9@$eDHv>m`=fxdp1Pn1n@^u
M_C7rC&SRhd2e>LLL;wH)
--- a/intl/locale/PluralForm.jsm
+++ b/intl/locale/PluralForm.jsm
@@ -70,16 +70,18 @@ var gFunctions = [
   // 15: Icelandic, Macedonian
   [2, (n) => n%10==1&&n%100!=11?0:1],
   // 16: Breton
   [5, (n) => n%10==1&&n%100!=11&&n%100!=71&&n%100!=91?0:n%10==2&&n%100!=12&&n%100!=72&&n%100!=92?1:(n%10==3||n%10==4||n%10==9)&&n%100!=13&&n%100!=14&&n%100!=19&&n%100!=73&&n%100!=74&&n%100!=79&&n%100!=93&&n%100!=94&&n%100!=99?2:n%1000000==0&&n!=0?3:4],
   // 17: Shuar
   [2, (n) => n!=0?1:0],
   // 18: Welsh
   [6, (n) => n==0?0:n==1?1:n==2?2:n==3?3:n==6?4:5],
+  // 19: Slavic languages (bs, hr, sr). Same as rule 7, but resulting in different CLDR categories
+  [3, (n) => n%10==1&&n%100!=11?0:n%10>=2&&n%10<=4&&(n%100<10||n%100>=20)?1:2],
 ];
 
 var PluralForm = {
   /**
    * Get the correct plural form of a word based on the number
    *
    * @param aNum
    *        The number to decide which plural form to use
--- a/intl/locale/tests/unit/test_pluralForm.js
+++ b/intl/locale/tests/unit/test_pluralForm.js
@@ -652,16 +652,50 @@ function run_test()
     6,6,6,6,6,6,6,6,6,6,
     6,6,6,6,6,6,6,6,6,6,
     6,6,6,6,6,6,6,6,6,6,
     6,6,6,6,6,6,6,6,6,6,
     6,6,6,6,6,6,6,6,6,6,
     6,6,6,6,6,6,6,6,6,6,
     6,6,6,6,6,6,6,6,6,6,
     6,6,6,6,6,6,6,6,6,6,
+  ], [
+    // 19: Slavic languages (bs, hr, sr) 0-9, 10-19, ..., 90-99
+    3,1,2,2,2,3,3,3,3,3,
+    3,3,3,3,3,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    // 100-109, 110-119, ..., 190-199
+    3,1,2,2,2,3,3,3,3,3,
+    3,3,3,3,3,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    // 200-209, 210-219, ..., 290-299
+    3,1,2,2,2,3,3,3,3,3,
+    3,3,3,3,3,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
+    3,1,2,2,2,3,3,3,3,3,
   ]];
 
   for (let [rule, expect] of allExpect.entries()) {
     print("\nTesting rule #" + rule);
 
     let [get, numForms] = PluralForm.makeGetter(rule);
 
     // Make sure the largest value expected matches the number of plural forms
--- a/js/src/devtools/rootAnalysis/mozconfig.haz
+++ b/js/src/devtools/rootAnalysis/mozconfig.haz
@@ -39,8 +39,9 @@ ac_add_options --disable-replace-malloc
 # -Wattributes is very verbose due to attributes being ignored on template
 # instantiations. -Wignored-attributes is very verbose due to attributes being
 # ignored on template parameters.
 CFLAGS="$CFLAGS -Wno-attributes -Wno-ignored-attributes"
 CPPFLAGS="$CPPFLAGS -Wno-attributes -Wno-ignored-attributes"
 CXXFLAGS="$CXXFLAGS -Wno-attributes -Wno-ignored-attributes"
 
 NODEJS="$TOOLTOOL_DIR/node/bin/node"
+NASM="$TOOLTOOL_DIR/nasm/nasm"
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -189,17 +189,17 @@ void js::Nursery::enable() {
     return;
   }
 
   {
     AutoLockGCBgAlloc lock(runtime());
     if (!allocateNextChunk(0, lock)) {
       return;
     }
-    capacity_ = NurseryChunkUsableSize;
+    capacity_ = SubChunkLimit;
   }
 
   setCurrentChunk(0, true);
   setStartPosition();
 #ifdef JS_GC_ZEAL
   if (runtime()->hasZealMode(ZealMode::GenerationalGC)) {
     enterZealMode();
   }
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/gc/bug1532289.js
@@ -0,0 +1,11 @@
+// |jit-test| --ion-warmup-threshold=0; --ion-offthread-compile=off; skip-if: !getJitCompilerOptions()['baseline.enable']
+
+// gczeal mode causes test to run too slowly with --no-baseline
+gczeal(4,40);
+
+var x;
+var y = false;
+
+function f(v) { x = v; while (y) {} }
+
+for (var z=1; z < 1e5; z++) { f(BigInt(z)); }
--- a/js/src/vm/TypeInference.cpp
+++ b/js/src/vm/TypeInference.cpp
@@ -1847,17 +1847,17 @@ JSObject* HeapTypeSetKey::singleton(Comp
 }
 
 bool HeapTypeSetKey::needsBarrier(CompilerConstraintList* constraints) {
   TypeSet* types = maybeTypes();
   if (!types) {
     return false;
   }
   bool result = types->unknownObject() || types->getObjectCount() > 0 ||
-                types->hasAnyFlag(TYPE_FLAG_STRING | TYPE_FLAG_SYMBOL);
+                types->hasAnyFlag(TYPE_FLAG_PRIMITIVE_GCTHING);
   if (!result) {
     freeze(constraints);
   }
   return result;
 }
 
 namespace {
 
--- a/js/src/vm/TypeSet.h
+++ b/js/src/vm/TypeSet.h
@@ -82,20 +82,26 @@ enum : uint32_t {
   TYPE_FLAG_INT32 = 0x8,
   TYPE_FLAG_DOUBLE = 0x10,
   TYPE_FLAG_STRING = 0x20,
   TYPE_FLAG_SYMBOL = 0x40,
   TYPE_FLAG_BIGINT = 0x80,
   TYPE_FLAG_LAZYARGS = 0x100,
   TYPE_FLAG_ANYOBJECT = 0x200,
 
+  /* Mask containing all "immediate" primitives (not heap-allocated) */
+  TYPE_FLAG_PRIMITIVE_IMMEDIATE = TYPE_FLAG_UNDEFINED | TYPE_FLAG_NULL |
+      TYPE_FLAG_BOOLEAN | TYPE_FLAG_INT32 | TYPE_FLAG_DOUBLE,
+  /* Mask containing all GCThing primitives (heap-allocated) */
+  TYPE_FLAG_PRIMITIVE_GCTHING =
+      TYPE_FLAG_STRING | TYPE_FLAG_SYMBOL | TYPE_FLAG_BIGINT,
+
   /* Mask containing all primitives */
-  TYPE_FLAG_PRIMITIVE = TYPE_FLAG_UNDEFINED | TYPE_FLAG_NULL |
-                        TYPE_FLAG_BOOLEAN | TYPE_FLAG_INT32 | TYPE_FLAG_DOUBLE |
-                        TYPE_FLAG_STRING | TYPE_FLAG_SYMBOL | TYPE_FLAG_BIGINT,
+  TYPE_FLAG_PRIMITIVE =
+      TYPE_FLAG_PRIMITIVE_IMMEDIATE | TYPE_FLAG_PRIMITIVE_GCTHING,
 
   /* Mask/shift for the number of objects in objectSet */
   TYPE_FLAG_OBJECT_COUNT_MASK = 0x3c00,
   TYPE_FLAG_OBJECT_COUNT_SHIFT = 10,
   TYPE_FLAG_OBJECT_COUNT_LIMIT = 7,
   TYPE_FLAG_DOMOBJECT_COUNT_LIMIT =
       TYPE_FLAG_OBJECT_COUNT_MASK >> TYPE_FLAG_OBJECT_COUNT_SHIFT,
 
--- a/layout/base/PresShell.cpp
+++ b/layout/base/PresShell.cpp
@@ -7624,312 +7624,317 @@ nsresult PresShell::EventHandler::Handle
   nsresult rv =
       HandleEventInternal(aEvent, aEventStatus, false, aOverrideClickTarget);
   return rv;
 }
 
 nsresult PresShell::EventHandler::HandleEventInternal(
     WidgetEvent* aEvent, nsEventStatus* aEventStatus,
     bool aIsHandlingNativeEvent, nsIContent* aOverrideClickTarget) {
+  MOZ_ASSERT(aEvent);
+  MOZ_ASSERT(aEventStatus);
+
   RefPtr<EventStateManager> manager = GetPresContext()->EventStateManager();
-  nsresult rv = NS_OK;
-
-  if (!NS_EVENT_NEEDS_FRAME(aEvent) || mPresShell->GetCurrentEventFrame() ||
-      mPresShell->GetCurrentEventContent()) {
-    bool touchIsNew = false;
-    bool isHandlingUserInput = false;
-
-    if (mPresShell->mCurrentEventContent &&
-        aEvent->IsTargetedAtFocusedWindow()) {
-      nsFocusManager* fm = nsFocusManager::GetFocusManager();
-      if (fm) {
-        fm->FlushBeforeEventHandlingIfNeeded(mPresShell->mCurrentEventContent);
-      }
-    }
-
-    // XXX How about IME events and input events for plugins?
-    if (aEvent->IsTrusted()) {
-      if (aEvent->IsUserAction()) {
-        mPresShell->mHasHandledUserInput = true;
-      }
-
-      switch (aEvent->mMessage) {
-        case eKeyPress:
-        case eKeyDown:
-        case eKeyUp: {
-          Document* doc = mPresShell->GetCurrentEventContent()
-                              ? mPresShell->mCurrentEventContent->OwnerDoc()
-                              : nullptr;
-          auto keyCode = aEvent->AsKeyboardEvent()->mKeyCode;
-          if (keyCode == NS_VK_ESCAPE) {
-            Document* root = nsContentUtils::GetRootDocument(doc);
-            if (root && root->GetFullscreenElement()) {
-              // Prevent default action on ESC key press when exiting
-              // DOM fullscreen mode. This prevents the browser ESC key
-              // handler from stopping all loads in the document, which
-              // would cause <video> loads to stop.
-              // XXX We need to claim the Escape key event which will be
-              //     dispatched only into chrome is already consumed by
-              //     content because we need to prevent its default here
-              //     for some reasons (not sure) but we need to detect
-              //     if a chrome event handler will call PreventDefault()
-              //     again and check it later.
-              aEvent->PreventDefaultBeforeDispatch(
-                  CrossProcessForwarding::eStop);
-              aEvent->mFlags.mOnlyChromeDispatch = true;
-
-              // The event listeners in chrome can prevent this ESC behavior by
-              // calling prevent default on the preceding keydown/press events.
-              if (!mPresShell->mIsLastChromeOnlyEscapeKeyConsumed &&
-                  aEvent->mMessage == eKeyUp) {
-                // ESC key released while in DOM fullscreen mode.
-                // Fully exit all browser windows and documents from
-                // fullscreen mode.
-                Document::AsyncExitFullscreen(nullptr);
-              }
-            }
-            nsCOMPtr<Document> pointerLockedDoc =
-                do_QueryReferent(EventStateManager::sPointerLockedDoc);
-            if (!mPresShell->mIsLastChromeOnlyEscapeKeyConsumed &&
-                pointerLockedDoc) {
-              // XXX See above comment to understand the reason why this needs
-              //     to claim that the Escape key event is consumed by content
-              //     even though it will be dispatched only into chrome.
-              aEvent->PreventDefaultBeforeDispatch(
-                  CrossProcessForwarding::eStop);
-              aEvent->mFlags.mOnlyChromeDispatch = true;
-              if (aEvent->mMessage == eKeyUp) {
-                Document::UnlockPointer();
-              }
-            }
-          }
-          if (keyCode != NS_VK_ESCAPE && keyCode != NS_VK_SHIFT &&
-              keyCode != NS_VK_CONTROL && keyCode != NS_VK_ALT &&
-              keyCode != NS_VK_WIN && keyCode != NS_VK_META) {
-            // Allow keys other than ESC and modifiers be marked as a
-            // valid user input for triggering popup, fullscreen, and
-            // pointer lock.
-            isHandlingUserInput = true;
-            GetPresContext()->RecordInteractionTime(
-                nsPresContext::InteractionType::eKeyInteraction,
-                aEvent->mTimeStamp);
-          }
-
-          Telemetry::AccumulateTimeDelta(
-              Telemetry::INPUT_EVENT_QUEUED_KEYBOARD_MS, aEvent->mTimeStamp);
-          break;
-        }
-        case eMouseDown:
-        case eMouseUp:
-          Telemetry::AccumulateTimeDelta(Telemetry::INPUT_EVENT_QUEUED_CLICK_MS,
-                                         aEvent->mTimeStamp);
-          MOZ_FALLTHROUGH;
-        case ePointerDown:
-        case ePointerUp:
-          isHandlingUserInput = true;
-          GetPresContext()->RecordInteractionTime(
-              nsPresContext::InteractionType::eClickInteraction,
-              aEvent->mTimeStamp);
-          break;
-
-        case eMouseMove:
-          if (aEvent->mFlags.mHandledByAPZ) {
-            Telemetry::AccumulateTimeDelta(
-                Telemetry::INPUT_EVENT_QUEUED_APZ_MOUSE_MOVE_MS,
-                aEvent->mTimeStamp);
-          }
-          break;
-
-        case eDrop: {
-          nsCOMPtr<nsIDragSession> session = nsContentUtils::GetDragSession();
-          if (session) {
-            bool onlyChromeDrop = false;
-            session->GetOnlyChromeDrop(&onlyChromeDrop);
-            if (onlyChromeDrop) {
-              aEvent->mFlags.mOnlyChromeDispatch = true;
-            }
-          }
-          break;
-        }
-
-        case eWheel:
-          if (aEvent->mFlags.mHandledByAPZ) {
-            Telemetry::AccumulateTimeDelta(
-                Telemetry::INPUT_EVENT_QUEUED_APZ_WHEEL_MS, aEvent->mTimeStamp);
-          }
-          break;
-
-        case eTouchMove:
-          if (aEvent->mFlags.mHandledByAPZ) {
-            Telemetry::AccumulateTimeDelta(
-                Telemetry::INPUT_EVENT_QUEUED_APZ_TOUCH_MOVE_MS,
-                aEvent->mTimeStamp);
-          }
-          break;
-
-        default:
-          break;
-      }
-
-      if (!mPresShell->mTouchManager.PreHandleEvent(
-              aEvent, aEventStatus, touchIsNew, isHandlingUserInput,
-              mPresShell->mCurrentEventContent)) {
-        return NS_OK;
-      }
-    }
-
-    if (aEvent->mMessage == eContextMenu) {
-      WidgetMouseEvent* mouseEvent = aEvent->AsMouseEvent();
-      if (mouseEvent->IsContextMenuKeyEvent() &&
-          !AdjustContextMenuKeyEvent(mouseEvent)) {
-        return NS_OK;
-      }
-      if (mouseEvent->IsShift()) {
-        aEvent->mFlags.mOnlyChromeDispatch = true;
-        aEvent->mFlags.mRetargetToNonNativeAnonymous = true;
-      }
-    }
-
-    AutoHandlingUserInputStatePusher userInpStatePusher(isHandlingUserInput,
-                                                        aEvent, GetDocument());
-
-    if (aEvent->IsTrusted() && aEvent->mMessage == eMouseMove) {
-      nsIPresShell::AllowMouseCapture(
-          EventStateManager::GetActiveEventStateManager() == manager);
-
-      GetPresContext()->RecordInteractionTime(
-          nsPresContext::InteractionType::eMouseMoveInteraction,
-          aEvent->mTimeStamp);
-    }
-
-    nsAutoPopupStatePusher popupStatePusher(
-        PopupBlocker::GetEventPopupControlState(aEvent));
-
-    // FIXME. If the event was reused, we need to clear the old target,
-    // bug 329430
-    aEvent->mTarget = nullptr;
-
-    HandlingTimeAccumulator handlingTimeAccumulator(*this, aEvent);
-
-    // 1. Give event to event manager for pre event state changes and
-    //    generation of synthetic events.
-    rv = manager->PreHandleEvent(
-        GetPresContext(), aEvent, mPresShell->mCurrentEventFrame,
-        mPresShell->mCurrentEventContent, aEventStatus, aOverrideClickTarget);
-
-    // 2. Give event to the DOM for third party and JS use.
-    if (NS_SUCCEEDED(rv)) {
-      bool wasHandlingKeyBoardEvent = nsContentUtils::IsHandlingKeyBoardEvent();
-      if (aEvent->mClass == eKeyboardEventClass) {
-        nsContentUtils::SetIsHandlingKeyBoardEvent(true);
-      }
-      // If EventStateManager or something wants reply from remote process and
-      // needs to win any other event listeners in chrome, the event is both
-      // stopped its propagation and marked as "waiting reply from remote
-      // process".  In this case, PresShell shouldn't dispatch the event into
-      // the DOM tree because they don't have a chance to stop propagation in
-      // the system event group.  On the other hand, if its propagation is not
-      // stopped, that means that the event may be reserved by chrome.  If it's
-      // reserved by chrome, the event shouldn't be sent to any remote
-      // processes.  In this case, PresShell needs to dispatch the event to
-      // the DOM tree for checking if it's reserved.
-      if (aEvent->IsAllowedToDispatchDOMEvent() &&
-          !(aEvent->PropagationStopped() &&
-            aEvent->IsWaitingReplyFromRemoteProcess())) {
-        MOZ_ASSERT(nsContentUtils::IsSafeToRunScript(),
-                   "Somebody changed aEvent to cause a DOM event!");
-        nsPresShellEventCB eventCB(mPresShell);
-        if (nsIFrame* target = mPresShell->GetCurrentEventFrame()) {
-          if (target->OnlySystemGroupDispatch(aEvent->mMessage)) {
-            aEvent->StopPropagation();
-          }
-        }
-        if (aEvent->mClass == eTouchEventClass) {
-          DispatchTouchEventToDOM(aEvent, aEventStatus, &eventCB, touchIsNew);
-        } else {
-          DispatchEventToDOM(aEvent, aEventStatus, &eventCB);
-        }
-      }
-
-      nsContentUtils::SetIsHandlingKeyBoardEvent(wasHandlingKeyBoardEvent);
-
-      if (aEvent->mMessage == ePointerUp ||
-          aEvent->mMessage == ePointerCancel) {
-        // Implicitly releasing capture for given pointer.
-        // ePointerLostCapture should be send after ePointerUp or
-        // ePointerCancel.
-        WidgetPointerEvent* pointerEvent = aEvent->AsPointerEvent();
-        MOZ_ASSERT(pointerEvent);
-        PointerEventHandler::ReleasePointerCaptureById(pointerEvent->pointerId);
-        PointerEventHandler::CheckPointerCaptureState(pointerEvent);
-      }
-
-      // 3. Give event to event manager for post event state changes and
-      //    generation of synthetic events.
-      if (!mPresShell->IsDestroying() && NS_SUCCEEDED(rv)) {
-        rv = manager->PostHandleEvent(GetPresContext(), aEvent,
-                                      mPresShell->GetCurrentEventFrame(),
-                                      aEventStatus, aOverrideClickTarget);
-      }
-    }
-
-    if (!mPresShell->IsDestroying() && aIsHandlingNativeEvent) {
-      // Ensure that notifications to IME should be sent before getting next
-      // native event from the event queue.
-      // XXX Should we check the event message or event class instead of
-      //     using aIsHandlingNativeEvent?
-      manager->TryToFlushPendingNotificationsToIME();
+
+  // If we cannot handle the event with mPresShell because of no target,
+  // just record the response time.
+  // XXX Is this intentional?  In such case, the score is really good because
+  //     of nothing to do.  So, it may make average and median better.
+  if (NS_EVENT_NEEDS_FRAME(aEvent) && !mPresShell->GetCurrentEventFrame() &&
+      !mPresShell->GetCurrentEventContent()) {
+    RecordEventHandlingResponsePerformance(aEvent);
+    return NS_OK;
+  }
+
+  bool touchIsNew = false;
+  bool isHandlingUserInput = false;
+
+  if (mPresShell->mCurrentEventContent && aEvent->IsTargetedAtFocusedWindow()) {
+    nsFocusManager* fm = nsFocusManager::GetFocusManager();
+    if (fm) {
+      fm->FlushBeforeEventHandlingIfNeeded(mPresShell->mCurrentEventContent);
+    }
+  }
+
+  // XXX How about IME events and input events for plugins?
+  if (aEvent->IsTrusted()) {
+    if (aEvent->IsUserAction()) {
+      mPresShell->mHasHandledUserInput = true;
     }
 
     switch (aEvent->mMessage) {
       case eKeyPress:
       case eKeyDown:
       case eKeyUp: {
-        if (aEvent->AsKeyboardEvent()->mKeyCode == NS_VK_ESCAPE) {
-          if (aEvent->mMessage == eKeyUp) {
-            // Reset this flag after key up is handled.
-            mPresShell->mIsLastChromeOnlyEscapeKeyConsumed = false;
-          } else {
-            if (aEvent->mFlags.mOnlyChromeDispatch &&
-                aEvent->mFlags.mDefaultPreventedByChrome) {
-              mPresShell->mIsLastChromeOnlyEscapeKeyConsumed = true;
+        Document* doc = mPresShell->GetCurrentEventContent()
+                            ? mPresShell->mCurrentEventContent->OwnerDoc()
+                            : nullptr;
+        auto keyCode = aEvent->AsKeyboardEvent()->mKeyCode;
+        if (keyCode == NS_VK_ESCAPE) {
+          Document* root = nsContentUtils::GetRootDocument(doc);
+          if (root && root->GetFullscreenElement()) {
+            // Prevent default action on ESC key press when exiting
+            // DOM fullscreen mode. This prevents the browser ESC key
+            // handler from stopping all loads in the document, which
+            // would cause <video> loads to stop.
+            // XXX We need to claim the Escape key event which will be
+            //     dispatched only into chrome is already consumed by
+            //     content because we need to prevent its default here
+            //     for some reasons (not sure) but we need to detect
+            //     if a chrome event handler will call PreventDefault()
+            //     again and check it later.
+            aEvent->PreventDefaultBeforeDispatch(CrossProcessForwarding::eStop);
+            aEvent->mFlags.mOnlyChromeDispatch = true;
+
+            // The event listeners in chrome can prevent this ESC behavior by
+            // calling prevent default on the preceding keydown/press events.
+            if (!mPresShell->mIsLastChromeOnlyEscapeKeyConsumed &&
+                aEvent->mMessage == eKeyUp) {
+              // ESC key released while in DOM fullscreen mode.
+              // Fully exit all browser windows and documents from
+              // fullscreen mode.
+              Document::AsyncExitFullscreen(nullptr);
+            }
+          }
+          nsCOMPtr<Document> pointerLockedDoc =
+              do_QueryReferent(EventStateManager::sPointerLockedDoc);
+          if (!mPresShell->mIsLastChromeOnlyEscapeKeyConsumed &&
+              pointerLockedDoc) {
+            // XXX See above comment to understand the reason why this needs
+            //     to claim that the Escape key event is consumed by content
+            //     even though it will be dispatched only into chrome.
+            aEvent->PreventDefaultBeforeDispatch(CrossProcessForwarding::eStop);
+            aEvent->mFlags.mOnlyChromeDispatch = true;
+            if (aEvent->mMessage == eKeyUp) {
+              Document::UnlockPointer();
             }
           }
         }
-        if (aEvent->mMessage == eKeyDown) {
-          mPresShell->mIsLastKeyDownCanceled = aEvent->mFlags.mDefaultPrevented;
+        if (keyCode != NS_VK_ESCAPE && keyCode != NS_VK_SHIFT &&
+            keyCode != NS_VK_CONTROL && keyCode != NS_VK_ALT &&
+            keyCode != NS_VK_WIN && keyCode != NS_VK_META) {
+          // Allow keys other than ESC and modifiers be marked as a
+          // valid user input for triggering popup, fullscreen, and
+          // pointer lock.
+          isHandlingUserInput = true;
+          GetPresContext()->RecordInteractionTime(
+              nsPresContext::InteractionType::eKeyInteraction,
+              aEvent->mTimeStamp);
+        }
+
+        Telemetry::AccumulateTimeDelta(
+            Telemetry::INPUT_EVENT_QUEUED_KEYBOARD_MS, aEvent->mTimeStamp);
+        break;
+      }
+      case eMouseDown:
+      case eMouseUp:
+        Telemetry::AccumulateTimeDelta(Telemetry::INPUT_EVENT_QUEUED_CLICK_MS,
+                                       aEvent->mTimeStamp);
+        MOZ_FALLTHROUGH;
+      case ePointerDown:
+      case ePointerUp:
+        isHandlingUserInput = true;
+        GetPresContext()->RecordInteractionTime(
+            nsPresContext::InteractionType::eClickInteraction,
+            aEvent->mTimeStamp);
+        break;
+
+      case eMouseMove:
+        if (aEvent->mFlags.mHandledByAPZ) {
+          Telemetry::AccumulateTimeDelta(
+              Telemetry::INPUT_EVENT_QUEUED_APZ_MOUSE_MOVE_MS,
+              aEvent->mTimeStamp);
+        }
+        break;
+
+      case eDrop: {
+        nsCOMPtr<nsIDragSession> session = nsContentUtils::GetDragSession();
+        if (session) {
+          bool onlyChromeDrop = false;
+          session->GetOnlyChromeDrop(&onlyChromeDrop);
+          if (onlyChromeDrop) {
+            aEvent->mFlags.mOnlyChromeDispatch = true;
+          }
+        }
+        break;
+      }
+
+      case eWheel:
+        if (aEvent->mFlags.mHandledByAPZ) {
+          Telemetry::AccumulateTimeDelta(
+              Telemetry::INPUT_EVENT_QUEUED_APZ_WHEEL_MS, aEvent->mTimeStamp);
+        }
+        break;
+
+      case eTouchMove:
+        if (aEvent->mFlags.mHandledByAPZ) {
+          Telemetry::AccumulateTimeDelta(
+              Telemetry::INPUT_EVENT_QUEUED_APZ_TOUCH_MOVE_MS,
+              aEvent->mTimeStamp);
         }
         break;
-      }
-      case eMouseUp:
-        // reset the capturing content now that the mouse button is up
-        nsIPresShell::SetCapturingContent(nullptr, 0);
-        break;
-      case eMouseMove:
-        nsIPresShell::AllowMouseCapture(false);
-        break;
-      case eDrag:
-      case eDragEnd:
-      case eDragEnter:
-      case eDragExit:
-      case eDragLeave:
-      case eDragOver:
-      case eDrop: {
-        // After any drag event other than dragstart (which is handled
-        // separately, as we need to collect the data first), the DataTransfer
-        // needs to be made protected, and then disconnected.
-        DataTransfer* dataTransfer = aEvent->AsDragEvent()->mDataTransfer;
-        if (dataTransfer) {
-          dataTransfer->Disconnect();
-        }
-        break;
-      }
+
       default:
         break;
     }
+
+    if (!mPresShell->mTouchManager.PreHandleEvent(
+            aEvent, aEventStatus, touchIsNew, isHandlingUserInput,
+            mPresShell->mCurrentEventContent)) {
+      return NS_OK;
+    }
+  }
+
+  if (aEvent->mMessage == eContextMenu) {
+    WidgetMouseEvent* mouseEvent = aEvent->AsMouseEvent();
+    if (mouseEvent->IsContextMenuKeyEvent() &&
+        !AdjustContextMenuKeyEvent(mouseEvent)) {
+      return NS_OK;
+    }
+    if (mouseEvent->IsShift()) {
+      aEvent->mFlags.mOnlyChromeDispatch = true;
+      aEvent->mFlags.mRetargetToNonNativeAnonymous = true;
+    }
+  }
+
+  AutoHandlingUserInputStatePusher userInpStatePusher(isHandlingUserInput,
+                                                      aEvent, GetDocument());
+
+  if (aEvent->IsTrusted() && aEvent->mMessage == eMouseMove) {
+    nsIPresShell::AllowMouseCapture(
+        EventStateManager::GetActiveEventStateManager() == manager);
+
+    GetPresContext()->RecordInteractionTime(
+        nsPresContext::InteractionType::eMouseMoveInteraction,
+        aEvent->mTimeStamp);
+  }
+
+  nsAutoPopupStatePusher popupStatePusher(
+      PopupBlocker::GetEventPopupControlState(aEvent));
+
+  // FIXME. If the event was reused, we need to clear the old target,
+  // bug 329430
+  aEvent->mTarget = nullptr;
+
+  HandlingTimeAccumulator handlingTimeAccumulator(*this, aEvent);
+
+  // 1. Give event to event manager for pre event state changes and
+  //    generation of synthetic events.
+  nsresult rv = manager->PreHandleEvent(
+      GetPresContext(), aEvent, mPresShell->mCurrentEventFrame,
+      mPresShell->mCurrentEventContent, aEventStatus, aOverrideClickTarget);
+
+  // 2. Give event to the DOM for third party and JS use.
+  if (NS_SUCCEEDED(rv)) {
+    bool wasHandlingKeyBoardEvent = nsContentUtils::IsHandlingKeyBoardEvent();
+    if (aEvent->mClass == eKeyboardEventClass) {
+      nsContentUtils::SetIsHandlingKeyBoardEvent(true);
+    }
+    // If EventStateManager or something wants reply from remote process and
+    // needs to win any other event listeners in chrome, the event is both
+    // stopped its propagation and marked as "waiting reply from remote
+    // process".  In this case, PresShell shouldn't dispatch the event into
+    // the DOM tree because they don't have a chance to stop propagation in
+    // the system event group.  On the other hand, if its propagation is not
+    // stopped, that means that the event may be reserved by chrome.  If it's
+    // reserved by chrome, the event shouldn't be sent to any remote
+    // processes.  In this case, PresShell needs to dispatch the event to
+    // the DOM tree for checking if it's reserved.
+    if (aEvent->IsAllowedToDispatchDOMEvent() &&
+        !(aEvent->PropagationStopped() &&
+          aEvent->IsWaitingReplyFromRemoteProcess())) {
+      MOZ_ASSERT(nsContentUtils::IsSafeToRunScript(),
+                 "Somebody changed aEvent to cause a DOM event!");
+      nsPresShellEventCB eventCB(mPresShell);
+      if (nsIFrame* target = mPresShell->GetCurrentEventFrame()) {
+        if (target->OnlySystemGroupDispatch(aEvent->mMessage)) {
+          aEvent->StopPropagation();
+        }
+      }
+      if (aEvent->mClass == eTouchEventClass) {
+        DispatchTouchEventToDOM(aEvent, aEventStatus, &eventCB, touchIsNew);
+      } else {
+        DispatchEventToDOM(aEvent, aEventStatus, &eventCB);
+      }
+    }
+
+    nsContentUtils::SetIsHandlingKeyBoardEvent(wasHandlingKeyBoardEvent);
+
+    if (aEvent->mMessage == ePointerUp || aEvent->mMessage == ePointerCancel) {
+      // Implicitly releasing capture for given pointer.
+      // ePointerLostCapture should be send after ePointerUp or
+      // ePointerCancel.
+      WidgetPointerEvent* pointerEvent = aEvent->AsPointerEvent();
+      MOZ_ASSERT(pointerEvent);
+      PointerEventHandler::ReleasePointerCaptureById(pointerEvent->pointerId);
+      PointerEventHandler::CheckPointerCaptureState(pointerEvent);
+    }
+
+    // 3. Give event to event manager for post event state changes and
+    //    generation of synthetic events.
+    if (!mPresShell->IsDestroying() && NS_SUCCEEDED(rv)) {
+      rv = manager->PostHandleEvent(GetPresContext(), aEvent,
+                                    mPresShell->GetCurrentEventFrame(),
+                                    aEventStatus, aOverrideClickTarget);
+    }
+  }
+
+  if (!mPresShell->IsDestroying() && aIsHandlingNativeEvent) {
+    // Ensure that notifications to IME should be sent before getting next
+    // native event from the event queue.
+    // XXX Should we check the event message or event class instead of
+    //     using aIsHandlingNativeEvent?
+    manager->TryToFlushPendingNotificationsToIME();
+  }
+
+  switch (aEvent->mMessage) {
+    case eKeyPress:
+    case eKeyDown:
+    case eKeyUp: {
+      if (aEvent->AsKeyboardEvent()->mKeyCode == NS_VK_ESCAPE) {
+        if (aEvent->mMessage == eKeyUp) {
+          // Reset this flag after key up is handled.
+          mPresShell->mIsLastChromeOnlyEscapeKeyConsumed = false;
+        } else {
+          if (aEvent->mFlags.mOnlyChromeDispatch &&
+              aEvent->mFlags.mDefaultPreventedByChrome) {
+            mPresShell->mIsLastChromeOnlyEscapeKeyConsumed = true;
+          }
+        }
+      }
+      if (aEvent->mMessage == eKeyDown) {
+        mPresShell->mIsLastKeyDownCanceled = aEvent->mFlags.mDefaultPrevented;
+      }
+      break;
+    }
+    case eMouseUp:
+      // reset the capturing content now that the mouse button is up
+      nsIPresShell::SetCapturingContent(nullptr, 0);
+      break;
+    case eMouseMove:
+      nsIPresShell::AllowMouseCapture(false);
+      break;
+    case eDrag:
+    case eDragEnd:
+    case eDragEnter:
+    case eDragExit:
+    case eDragLeave:
+    case eDragOver:
+    case eDrop: {
+      // After any drag event other than dragstart (which is handled
+      // separately, as we need to collect the data first), the DataTransfer
+      // needs to be made protected, and then disconnected.
+      DataTransfer* dataTransfer = aEvent->AsDragEvent()->mDataTransfer;
+      if (dataTransfer) {
+        dataTransfer->Disconnect();
+      }
+      break;
+    }
+    default:
+      break;
   }
   RecordEventHandlingResponsePerformance(aEvent);
   return rv;
 }
 
 void PresShell::EventHandler::RecordEventHandlingResponsePerformance(
     const WidgetEvent* aEvent) {
   if (!Telemetry::CanRecordBase() || aEvent->mTimeStamp.IsNull() ||
--- a/layout/reftests/css-blending/reftest.list
+++ b/layout/reftests/css-blending/reftest.list
@@ -1,22 +1,22 @@
 == blend-canvas.html blend-canvas-ref.html
 == blend-constant-background-color.html blend-constant-background-color-ref.html
-fuzzy-if(webrender,1-3,1291-7888) == blend-gradient-background-color.html blend-gradient-background-color-ref.html
+== blend-gradient-background-color.html blend-gradient-background-color-ref.html
 == blend-image.html blend-image-ref.html
 == blend-difference-stacking.html blend-difference-stacking-ref.html
 
 fuzzy-if(/^Windows\x20NT\x2010\.0/.test(http.oscpu),0-1,0-10000) fuzzy-if(skiaContent,0-1,0-30000) == background-blending-alpha.html background-blending-alpha-ref.html
-fuzzy-if(webrender,1-3,1291-7888) == background-blending-gradient-color.html background-blending-gradient-color-ref.html
-fuzzy-if(azureSkiaGL,0-3,0-7597) fuzzy-if(cocoaWidget,0-3,0-7597) fuzzy-if(d2d,0-1,0-3800) fuzzy-if(d3d11,0-1,0-4200) fuzzy-if(skiaContent,0-2,0-9450) fuzzy-if(webrender,1-5,3938-23925) == background-blending-gradient-gradient.html background-blending-gradient-gradient-ref.html
-fuzzy-if(azureSkiaGL,0-2,0-7174) fuzzy-if(webrender,1-3,1288-7888) == background-blending-gradient-image.html background-blending-gradient-color-ref.html
+== background-blending-gradient-color.html background-blending-gradient-color-ref.html
+fuzzy-if(azureSkiaGL,0-3,0-7597) fuzzy-if(cocoaWidget,0-3,0-7597) fuzzy-if(d2d,0-1,0-3800) fuzzy-if(d3d11,0-1,0-4200) fuzzy-if(skiaContent,0-2,0-9450) fuzzy-if(webrender,1-1,2400-6200) == background-blending-gradient-gradient.html background-blending-gradient-gradient-ref.html
+fuzzy-if(azureSkiaGL,0-2,0-7174) == background-blending-gradient-image.html background-blending-gradient-color-ref.html
 fuzzy-if(azureSkia||d2d||gtkWidget,0-1,0-10000) == background-blending-image-color-jpg.html background-blending-image-color-ref.html
 == background-blending-image-color-png.html background-blending-image-color-ref.html
 == background-blending-image-color-svg.html background-blending-image-color-ref.html
-fuzzy-if(azureSkiaGL,0-2,0-7174) fuzzy-if(webrender,1-3,1288-7888) == background-blending-image-gradient.html background-blending-gradient-color-ref.html
+fuzzy-if(azureSkiaGL,0-2,0-7174) == background-blending-image-gradient.html background-blending-gradient-color-ref.html
 == background-blending-image-image.html background-blending-image-color-ref.html
 == background-blending-isolation.html background-blending-isolation-ref.html
 == background-blending-list-repeat.html background-blending-list-repeat-ref.html
 == background-blending-multiple-images.html background-blending-multiple-images-ref.html
 
 == background-blending-color-burn.html background-blending-color-burn-ref.svg
 == background-blending-color-dodge.html background-blending-color-dodge-ref.svg
 # need to investigate why these tests are fuzzy - first suspect is a possible color space conversion on some platforms; same for mix-blend-mode tests
--- a/layout/reftests/css-gradients/reftest.list
+++ b/layout/reftests/css-gradients/reftest.list
@@ -68,27 +68,27 @@ fuzzy-if(d2d,0-127,0-2612) == repeating-
 fuzzy-if(skiaContent,0-18,0-600) == twostops-1a.html twostops-1-ref.html
 fuzzy-if(skiaContent,0-18,0-600) == twostops-1b.html twostops-1-ref.html
 fuzzy-if(skiaContent,0-226,0-600) == twostops-1c.html twostops-1-ref.html
 fuzzy-if(skiaContent,0-141,0-300) == twostops-1d.html twostops-1-ref.html
 fuzzy-if(skiaContent,0-73,0-900) == twostops-1e.html twostops-1-ref.html
 
 # from http://www.xanthir.com/:4bhipd by way of http://a-ja.net/newgrad.html
 fuzzy-if(!contentSameGfxBackendAsCanvas,0-3,0-20000) fuzzy-if(azureSkiaGL||skiaContent&&layersGPUAccelerated,0-8,0-20000) == aja-linear-1a.html aja-linear-1-ref.html
-fails-if(!d2d&&!skiaContent) fuzzy-if(skiaContent,0-1,0-20000) fuzzy-if(webrender&&winWidget,1-2,11550-11789) == aja-linear-1b.html aja-linear-1-ref.html # bug 526694
+fails-if(!d2d&&!skiaContent) fuzzy-if(skiaContent,0-1,0-20000) fuzzy-if(webrender&&winWidget,1-1,5300-5500) == aja-linear-1b.html aja-linear-1-ref.html # bug 526694
 fuzzy-if(!contentSameGfxBackendAsCanvas,0-3,0-20000) fuzzy-if(azureSkiaGL||skiaContent,0-8,0-20000) == aja-linear-1c.html aja-linear-1-ref.html 
 fuzzy-if(!contentSameGfxBackendAsCanvas,0-3,0-20000) fuzzy-if(azureSkiaGL||skiaContent,0-8,0-20000) == aja-linear-1d.html aja-linear-1-ref.html 
 fuzzy-if(!contentSameGfxBackendAsCanvas,0-3,0-20000) fuzzy-if(azureSkiaGL||skiaContent,0-8,0-20000) == aja-linear-1e.html aja-linear-1-ref.html 
 fuzzy-if(!contentSameGfxBackendAsCanvas,0-3,0-20000) fuzzy-if(azureSkiaGL||skiaContent,0-8,0-20000) == aja-linear-1f.html aja-linear-1-ref.html 
 fuzzy-if(!contentSameGfxBackendAsCanvas,0-2,0-20000) fuzzy-if(azureSkiaGL||skiaContent,0-8,0-20000) == aja-linear-2.html aja-linear-2-ref.html
 fuzzy-if(!contentSameGfxBackendAsCanvas,0-2,0-19999) fuzzy-if(azureSkiaGL||skiaContent,0-8,0-20000) == aja-linear-3a.html aja-linear-3-ref.html 
 fuzzy-if(!contentSameGfxBackendAsCanvas,0-2,0-19999) fuzzy-if(azureSkiaGL||skiaContent,0-8,0-20000) == aja-linear-3b.html aja-linear-3-ref.html 
 fuzzy-if(!contentSameGfxBackendAsCanvas,0-4,0-20000) fuzzy-if(azureSkiaGL||skiaContent,0-8,0-20000) == aja-linear-4a.html aja-linear-4-ref.html 
 fuzzy-if(!contentSameGfxBackendAsCanvas,0-4,0-20000) fuzzy-if(azureSkiaGL||skiaContent,0-8,0-20000) == aja-linear-4b.html aja-linear-4-ref.html 
-fuzzy-if(!contentSameGfxBackendAsCanvas,0-4,0-20000) fuzzy-if(azureSkiaGL||skiaContent,0-8,0-20000) fuzzy-if(webrender&&winWidget,4-9,15926-16125) == aja-linear-5a.html aja-linear-5-ref.html 
+fuzzy-if(!contentSameGfxBackendAsCanvas,0-4,0-20000) fuzzy-if(azureSkiaGL||skiaContent,0-8,0-20000) fuzzy-if(webrender&&winWidget,1-1,5900-6100) == aja-linear-5a.html aja-linear-5-ref.html
 fuzzy-if(Android,0-6,0-10576) == height-dependence-1.html height-dependence-1-ref.html
 fuzzy-if(cocoaWidget,0-1,0-40000) fuzzy-if(Android,0-6,0-10576) == height-dependence-2.html height-dependence-2-ref.html
 fuzzy-if(Android,0-6,0-10576) == height-dependence-3.html height-dependence-3-ref.html
 
 == linear-onestopposition-1.html linear-onestopposition-1-ref.html
 fuzzy-if(d2d,0-47,0-400) fuzzy-if(webrender&&winWidget,0-1,0-1375) == linear-onestopposition-1.html linear-onestopposition-1-ref2.html # d2d interpolates the hard stop
 == radial-onestopposition-1a.html radial-onestopposition-1-ref.html
 == radial-onestopposition-1b.html radial-onestopposition-1-ref.html
--- a/layout/reftests/xul/reftest.list
+++ b/layout/reftests/xul/reftest.list
@@ -7,18 +7,18 @@ random-if(Android) == menulist-shrinkwra
 == textbox-overflow-1.xul textbox-overflow-1-ref.xul # for bug 749658
 # accesskeys are not normally displayed on Mac, so skip this test
 skip-if(cocoaWidget) == accesskey.xul accesskey-ref.xul
 pref(layout.css.xul-tree-pseudos.content.enabled,true) fuzzy-if(xulRuntime.widgetToolkit=="gtk3",0-1,0-11) == tree-row-outline-1.xul tree-row-outline-1-ref.xul # win8: bug 1254832
 skip-if(!cocoaWidget) fails-if(webrender&&cocoaWidget) == mac-tab-toolbar.xul mac-tab-toolbar-ref.xul
 pref(layout.css.xul-tree-pseudos.content.enabled,true) != tree-row-outline-1.xul tree-row-outline-1-notref.xul
 == text-crop.xul text-crop-ref.xul
 random-if(/^Windows\x20NT\x206\.1/.test(http.oscpu)) == text-small-caps-1.xul text-small-caps-1-ref.xul
-fuzzy-if(skiaContent,0-1,0-60) fuzzy-if(cocoaWidget&&browserIsRemote&&!skiaContent,0-1,0-31) fuzzy-if(winWidget&&browserIsRemote&&layersGPUAccelerated,0-1,0-50) == inactive-fixed-bg-bug1205630.xul inactive-fixed-bg-bug1205630-ref.html
-fuzzy-if(skiaContent,0-1,0-60) fuzzy-if(cocoaWidget&&browserIsRemote&&!skiaContent,0-1,0-31) fuzzy-if(winWidget&&browserIsRemote&&layersGPUAccelerated,0-1,0-50) == inactive-fixed-bg-bug1272525.xul inactive-fixed-bg-bug1272525-ref.html
+fuzzy-if(skiaContent,0-1,0-60) fuzzy-if(cocoaWidget&&browserIsRemote&&!skiaContent,0-1,0-31) fuzzy-if(winWidget&&browserIsRemote&&layersGPUAccelerated,0-1,0-50) fuzzy-if(webrender,0-1,350-1050) == inactive-fixed-bg-bug1205630.xul inactive-fixed-bg-bug1205630-ref.html
+fuzzy-if(skiaContent,0-1,0-60) fuzzy-if(cocoaWidget&&browserIsRemote&&!skiaContent,0-1,0-31) fuzzy-if(winWidget&&browserIsRemote&&layersGPUAccelerated,0-1,0-50) fuzzy-if(webrender,0-1,450-1100) == inactive-fixed-bg-bug1272525.xul inactive-fixed-bg-bug1272525-ref.html
 
 # Tests for XUL <image> with 'object-fit' & 'object-position':
 # These tests should be very similar to tests in our w3c-css/submitted/images3
 # reftest directory. They live here because they use XUL, and it
 # wouldn't be fair of us to make a W3C testsuite implicitly depend on XUL.
 == object-fit-contain-png-001.xul object-fit-contain-png-001-ref.html
 == object-fit-contain-png-002.xul object-fit-contain-png-002-ref.html
 == object-fit-contain-svg-001.xul object-fit-contain-svg-001-ref.html
--- a/layout/style/test/property_database.js
+++ b/layout/style/test/property_database.js
@@ -76,20 +76,24 @@ const CSS_TYPE_LEGACY_SHORTHAND = 3;
 // property's fields.
 
 // Helper functions used to construct gCSSProperties.
 
 function initial_font_family_is_sans_serif()
 {
   // The initial value of 'font-family' might be 'serif' or
   // 'sans-serif'.
-  var div = document.createElement("div");
-  div.setAttribute("style", "font: initial");
-  return getComputedStyle(div, "").fontFamily == "sans-serif";
+  const meta = document.createElement("meta");
+  meta.setAttribute("style", "font: initial;");
+  document.documentElement.appendChild(meta);
+  const family = getComputedStyle(meta).fontFamily;
+  meta.remove();
+  return family == "sans-serif";
 }
+
 var gInitialFontFamilyIsSansSerif = initial_font_family_is_sans_serif();
 
 // shared by background-image and border-image-source
 var validGradientAndElementValues = [
   "-moz-element(#a)",
   "-moz-element(  #a  )",
   "-moz-element(#a-1)",
   "-moz-element(#a\\:1)",
--- a/layout/style/test/test_revert.html
+++ b/layout/style/test/test_revert.html
@@ -52,19 +52,16 @@ function testResetProperty(property, inf
 
 function testInheritedProperty(property, info) {
   // Given how line-height works, and that it always returns the used value, we
   // cannot test it. The prerequisites for line-height makes getComputedStyle
   // and getDefaultComputedStyle return the same, even though the computed value
   // is different (normal vs. 19px).
   if (property == "line-height")
     return;
-  // Ongoing debugging in bug 1533392.
-  if (property == "font-family")
-    return;
 
   const div = kInheritedDiv;
   const initial = getInitialValue(div, property);
   const parentValue = computedValue(div.parentNode, property);
 
   isnot(parentValue, initial, `${property}: Should inherit something non-initial to begin with`);
 
   const inheritedValue = computedValue(div, property);
--- a/mobile/android/geckoview/src/main/java/org/mozilla/gecko/GeckoThread.java
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/GeckoThread.java
@@ -266,19 +266,16 @@ public class GeckoThread extends Thread 
         GeckoLoader.loadSQLiteLibs(context);
         GeckoLoader.loadNSSLibs(context);
         GeckoLoader.loadGeckoLibs(context);
         setState(State.LIBS_READY);
     }
 
     private static void initGeckoEnvironment() {
         final Context context = GeckoAppShell.getApplicationContext();
-        GeckoLoader.loadMozGlue(context);
-        setState(State.MOZGLUE_READY);
-
         final Locale locale = Locale.getDefault();
         final Resources res = context.getResources();
         if (locale.toString().equalsIgnoreCase("zh_hk")) {
             final Locale mappedLocale = Locale.TRADITIONAL_CHINESE;
             Locale.setDefault(mappedLocale);
             Configuration config = res.getConfiguration();
             config.locale = mappedLocale;
             res.updateConfiguration(config, null);
@@ -409,28 +406,50 @@ public class GeckoThread extends Thread 
                 idleMsg.obj = geckoHandler;
                 geckoHandler.sendMessageAtFrontOfQueue(idleMsg);
                 // Keep this IdleHandler
                 return true;
             }
         };
         Looper.myQueue().addIdleHandler(idleHandler);
 
-        initGeckoEnvironment();
-
-        // Wait until initialization before calling Gecko entry point.
+        // Wait until initialization before preparing environment.
         synchronized (this) {
-            while (!mInitialized || !isState(State.LIBS_READY)) {
+            while (!mInitialized) {
                 try {
                     wait();
                 } catch (final InterruptedException e) {
                 }
             }
         }
 
+        final Context context = GeckoAppShell.getApplicationContext();
+        final List<String> env = getEnvFromExtras(mInitInfo.extras);
+
+        // In Gecko, the native crash reporter is enabled by default in opt builds, and
+        // disabled by default in debug builds.
+        if ((mInitInfo.flags & FLAG_ENABLE_NATIVE_CRASHREPORTER) == 0 && !BuildConfig.DEBUG_BUILD) {
+            env.add(0, "MOZ_CRASHREPORTER_DISABLE=1");
+        } else if ((mInitInfo.flags & FLAG_ENABLE_NATIVE_CRASHREPORTER) != 0 && BuildConfig.DEBUG_BUILD) {
+            env.add(0, "MOZ_CRASHREPORTER=1");
+        }
+
+        if (!isChildProcess() && ((mInitInfo.flags & FLAG_ENABLE_MARIONETTE) != 0)) {
+            // The presence of this environment variable determines the initial
+            // value of `marionette.enabled`.
+            env.add(0, "MOZ_MARIONETTE=1");
+        }
+
+        GeckoLoader.loadMozGlue(context);
+        setState(State.MOZGLUE_READY);
+
+        GeckoLoader.setupGeckoEnvironment(context, context.getFilesDir().getPath(), env, mInitInfo.prefs);
+
+        initGeckoEnvironment();
+
         if ((mInitInfo.flags & FLAG_PRELOAD_CHILD) != 0) {
             ThreadUtils.postToBackgroundThread(new Runnable() {
                 @Override
                 public void run() {
                     // Preload the content ("tab") child process.
                     GeckoProcessManager.getInstance().preload("tab");
                 }
             });
@@ -440,41 +459,22 @@ public class GeckoThread extends Thread 
             try {
                 Thread.sleep(5 * 1000 /* 5 seconds */);
             } catch (final InterruptedException e) {
             }
         }
 
         Log.w(LOGTAG, "zerdatime " + SystemClock.elapsedRealtime() + " - runGecko");
 
-        final Context context = GeckoAppShell.getApplicationContext();
         final String[] args = isChildProcess() ? mInitInfo.args : getMainProcessArgs();
 
         if ((mInitInfo.flags & FLAG_DEBUGGING) != 0) {
             Log.i(LOGTAG, "RunGecko - args = " + TextUtils.join(" ", args));
         }
 
-        final List<String> env = getEnvFromExtras(mInitInfo.extras);
-
-        // In Gecko, the native crash reporter is enabled by default in opt builds, and
-        // disabled by default in debug builds.
-        if ((mInitInfo.flags & FLAG_ENABLE_NATIVE_CRASHREPORTER) == 0 && !BuildConfig.DEBUG_BUILD) {
-            env.add(0, "MOZ_CRASHREPORTER_DISABLE=1");
-        } else if ((mInitInfo.flags & FLAG_ENABLE_NATIVE_CRASHREPORTER) != 0 && BuildConfig.DEBUG_BUILD) {
-            env.add(0, "MOZ_CRASHREPORTER=1");
-        }
-
-        if (!isChildProcess() && ((mInitInfo.flags & FLAG_ENABLE_MARIONETTE) != 0)) {
-            // The presence of this environment variable determines the initial
-            // value of `marionette.enabled`.
-            env.add(0, "MOZ_MARIONETTE=1");
-        }
-
-        GeckoLoader.setupGeckoEnvironment(context, context.getFilesDir().getPath(), env, mInitInfo.prefs);
-
         // And go.
         GeckoLoader.nativeRun(args,
                               mInitInfo.extras.getInt(EXTRA_PREFS_FD, -1),
                               mInitInfo.extras.getInt(EXTRA_PREF_MAP_FD, -1),
                               mInitInfo.extras.getInt(EXTRA_IPC_FD, -1),
                               mInitInfo.extras.getInt(EXTRA_CRASH_FD, -1),
                               mInitInfo.extras.getInt(EXTRA_CRASH_ANNOTATION_FD, -1));
 
--- a/modules/libjar/nsZipArchive.cpp
+++ b/modules/libjar/nsZipArchive.cpp
@@ -17,16 +17,17 @@
 #endif
 #include "nsISupportsUtils.h"
 #include "prio.h"
 #include "plstr.h"
 #include "mozilla/Attributes.h"
 #include "mozilla/Logging.h"
 #include "mozilla/UniquePtrExtensions.h"
 #include "stdlib.h"
+#include "nsDirectoryService.h"
 #include "nsWildCard.h"
 #include "nsXULAppAPI.h"
 #include "nsZipArchive.h"
 #include "nsString.h"
 #include "prenv.h"
 #if defined(XP_WIN)
 #  include <windows.h>
 #endif
@@ -73,24 +74,18 @@ static const uint16_t kSyntheticTime = 0
 static const uint16_t kSyntheticDate = (1 + (1 << 5) + (0 << 9));
 
 static uint16_t xtoint(const uint8_t *ii);
 static uint32_t xtolong(const uint8_t *ll);
 static uint32_t HashName(const char *aName, uint16_t nameLen);
 
 class ZipArchiveLogger {
  public:
-  void Write(const nsACString &zip, const char *entry) const {
-    if (!XRE_IsParentProcess()) {
-      return;
-    }
+  void Init(const char *env) {
     if (!fd) {
-      char *env = PR_GetEnv("MOZ_JAR_LOG_FILE");
-      if (!env) return;
-
       nsCOMPtr<nsIFile> logFile;
       nsresult rv = NS_NewLocalFile(NS_ConvertUTF8toUTF16(env), false,
                                     getter_AddRefs(logFile));
       if (NS_FAILED(rv)) return;
 
       // Create the log file and its parent directory (in case it doesn't exist)
       rv = logFile->Create(nsIFile::NORMAL_FILE_TYPE, 0644);
       if (NS_FAILED(rv)) return;
@@ -111,21 +106,26 @@ class ZipArchiveLogger {
       if (!file) return;
 #else
       rv = logFile->OpenNSPRFileDesc(PR_WRONLY | PR_CREATE_FILE | PR_APPEND,
                                      0644, &file);
       if (NS_FAILED(rv)) return;
 #endif
       fd = file;
     }
-    nsCString buf(zip);
-    buf.Append(' ');
-    buf.Append(entry);
-    buf.Append('\n');
-    PR_Write(fd, buf.get(), buf.Length());
+  }
+
+  void Write(const nsACString &zip, const char *entry) const {
+    if (fd) {
+      nsCString buf(zip);
+      buf.Append(' ');
+      buf.Append(entry);
+      buf.Append('\n');
+      PR_Write(fd, buf.get(), buf.Length());
+    }
   }
 
   void AddRef() {
     MOZ_ASSERT(refCnt >= 0);
     ++refCnt;
   }
 
   void Release() {
@@ -133,17 +133,17 @@ class ZipArchiveLogger {
     if ((0 == --refCnt) && fd) {
       PR_Close(fd);
       fd = nullptr;
     }
   }
 
  private:
   int refCnt;
-  mutable PRFileDesc *fd;
+  PRFileDesc *fd;
 };
 
 static ZipArchiveLogger zipLog;
 
 //***********************************************************
 // For every inflation the following allocations are done:
 // malloc(1 * 9520)
 // malloc(32768 * 1)
@@ -331,17 +331,62 @@ nsZipHandle::~nsZipHandle() {
 //  nsZipArchive::OpenArchive
 //---------------------------------------------
 nsresult nsZipArchive::OpenArchive(nsZipHandle *aZipHandle, PRFileDesc *aFd) {
   mFd = aZipHandle;
 
   //-- get table of contents for archive
   nsresult rv = BuildFileList(aFd);
   if (NS_SUCCEEDED(rv)) {
-    if (aZipHandle->mFile) aZipHandle->mFile.GetURIString(mURI);
+    if (aZipHandle->mFile && XRE_IsParentProcess()) {
+      static char *env = PR_GetEnv("MOZ_JAR_LOG_FILE");
+      if (env) {
+        zipLog.Init(env);
+        // We only log accesses in jar/zip archives within the NS_GRE_DIR
+        // and/or the APK on Android. For the former, we log the archive path
+        // relative to NS_GRE_DIR, and for the latter, the nested-archive
+        // path within the APK. This makes the path match the path of the
+        // archives relative to the packaged dist/$APP_NAME directory in a
+        // build.
+        if (aZipHandle->mFile.IsZip()) {
+          // Nested archive, likely omni.ja in APK.
+          aZipHandle->mFile.GetPath(mURI);
+        } else if (nsDirectoryService::gService) {
+          // We can reach here through the initialization of Omnijar from
+          // XRE_InitCommandLine, which happens before the directory service
+          // is initialized. When that happens, it means the opened archive is
+          // the APK, and we don't care to log that one, so we just skip
+          // when the directory service is not initialized.
+          nsCOMPtr<nsIFile> dir = aZipHandle->mFile.GetBaseFile();
+          nsCOMPtr<nsIFile> gre_dir;
+          nsAutoCString path;
+          if (NS_SUCCEEDED(nsDirectoryService::gService->Get(
+                  NS_GRE_DIR, NS_GET_IID(nsIFile), getter_AddRefs(gre_dir)))) {
+            nsAutoCString leaf;
+            nsCOMPtr<nsIFile> parent;
+            while (NS_SUCCEEDED(dir->GetNativeLeafName(leaf)) &&
+                   NS_SUCCEEDED(dir->GetParent(getter_AddRefs(parent)))) {
+              if (!parent) {
+                break;
+              }
+              dir = parent;
+              if (path.Length()) {
+                path.Insert('/', 0);
+              }
+              path.Insert(leaf, 0);
+              bool equals;
+              if (NS_SUCCEEDED(dir->Equals(gre_dir, &equals)) && equals) {
+                mURI.Assign(path);
+                break;
+              }
+            }
+          }
+        }
+      }
+    }
   }
   return rv;
 }
 
 nsresult nsZipArchive::OpenArchive(nsIFile *aFile) {
   RefPtr<nsZipHandle> handle;
 #if defined(XP_WIN)
   mozilla::AutoFDClose fd;
@@ -422,17 +467,19 @@ nsZipItem *nsZipArchive::GetItem(const c
     }
     MOZ_WIN_MEM_TRY_BEGIN
     nsZipItem *item = mFiles[HashName(aEntryName, len)];
     while (item) {
       if ((len == item->nameLength) &&
           (!memcmp(aEntryName, item->Name(), len))) {
         // Successful GetItem() is a good indicator that the file is about to be
         // read
-        zipLog.Write(mURI, aEntryName);
+        if (mURI.Length()) {
+          zipLog.Write(mURI, aEntryName);
+        }
         return item;  //-- found it
       }
       item = item->next;
     }
     MOZ_WIN_MEM_TRY_CATCH(return nullptr)
   }
   return nullptr;
 }
--- a/netwerk/base/nsNetUtil.cpp
+++ b/netwerk/base/nsNetUtil.cpp
@@ -1619,33 +1619,39 @@ nsresult NS_NewURI(
         *ioService /* = nullptr */)  // pass in nsIIOService to optimize callers
 {
   nsAutoCString charset;
   encoding->Name(charset);
   return NS_NewURI(result, spec, charset.get(), baseURI, ioService);
 }
 
 nsresult NS_NewURI(
-    nsIURI **result, const nsAString &spec, const char *charset /* = nullptr */,
+    nsIURI **result, const nsAString &aSpec,
+    const char *charset /* = nullptr */, nsIURI *baseURI /* = nullptr */,
+    nsIIOService
+        *ioService /* = nullptr */)  // pass in nsIIOService to optimize callers
+{
+  nsAutoCString spec;
+  if (!AppendUTF16toUTF8(aSpec, spec, mozilla::fallible)) {
+    return NS_ERROR_OUT_OF_MEMORY;
+  }
+  return NS_NewURI(result, spec, charset, baseURI, ioService);
+}
+
+nsresult NS_NewURI(
+    nsIURI **result, const nsAString &aSpec, NotNull<const Encoding *> encoding,
     nsIURI *baseURI /* = nullptr */,
     nsIIOService
         *ioService /* = nullptr */)  // pass in nsIIOService to optimize callers
 {
-  return NS_NewURI(result, NS_ConvertUTF16toUTF8(spec), charset, baseURI,
-                   ioService);
-}
-
-nsresult NS_NewURI(
-    nsIURI **result, const nsAString &spec, NotNull<const Encoding *> encoding,
-    nsIURI *baseURI /* = nullptr */,
-    nsIIOService
-        *ioService /* = nullptr */)  // pass in nsIIOService to optimize callers
-{
-  return NS_NewURI(result, NS_ConvertUTF16toUTF8(spec), encoding, baseURI,
-                   ioService);
+  nsAutoCString spec;
+  if (!AppendUTF16toUTF8(aSpec, spec, mozilla::fallible)) {
+    return NS_ERROR_OUT_OF_MEMORY;
+  }
+  return NS_NewURI(result, spec, encoding, baseURI, ioService);
 }
 
 nsresult NS_NewURI(
     nsIURI **result, const char *spec, nsIURI *baseURI /* = nullptr */,
     nsIIOService
         *ioService /* = nullptr */)  // pass in nsIIOService to optimize callers
 {
   return NS_NewURI(result, nsDependentCString(spec), nullptr, baseURI,
--- a/netwerk/protocol/http/Http2Session.cpp
+++ b/netwerk/protocol/http/Http2Session.cpp
@@ -163,17 +163,17 @@ void Http2Session::Shutdown() {
     // restarted.
     if (mCleanShutdown &&
         (stream->StreamID() > mGoAwayID || !stream->HasRegisteredID())) {
       CloseStream(stream, NS_ERROR_NET_RESET);  // can be restarted
     } else if (stream->RecvdData()) {
       CloseStream(stream, NS_ERROR_NET_PARTIAL_TRANSFER);
     } else if (mGoAwayReason == INADEQUATE_SECURITY) {
       CloseStream(stream, NS_ERROR_NET_INADEQUATE_SECURITY);
-    } else if (!mCleanShutdown) {
+    } else if (!mCleanShutdown && (mGoAwayReason != NO_HTTP_ERROR)) {
       CloseStream(stream, NS_ERROR_NET_HTTP2_SENT_GOAWAY);
     } else {
       CloseStream(stream, NS_ERROR_ABORT);
     }
   }
 }
 
 Http2Session::~Http2Session() {
@@ -3866,17 +3866,17 @@ void Http2Session::Close(nsresult aReaso
     mWaitingWebsocketCallbacks.Clear();
   }
 
   uint32_t goAwayReason;
   if (mGoAwayReason != NO_HTTP_ERROR) {
     goAwayReason = mGoAwayReason;
   } else if (NS_SUCCEEDED(aReason)) {
     goAwayReason = NO_HTTP_ERROR;
-  } else if (aReason == NS_ERROR_ILLEGAL_VALUE) {
+  } else if (aReason == NS_ERROR_NET_HTTP2_SENT_GOAWAY) {
     goAwayReason = PROTOCOL_ERROR;
   } else if (mCleanShutdown) {
     goAwayReason = NO_HTTP_ERROR;
   } else {
     goAwayReason = INTERNAL_ERROR;
   }
   if (!mAttemptingEarlyData) {
     GenerateGoAway(goAwayReason);
--- a/netwerk/protocol/http/HttpChannelChild.cpp
+++ b/netwerk/protocol/http/HttpChannelChild.cpp
@@ -175,16 +175,17 @@ HttpChannelChild::HttpChannelChild()
       mFlushedForDiversion(false),
       mIsFromCache(false),
       mCacheNeedToReportBytesReadInitialized(false),
       mNeedToReportBytesRead(true),
       mCacheEntryAvailable(false),
       mAltDataCacheEntryAvailable(false),
       mSendResumeAt(false),
       mKeptAlive(false),
+      mIPCActorDeleted(false),
       mSuspendSent(false),
       mSynthesizedResponse(false),
       mShouldInterceptSubsequentRedirect(false),
       mRedirectingForSubsequentSynthesizedResponse(false),
       mPostRedirectChannelShouldIntercept(false),
       mPostRedirectChannelShouldUpgrade(false),
       mShouldParentIntercept(false),
       mSuspendParentAfterSynthesizeResponse(false) {
@@ -528,17 +529,21 @@ void HttpChannelChild::OnStartRequest(
   // stage, as they are set in the listener's OnStartRequest.
   MOZ_RELEASE_ASSERT(
       !mFlushedForDiversion,
       "mFlushedForDiversion should be unset before OnStartRequest!");
   MOZ_RELEASE_ASSERT(
       !mDivertingToParent,
       "mDivertingToParent should be unset before OnStartRequest!");
 
-  if (mOnStartRequestCalled && !mIPCOpen) {
+  // If this channel was aborted by ActorDestroy, then there may be other
+  // OnStartRequest/OnStopRequest/OnDataAvailable IPC messages that need to
+  // be handled. In that case we just ignore them to avoid calling the listener
+  // twice.
+  if (mOnStartRequestCalled && mIPCActorDeleted) {
     return;
   }
 
   if (!mCanceled && NS_SUCCEEDED(mStatus)) {
     mStatus = channelStatus;
   }
 
   // Cookies headers should not be visible to the child process
@@ -658,21 +663,21 @@ void HttpChannelChild::DoOnStartRequest(
   }
 
   if (mSynthesizedResponsePump && mLoadFlags & LOAD_CALL_CONTENT_SNIFFERS) {
     mSynthesizedResponsePump->PeekStream(CallTypeSniffers,
                                          static_cast<nsIChannel*>(this));
   }
 
   nsresult rv = mListener->OnStartRequest(aRequest);
+  mOnStartRequestCalled = true;
   if (NS_FAILED(rv)) {
     Cancel(rv);
     return;
   }
-  mOnStartRequestCalled = true;
 
   if (mDivertingToParent) {
     mListener = nullptr;
     mCompressListener = nullptr;
     if (mLoadGroup) {
       mLoadGroup->RemoveRequest(this, nullptr, mStatus);
     }
 
@@ -1028,17 +1033,21 @@ void HttpChannelChild::MaybeDivertOnStop
 
 void HttpChannelChild::OnStopRequest(
     const nsresult& channelStatus, const ResourceTimingStruct& timing,
     const nsHttpHeaderArray& aResponseTrailers) {
   LOG(("HttpChannelChild::OnStopRequest [this=%p status=%" PRIx32 "]\n", this,
        static_cast<uint32_t>(channelStatus)));
   MOZ_ASSERT(NS_IsMainThread());
 
-  if (mOnStopRequestCalled && !mIPCOpen) {
+  // If this channel was aborted by ActorDestroy, then there may be other
+  // OnStartRequest/OnStopRequest/OnDataAvailable IPC messages that need to
+  // be handled. In that case we just ignore them to avoid calling the listener
+  // twice.
+  if (mOnStopRequestCalled && mIPCActorDeleted) {
     return;
   }
 
   if (mDivertingToParent) {
     MOZ_RELEASE_ASSERT(
         !mFlushedForDiversion,
         "Should not be processing any more callbacks from parent!");
 
@@ -3825,16 +3834,19 @@ void HttpChannelChild::ActorDestroy(Acto
     AutoEventEnqueuer ensureSerialDispatch(mEventQ);
 
     mStatus = NS_ERROR_DOCSHELL_DYING;
     HandleAsyncAbort();
 
     // Cleanup the background channel before we resume the eventQ so we don't
     // get any other events.
     CleanupBackgroundChannel();
+
+    mIPCActorDeleted = true;
+    mCanceled = true;
   }
 }
 
 mozilla::ipc::IPCResult HttpChannelChild::RecvLogBlockedCORSRequest(
     const nsString& aMessage, const nsCString& aCategory) {
   Unused << LogBlockedCORSRequest(aMessage, aCategory);
   return IPC_OK();
 }
--- a/netwerk/protocol/http/HttpChannelChild.h
+++ b/netwerk/protocol/http/HttpChannelChild.h
@@ -408,16 +408,20 @@ class HttpChannelChild final : public PH
   uint8_t mCacheEntryAvailable : 1;
   uint8_t mAltDataCacheEntryAvailable : 1;
 
   // If ResumeAt is called before AsyncOpen, we need to send extra data upstream
   uint8_t mSendResumeAt : 1;
 
   uint8_t mKeptAlive : 1;  // IPC kept open, but only for security info
 
+  // Set when ActorDestroy(ActorDestroyReason::Deletion) is called
+  // The channel must ignore any following OnStart/Stop/DataAvailable messages
+  uint8_t mIPCActorDeleted : 1;
+
   // Set if SendSuspend is called. Determines if SendResume is needed when
   // diverting callbacks to parent.
   uint8_t mSuspendSent : 1;
 
   // Set if a response was synthesized, indicating that any forthcoming
   // redirects should be intercepted.
   uint8_t mSynthesizedResponse : 1;
 
--- a/netwerk/protocol/http/nsHttpHandler.cpp
+++ b/netwerk/protocol/http/nsHttpHandler.cpp
@@ -60,16 +60,17 @@
 
 #include "mozilla/net/NeckoChild.h"
 #include "mozilla/net/NeckoParent.h"
 #include "mozilla/net/RequestContextService.h"
 #include "mozilla/ipc/URIUtils.h"
 #include "mozilla/Telemetry.h"
 #include "mozilla/Unused.h"
 #include "mozilla/BasePrincipal.h"
+#include "mozilla/LazyIdleThread.h"
 
 #include "mozilla/dom/ContentParent.h"
 #include "mozilla/dom/Navigator.h"
 #include "mozilla/dom/network/Connection.h"
 
 #include "nsNSSComponent.h"
 
 #if defined(XP_UNIX)
@@ -272,31 +273,31 @@ nsHttpHandler::nsHttpHandler()
       ,
       mSpdyPullAllowance(ASpdySession::kInitialRwin),
       mDefaultSpdyConcurrent(ASpdySession::kDefaultMaxConcurrent),
       mSpdyPingThreshold(PR_SecondsToInterval(58)),
       mSpdyPingTimeout(PR_SecondsToInterval(8)),
       mConnectTimeout(90000),
       mTLSHandshakeTimeout(30000),
       mParallelSpeculativeConnectLimit(6),
-      mSpeculativeConnectEnabled(true),
       mRequestTokenBucketEnabled(true),
       mRequestTokenBucketMinParallelism(6),
       mRequestTokenBucketHz(100),
       mRequestTokenBucketBurst(32),
       mCriticalRequestPrioritization(true),
       mTCPKeepaliveShortLivedEnabled(false),
       mTCPKeepaliveShortLivedTimeS(60),
       mTCPKeepaliveShortLivedIdleTimeS(10),
       mTCPKeepaliveLongLivedEnabled(false),
       mTCPKeepaliveLongLivedIdleTimeS(600),
       mEnforceH1Framing(FRAMECHECK_BARELY),
       mDefaultHpackBuffer(4096),
       mMaxHttpResponseHeaderSize(393216),
       mFocusedWindowTransactionRatio(0.9f),
+      mSpeculativeConnectEnabled(false),
       mUseFastOpen(true),
       mFastOpenConsecutiveFailureLimit(5),
       mFastOpenConsecutiveFailureCounter(0),
       mFastOpenStallsLimit(3),
       mFastOpenStallsCounter(0),
       mFastOpenStallsIdleTime(10),
       mFastOpenStallsTimeout(20),
       mActiveTabPriority(true),
@@ -461,16 +462,19 @@ nsresult nsHttpHandler::Init() {
   nsCOMPtr<nsIIOService> service = do_GetService(NS_IOSERVICE_CONTRACTID, &rv);
   if (NS_FAILED(rv)) {
     NS_WARNING("unable to continue without io service");
     return rv;
   }
   mIOService = new nsMainThreadPtrHolder<nsIIOService>(
       "nsHttpHandler::mIOService", service);
 
+  mBackgroundThread = new mozilla::LazyIdleThread(
+      10000, NS_LITERAL_CSTRING("HTTP Handler Background"));
+
   if (IsNeckoChild()) NeckoChild::InitNeckoChild();
 
   InitUserAgentComponents();
 
   // This perference is only used in parent process.
   if (!IsNeckoChild()) {
     mActiveTabPriority =
         Preferences::GetBool(HTTP_PREF("active_tab_priority"), true);
@@ -554,16 +558,17 @@ nsresult nsHttpHandler::Init() {
     obsService->AddObserver(this, "net:cancel-all-connections", true);
     obsService->AddObserver(this, "last-pb-context-exited", true);
     obsService->AddObserver(this, "browser:purge-session-history", true);
     obsService->AddObserver(this, NS_NETWORK_LINK_TOPIC, true);
     obsService->AddObserver(this, "application-background", true);
     obsService->AddObserver(this, "psm:user-certificate-added", true);
     obsService->AddObserver(this, "psm:user-certificate-deleted", true);
     obsService->AddObserver(this, "intl:app-locales-changed", true);
+    obsService->AddObserver(this, "browser-delayed-startup-finished", true);
 
     if (!IsNeckoChild()) {
       obsService->AddObserver(
           this, "net:current-toplevel-outer-content-windowid", true);
     }
 
     if (mFastOpenSupported) {
       obsService->AddObserver(this, "captive-portal-login", true);
@@ -2170,18 +2175,16 @@ nsHttpHandler::GetMisc(nsACString &value
   value = mMisc;
   return NS_OK;
 }
 
 //-----------------------------------------------------------------------------
 // nsHttpHandler::nsIObserver
 //-----------------------------------------------------------------------------
 
-static bool CanEnableSpeculativeConnect();  // forward declaration
-
 NS_IMETHODIMP
 nsHttpHandler::Observe(nsISupports *subject, const char *topic,
                        const char16_t *data) {
   MOZ_ASSERT(NS_IsMainThread());
   LOG(("nsHttpHandler::Observe [topic=\"%s\"]\n", topic));
 
   nsresult rv;
   if (!strcmp(topic, "profile-change-net-teardown") ||
@@ -2321,54 +2324,77 @@ nsHttpHandler::Observe(nsISupports *subj
     ResetFastOpenConsecutiveFailureCounter();
   } else if (!strcmp(topic, "psm:user-certificate-added")) {
     // A user certificate has just been added.
     // We should immediately disable speculative connect
     mSpeculativeConnectEnabled = false;
   } else if (!strcmp(topic, "psm:user-certificate-deleted")) {
     // If a user certificate has been removed, we need to check if there
     // are others installed
-    mSpeculativeConnectEnabled = CanEnableSpeculativeConnect();
+    MaybeEnableSpeculativeConnect();
   } else if (!strcmp(topic, "intl:app-locales-changed")) {
     // If the locale changed, there's a chance the accept language did too
     mAcceptLanguagesIsDirty = true;
+  } else if (!strcmp(topic, "browser-delayed-startup-finished")) {
+    MaybeEnableSpeculativeConnect();
   }
 
   return NS_OK;
 }
 
 // nsISpeculativeConnect
 
 static bool CanEnableSpeculativeConnect() {
-  MOZ_ASSERT(NS_IsMainThread(), "Main thread only");
+  nsCOMPtr<nsINSSComponent> component(do_GetService(PSM_COMPONENT_CONTRACTID));
 
-  nsCOMPtr<nsINSSComponent> component(do_GetService(PSM_COMPONENT_CONTRACTID));
-  if (!component) {
-    return false;
-  }
-
+  MOZ_ASSERT(!NS_IsMainThread(), "Must run on the background thread");
   // Check if any 3rd party PKCS#11 module are installed, as they may produce
   // client certificates
   bool activeSmartCards = false;
   nsresult rv = component->HasActiveSmartCards(&activeSmartCards);
   if (NS_FAILED(rv) || activeSmartCards) {
     return false;
   }
 
   // If there are any client certificates installed, we can't enable speculative
   // connect, as it may pop up the certificate chooser at any time.
   bool hasUserCerts = false;
   rv = component->HasUserCertsInstalled(&hasUserCerts);
   if (NS_FAILED(rv) || hasUserCerts) {
     return false;
   }
 
+  // No smart cards and no client certificates means
+  // we can enable speculative connect.
   return true;
 }
 
+void nsHttpHandler::MaybeEnableSpeculativeConnect() {
+  MOZ_ASSERT(NS_IsMainThread(), "Main thread only");
+
+  // We don't need to and can't check this in the child process.
+  if (IsNeckoChild()) {
+    return;
+  }
+
+  if (!mBackgroundThread) {
+    NS_WARNING(
+        "nsHttpHandler::MaybeEnableSpeculativeConnect() no background thread");
+    return;
+  }
+
+  net_EnsurePSMInit();
+
+  mBackgroundThread->Dispatch(
+      NS_NewRunnableFunction("CanEnableSpeculativeConnect", [] {
+        gHttpHandler->mSpeculativeConnectEnabled =
+            CanEnableSpeculativeConnect();
+      }));
+}
+
 nsresult nsHttpHandler::SpeculativeConnectInternal(
     nsIURI *aURI, nsIPrincipal *aPrincipal, nsIInterfaceRequestor *aCallbacks,
     bool anonymous) {
   if (IsNeckoChild()) {
     ipc::URIParams params;
     SerializeURI(aURI, params);
     gNeckoChild->SendSpeculativeConnect(params, IPC::Principal(aPrincipal),
                                         anonymous);
@@ -2442,22 +2468,16 @@ nsresult nsHttpHandler::SpeculativeConne
   else if (!scheme.EqualsLiteral("http"))
     return NS_ERROR_UNEXPECTED;
 
   // Construct connection info object
   bool usingSSL = false;
   rv = aURI->SchemeIs("https", &usingSSL);
   if (NS_FAILED(rv)) return rv;
 
-  static bool sCheckedIfSpeculativeEnabled = false;
-  if (!sCheckedIfSpeculativeEnabled) {
-    sCheckedIfSpeculativeEnabled = true;
-    mSpeculativeConnectEnabled = CanEnableSpeculativeConnect();
-  }
-
   if (usingSSL && !mSpeculativeConnectEnabled) {
     return NS_ERROR_UNEXPECTED;
   }
 
   nsAutoCString host;
   rv = aURI->GetAsciiHost(host);
   if (NS_FAILED(rv)) return rv;
 
--- a/netwerk/protocol/http/nsHttpHandler.h
+++ b/netwerk/protocol/http/nsHttpHandler.h
@@ -432,30 +432,38 @@ class nsHttpHandler final : public nsIHt
   MOZ_MUST_USE nsresult InitConnectionMgr();
 
   void NotifyObservers(nsIHttpChannel *chan, const char *event);
 
   void SetFastOpenOSSupport();
 
   void EnsureUAOverridesInit();
 
+  // Checks if there are any user certs or active smart cards on a different
+  // thread. Updates mSpeculativeConnectEnabled when done.
+  void MaybeEnableSpeculativeConnect();
+
  private:
   // cached services
   nsMainThreadPtrHandle<nsIIOService> mIOService;
   nsMainThreadPtrHandle<nsIStreamConverterService> mStreamConvSvc;
   nsMainThreadPtrHandle<nsICookieService> mCookieService;
   nsMainThreadPtrHandle<nsISiteSecurityService> mSSService;
 
   // the authentication credentials cache
   nsHttpAuthCache mAuthCache;
   nsHttpAuthCache mPrivateAuthCache;
 
   // the connection manager
   RefPtr<nsHttpConnectionMgr> mConnMgr;
 
+  // This thread is used for performing operations that should not block
+  // the main thread.
+  nsCOMPtr<nsIThread> mBackgroundThread;
+
   //
   // prefs
   //
 
   enum HttpVersion mHttpVersion;
   enum HttpVersion mProxyHttpVersion;
   uint32_t mCapabilities;
   uint8_t mReferrerLevel;
@@ -592,20 +600,16 @@ class nsHttpHandler final : public nsIHt
   // The maximum amount of time to wait for a tls handshake to be
   // established. In milliseconds.
   uint32_t mTLSHandshakeTimeout;
 
   // The maximum number of current global half open sockets allowable
   // when starting a new speculative connection.
   uint32_t mParallelSpeculativeConnectLimit;
 
-  // We may disable speculative connect if the browser has user certificates
-  // installed as that might randomly popup the certificate choosing window.
-  bool mSpeculativeConnectEnabled;
-
   // For Rate Pacing of HTTP/1 requests through a netwerk/base/EventTokenBucket
   // Active requests <= *MinParallelism are not subject to the rate pacing
   bool mRequestTokenBucketEnabled;
   uint16_t mRequestTokenBucketMinParallelism;
   uint32_t mRequestTokenBucketHz;     // EventTokenBucket HZ
   uint32_t mRequestTokenBucketBurst;  // EventTokenBucket Burst
 
   // Whether or not to block requests for non head js/css items (e.g. media)
@@ -636,16 +640,20 @@ class nsHttpHandler final : public nsIHt
   uint32_t mDefaultHpackBuffer;
 
   // The max size (in bytes) for received Http response header.
   uint32_t mMaxHttpResponseHeaderSize;
 
   // The ratio for dispatching transactions from the focused window.
   float mFocusedWindowTransactionRatio;
 
+  // We may disable speculative connect if the browser has user certificates
+  // installed as that might randomly popup the certificate choosing window.
+  Atomic<bool, Relaxed> mSpeculativeConnectEnabled;
+
   Atomic<bool, Relaxed> mUseFastOpen;
   Atomic<bool, Relaxed> mFastOpenSupported;
   uint32_t mFastOpenConsecutiveFailureLimit;
   uint32_t mFastOpenConsecutiveFailureCounter;
   uint32_t mFastOpenStallsLimit;
   uint32_t mFastOpenStallsCounter;
   uint32_t mFastOpenStallsIdleTime;
   uint32_t mFastOpenStallsTimeout;
--- a/python/mozbuild/mozpack/mozjar.py
+++ b/python/mozbuild/mozpack/mozjar.py
@@ -9,17 +9,16 @@ import struct
 import subprocess
 import zlib
 import os
 from zipfile import (
     ZIP_STORED,
     ZIP_DEFLATED,
 )
 from collections import OrderedDict
-from urlparse import urlparse, ParseResult
 import mozpack.path as mozpath
 from mozbuild.util import memoize
 
 
 JAR_STORED = ZIP_STORED
 JAR_DEFLATED = ZIP_DEFLATED
 JAR_BROTLI = 0x81
 MAX_WBITS = 15
@@ -828,61 +827,23 @@ class BrotliCompress(object):
 
     def flush(self):
         return Brotli.compress(self._buf.getvalue())
 
 
 class JarLog(dict):
     '''
     Helper to read the file Gecko generates when setting MOZ_JAR_LOG_FILE.
-    The jar log is then available as a dict with the jar path as key (see
-    canonicalize for more details on the key value), and the corresponding
-    access log as a list value. Only the first access to a given member of
-    a jar is stored.
+    The jar log is then available as a dict with the jar path as key, and
+    the corresponding access log as a list value. Only the first access to
+    a given member of a jar is stored.
     '''
 
     def __init__(self, file=None, fileobj=None):
         if not fileobj:
             fileobj = open(file, 'r')
-        urlmap = {}
         for line in fileobj:
-            url, path = line.strip().split(None, 1)
-            if not url or not path:
+            jar, path = line.strip().split(None, 1)
+            if not jar or not path:
                 continue
-            if url not in urlmap:
-                urlmap[url] = JarLog.canonicalize(url)
-            jar = urlmap[url]
             entry = self.setdefault(jar, [])
             if path not in entry:
                 entry.append(path)
-
-    @staticmethod
-    def canonicalize(url):
-        '''
-        The jar path is stored in a MOZ_JAR_LOG_FILE log as a url. This method
-        returns a unique value corresponding to such urls.
-        - file:///{path} becomes {path}
-        - jar:file:///{path}!/{subpath} becomes ({path}, {subpath})
-        - jar:jar:file:///{path}!/{subpath}!/{subpath2} becomes
-           ({path}, {subpath}, {subpath2})
-        '''
-        if not isinstance(url, ParseResult):
-            # Assume that if it doesn't start with jar: or file:, it's a path.
-            if not url.startswith(('jar:', 'file:')):
-                url = 'file:///' + os.path.abspath(url)
-            url = urlparse(url)
-        assert url.scheme
-        assert url.scheme in ('jar', 'file')
-        if url.scheme == 'jar':
-            path = JarLog.canonicalize(url.path)
-            if isinstance(path, tuple):
-                return path[:-1] + tuple(path[-1].split('!/', 1))
-            return tuple(path.split('!/', 1))
-        if url.scheme == 'file':
-            assert os.path.isabs(url.path)
-            path = url.path
-            # On Windows, url.path will be /drive:/path ; on Unix systems,
-            # /path. As we want drive:/path instead of /drive:/path on Windows,
-            # remove the leading /.
-            if os.path.isabs(path[1:]):
-                path = path[1:]
-            path = os.path.realpath(path)
-            return mozpath.normsep(os.path.normcase(path))
--- a/python/mozbuild/mozpack/test/test_mozjar.py
+++ b/python/mozbuild/mozpack/test/test_mozjar.py
@@ -12,17 +12,16 @@ from mozpack.mozjar import (
     Deflater,
     JarLog,
 )
 from collections import OrderedDict
 from mozpack.test.test_files import MockDest
 import unittest
 import mozunit
 from cStringIO import StringIO
-from urllib import pathname2url
 import mozpack.path as mozpath
 import os
 
 
 test_data_path = mozpath.abspath(mozpath.dirname(__file__))
 test_data_path = mozpath.join(test_data_path, 'data')
 
 
@@ -285,61 +284,38 @@ class TestPreload(unittest.TestCase):
 
         self.assertEqual(files[0].filename, 'baz/qux')
         self.assertEqual(files[1].filename, 'bar')
         self.assertEqual(files[2].filename, 'foo')
 
 
 class TestJarLog(unittest.TestCase):
     def test_jarlog(self):
-        base = 'file:' + pathname2url(os.path.abspath(os.curdir))
         s = StringIO('\n'.join([
-            base + '/bar/baz.jar first',
-            base + '/bar/baz.jar second',
-            base + '/bar/baz.jar third',
-            base + '/bar/baz.jar second',
-            base + '/bar/baz.jar second',
-            'jar:' + base + '/qux.zip!/omni.ja stuff',
-            base + '/bar/baz.jar first',
-            'jar:' + base + '/qux.zip!/omni.ja other/stuff',
-            'jar:' + base + '/qux.zip!/omni.ja stuff',
-            base + '/bar/baz.jar third',
-            'jar:jar:' + base + '/qux.zip!/baz/baz.jar!/omni.ja nested/stuff',
-            'jar:jar:jar:' + base + '/qux.zip!/baz/baz.jar!/foo.zip!/omni.ja' +
-            ' deeply/nested/stuff',
+            'bar/baz.jar first',
+            'bar/baz.jar second',
+            'bar/baz.jar third',
+            'bar/baz.jar second',
+            'bar/baz.jar second',
+            'omni.ja stuff',
+            'bar/baz.jar first',
+            'omni.ja other/stuff',
+            'omni.ja stuff',
+            'bar/baz.jar third',
         ]))
         log = JarLog(fileobj=s)
-
-        def canonicalize(p):
-            return mozpath.normsep(os.path.normcase(os.path.realpath(p)))
-
-        baz_jar = canonicalize('bar/baz.jar')
-        qux_zip = canonicalize('qux.zip')
         self.assertEqual(set(log.keys()), set([
-            baz_jar,
-            (qux_zip, 'omni.ja'),
-            (qux_zip, 'baz/baz.jar', 'omni.ja'),
-            (qux_zip, 'baz/baz.jar', 'foo.zip', 'omni.ja'),
+            'bar/baz.jar',
+            'omni.ja',
         ]))
-        self.assertEqual(log[baz_jar], [
+        self.assertEqual(log['bar/baz.jar'], [
             'first',
             'second',
             'third',
         ])
-        self.assertEqual(log[(qux_zip, 'omni.ja')], [
+        self.assertEqual(log['omni.ja'], [
             'stuff',
             'other/stuff',
         ])
-        self.assertEqual(log[(qux_zip, 'baz/baz.jar', 'omni.ja')],
-                         ['nested/stuff'])
-        self.assertEqual(log[(qux_zip, 'baz/baz.jar', 'foo.zip',
-                              'omni.ja')], ['deeply/nested/stuff'])
-
-        # The above tests also indirectly check the value returned by
-        # JarLog.canonicalize for various jar: and file: urls, but
-        # JarLog.canonicalize also supports plain paths.
-        self.assertEqual(JarLog.canonicalize(os.path.abspath('bar/baz.jar')),
-                         baz_jar)
-        self.assertEqual(JarLog.canonicalize('bar/baz.jar'), baz_jar)
 
 
 if __name__ == '__main__':
     mozunit.main()
--- a/security/manager/ssl/nsINSSComponent.idl
+++ b/security/manager/ssl/nsINSSComponent.idl
@@ -82,23 +82,21 @@ interface nsINSSComponent : nsISupports 
    * user's traffic (if they don't match, the server is likely misconfigured).
    * This function succeeds if the given DN matches the noted DN and fails
    * otherwise (e.g. if the update ping never failed).
    */
   [noscript] void issuerMatchesMitmCanary(in string certIssuer);
 
   /**
    * Returns true if the user has a PKCS#11 module with removable slots.
-   * Main thread only.
    */
   [noscript] bool hasActiveSmartCards();
 
   /**
    * Returns true if the user has any client authentication certificates.
-   * Main thread only.
    */
   [noscript] bool hasUserCertsInstalled();
 
   /**
    * Returns an already-adrefed handle to the currently configured shared
    * certificate verifier.
    */
   [noscript] SharedCertVerifierPtr getDefaultCertVerifier();
--- a/security/manager/ssl/nsNSSComponent.cpp
+++ b/security/manager/ssl/nsNSSComponent.cpp
@@ -690,24 +690,20 @@ LoadLoadableRootsTask::Run() {
 
   // Go back to the main thread to clean up this worker thread.
   return NS_DispatchToMainThread(this);
 }
 
 NS_IMETHODIMP
 nsNSSComponent::HasActiveSmartCards(bool* result) {
   NS_ENSURE_ARG_POINTER(result);
-  MOZ_ASSERT(NS_IsMainThread(), "Main thread only");
-  if (!NS_IsMainThread()) {
-    return NS_ERROR_NOT_SAME_THREAD;
-  }
+
+  BlockUntilLoadableRootsLoaded();
 
 #ifndef MOZ_NO_SMART_CARDS
-  MutexAutoLock nsNSSComponentLock(mMutex);
-
   AutoSECMODListReadLock secmodLock;
   SECMODModuleList* list = SECMOD_GetDefaultModuleList();
   while (list) {
     if (SECMOD_HasRemovableSlots(list->module)) {
       *result = true;
       return NS_OK;
     }
     list = list->next;
@@ -715,20 +711,18 @@ nsNSSComponent::HasActiveSmartCards(bool
 #endif
   *result = false;
   return NS_OK;
 }
 
 NS_IMETHODIMP
 nsNSSComponent::HasUserCertsInstalled(bool* result) {
   NS_ENSURE_ARG_POINTER(result);
-  MOZ_ASSERT(NS_IsMainThread(), "Main thread only");
-  if (!NS_IsMainThread()) {
-    return NS_ERROR_NOT_SAME_THREAD;
-  }
+
+  BlockUntilLoadableRootsLoaded();
 
   *result = false;
   UniqueCERTCertList certList(CERT_FindUserCertsByUsage(
       CERT_GetDefaultCertDB(), certUsageSSLClient, false, true, nullptr));
   if (!certList) {
     return NS_OK;
   }
 
--- a/taskcluster/ci/build/macosx.yml
+++ b/taskcluster/ci/build/macosx.yml
@@ -287,17 +287,17 @@ macosx64-ccov/debug:
         product: firefox
         job-name: macosx64-ccov-debug
     treeherder:
         platform: osx-cross-ccov/debug
         symbol: B
         tier: 1
     worker-type: aws-provisioner-v1/gecko-{level}-b-linux
     worker:
-        max-run-time: 5400
+        max-run-time: 7200
         env:
             TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/macosx64/cross-releng.manifest"
     run:
         using: mozharness
         actions: [get-secrets, build]
         config:
             - builds/releng_base_firefox.py
             - builds/releng_base_mac_64_cross_builds.py
--- a/taskcluster/ci/docker-image/kind.yml
+++ b/taskcluster/ci/docker-image/kind.yml
@@ -70,32 +70,30 @@ jobs:
   custom-v8:
     symbol: I(custom-v8)
     parent: debian9-base
   debian7-amd64-build-base:
     symbol: I(deb7-bb)
     parent: debian7-base
     definition: debian-build
     packages:
-      - deb7-nasm
       - deb7-valgrind
     args:
       ARCH: amd64
   debian7-amd64-build:
     symbol: I(deb7)
     parent: debian7-base
     definition: debian-build
     packages:
       - deb7-atk
       - deb7-glib
       - deb7-gdk-pixbuf
       - deb7-gtk3
       - deb7-harfbuzz
       - deb7-libxkbcommon
-      - deb7-nasm
       - deb7-pango
       - deb7-pcre3
       - deb7-valgrind
       - deb7-wayland
     args:
       ARCH: amd64
   debian7-i386-build:
     symbol: I(deb7-32)
@@ -111,17 +109,16 @@ jobs:
       - deb7-32-pango
       - deb7-32-pcre3
       - deb7-32-xkeyboard-config
       - deb7-32-wayland
       - deb7-atk
       - deb7-glib
       - deb7-gtk3
       - deb7-harfbuzz
-      - deb7-nasm
       - deb7-python-defaults
       - deb7-pcre3
       - deb7-valgrind
     args:
       ARCH: i386
   debian7-mozjs-rust-build:
     symbol: I(deb7jsrs)
     parent: debian7-amd64-build
@@ -133,19 +130,19 @@ jobs:
   lint:
     symbol: I(lnt)
   # Neither the debian9-raw nor the debian9-packages images can have
   # packages dependencies.
   debian9-raw:
     symbol: I(deb9-raw)
     definition: debian-raw
     args:
-      BASE_IMAGE: debian:stretch-20170620
+      BASE_IMAGE: debian:stretch-20190228
       DIST: stretch
-      SNAPSHOT: '20170830T000511Z'
+      SNAPSHOT: '20190306T040711Z'
   debian9-packages:
     symbol: I(deb9-pkg)
     definition: debian-packages
     parent: debian9-raw
   debian9-base:
     symbol: I(deb9-base)
     definition: debian-base
     parent: debian9-raw
@@ -162,31 +159,30 @@ jobs:
     symbol: I(agb)
     parent: debian9-base
   fetch:
     symbol: I(fetch)
     parent: debian9-base
   static-analysis-build:
     symbol: I(static-analysis-build)
     parent: android-build
-    packages:
-      - deb9-nasm
   mingw32-build:
     symbol: I(mingw)
     parent: debian9-base
   index-task:
     symbol: I(idx)
   funsize-update-generator:
     symbol: I(pg)
   google-play-strings:
     symbol: I(gps)
   update-verify:
     symbol: I(uv)
   diffoscope:
     symbol: I(diff)
+    parent: debian9-base
   partner-repack:
     symbol: I(PR)
     parent: debian9-base
     definition: partner-repack
   periodic-updates:
     symbol: I(file)
   pipfile-updates:
     symbol: I(pip)
new file mode 100644
--- /dev/null
+++ b/taskcluster/ci/fetch/chromium-fetch.yml
@@ -0,0 +1,28 @@
+job-defaults:
+    fetch:
+        type: chromium-fetch
+        script: /builds/worker/bin/fetch-chromium.py
+
+linux64-chromium:
+    description: 'Linux64 Chromium Fetch'
+    fetch:
+        platform: linux
+        artifact-name: chrome-linux.tar.bz2
+
+win32-chromium:
+    description: 'Windows32 Chromium Fetch'
+    fetch:
+        platform: win32
+        artifact-name: chrome-win32.tar.bz2
+
+win64-chromium:
+    description: 'Windows64 Chromium Fetch'
+    fetch:
+        platform: win64
+        artifact-name: chrome-win64.tar.bz2
+
+mac-chromium:
+    description: 'MacOSX Chromium Fetch'
+    fetch:
+        platform: mac
+        artifact-name: chrome-mac.tar.bz2
--- a/taskcluster/ci/fetch/kind.yml
+++ b/taskcluster/ci/fetch/kind.yml
@@ -8,8 +8,9 @@ transforms:
     - taskgraph.transforms.fetch:transforms
     - taskgraph.transforms.try_job:transforms
     - taskgraph.transforms.job:transforms
     - taskgraph.transforms.task:transforms
 
 jobs-from:
     - benchmarks.yml
     - toolchains.yml
+    - chromium-fetch.yml
--- a/taskcluster/ci/hazard/kind.yml
+++ b/taskcluster/ci/hazard/kind.yml
@@ -64,9 +64,10 @@ jobs:
                 cd /builds/worker/checkouts/gecko/taskcluster/scripts/builder
                 && ./build-haz-linux.sh --project browser $HOME/workspace
         toolchains:
             - linux64-clang
             - linux64-gcc-6
             - linux64-gcc-sixgill
             - linux64-rust
             - linux64-cbindgen
+            - linux64-nasm
             - linux64-node
--- a/taskcluster/ci/packages/kind.yml
+++ b/taskcluster/ci/packages/kind.yml
@@ -272,40 +272,16 @@ jobs:
       symbol: Deb9(python-zstandard)
     run:
       using: debian-package
       dist: stretch
       tarball:
         url: https://github.com/indygreg/python-zstandard/releases/download/0.9.1/python-zstandard-0.9.1.tar.gz
         sha256: 59c7d6f1f85cebb5124abb50d8ec281c5311e0812e18785e28b197cf1515dd3b
 
-  deb7-nasm:
-    description: "nasm for Debian wheezy"
-    treeherder:
-      symbol: Deb7(nasm)
-    run:
-      using: debian-package
-      dsc:
-        url: http://snapshot.debian.org/archive/debian/20170704T094954Z/pool/main/n/nasm/nasm_2.13.01-1.dsc
-        sha256: 76528365eddc646f3f53c9f501ae9c2ba1678a163303d297e9014e3da36643c8
-      patch: nasm-wheezy.diff
-
-  deb9-nasm:
-    description: "nasm for Debian stretch"
-    treeherder:
-      symbol: Deb9(nasm)
-    run:
-      using: debian-package
-      dist: stretch
-      dsc:
-        url: http://snapshot.debian.org/archive/debian/20170704T094954Z/pool/main/n/nasm/nasm_2.13.01-1.dsc
-        sha256: 76528365eddc646f3f53c9f501ae9c2ba1678a163303d297e9014e3da36643c8
-      # The package source is missing a build dependency on fontconfig.
-      pre-build-command: apt-get install -y fontconfig
-
   deb7-pcre3:
     description: "pcre3 8.31 for Debian Wheezy"
     treeherder:
       symbol: Deb7(pcre3)
     run:
       using: debian-package
       dsc:
         url: http://snapshot.debian.org/archive/debian/20140424T055217Z/pool/main/p/pcre3/pcre3_8.31-5.dsc
--- a/taskcluster/ci/test/kind.yml
+++ b/taskcluster/ci/test/kind.yml
@@ -24,16 +24,17 @@ transforms:
 jobs-from:
     - awsy.yml
     - compiled.yml
     - firefox-ui.yml
     - marionette.yml
     - misc.yml
     - mochitest.yml
     - raptor.yml
+    - raptor-chrome.yml
     - reftest.yml
     - talos.yml
     - web-platform.yml
     - xpcshell.yml
 
 
 job-defaults:
     require-signed-extensions:
new file mode 100644
--- /dev/null
+++ b/taskcluster/ci/test/raptor-chrome.yml
@@ -0,0 +1,333 @@
+job-defaults:
+    max-run-time:
+        by-test-platform:
+            .*-qr/.*: 2400
+            .*-ux/.*: 2400
+            default: 1800
+    suite: raptor
+    workdir:
+        by-test-platform:
+            android-hw.*: /builds/worker
+            default: /home/cltbld
+    run-on-projects:
+        by-test-platform:
+            windows10-64-ux: ['try', 'mozilla-central']
+            default: ['try', 'trunk', 'mozilla-beta']
+    tier:
+        by-test-platform:
+            windows10-64-ccov/.*: 3
+            linux64-ccov/.*: 3
+            android-hw-g5.*: 2
+            default: 1
+    virtualization:
+        by-test-platform:
+            windows10-64-ccov/.*: virtual
+            default: hardware
+    mozharness:
+        script: raptor_script.py
+        config:
+            by-test-platform:
+                macosx.*:
+                    - raptor/mac_config.py
+                windows.*:
+                    - raptor/windows_config.py
+                windows10-64-ccov/debug:
+                    - raptor/windows_vm_config.py
+                linux64-ccov/opt:
+                    - raptor/linux64_config_taskcluster.py
+                android-hw.*:
+                    - raptor/android_hw_config.py
+                default:
+                    - raptor/linux_config.py
+    fetches:
+        by-test-platform:
+            win.*64.*:
+                fetch:
+                    - win64-chromium
+            win.*32.*:
+                fetch:
+                    - win32-chromium
+            macosx.*:
+                fetch:
+                    - mac-chromium
+            default:
+                fetch:
+                    - linux64-chromium
+
+raptor-tp6-1-chrome:
+    description: "Raptor tp6-1 on Chrome"
+    try-name: raptor-tp6-1-chrome
+    treeherder-symbol: Rap-C(tp6-1)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    max-run-time: 1200
+    mozharness:
+        extra-options:
+            - --test=raptor-tp6-1
+            - --app=chrome
+
+raptor-tp6-2-chrome:
+    description: "Raptor tp6-2 on Chrome"
+    try-name: raptor-tp6-2-chrome
+    treeherder-symbol: Rap-C(tp6-2)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    mozharness:
+        extra-options:
+            - --test=raptor-tp6-2
+            - --app=chrome
+
+raptor-tp6-3-chrome:
+    description: "Raptor tp6-3 on Chrome"
+    try-name: raptor-tp6-3-chrome
+    treeherder-symbol: Rap-C(tp6-3)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    max-run-time: 2400
+    mozharness:
+        extra-options:
+            - --test=raptor-tp6-3
+            - --app=chrome
+
+raptor-tp6-4-chrome:
+    description: "Raptor tp6-4 on Chrome"
+    try-name: raptor-tp6-4-chrome
+    treeherder-symbol: Rap-C(tp6-4)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    mozharness:
+        extra-options:
+            - --test=raptor-tp6-4
+            - --app=chrome
+
+raptor-tp6-5-chrome:
+    description: "Raptor tp6-5 on Chrome"
+    try-name: raptor-tp6-5-chrome
+    treeherder-symbol: Rap-C(tp6-5)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    mozharness:
+        extra-options:
+            - --test=raptor-tp6-5
+            - --app=chrome
+
+raptor-tp6-6-chrome:
+    description: "Raptor tp6-6 on Chrome"
+    try-name: raptor-tp6-6-chrome
+    treeherder-symbol: Rap-C(tp6-6)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    mozharness:
+        extra-options:
+            - --test=raptor-tp6-6
+            - --app=chrome
+
+raptor-tp6-7-chrome:
+    description: "Raptor tp6-7 on Chrome"
+    try-name: raptor-tp6-7-chrome
+    treeherder-symbol: Rap-C(tp6-7)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    mozharness:
+        extra-options:
+            - --test=raptor-tp6-7
+            - --app=chrome
+
+raptor-tp6-8-chrome:
+    description: "Raptor tp6-8 on Chrome"
+    try-name: raptor-tp6-8-chrome
+    treeherder-symbol: Rap-C(tp6-8)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    mozharness:
+        extra-options:
+            - --test=raptor-tp6-8
+            - --app=chrome
+
+raptor-tp6-9-chrome:
+    description: "Raptor tp6-9 on Chrome"
+    try-name: raptor-tp6-9-chrome
+    treeherder-symbol: Rap-C(tp6-9)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    mozharness:
+        extra-options:
+            - --test=raptor-tp6-9
+            - --app=chrome
+
+raptor-tp6-10-chrome:
+    description: "Raptor tp6-10 on Chrome"
+    try-name: raptor-tp6-10-chrome
+    treeherder-symbol: Rap-C(tp6-10)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    mozharness:
+        extra-options:
+            - --test=raptor-tp6-10
+            - --app=chrome
+
+raptor-speedometer-chrome:
+    description: "Raptor Speedometer on Chrome"
+    try-name: raptor-speedometer-chrome
+    treeherder-symbol: Rap-C(sp)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    max-run-time: 1500
+    mozharness:
+        extra-options:
+            - --test=raptor-speedometer
+            - --app=chrome
+
+raptor-stylebench-chrome:
+    description: "Raptor StyleBench on Chrome"
+    try-name: raptor-stylebench-chrome
+    treeherder-symbol: Rap-C(sb)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    mozharness:
+        extra-options:
+            - --test=raptor-stylebench
+            - --app=chrome
+
+raptor-motionmark-htmlsuite-chrome:
+    description: "Raptor MotionMark HtmlSuite on Chrome"
+    try-name: raptor-motionmark-htmlsuite-chrome
+    treeherder-symbol: Rap-C(mm-h)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    mozharness:
+        extra-options:
+            - --test=raptor-motionmark-htmlsuite
+            - --app=chrome
+
+raptor-motionmark-animometer-chrome:
+    description: "Raptor MotionMark Animometer on Chrome"
+    try-name: raptor-motionmark-animometer-chrome
+    treeherder-symbol: Rap-C(mm-a)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    mozharness:
+        extra-options:
+            - --test=raptor-motionmark-animometer
+            - --app=chrome
+
+raptor-webaudio-chrome:
+    description: "Raptor WebAudio on Chrome"
+    try-name: raptor-webaudio-chrome
+    treeherder-symbol: Rap-C(wa)
+    tier: 2
+    mozharness:
+        extra-options:
+            - --test=raptor-webaudio
+            - --app=chrome
+
+raptor-sunspider-chrome:
+    description: "Raptor SunSpider on Chrome"
+    try-name: raptor-sunspider-chrome
+    treeherder-symbol: Rap-C(ss)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    mozharness:
+        extra-options:
+            - --test=raptor-sunspider
+            - --app=chrome
+
+raptor-unity-webgl-chrome:
+    description: "Raptor Unity WebGL on Chrome"
+    try-name: raptor-unity-webgl-chrome
+    treeherder-symbol: Rap-C(ugl)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    mozharness:
+        extra-options:
+            - --test=raptor-unity-webgl
+            - --app=chrome
+    fetches:
+        by-test-platform:
+            win.*64.*:
+                fetch:
+                    - win64-chromium
+                    - unity-webgl
+            win.*32.*:
+                fetch:
+                    - win32-chromium
+                    - unity-webgl
+            macosx.*:
+                fetch:
+                    - mac-chromium
+                    - unity-webgl
+            default:
+                fetch:
+                    - linux64-chromium
+                    - unity-webgl
+
+raptor-wasm-misc-chrome:
+    description: "Raptor WASM Misc on Chrome"
+    try-name: raptor-wasm-misc-chrome
+    treeherder-symbol: Rap-C(wm)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    mozharness:
+        extra-options:
+            - --test=raptor-wasm-misc
+            - --app=chrome
+    fetches:
+        by-test-platform:
+            win.*64.*:
+                fetch:
+                    - win64-chromium
+                    - wasm-misc
+            win.*32.*:
+                fetch:
+                    - win32-chromium
+                    - wasm-misc
+            macosx.*:
+                fetch:
+                    - mac-chromium
+                    - wasm-misc
+            default:
+                fetch:
+                    - linux64-chromium
+                    - wasm-misc
+
+raptor-assorted-dom-chrome:
+    description: "Raptor Assorted-Dom on Chrome"
+    try-name: raptor-assorted-dom-chrome
+    treeherder-symbol: Rap-C(dom)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    max-run-time: 1500
+    mozharness:
+        extra-options:
+            - --test=raptor-assorted-dom
+            - --app=chrome
+    fetches:
+        by-test-platform:
+            win.*64.*:
+                fetch:
+                    - win64-chromium
+                    - assorted-dom
+            win.*32.*:
+                fetch:
+                    - win32-chromium
+                    - assorted-dom
+            macosx.*:
+                fetch:
+                    - mac-chromium
+                    - assorted-dom
+            default:
+                fetch:
+                    - linux64-chromium
+                    - assorted-dom
+
+raptor-wasm-godot-chrome:
+    description: "Raptor Wasm Godot on Chrome"
+    try-name: raptor-wasm-godot-chrome
+    treeherder-symbol: Rap-C(godot)
+    run-on-projects: ['try', 'mozilla-central']
+    tier: 2
+    max-run-time: 1500
+    mozharness:
+        extra-options:
+            - --test=raptor-wasm-godot
+            - --app=chrome
--- a/taskcluster/ci/test/raptor.yml
+++ b/taskcluster/ci/test/raptor.yml
@@ -56,28 +56,16 @@ raptor-tp6-1-firefox-profiling:
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-tp6-1
             - --gecko-profile
 
-raptor-tp6-1-chrome:
-    description: "Raptor tp6-1 on Chrome"
-    try-name: raptor-tp6-1-chrome
-    treeherder-symbol: Rap-C(tp6-1)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    max-run-time: 1200
-    mozharness:
-        extra-options:
-            - --test=raptor-tp6-1
-            - --app=chrome
-
 raptor-tp6-2-firefox:
     description: "Raptor tp6-2 on Firefox"
     try-name: raptor-tp6-2-firefox
     treeherder-symbol: Rap(tp6-2)
     mozharness:
         extra-options:
             - --test=raptor-tp6-2
 
@@ -88,27 +76,16 @@ raptor-tp6-2-firefox-profiling:
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-tp6-2
             - --gecko-profile
 
-raptor-tp6-2-chrome:
-    description: "Raptor tp6-2 on Chrome"
-    try-name: raptor-tp6-2-chrome
-    treeherder-symbol: Rap-C(tp6-2)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    mozharness:
-        extra-options:
-            - --test=raptor-tp6-2
-            - --app=chrome
-
 raptor-tp6-3-firefox:
     description: "Raptor tp6-3 on Firefox"
     try-name: raptor-tp6-3-firefox
     treeherder-symbol: Rap(tp6-3)
     mozharness:
         extra-options:
             - --test=raptor-tp6-3
 
@@ -119,28 +96,16 @@ raptor-tp6-3-firefox-profiling:
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-tp6-3
             - --gecko-profile
 
-raptor-tp6-3-chrome:
-    description: "Raptor tp6-3 on Chrome"
-    try-name: raptor-tp6-3-chrome
-    treeherder-symbol: Rap-C(tp6-3)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    max-run-time: 2400
-    mozharness:
-        extra-options:
-            - --test=raptor-tp6-3
-            - --app=chrome
-
 raptor-tp6-4-firefox:
     description: "Raptor tp6-4 on Firefox"
     try-name: raptor-tp6-4-firefox
     treeherder-symbol: Rap(tp6-4)
     mozharness:
         extra-options:
             - --test=raptor-tp6-4
 
@@ -151,27 +116,16 @@ raptor-tp6-4-firefox-profiling:
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-tp6-4
             - --gecko-profile
 
-raptor-tp6-4-chrome:
-    description: "Raptor tp6-4 on Chrome"
-    try-name: raptor-tp6-4-chrome
-    treeherder-symbol: Rap-C(tp6-4)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    mozharness:
-        extra-options:
-            - --test=raptor-tp6-4
-            - --app=chrome
-
 raptor-tp6-5-firefox:
     description: "Raptor tp6-5 on Firefox"
     try-name: raptor-tp6-5-firefox
     treeherder-symbol: Rap(tp6-5)
     mozharness:
         extra-options:
             - --test=raptor-tp6-5
 
@@ -182,27 +136,16 @@ raptor-tp6-5-firefox-profiling:
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-tp6-5
             - --gecko-profile
 
-raptor-tp6-5-chrome:
-    description: "Raptor tp6-5 on Chrome"
-    try-name: raptor-tp6-5-chrome
-    treeherder-symbol: Rap-C(tp6-5)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    mozharness:
-        extra-options:
-            - --test=raptor-tp6-5
-            - --app=chrome
-
 raptor-tp6-6-firefox:
     description: "Raptor tp6-6 on Firefox"
     try-name: raptor-tp6-6-firefox
     treeherder-symbol: Rap(tp6-6)
     mozharness:
         extra-options:
             - --test=raptor-tp6-6
 
@@ -213,27 +156,16 @@ raptor-tp6-6-firefox-profiling:
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-tp6-6
             - --gecko-profile
 
-raptor-tp6-6-chrome:
-    description: "Raptor tp6-6 on Chrome"
-    try-name: raptor-tp6-6-chrome
-    treeherder-symbol: Rap-C(tp6-6)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    mozharness:
-        extra-options:
-            - --test=raptor-tp6-6
-            - --app=chrome
-
 raptor-tp6-7-firefox:
     description: "Raptor tp6-7 on Firefox"
     try-name: raptor-tp6-7-firefox
     treeherder-symbol: Rap(tp6-7)
     mozharness:
         extra-options:
             - --test=raptor-tp6-7
 
@@ -244,27 +176,16 @@ raptor-tp6-7-firefox-profiling:
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-tp6-7
             - --gecko-profile
 
-raptor-tp6-7-chrome:
-    description: "Raptor tp6-7 on Chrome"
-    try-name: raptor-tp6-7-chrome
-    treeherder-symbol: Rap-C(tp6-7)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    mozharness:
-        extra-options:
-            - --test=raptor-tp6-7
-            - --app=chrome
-
 raptor-tp6-8-firefox:
     description: "Raptor tp6-8 on Firefox"
     try-name: raptor-tp6-8-firefox
     treeherder-symbol: Rap(tp6-8)
     run-on-projects: ['try', 'mozilla-central']
     tier: 2
     mozharness:
         extra-options:
@@ -277,27 +198,16 @@ raptor-tp6-8-firefox-profiling:
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-tp6-8
             - --gecko-profile
 
-raptor-tp6-8-chrome:
-    description: "Raptor tp6-8 on Chrome"
-    try-name: raptor-tp6-8-chrome
-    treeherder-symbol: Rap-C(tp6-8)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    mozharness:
-        extra-options:
-            - --test=raptor-tp6-8
-            - --app=chrome
-
 raptor-tp6-9-firefox:
     description: "Raptor tp6-9 on Firefox"
     try-name: raptor-tp6-9-firefox
     treeherder-symbol: Rap(tp6-9)
     run-on-projects: ['try', 'mozilla-central']
     tier: 2
     mozharness:
         extra-options:
@@ -310,27 +220,16 @@ raptor-tp6-9-firefox-profiling:
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-tp6-9
             - --gecko-profile
 
-raptor-tp6-9-chrome:
-    description: "Raptor tp6-9 on Chrome"
-    try-name: raptor-tp6-9-chrome
-    treeherder-symbol: Rap-C(tp6-9)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    mozharness:
-        extra-options:
-            - --test=raptor-tp6-9
-            - --app=chrome
-
 raptor-tp6-10-firefox:
     description: "Raptor tp6-10 on Firefox"
     try-name: raptor-tp6-10-firefox
     treeherder-symbol: Rap(tp6-10)
     run-on-projects: ['try', 'mozilla-central']
     tier: 2
     mozharness:
         extra-options:
@@ -343,27 +242,16 @@ raptor-tp6-10-firefox-profiling:
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-tp6-10
             - --gecko-profile
 
-raptor-tp6-10-chrome:
-    description: "Raptor tp6-10 on Chrome"
-    try-name: raptor-tp6-10-chrome
-    treeherder-symbol: Rap-C(tp6-10)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    mozharness:
-        extra-options:
-            - --test=raptor-tp6-10
-            - --app=chrome
-
 raptor-tp6-binast-1-firefox:
     description: "Raptor tp6-binast-1 on Firefox"
     try-name: raptor-tp6-binast-1
     treeherder-symbol: Rap(tp6-b-1)
     run-on-projects: ['try', 'mozilla-central']
     tier: 2
     mozharness:
         extra-options:
@@ -508,28 +396,16 @@ raptor-speedometer-fennec:
     tier: 2
     max-run-time: 900
     mozharness:
         extra-options:
             - --test=raptor-speedometer-fennec
             - --app=fennec
             - --binary=org.mozilla.fennec_aurora
 
-raptor-speedometer-chrome:
-    description: "Raptor Speedometer on Chrome"
-    try-name: raptor-speedometer-chrome
-    treeherder-symbol: Rap-C(sp)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    max-run-time: 1500
-    mozharness:
-        extra-options:
-            - --test=raptor-speedometer
-            - --app=chrome
-
 raptor-stylebench-firefox:
     description: "Raptor StyleBench on Firefox"
     try-name: raptor-stylebench-firefox
     treeherder-symbol: Rap(sb)
     mozharness:
         extra-options:
             - --test=raptor-stylebench
 
@@ -540,27 +416,16 @@ raptor-stylebench-firefox-profiling:
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-stylebench
             - --gecko-profile
 
-raptor-stylebench-chrome:
-    description: "Raptor StyleBench on Chrome"
-    try-name: raptor-stylebench-chrome
-    treeherder-symbol: Rap-C(sb)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    mozharness:
-        extra-options:
-            - --test=raptor-stylebench
-            - --app=chrome
-
 raptor-motionmark-htmlsuite-firefox:
     description: "Raptor MotionMark HtmlSuite on Firefox"
     try-name: raptor-motionmark-htmlsuite-firefox
     treeherder-symbol: Rap(mm-h)
     mozharness:
         extra-options:
             - --test=raptor-motionmark-htmlsuite
 
@@ -571,27 +436,16 @@ raptor-motionmark-htmlsuite-firefox-prof
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-motionmark-htmlsuite
             - --gecko-profile
 
-raptor-motionmark-htmlsuite-chrome:
-    description: "Raptor MotionMark HtmlSuite on Chrome"
-    try-name: raptor-motionmark-htmlsuite-chrome
-    treeherder-symbol: Rap-C(mm-h)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    mozharness:
-        extra-options:
-            - --test=raptor-motionmark-htmlsuite
-            - --app=chrome
-
 raptor-motionmark-animometer-firefox:
     description: "Raptor MotionMark Animometer on Firefox"
     try-name: raptor-motionmark-animometer-firefox
     treeherder-symbol: Rap(mm-a)
     mozharness:
         extra-options:
             - --test=raptor-motionmark-animometer
 
@@ -602,27 +456,16 @@ raptor-motionmark-animometer-firefox-pro
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-motionmark-animometer
             - --gecko-profile
 
-raptor-motionmark-animometer-chrome:
-    description: "Raptor MotionMark Animometer on Chrome"
-    try-name: raptor-motionmark-animometer-chrome
-    treeherder-symbol: Rap-C(mm-a)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    mozharness:
-        extra-options:
-            - --test=raptor-motionmark-animometer
-            - --app=chrome
-
 raptor-webaudio-firefox:
     description: "Raptor WebAudio on Firefox"
     try-name: raptor-webaudio-firefox
     treeherder-symbol: Rap(wa)
     mozharness:
         extra-options:
             - --test=raptor-webaudio
 
@@ -633,26 +476,16 @@ raptor-webaudio-firefox-profiling:
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-webaudio
             - --gecko-profile
 
-raptor-webaudio-chrome:
-    description: "Raptor WebAudio on Chrome"
-    try-name: raptor-webaudio-chrome
-    treeherder-symbol: Rap-C(wa)
-    tier: 2
-    mozharness:
-        extra-options:
-            - --test=raptor-webaudio
-            - --app=chrome
-
 raptor-sunspider-firefox:
     description: "Raptor SunSpider on Firefox"
     try-name: raptor-sunspider-firefox
     treeherder-symbol: Rap(ss)
     mozharness:
         extra-options:
             - --test=raptor-sunspider
 
@@ -663,27 +496,16 @@ raptor-sunspider-firefox-profiling:
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-sunspider
             - --gecko-profile
 
-raptor-sunspider-chrome:
-    description: "Raptor SunSpider on Chrome"
-    try-name: raptor-sunspider-chrome
-    treeherder-symbol: Rap-C(ss)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    mozharness:
-        extra-options:
-            - --test=raptor-sunspider
-            - --app=chrome
-
 raptor-unity-webgl-firefox:
     description: "Raptor Unity WebGL on Firefox"
     try-name: raptor-unity-webgl-firefox
     treeherder-symbol: Rap(ugl)
     mozharness:
         extra-options:
             - --test=raptor-unity-webgl
     fetches:
@@ -721,30 +543,16 @@ raptor-unity-webgl-geckoview:
             - --test=raptor-unity-webgl
             - --app=geckoview
             - --binary=org.mozilla.geckoview_example
             - --activity=GeckoViewActivity
     fetches:
         fetch:
             - unity-webgl
 
-raptor-unity-webgl-chrome:
-    description: "Raptor Unity WebGL on Chrome"
-    try-name: raptor-unity-webgl-chrome
-    treeherder-symbol: Rap-C(ugl)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    mozharness:
-        extra-options:
-            - --test=raptor-unity-webgl
-            - --app=chrome
-    fetches:
-        fetch:
-            - unity-webgl
-
 raptor-wasm-misc-firefox:
     description: "Raptor WASM Misc on Firefox"
     try-name: raptor-wasm-misc-firefox
     treeherder-symbol: Rap(wm)
     mozharness:
         extra-options:
             - --test=raptor-wasm-misc
     fetches:
@@ -813,30 +621,16 @@ raptor-wasm-misc-ion-firefox-profiling:
     mozharness:
         extra-options:
             - --test=raptor-wasm-misc-ion
             - --gecko-profile
     fetches:
         fetch:
             - wasm-misc
 
-raptor-wasm-misc-chrome:
-    description: "Raptor WASM Misc on Chrome"
-    try-name: raptor-wasm-misc-chrome
-    treeherder-symbol: Rap-C(wm)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    mozharness:
-        extra-options:
-            - --test=raptor-wasm-misc
-            - --app=chrome
-    fetches:
-        fetch:
-            - wasm-misc
-
 raptor-assorted-dom-firefox:
     description: "Raptor Assorted-Dom on Firefox"
     try-name: raptor-assorted-dom-firefox
     treeherder-symbol: Rap(dom)
     max-run-time:
         by-test-platform:
             .*-qr/.*: 2100
             .*-ux/.*: 2100
@@ -858,31 +652,16 @@ raptor-assorted-dom-firefox-profiling:
     mozharness:
         extra-options:
             - --test=raptor-assorted-dom
             - --gecko-profile
     fetches:
         fetch:
             - assorted-dom
 
-raptor-assorted-dom-chrome:
-    description: "Raptor Assorted-Dom on Chrome"
-    try-name: raptor-assorted-dom-chrome
-    treeherder-symbol: Rap-C(dom)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    max-run-time: 1500
-    mozharness:
-        extra-options:
-            - --test=raptor-assorted-dom
-            - --app=chrome
-    fetches:
-        fetch:
-            - assorted-dom
-
 raptor-wasm-godot-firefox:
     description: "Raptor Wasm Godot on Firefox"
     try-name: raptor-wasm-godot-firefox
     treeherder-symbol: Rap(godot)
     max-run-time:
         by-test-platform:
             .*-qr/.*: 2100
             .*-ux/.*: 2100
@@ -898,28 +677,16 @@ raptor-wasm-godot-firefox-profiling:
     run-on-projects: ['mozilla-central', 'try']
     max-run-time: 900
     tier: 2
     mozharness:
         extra-options:
             - --test=raptor-wasm-godot
             - --gecko-profile
 
-raptor-wasm-godot-chrome:
-    description: "Raptor Wasm Godot on Chrome"
-    try-name: raptor-wasm-godot-chrome
-    treeherder-symbol: Rap-C(godot)
-    run-on-projects: ['try', 'mozilla-central']
-    tier: 2
-    max-run-time: 1500
-    mozharness:
-        extra-options:
-            - --test=raptor-wasm-godot
-            - --app=chrome
-
 raptor-wasm-godot-baseline-firefox:
     description: "Raptor Wasm Godot on Firefox with baseline JIT"
     try-name: raptor-wasm-godot-baseline-firefox
     treeherder-symbol: Rap(godot-b)
     max-run-time:
         by-test-platform:
             .*-qr/.*: 2100
             .*-ux/.*: 2100
--- a/taskcluster/docker/debian-raw/Dockerfile
+++ b/taskcluster/docker/debian-raw/Dockerfile
@@ -20,11 +20,12 @@ RUN for s in debian_$DIST debian_$DIST-u
       echo 'APT::Get::Assume-Yes "true";'; \
       echo 'APT::Install-Recommends "false";'; \
       echo 'Acquire::Check-Valid-Until "false";'; \
       echo 'Acquire::Retries "5";'; \
       echo 'dir::bin::methods::https "/usr/local/sbin/cloud-mirror-workaround.sh";'; \
     ) > /etc/apt/apt.conf.d/99taskcluster
 
 RUN apt-get update && \
+    apt-get dist-upgrade && \
     apt-get install \
       apt-transport-https \
       ca-certificates
--- a/taskcluster/docker/diffoscope/Dockerfile
+++ b/taskcluster/docker/diffoscope/Dockerfile
@@ -1,43 +1,30 @@
-FROM debian:stretch-20171210
+# %ARG DOCKER_IMAGE_PARENT
+FROM $DOCKER_IMAGE_PARENT
 MAINTAINER Mike Hommey <mhommey@mozilla.com>
 
-RUN mkdir /builds
-RUN useradd -d /builds/worker -s /bin/bash -m worker
-WORKDIR /builds/worker
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
 
-# Set variable normally configured at login, by the shells parent process, these
-# are taken from GNU su manual
-ENV HOME=/builds/worker \
-    SHELL=/bin/bash \
-    USER=worker \
-    LOGNAME=worker \
-    HOSTNAME=taskcluster-worker \
-    LANG=en_US.UTF-8 \
-    LC_ALL=en_US.UTF-8 \
-    DEBIAN_FRONTEND=noninteractive
-
-# Set a default command useful for debugging
-CMD ["/bin/bash", "--login"]
+ENV LANG=en_US.UTF-8
 
-# Set apt sources list to a snapshot.
-RUN for s in debian_stretch debian_stretch-updates debian-security_stretch/updates; do \
-      echo "deb [check-valid-until=no] http://snapshot.debian.org/archive/${s%_*}/20171222T153610Z/ ${s#*_} main"; \
-    done > /etc/apt/sources.list
-
-RUN apt-get update -q && \
-    apt-get install -yyq diffoscope libc++abi1 locales python3-setuptools python2.7 python-pip git && \
+RUN apt-get install \
+      binutils-multiarch \
+      bzip2 \
+      curl \
+      enjarify \
+      diffoscope/stretch-backports \
+      jsbeautifier \
+      libc++abi1 \
+      locales \
+      openjdk-8-jdk-headless \
+      python3-progressbar \
+      unzip \
+      zip \
+      && \
     sed -i '/en_US.UTF-8/s/^# *//' /etc/locale.gen && \
-    locale-gen && \
-    git clone https://salsa.debian.org/reproducible-builds/diffoscope.git /tmp/diffoscope && \
-    git -C /tmp/diffoscope checkout 202caf9d5d134e95f870d5f19f89511d635c27e4 && \
-    (cd /tmp/diffoscope && python3 setup.py install ) && \
-    rm -rf /tmp/diffoscope && \
-    apt-get clean
-
-# %include taskcluster/scripts/run-task
-COPY topsrcdir/taskcluster/scripts/run-task /builds/worker/bin/run-task
+    locale-gen
 
 COPY get_and_diffoscope /builds/worker/bin/get_and_diffoscope
 
 RUN chown -R worker:worker /builds/worker/bin && chmod 755 /builds/worker/bin/*
-
--- a/taskcluster/docker/diffoscope/get_and_diffoscope
+++ b/taskcluster/docker/diffoscope/get_and_diffoscope
@@ -31,17 +31,17 @@ case "$ORIG_URL" in
 	;;
 */target.dmg)
 	# We don't have mach available to call mach artifact toolchain.
 	# This is the trivial equivalent for those toolchains we use here.
 	for t in $MOZ_TOOLCHAINS; do
 		curl -sL $queue_base/task/${t#*@}/artifacts/${t%@*} | tar -Jxf -
 	done
 	for tool in lipo otool; do
-		ln -s /builds/worker/cctools/bin/x86_64-apple-darwin*-$tool bin/$tool
+		ln -s /builds/worker/cctools/bin/x86_64-darwin*-$tool bin/$tool
 	done
 	export PATH=$PATH:/builds/worker/bin
 	curl -sL "$ORIG_URL" > a.dmg
 	curl -sL "$NEW_URL" > b.dmg
 	for i in a b; do
 		dmg/dmg extract $i.dmg $i.hfs
 		dmg/hfsplus $i.hfs extractall / $i
 	done
--- a/taskcluster/docker/fetch/Dockerfile
+++ b/taskcluster/docker/fetch/Dockerfile
@@ -1,9 +1,15 @@
 # %ARG DOCKER_IMAGE_PARENT
 FROM $DOCKER_IMAGE_PARENT
 
 RUN apt-get update && \
     apt-get install \
-      gnupg
+      gnupg \
+      bzip2 \
+      python3-requests \
+      unzip
 
 # %include taskcluster/scripts/misc/fetch-content
 ADD topsrcdir/taskcluster/scripts/misc/fetch-content /builds/worker/bin/fetch-content
+
+# %include taskcluster/scripts/misc/fetch-chromium.py
+ADD topsrcdir/taskcluster/scripts/misc/fetch-chromium.py /builds/worker/bin/fetch-chromium.py
new file mode 100644
--- /dev/null
+++ b/taskcluster/scripts/misc/fetch-chromium.py
@@ -0,0 +1,206 @@
+#!/usr/bin/python3 -u
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''
+This script downloads the latest chromium build (or a manually
+defined version) for a given platform. It then uploads the build,
+with the revision of the build stored in a REVISION file.
+'''
+
+from __future__ import absolute_import, print_function
+
+import argparse
+import errno
+import json
+import os
+import shutil
+import subprocess
+import requests
+import tempfile
+
+NEWEST_BASE_URL = 'https://download-chromium.appspot.com/'
+NEWREV_DOWNLOAD_URL = NEWEST_BASE_URL + 'dl/%s?type=snapshots'
+NEWREV_REVISION_URL = NEWEST_BASE_URL + 'rev/%s?type=snapshots'
+
+OLDREV_BASE_URL = 'http://commondatastorage.googleapis.com/chromium-browser-snapshots/'
+OLDREV_DOWNLOAD_URL = OLDREV_BASE_URL + '%s/%s/%s'  # (platform/revision/archive)
+
+CHROMIUM_INFO = {
+    'linux': {
+        'platform': 'Linux_x64',
+        'archive': 'chrome-linux.zip',
+        'result': 'chrome-linux.tar.bz2'
+    },
+    'win32': {
+        'platform': 'Win_x64',
+        'archive': 'chrome-win.zip',
+        'result': 'chrome-win32.tar.bz2'
+    },
+    'win64': {
+        'platform': 'Win_x64',
+        'archive': 'chrome-win.zip',
+        'result': 'chrome-win64.tar.bz2'
+    },
+    'mac': {
+        'platform': 'Mac',
+        'archive': 'chrome-mac.zip',
+        'result': 'chrome-mac.tar.bz2'
+    }
+}
+
+
+def log(msg):
+    print('build-chromium: %s' % msg)
+
+
+def fetch_file(url, filepath):
+    '''Download a file from the given url to a given file.'''
+    size = 4096
+    r = requests.get(url, stream=True)
+    r.raise_for_status()
+
+    with open(filepath, 'wb') as fd:
+        for chunk in r.iter_content(size):
+            fd.write(chunk)
+
+
+def fetch_chromium_revision(platform):
+    '''Get the revision of the latest chromium build. '''
+    chromium_platform = CHROMIUM_INFO[platform]['platform']
+    revision_url = NEWREV_REVISION_URL % chromium_platform
+
+    log(
+        'Getting revision number for latest %s chromium build...' %
+        chromium_platform
+    )
+
+    # Expects a JSON of the form:
+    # {
+    #   'content': '<REVNUM>',
+    #   'last-modified': '<DATE>'
+    # }
+    r = requests.get(revision_url, timeout=30)
+    r.raise_for_status()
+
+    chromium_revision = json.loads(
+        r.content.decode('utf-8')
+    )['content']
+
+    return chromium_revision
+
+
+def fetch_chromium_build(platform, revision, zippath):
+    '''Download a chromium build for a given revision, or the latest. '''
+    use_oldrev = True
+    if not revision:
+        use_oldrev = False
+        revision = fetch_chromium_revision(platform)
+
+    download_platform = CHROMIUM_INFO[platform]['platform']
+    if use_oldrev:
+        chromium_archive = CHROMIUM_INFO[platform]['archive']
+        download_url = OLDREV_DOWNLOAD_URL % (
+            download_platform, revision, chromium_archive
+        )
+    else:
+        download_url = NEWREV_DOWNLOAD_URL % download_platform
+
+    log(
+        'Downloading %s chromium build revision %s...' %
+        (download_platform, revision)
+    )
+
+    fetch_file(download_url, zippath)
+    return revision
+
+
+def build_chromium_archive(platform, revision=None):
+    '''
+    Download and store a chromium build for a given platform.
+
+    Retrieves either the latest version, or uses a pre-defined version if
+    the `--revision` option is given a revision.
+    '''
+    upload_dir = os.environ.get('UPLOAD_DIR')
+    if upload_dir:
+        # Create the upload directory if it doesn't exist.
+        try:
+            log('Creating upload directory in %s...' % os.path.abspath(upload_dir))
+            os.makedirs(upload_dir)
+        except OSError as e:
+            if e.errno != errno.EEXIST:
+                raise
+
+    # Make a temporary location for the file
+    tmppath = tempfile.mkdtemp()
+    tmpzip = os.path.join(tmppath, 'tmp-chrome.zip')
+
+    revision = fetch_chromium_build(platform, revision, tmpzip)
+
+    # Unpack archive in `tmpzip` to store the revision number
+    log('Unpacking archive at: %s to: %s' % (tmpzip, tmppath))
+    unzip_command = ['unzip', '-q', '-o', tmpzip, '-d', tmppath]
+    subprocess.check_call(unzip_command)
+
+    dirs = [
+        d for d in os.listdir(tmppath)
+        if os.path.isdir(os.path.join(tmppath, d)) and d.startswith('chrome-')
+    ]
+
+    if len(dirs) > 1:
+        raise Exception(
+            "Too many directories starting with 'chrome-' after extracting."
+        )
+    elif len(dirs) == 0:
+        raise Exception(
+            "Could not find any directories after extraction of chromium zip."
+        )
+
+    chrome_dir = os.path.join(tmppath, dirs[0])
+    revision_file = os.path.join(chrome_dir, '.REVISION')
+    with open(revision_file, 'w+') as f:
+        f.write(str(revision))
+
+    tar_file = CHROMIUM_INFO[platform]['result']
+    tar_command = ['tar', 'cjf', tar_file, '-C', tmppath, dirs[0]]
+    log(
+        "Added revision to %s file." % revision_file
+    )
+
+    log('Tarring with the command: %s' % str(tar_command))
+    subprocess.check_call(tar_command)
+
+    upload_dir = os.environ.get('UPLOAD_DIR')
+    if upload_dir:
+        # Move the tarball to the output directory for upload.
+        log('Moving %s to the upload directory...' % tar_file)
+        shutil.copy(tar_file, os.path.join(upload_dir, tar_file))
+
+    shutil.rmtree(tmppath)
+
+
+def parse_args():
+    '''Read command line arguments and return options.'''
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        '--platform',
+        help='Platform version of chromium to build.',
+        required=True
+    )
+    parser.add_argument(
+        '--revision',
+        help='Revision of chromium to build to get. '
+             '(Defaults to the newest chromium build).',
+        default=None
+    )
+
+    return parser.parse_args()
+
+
+if __name__ == '__main__':
+    args = vars(parse_args())
+    build_chromium_archive(**args)
--- a/taskcluster/taskgraph/target_tasks.py
+++ b/taskcluster/taskgraph/target_tasks.py
@@ -565,16 +565,25 @@ def target_tasks_searchfox(full_task_gra
 
 
 @_target_task('customv8_update')
 def target_tasks_customv8_update(full_task_graph, parameters, graph_config):
     """Select tasks required for building latest d8/v8 version."""
     return ['toolchain-linux64-custom-v8']
 
 
+@_target_task('chromium_update')
+def target_tasks_chromium_update(full_task_graph, parameters, graph_config):
+    """Select tasks required for building latest chromium versions."""
+    return ['fetch-linux64-chromium',
+            'fetch-win32-chromium',
+            'fetch-win64-chromium',
+            'fetch-mac-chromium']
+
+
 @_target_task('pipfile_update')
 def target_tasks_pipfile_update(full_task_graph, parameters, graph_config):
     """Select the set of tasks required to perform nightly in-tree pipfile updates
     """
     def filter(task):
         # For now any task in the repo-update kind is ok
         return task.kind in ['pipfile-update']
     return [l for l, t in full_task_graph.tasks.iteritems() if filter(t)]
--- a/taskcluster/taskgraph/transforms/fetch.py
+++ b/taskcluster/taskgraph/transforms/fetch.py
@@ -2,19 +2,22 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 # Support for running tasks that download remote content and re-export
 # it as task artifacts.
 
 from __future__ import absolute_import, unicode_literals
 
+from mozbuild.shellutil import quote as shell_quote
+
 import os
 
 from voluptuous import (
+    Any,
     Optional,
     Required,
 )
 
 import taskgraph
 
 from .base import (
     TransformSequence,
@@ -38,45 +41,61 @@ FETCH_SCHEMA = Schema({
 
     # Relative path (from config.path) to the file the task was defined
     # in.
     Optional('job-from'): basestring,
 
     # Description of the task.
     Required('description'): basestring,
 
-    Required('fetch'): {
-        'type': 'static-url',
-
-        # The URL to download.
-        Required('url'): basestring,
+    Required('fetch'): Any(
+        {
+            'type': 'static-url',
 
-        # The SHA-256 of the downloaded content.
-        Required('sha256'): basestring,
+            # The URL to download.
+            Required('url'): basestring,
 
-        # Size of the downloaded entity, in bytes.
-        Required('size'): int,
+            # The SHA-256 of the downloaded content.
+            Required('sha256'): basestring,
+
+            # Size of the downloaded entity, in bytes.
+            Required('size'): int,
 
-        # GPG signature verification.
-        Optional('gpg-signature'): {
-            # URL where GPG signature document can be obtained. Can contain the
-            # value ``{url}``, which will be substituted with the value from
-            # ``url``.
-            Required('sig-url'): basestring,
-            # Path to file containing GPG public key(s) used to validate
-            # download.
-            Required('key-path'): basestring,
+            # GPG signature verification.
+            Optional('gpg-signature'): {
+                # URL where GPG signature document can be obtained. Can contain the
+                # value ``{url}``, which will be substituted with the value from
+                # ``url``.
+                Required('sig-url'): basestring,
+                # Path to file containing GPG public key(s) used to validate
+                # download.
+                Required('key-path'): basestring,
+            },
+
+            # The name to give to the generated artifact.
+            Optional('artifact-name'): basestring,
+
+            # IMPORTANT: when adding anything that changes the behavior of the task,
+            # it is important to update the digest data used to compute cache hits.
         },
+        {
+            'type': 'chromium-fetch',
 
-        # The name to give to the generated artifact.
-        Optional('artifact-name'): basestring,
+            Required('script'): basestring,
+
+            # Platform type for chromium build
+            Required('platform'): basestring,
 
-        # IMPORTANT: when adding anything that changes the behavior of the task,
-        # it is important to update the digest data used to compute cache hits.
-    },
+            # Chromium revision to obtain
+            Optional('revision'): basestring,
+
+            # The name to give to the generated artifact.
+            Required('artifact-name'): basestring
+        }
+    ),
 })
 
 transforms = TransformSequence()
 transforms.add_validate(FETCH_SCHEMA)
 
 
 @transforms.add
 def process_fetch_job(config, jobs):
@@ -84,16 +103,18 @@ def process_fetch_job(config, jobs):
     for job in jobs:
         if 'fetch' not in job:
             continue
 
         typ = job['fetch']['type']
 
         if typ == 'static-url':
             yield create_fetch_url_task(config, job)
+        elif typ == 'chromium-fetch':
+            yield create_chromium_fetch_task(config, job)
         else:
             # validate() should have caught this.
             assert False
 
 
 def make_base_task(config, name, description, command):
     # Fetch tasks are idempotent and immutable. Have them live for
     # essentially forever.
@@ -184,8 +205,60 @@ def create_fetch_url_task(config, job):
             cache_name=cache_name,
             # We don't include the GPG signature in the digest because it isn't
             # materially important for caching: GPG signatures are supplemental
             # trust checking beyond what the shasum already provides.
             digest_data=[fetch['sha256'], '%d' % fetch['size'], artifact_name],
         )
 
     return task
+
+
+def create_chromium_fetch_task(config, job):
+    name = job['name']
+    fetch = job['fetch']
+    artifact_name = fetch.get('artifact-name')
+
+    workdir = '/builds/worker'
+
+    platform = fetch.get('platform')
+    revision = fetch.get('revision')
+
+    args = '--platform ' + shell_quote(platform)
+    if revision:
+        args += ' --revision ' + shell_quote(revision)
+
+    cmd = [
+        'bash',
+        '-c',
+        'cd {} && '
+        '/usr/bin/python3 {} {}'.format(
+            workdir, fetch['script'], args
+        )
+    ]
+
+    env = {
+        'UPLOAD_DIR': '/builds/worker/artifacts'
+    }
+
+    task = make_base_task(config, name, job['description'], cmd)
+    task['treeherder']['symbol'] = join_symbol('Fetch-URL', name)
+    task['worker']['artifacts'] = [{
+        'type': 'directory',
+        'name': 'public',
+        'path': '/builds/worker/artifacts',
+    }]
+    task['worker']['env'] = env
+    task['attributes']['fetch-artifact'] = 'public/%s' % artifact_name
+
+    if not taskgraph.fast:
+        cache_name = task['label'].replace('{}-'.format(config.kind), '', 1)
+
+        # This adds the level to the index path automatically.
+        add_optimization(
+            config,
+            task,
+            cache_type=CACHE_TYPE,
+            cache_name=cache_name,
+            digest_data=["revision={}".format(revision), "platform={}".format(platform)],
+        )
+
+    return task
--- a/taskcluster/taskgraph/transforms/tests.py
+++ b/taskcluster/taskgraph/transforms/tests.py
@@ -425,19 +425,22 @@ test_description_schema = Schema({
     # target.dmg (Mac), target.apk (Android), target.tar.bz2 (Linux),
     # or target.zip (Windows).
     Optional('target'): optionally_keyed_by(
         'test-platform',
         Any(basestring, None),
     ),
 
     # A list of artifacts to install from 'fetch' tasks.
-    Optional('fetches'): {
-        basestring: [basestring],
-    },
+    Optional('fetches'): optionally_keyed_by(
+        'test-platform',
+        {
+            basestring: [basestring]
+        }
+    ),
 }, required=True)
 
 
 @transforms.add
 def handle_keyed_by_mozharness(config, tests):
     """Resolve a mozharness field if it is keyed by something"""
     for test in tests:
         resolve_keyed_by(test, 'mozharness', item_name=test['test-name'])
@@ -742,16 +745,17 @@ def handle_keyed_by(config, tests):
         'mozharness.chunked',
         'mozharness.config',
         'mozharness.extra-options',
         'mozharness.requires-signed-builds',
         'mozharness.script',
         'workdir',
         'worker-type',
         'virtualization',
+        'fetches',
     ]
     for test in tests:
         for field in fields:
             resolve_keyed_by(test, field, item_name=test['test-name'],
                              project=config.params['project'])
         yield test
 
 
--- a/testing/firefox-ui/tests/functional/safebrowsing/test_initial_download.py
+++ b/testing/firefox-ui/tests/functional/safebrowsing/test_initial_download.py
@@ -12,17 +12,17 @@ from marionette_harness import Marionett
 class TestSafeBrowsingInitialDownload(PuppeteerMixin, MarionetteTestCase):
 
     v2_file_extensions = [
         'pset',
         'sbstore',
     ]
 
     v4_file_extensions = [
-        'pset',
+        'vlpset',
         'metadata',
     ]
 
     prefs_download_lists = [
         'urlclassifier.blockedTable',
         'urlclassifier.downloadAllowTable',
         'urlclassifier.downloadBlockTable',
         'urlclassifier.malwareTable',
--- a/testing/mochitest/runjunit.py
+++ b/testing/mochitest/runjunit.py
@@ -10,17 +10,17 @@ import shutil
 import sys
 import tempfile
 import traceback
 
 import mozcrash
 import mozinfo
 import mozlog
 import moznetwork
-from mozdevice import ADBDevice, ADBError
+from mozdevice import ADBDevice, ADBError, ADBTimeoutError
 from mozprofile import Profile, DEFAULT_PORTS
 from mozprofile.permissions import ServerLocations
 from runtests import MochitestDesktop, update_mozinfo
 
 here = os.path.abspath(os.path.dirname(__file__))
 
 try:
     from mozbuild.base import (
@@ -436,27 +436,31 @@ def run_test_harness(parser, options):
     if hasattr(options, 'log'):
         log = options.log
     else:
         log = mozlog.commandline.setup_logging("runjunit", options,
                                                {"tbpl": sys.stdout})
     runner = JUnitTestRunner(log, options)
     result = -1
     try:
+        device_exception = False
         result = runner.run_tests(options.test_filters)
     except KeyboardInterrupt:
         log.info("runjunit.py | Received keyboard interrupt")
         result = -1
-    except Exception:
+    except Exception as e:
         traceback.print_exc()
         log.error(
             "runjunit.py | Received unexpected exception while running tests")
         result = 1
+        if isinstance(e, ADBTimeoutError):
+            device_exception = True
     finally:
-        runner.cleanup()
+        if not device_exception:
+            runner.cleanup()
     return result
 
 
 def main(args=sys.argv[1:]):
     parser = JunitArgumentParser()
     options = parser.parse_args()
     return run_test_harness(parser, options)
 
--- a/testing/mozbase/mozproxy/setup.py
+++ b/testing/mozbase/mozproxy/setup.py
@@ -10,17 +10,17 @@ PACKAGE_NAME = "mozproxy"
 PACKAGE_VERSION = "1.0"
 
 # dependencies
 deps = []
 
 setup(
     name=PACKAGE_NAME,
     version=PACKAGE_VERSION,
-    description="Proxy for playback" "left behind by crashed processes",
+    description="Proxy for playback",
     long_description="see https://firefox-source-docs.mozilla.org/mozbase/index.html",
     classifiers=[
         "Programming Language :: Python :: 2.7",
         "Programming Language :: Python :: 3.5",
     ],
     # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
     keywords="mozilla",
     author="Mozilla Automation and Tools team",
--- a/testing/mozharness/mozharness/mozilla/testing/android.py
+++ b/testing/mozharness/mozharness/mozilla/testing/android.py
@@ -594,13 +594,16 @@ class AndroidMixin(object):
         """
         Stop logcat and kill the emulator, if necessary.
         """
         if not self.is_android:
             return
 
         for t in self.timers:
             t.cancel()
-        self.check_for_ANRs()
-        self.check_for_tombstones()
+        if self.worst_status != TBPL_RETRY:
+            self.check_for_ANRs()
+            self.check_for_tombstones()
+        else:
+            self.info("ANR and tombstone checks skipped due to TBPL_RETRY")
         self.logcat_stop()
         if self.is_emulator:
             self.kill_processes(self.config["emulator_process_name"])
--- a/testing/mozharness/mozharness/mozilla/testing/raptor.py
+++ b/testing/mozharness/mozharness/mozilla/testing/raptor.py
@@ -6,17 +6,16 @@ from __future__ import absolute_import, 
 
 import argparse
 import copy
 import json
 import os
 import re
 import sys
 import subprocess
-import time
 
 from shutil import copyfile
 
 import mozharness
 
 from mozharness.base.errors import PythonErrorList
 from mozharness.base.log import OutputParser, DEBUG, ERROR, CRITICAL, INFO
 from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
@@ -246,93 +245,41 @@ class Raptor(TestingMixin, MercurialScri
         abs_dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'],
                                                        'blobber_upload_dir')
         abs_dirs['abs_test_install_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'tests')
 
         self.abs_dirs = abs_dirs
         return self.abs_dirs
 
     def install_chrome(self):
-        # temporary hack to install google chrome in production; until chrome is in our CI
+        '''install google chrome in production; installation
+        requirements depend on the platform'''
         if self.app != "chrome":
             self.info("Google Chrome is not required")
             return
 
         if self.config.get("run_local"):
             self.info("expecting Google Chrome to be pre-installed locally")
             return
 
-        # in production we can put the chrome build in mozharness/mozilla/testing/chrome
-        self.chrome_dest = os.path.join(here, 'chrome')
-
-        # mozharness/base/script.py.self.platform_name will return one of:
-        # 'linux64', 'linux', 'macosx', 'win64', 'win32'
-
-        base_url = "http://commondatastorage.googleapis.com/chromium-browser-snapshots"
-
-        # note: temporarily use a specified chromium revision number to download; however
-        # in the future we will be using a fetch task to get a new chromium (Bug 1476372)
+        self.info("Getting fetched chromium build")
+        self.chrome_dest = os.path.normpath(os.path.abspath(os.environ['MOZ_FETCHES_DIR']))
 
         if 'mac' in self.platform_name():
-            # for now hardcoding a revision; but change this to update to newer version; from:
-            # http://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/LAST_CHANGE
-
-            # Note: Using an older version of Chromium on OSX b/c of an issue with a pop-up
-            # dialog appearing with newer Chromium on OSX; please see:
-            # Bug 1520523 - Update Chromium version running with Raptor in production
-            chromium_rev = "634618"
-            chrome_archive_file = "chrome-mac.zip"
-            chrome_url = "%s/Mac/%s/%s" % (base_url, chromium_rev, chrome_archive_file)
             self.chrome_path = os.path.join(self.chrome_dest, 'chrome-mac', 'Chromium.app',
                                             'Contents', 'MacOS', 'Chromium')
 
         elif 'linux' in self.platform_name():
-            # for now hardcoding a revision; but change this to update to newer version; from:
-            # http://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/LAST_CHANGE
-            chromium_rev = "634637"
-            chrome_archive_file = "chrome-linux.zip"
-            chrome_url = "%s/Linux_x64/%s/%s" % (base_url, chromium_rev, chrome_archive_file)
             self.chrome_path = os.path.join(self.chrome_dest, 'chrome-linux', 'chrome')
 
         else:
-            # windows 7/10
-            # for now hardcoding a revision; but change this to update to newer version; from:
-            # http://commondatastorage.googleapis.com/chromium-browser-snapshots/Win_x64/LAST_CHANGE
-            chromium_rev = "634634"
-            chrome_archive_file = "chrome-win.zip"  # same zip name for win32/64
-
-            # one url for Win x64/32
-            chrome_url = "%s/Win_x64/%s/%s" % (base_url, chromium_rev, chrome_archive_file)
-
             self.chrome_path = os.path.join(self.chrome_dest, 'chrome-win', 'Chrome.exe')
 
-        chrome_archive = os.path.join(self.chrome_dest, chrome_archive_file)
-
-        self.info("installing google chrome - temporary install hack")
-        self.info("chrome archive is: %s" % chrome_archive)
         self.info("chrome dest is: %s" % self.chrome_dest)
-
-        if os.path.exists(self.chrome_path):
-            self.info("google chrome binary already exists at: %s" % self.chrome_path)
-            return
-
-        if not os.path.exists(chrome_archive):
-            # download the chrome installer
-            self.download_file(chrome_url, parent_dir=self.chrome_dest)
-
-        commands = []
-        commands.append(['unzip', '-q', '-o', chrome_archive_file, '-d', self.chrome_dest])
-
-        # now run the commands to unpack / install google chrome
-        for next_command in commands:
-            return_code = self.run_command(next_command, cwd=self.chrome_dest)
-            time.sleep(30)
-            if return_code not in [0]:
-                self.info("abort: failed to install %s to %s with command: %s"
-                          % (chrome_archive_file, self.chrome_dest, next_command))
+        self.info("chrome path is: %s" % self.chrome_path)
 
         # now ensure chrome binary exists
         if os.path.exists(self.chrome_path):
             self.info("successfully installed Google Chrome to: %s" % self.chrome_path)
         else:
             self.info("abort: failed to install Google Chrome")
 
     def raptor_options(self, args=None, **kw):
--- a/testing/talos/talos/xtalos/xperf_whitelist.json
+++ b/testing/talos/talos/xtalos/xperf_whitelist.json
@@ -198,16 +198,22 @@
     "maxbytes": 540672
   },
   "{firefox}\\browser\\extensions\\{uuid}.xpi": {
     "mincount": 0,
     "maxcount": 2,
     "minbytes": 0,
     "maxbytes": 8192
   },
+  "{firefox}\\browser\\features\\formautofill@mozilla.org.xpi": {
+    "mincount": 0,
+    "maxcount": 6,
+    "minbytes": 0,
+    "maxbytes": 393216
+  },
   "{firefox}\\browser\\features\\flyweb@mozilla.org.xpi": {
     "mincount": 0,
     "maxcount": 2,
     "minbytes": 0,
     "maxbytes": 32768
   },
   "{firefox}\\browser\\omni.ja": {
     "mincount": 0,
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/css/css-backgrounds/background-gradient-subpixel-fills-area.html.ini
@@ -0,0 +1,3 @@
+[background-gradient-subpixel-fills-area.html]
+  expected:
+    if webrender: FAIL
deleted file mode 100644
--- a/testing/web-platform/meta/html/semantics/embedded-content/media-elements/track/track-element/track-cues-enter-exit.html.ini
+++ /dev/null
@@ -1,4 +0,0 @@
-[track-cues-enter-exit.html]
-  [TextTrack's cues are indexed and updated in order during video playback]
-    expected: FAIL
-
--- a/testing/web-platform/meta/webrtc/RTCPeerConnection-transceivers.https.html.ini
+++ b/testing/web-platform/meta/webrtc/RTCPeerConnection-transceivers.https.html.ini
@@ -1,12 +1,8 @@
 [RTCPeerConnection-transceivers.https.html]
-  [setRemoteDescription(offer): ontrack's track.id is the same as track.id]
-    expected: FAIL
-    bug: https://bugzilla.mozilla.org/show_bug.cgi?id=1531439
-
   [addTransceiver(track, init): initialize sendEncodings\[0\].active to false]
     expected: FAIL
     bug: https://bugzilla.mozilla.org/show_bug.cgi?id=1396918
 
   [Closing the PC stops the transceivers]
     expected: FAIL
     bug: https://bugzilla.mozilla.org/show_bug.cgi?id=1531448
--- a/testing/web-platform/tests/html/semantics/embedded-content/media-elements/interfaces/TextTrack/activeCues.html
+++ b/testing/web-platform/tests/html/semantics/embedded-content/media-elements/interfaces/TextTrack/activeCues.html
@@ -72,21 +72,18 @@ test1.step(function(){
             } catch(ex) {
                 test2.step(function() { throw ex; });
                 test3.step(function() { assert_unreached(); });
                 return;
             }
             test3.step(function(){
                 var c3 = new VTTCue(0, 2, "text3");
                 t1.addCue(c3);
-                assert_equals(t1.activeCues.length, 1, "t1.activeCues.length after adding a cue in the same script");
-                test3.step_timeout(function(){
-                    assert_equals(t1.activeCues.length, 2, "t1.activeCues.length after the event loop has spun");
-                    test3.done();
-                }, 0);
+                assert_equals(t1.activeCues.length, 2, "t1.activeCues.length should be changed immediately");
+                test3.done();
             });
             test2.done();
         });
         try {
             assert_equals(t1.activeCues, t1_cues, "t1.activeCues should return the same object after loading a video");
             assert_equals(t2.activeCues, t2_cues, "t2.activeCues should return the same object after loading a video");
             assert_equals(t1.activeCues.length, 0, "t1.activeCues.length before the video has started playing");
             assert_equals(t2.activeCues.length, 0, "t1.activeCues.length before the video has started playing");
--- a/testing/web-platform/tests/html/semantics/embedded-content/media-elements/track/track-element/track-cues-enter-exit.html
+++ b/testing/web-platform/tests/html/semantics/embedded-content/media-elements/track/track-element/track-cues-enter-exit.html
@@ -20,17 +20,17 @@
             for (var i = 0; i < testTrack.track.cues.length; i++) {
                 testTrack.track.cues[i].onenter = t.step_func(cueEntered);
                 testTrack.track.cues[i].onexit = t.step_func(cueExited);
             }
             video.play();
         }
 
         var cueCount = 0;
-        function cueEntered() {
+        function cueEntered(event) {
             var currentCue = event.target;
 
             // This cue is the currently active cue.
             assert_equals(currentCue, testTrack.track.activeCues[0]);
             assert_equals(currentCue.id, (cueCount + 1).toString());
         }
 
         function cueExited() {
--- a/testing/web-platform/tests/webrtc/RTCPeerConnection-transceivers.https.html
+++ b/testing/web-platform/tests/webrtc/RTCPeerConnection-transceivers.https.html
@@ -134,19 +134,23 @@ promise_test(async t => {
 promise_test(async t => {
   const pc1 = createPeerConnectionWithCleanup(t);
   const [track, stream] = await createTrackAndStreamWithCleanup(t);
   pc1.addTrack(track, stream);
   const pc2 = createPeerConnectionWithCleanup(t);
   const trackEvent = await exchangeOfferAndListenToOntrack(t, pc1, pc2);
   assert_true(trackEvent.track instanceof MediaStreamTrack,
               'trackEvent.track instanceof MediaStreamTrack');
-  assert_equals(trackEvent.track.id, track.id,
-                'trackEvent.track.id == track.id');
-}, 'setRemoteDescription(offer): ontrack\'s track.id is the same as track.id');
+  assert_equals(trackEvent.streams.length, 1,
+                'trackEvent contains a single stream');
+  assert_true(trackEvent.streams[0] instanceof MediaStream,
+              'trackEvent has a MediaStream');
+  assert_equals(trackEvent.streams[0].id, stream.id,
+                'trackEvent.streams[0].id == stream.id');
+}, 'setRemoteDescription(offer): ontrack\'s stream.id is the same as stream.id');
 
 promise_test(async t => {
   const pc1 = createPeerConnectionWithCleanup(t);
   pc1.addTrack(... await createTrackAndStreamWithCleanup(t));
   const pc2 = createPeerConnectionWithCleanup(t);
   const trackEvent = await exchangeOfferAndListenToOntrack(t, pc1, pc2);
   assert_true(trackEvent.transceiver instanceof RTCRtpTransceiver,
               'trackEvent.transceiver instanceof RTCRtpTransceiver');
--- a/toolkit/components/enterprisepolicies/nsIEnterprisePolicies.idl
+++ b/toolkit/components/enterprisepolicies/nsIEnterprisePolicies.idl
@@ -25,9 +25,16 @@ interface nsIEnterprisePolicies : nsISup
   jsval getActivePolicies();
 
   /**
    * Get the contents of the support menu (if applicable)
    *
    * @returns A JS object that contains the url and label or null.
    */
   jsval getSupportMenu();
+
+  /**
+   * Get the policy for a given extensionID (if available)
+   *
+   * @returns A JS object that contains the storage or null if unavailable.
+   */
+  jsval getExtensionPolicy(in ACString extensionID);
 };
--- a/toolkit/components/extensions/parent/ext-storage.js
+++ b/toolkit/components/extensions/parent/ext-storage.js
@@ -21,16 +21,20 @@ const enforceNoTemporaryAddon = extensio
     throw new ExtensionError(EXCEPTION_MESSAGE);
   }
 };
 
 // WeakMap[extension -> Promise<SerializableMap?>]
 const managedStorage = new WeakMap();
 
 const lookupManagedStorage = async (extensionId, context) => {
+  let extensionPolicy = Services.policies.getExtensionPolicy(extensionId);
+  if (extensionPolicy) {
+    return ExtensionStorage._serializableMap(extensionPolicy);
+  }
   let info = await NativeManifests.lookupManifest("storage", extensionId, context);
   if (info) {
     return ExtensionStorage._serializableMap(info.manifest.data);
   }
   return null;
 };
 
 this.storage = class extends ExtensionAPI {
new file mode 100644
--- /dev/null
+++ b/toolkit/components/extensions/test/xpcshell/test_ext_storage_managed_policy.js
@@ -0,0 +1,48 @@
+"use strict";
+
+const PREF_DISABLE_SECURITY = ("security.turn_off_all_security_so_that_" +
+                               "viruses_can_take_over_this_computer");
+
+const {EnterprisePolicyTesting} = ChromeUtils.import("resource://testing-common/EnterprisePolicyTesting.jsm");
+
+// Setting PREF_DISABLE_SECURITY tells the policy engine that we are in testing
+// mode and enables restarting the policy engine without restarting the browser.
+Services.prefs.setBoolPref(PREF_DISABLE_SECURITY, true);
+registerCleanupFunction(() => {
+  Services.prefs.clearUserPref(PREF_DISABLE_SECURITY);
+});
+
+// Load policy engine
+Services.policies; // eslint-disable-line no-unused-expressions
+
+add_task(async function test_storage_managed_policy() {
+  await ExtensionTestUtils.startAddonManager();
+
+  await EnterprisePolicyTesting.setupPolicyEngineWithJson({
+    "policies": {
+      "3rdparty": {
+        "Extensions": {
+          "test-storage-managed-policy@mozilla.com": {
+            "string": "value",
+          },
+        },
+      },
+    },
+  });
+
+  let extension = ExtensionTestUtils.loadExtension({
+    manifest: {
+      applications: {gecko: {id: "test-storage-managed-policy@mozilla.com"}},
+      permissions: ["storage"],
+    },
+
+    async background() {
+      let str = await browser.storage.managed.get("string");
+      browser.test.sendMessage("results", str);
+    },
+  });
+
+  await extension.startup();
+  deepEqual(await extension.awaitMessage("results"), {"string": "value"});
+  await extension.unload();
+});
--- a/toolkit/components/extensions/test/xpcshell/xpcshell-common.ini
+++ b/toolkit/components/extensions/test/xpcshell/xpcshell-common.ini
@@ -111,16 +111,18 @@ skip-if = os == "android"
 [test_ext_storage.js]
 skip-if = os == "android" && debug
 [test_ext_storage_idb_data_migration.js]
 skip-if = os == "android" && debug
 [test_ext_storage_content.js]
 skip-if = os == "android" && debug
 [test_ext_storage_managed.js]
 skip-if = os == "android"
+[test_ext_storage_managed_policy.js]
+skip-if = os == "android"
 [test_ext_storage_sync.js]
 head = head.js head_sync.js
 skip-if = appname == "thunderbird" || os == "android"
 [test_ext_storage_sync_crypto.js]
 skip-if = appname == "thunderbird" || os == "android"
 [test_ext_storage_tab.js]
 [test_ext_storage_telemetry.js]
 skip-if = os == "android" # checking for telemetry needs to be updated: 1384923
--- a/toolkit/components/url-classifier/Classifier.cpp
+++ b/toolkit/components/url-classifier/Classifier.cpp
@@ -1572,31 +1572,32 @@ nsresult Classifier::LoadMetadata(nsIFil
       if (lookupCache) {
         lookupCacheV4 = LookupCache::Cast<LookupCacheV4>(lookupCache);
       }
     }
     if (!lookupCacheV4) {
       continue;
     }
 
-    nsCString state;
-    nsCString checksum;
-    rv = lookupCacheV4->LoadMetadata(state, checksum);
+    nsCString state, sha256;
+    rv = lookupCacheV4->LoadMetadata(state, sha256);
+    Telemetry::Accumulate(Telemetry::URLCLASSIFIER_VLPS_METADATA_CORRUPT,
+                          rv == NS_ERROR_FILE_CORRUPTED);
     if (NS_FAILED(rv)) {
       LOG(("Failed to get metadata for table %s", tableName.get()));
       continue;
     }
 
     // The state might include '\n' so that we have to encode.
     nsAutoCString stateBase64;
     rv = Base64Encode(state, stateBase64);
     NS_ENSURE_SUCCESS(rv, rv);
 
     nsAutoCString checksumBase64;
-    rv = Base64Encode(checksum, checksumBase64);
+    rv = Base64Encode(sha256, checksumBase64);
     NS_ENSURE_SUCCESS(rv, rv);
 
     LOG(("Appending state '%s' and checksum '%s' for table %s",
          stateBase64.get(), checksumBase64.get(), tableName.get()));
 
     aResult.AppendPrintf("%s;%s:%s\n", tableName.get(), stateBase64.get(),
                          checksumBase64.get());
   }
--- a/toolkit/components/url-classifier/HashStore.cpp
+++ b/toolkit/components/url-classifier/HashStore.cpp
@@ -201,18 +201,18 @@ nsresult TableUpdateV4::NewRemovalIndice
   }
 
   for (size_t i = 0; i < aNumOfIndices; i++) {
     mRemovalIndiceArray.AppendElement(aIndices[i]);
   }
   return NS_OK;
 }
 
-void TableUpdateV4::NewChecksum(const std::string& aChecksum) {
-  mChecksum.Assign(aChecksum.data(), aChecksum.size());
+void TableUpdateV4::SetSHA256(const std::string& aSHA256) {
+  mSHA256.Assign(aSHA256.data(), aSHA256.size());
 }
 
 nsresult TableUpdateV4::NewFullHashResponse(
     const Prefix& aPrefix, const CachedFullHashResponse& aResponse) {
   CachedFullHashResponse* response =
       mFullHashResponseMap.LookupOrAdd(aPrefix.ToUint32());
   if (!response) {
     return NS_ERROR_OUT_OF_MEMORY;
--- a/toolkit/components/url-classifier/HashStore.h
+++ b/toolkit/components/url-classifier/HashStore.h
@@ -154,41 +154,41 @@ class TableUpdateV4 : public TableUpdate
   }
 
   bool IsFullUpdate() const { return mFullUpdate; }
   const PrefixStringMap& Prefixes() const { return mPrefixesMap; }
   const RemovalIndiceArray& RemovalIndices() const {
     return mRemovalIndiceArray;
   }
   const nsACString& ClientState() const { return mClientState; }
-  const nsACString& Checksum() const { return mChecksum; }
+  const nsACString& SHA256() const { return mSHA256; }
   const FullHashResponseMap& FullHashResponse() const {
     return mFullHashResponseMap;
   }
 
   // For downcasting.
   static const int TAG = 4;
 
   void SetFullUpdate(bool aIsFullUpdate) { mFullUpdate = aIsFullUpdate; }
   void NewPrefixes(int32_t aSize, const nsACString& aPrefixes);
   void SetNewClientState(const nsACString& aState) { mClientState = aState; }
-  void NewChecksum(const std::string& aChecksum);
+  void SetSHA256(const std::string& aSHA256);
 
   nsresult NewRemovalIndices(const uint32_t* aIndices, size_t aNumOfIndices);
   nsresult NewFullHashResponse(const Prefix& aPrefix,
                                const CachedFullHashResponse& aResponse);
 
  private:
   virtual int Tag() const override { return TAG; }
 
   bool mFullUpdate;
   PrefixStringMap mPrefixesMap;
   RemovalIndiceArray mRemovalIndiceArray;
   nsCString mClientState;
-  nsCString mChecksum;
+  nsCString mSHA256;
 
   // This is used to store response from fullHashes.find.
   FullHashResponseMap mFullHashResponseMap;
 };
 
 // There is one hash store per table.
 class HashStore {
  public:
--- a/toolkit/components/url-classifier/LookupCache.cpp
+++ b/toolkit/components/url-classifier/LookupCache.cpp
@@ -26,31 +26,30 @@
 // PrefixSet stores and provides lookups for 4-byte prefixes.
 // mUpdateCompletions contains 32-byte completions which were
 // contained in updates. They are retrieved from HashStore/.sbtore
 // on startup.
 // mGetHashCache contains 32-byte completions which were
 // returned from the gethash server. They are not serialized,
 // only cached until the next update.
 
-// Name of the persistent PrefixSet storage
-#define PREFIXSET_SUFFIX ".pset"
-
 #define V2_CACHE_DURATION_SEC (15 * 60)
 
 // MOZ_LOG=UrlClassifierDbService:5
 extern mozilla::LazyLogModule gUrlClassifierDbServiceLog;
 #define LOG(args) \
   MOZ_LOG(gUrlClassifierDbServiceLog, mozilla::LogLevel::Debug, args)
 #define LOG_ENABLED() \
   MOZ_LOG_TEST(gUrlClassifierDbServiceLog, mozilla::LogLevel::Debug)
 
 namespace mozilla {
 namespace safebrowsing {
 
+const uint32_t LookupCache::MAX_BUFFER_SIZE = 64 * 1024;
+
 const int CacheResultV2::VER = CacheResult::V2;
 const int CacheResultV4::VER = CacheResult::V4;
 
 const int LookupCacheV2::VER = 2;
 
 static void CStringToHexString(const nsACString& aIn, nsACString& aOut) {
   static const char* const lut = "0123456789ABCDEF";
 
@@ -115,17 +114,17 @@ nsresult LookupCache::WriteFile() {
   if (nsUrlClassifierDBService::ShutdownHasStarted()) {
     return NS_ERROR_ABORT;
   }
 
   nsCOMPtr<nsIFile> psFile;
   nsresult rv = mStoreDirectory->Clone(getter_AddRefs(psFile));
   NS_ENSURE_SUCCESS(rv, rv);
 
-  rv = psFile->AppendNative(mTableName + NS_LITERAL_CSTRING(PREFIXSET_SUFFIX));
+  rv = psFile->AppendNative(mTableName + GetPrefixSetSuffix());
   NS_ENSURE_SUCCESS(rv, rv);
 
   rv = StoreToFile(psFile);
   NS_WARNING_ASSERTION(NS_SUCCEEDED(rv), "failed to store the prefixset");
 
   return NS_OK;
 }
 
@@ -434,32 +433,39 @@ nsresult LookupCache::GetHostKeys(const 
   return NS_OK;
 }
 
 nsresult LookupCache::LoadPrefixSet() {
   nsCOMPtr<nsIFile> psFile;
   nsresult rv = mStoreDirectory->Clone(getter_AddRefs(psFile));
   NS_ENSURE_SUCCESS(rv, rv);
 
-  rv = psFile->AppendNative(mTableName + NS_LITERAL_CSTRING(PREFIXSET_SUFFIX));
+  rv = psFile->AppendNative(mTableName + GetPrefixSetSuffix());
   NS_ENSURE_SUCCESS(rv, rv);
 
   bool exists;
   rv = psFile->Exists(&exists);
   NS_ENSURE_SUCCESS(rv, rv);
 
   if (exists) {
     LOG(("stored PrefixSet exists, loading from disk"));
     rv = LoadFromFile(psFile);
     if (NS_FAILED(rv)) {
       return rv;
     }
     mPrimed = true;
   } else {
-    LOG(("no (usable) stored PrefixSet found"));
+    // The only scenario we load the old .pset file is when we haven't received
+    // a SafeBrowsng update before. After receiving an update, new .vlpset will
+    // be stored while old .pset will be removed.
+    if (NS_SUCCEEDED(LoadLegacyFile())) {
+      mPrimed = true;
+    } else {
+      LOG(("no (usable) stored PrefixSet found"));
+    }
   }
 
 #ifdef DEBUG
   if (mPrimed) {
     uint32_t size = SizeOfPrefixSet();
     LOG(("SB tree done, size = %d bytes\n", size));
   }
 #endif
@@ -646,27 +652,95 @@ nsresult LookupCacheV2::ReadCompletions(
   return NS_OK;
 }
 
 nsresult LookupCacheV2::ClearPrefixes() {
   return mPrefixSet->SetPrefixes(nullptr, 0);
 }
 
 nsresult LookupCacheV2::StoreToFile(nsCOMPtr<nsIFile>& aFile) {
-  return mPrefixSet->StoreToFile(aFile);
+  nsCOMPtr<nsIOutputStream> localOutFile;
+  nsresult rv =
+      NS_NewLocalFileOutputStream(getter_AddRefs(localOutFile), aFile,
+                                  PR_WRONLY | PR_TRUNCATE | PR_CREATE_FILE);
+  NS_ENSURE_SUCCESS(rv, rv);
+
+  uint32_t fileSize;
+
+  // Preallocate the file storage
+  {
+    nsCOMPtr<nsIFileOutputStream> fos(do_QueryInterface(localOutFile));
+    Telemetry::AutoTimer<Telemetry::URLCLASSIFIER_PS_FALLOCATE_TIME> timer;
+
+    fileSize = mPrefixSet->CalculatePreallocateSize();
+
+    // Ignore failure, the preallocation is a hint and we write out the entire
+    // file later on
+    Unused << fos->Preallocate(fileSize);
+  }
+
+  // Convert to buffered stream
+  nsCOMPtr<nsIOutputStream> out;
+  rv = NS_NewBufferedOutputStream(getter_AddRefs(out), localOutFile.forget(),
+                                  std::min(fileSize, MAX_BUFFER_SIZE));
+  NS_ENSURE_SUCCESS(rv, rv);
+
+  rv = mPrefixSet->WritePrefixes(out);
+  NS_ENSURE_SUCCESS(rv, rv);
+
+  LOG(("[%s] Storing PrefixSet successful", mTableName.get()));
+  return NS_OK;
 }
 
+nsresult LookupCacheV2::LoadLegacyFile() { return NS_ERROR_NOT_IMPLEMENTED; }
+
 nsresult LookupCacheV2::LoadFromFile(nsCOMPtr<nsIFile>& aFile) {
-  return mPrefixSet->LoadFromFile(aFile);
+  Telemetry::AutoTimer<Telemetry::URLCLASSIFIER_PS_FILELOAD_TIME> timer;
+
+  nsCOMPtr<nsIInputStream> localInFile;
+  nsresult rv = NS_NewLocalFileInputStream(getter_AddRefs(localInFile), aFile,
+                                           PR_RDONLY | nsIFile::OS_READAHEAD);
+  NS_ENSURE_SUCCESS(rv, rv);
+
+  // Calculate how big the file is, make sure our read buffer isn't bigger
+  // than the file itself which is just wasting memory.
+  int64_t fileSize;
+  rv = aFile->GetFileSize(&fileSize);
+  NS_ENSURE_SUCCESS(rv, rv);
+
+  if (fileSize < 0 || fileSize > UINT32_MAX) {
+    return NS_ERROR_FAILURE;
+  }
+
+  uint32_t bufferSize =
+      std::min<uint32_t>(static_cast<uint32_t>(fileSize), MAX_BUFFER_SIZE);
+
+  // Convert to buffered stream
+  nsCOMPtr<nsIInputStream> in;
+  rv = NS_NewBufferedInputStream(getter_AddRefs(in), localInFile.forget(),
+                                 bufferSize);
+  NS_ENSURE_SUCCESS(rv, rv);
+
+  rv = mPrefixSet->LoadPrefixes(in);
+  NS_ENSURE_SUCCESS(rv, rv);
+
+  mPrimed = true;
+  LOG(("[%s] Loading PrefixSet successful", mTableName.get()));
+
+  return NS_OK;
 }
 
 size_t LookupCacheV2::SizeOfPrefixSet() const {
   return mPrefixSet->SizeOfIncludingThis(moz_malloc_size_of);
 }
 
+nsCString LookupCacheV2::GetPrefixSetSuffix() const {
+  return NS_LITERAL_CSTRING(".pset");
+}
+
 #ifdef DEBUG
 template <class T>
 static void EnsureSorted(T* aArray) {
   typename T::elem_type* start = aArray->Elements();
   typename T::elem_type* end = aArray->Elements() + aArray->Length();
   typename T::elem_type* iter = start;
   typename T::elem_type* previous = start;
 
--- a/toolkit/components/url-classifier/LookupCache.h
+++ b/toolkit/components/url-classifier/LookupCache.h
@@ -226,16 +226,19 @@ class LookupCache {
   void GetCacheInfo(nsIUrlClassifierCacheInfo** aCache) const;
 
   virtual nsresult Open();
   virtual nsresult Init() = 0;
   virtual nsresult ClearPrefixes() = 0;
   virtual nsresult Has(const Completion& aCompletion, bool* aHas,
                        uint32_t* aMatchLength, bool* aConfirmed) = 0;
 
+  virtual nsresult StoreToFile(nsCOMPtr<nsIFile>& aFile) = 0;
+  virtual nsresult LoadFromFile(nsCOMPtr<nsIFile>& aFile) = 0;
+
   virtual bool IsEmpty() const = 0;
 
   virtual void ClearAll();
 
   template <typename T>
   static T* Cast(LookupCache* aThat) {
     return ((aThat && T::VER == aThat->Ver()) ? reinterpret_cast<T*>(aThat)
                                               : nullptr);
@@ -245,25 +248,28 @@ class LookupCache {
     return ((aThat && T::VER == aThat->Ver())
                 ? reinterpret_cast<const T*>(aThat)
                 : nullptr);
   }
 
  private:
   nsresult LoadPrefixSet();
 
-  virtual nsresult StoreToFile(nsCOMPtr<nsIFile>& aFile) = 0;
-  virtual nsresult LoadFromFile(nsCOMPtr<nsIFile>& aFile) = 0;
   virtual size_t SizeOfPrefixSet() const = 0;
+  virtual nsCString GetPrefixSetSuffix() const = 0;
 
   virtual int Ver() const = 0;
 
+  virtual nsresult LoadLegacyFile() = 0;
+
  protected:
   virtual ~LookupCache() {}
 
+  static const uint32_t MAX_BUFFER_SIZE;
+
   // Check completions in positive cache and prefix in negative cache.
   // 'aHas' and 'aConfirmed' are output parameters.
   nsresult CheckCache(const Completion& aCompletion, bool* aHas,
                       bool* aConfirmed);
 
   bool mPrimed;  // true when the PrefixSet has been loaded (or constructed)
   const nsCString mTableName;
   const nsCString mProvider;
@@ -287,16 +293,19 @@ class LookupCacheV2 final : public Looku
       : LookupCache(aTableName, aProvider, aStoreFile) {}
 
   virtual nsresult Init() override;
   virtual nsresult Open() override;
   virtual void ClearAll() override;
   virtual nsresult Has(const Completion& aCompletion, bool* aHas,
                        uint32_t* aMatchLength, bool* aConfirmed) override;
 
+  virtual nsresult StoreToFile(nsCOMPtr<nsIFile>& aFile) override;
+  virtual nsresult LoadFromFile(nsCOMPtr<nsIFile>& aFile) override;
+
   virtual bool IsEmpty() const override;
 
   nsresult Build(AddPrefixArray& aAddPrefixes, AddCompleteArray& aAddCompletes);
 
   nsresult GetPrefixes(FallibleTArray<uint32_t>& aAddPrefixes);
 
   // This will Clear() the passed arrays when done.
   // 'aExpirySec' is used by testcase to config an expired time.
@@ -309,25 +318,26 @@ class LookupCacheV2 final : public Looku
 #endif
 
   static const int VER;
 
  protected:
   nsresult ReadCompletions();
 
   virtual nsresult ClearPrefixes() override;
-  virtual nsresult StoreToFile(nsCOMPtr<nsIFile>& aFile) override;
-  virtual nsresult LoadFromFile(nsCOMPtr<nsIFile>& aFile) override;
   virtual size_t SizeOfPrefixSet() const override;
+  virtual nsCString GetPrefixSetSuffix() const override;
 
  private:
   ~LookupCacheV2() {}
 
   virtual int Ver() const override { return VER; }
 
+  virtual nsresult LoadLegacyFile() override;
+
   // Construct a Prefix Set with known prefixes.
   // This will Clear() aAddPrefixes when done.
   nsresult ConstructPrefixSet(AddPrefixArray& aAddPrefixes);
 
   // Full length hashes obtained in update request
   CompletionArray mUpdateCompletions;
 
   // Set of prefixes known to be in the database
--- a/toolkit/components/url-classifier/LookupCacheV4.cpp
+++ b/toolkit/components/url-classifier/LookupCacheV4.cpp
@@ -1,33 +1,126 @@
 //* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "LookupCacheV4.h"
 #include "HashStore.h"
 #include "mozilla/Unused.h"
+#include "nsCheckSummedOutputStream.h"
+#include "crc32c.h"
 #include <string>
 
 // MOZ_LOG=UrlClassifierDbService:5
 extern mozilla::LazyLogModule gUrlClassifierDbServiceLog;
 #define LOG(args) \
   MOZ_LOG(gUrlClassifierDbServiceLog, mozilla::LogLevel::Debug, args)
 #define LOG_ENABLED() \
   MOZ_LOG_TEST(gUrlClassifierDbServiceLog, mozilla::LogLevel::Debug)
 
 #define METADATA_SUFFIX NS_LITERAL_CSTRING(".metadata")
+namespace {
+
+static const uint64_t STREAM_BUFFER_SIZE = 4096;
+
+//////////////////////////////////////////////////////////////////////////
+// A set of lightweight functions for reading/writing value from/to file.
+template <typename T>
+struct ValueTraits {
+  static_assert(sizeof(T) <= LookupCacheV4::MAX_METADATA_VALUE_LENGTH,
+                "LookupCacheV4::MAX_METADATA_VALUE_LENGTH is too small.");
+  static uint32_t Length(const T& aValue) { return sizeof(T); }
+  static char* WritePtr(T& aValue, uint32_t aLength) { return (char*)&aValue; }
+  static const char* ReadPtr(const T& aValue) { return (char*)&aValue; }
+  static bool IsFixedLength() { return true; }
+};
+
+template <>
+struct ValueTraits<nsACString> {
+  static bool IsFixedLength() { return false; }
+
+  static uint32_t Length(const nsACString& aValue) { return aValue.Length(); }
+
+  static char* WritePtr(nsACString& aValue, uint32_t aLength) {
+    aValue.SetLength(aLength);
+    return aValue.BeginWriting();
+  }
+
+  static const char* ReadPtr(const nsACString& aValue) {
+    return aValue.BeginReading();
+  }
+};
+
+template <typename T>
+static nsresult WriteValue(nsIOutputStream* aOutputStream, const T& aValue) {
+  uint32_t writeLength = ValueTraits<T>::Length(aValue);
+  MOZ_ASSERT(writeLength <= LookupCacheV4::MAX_METADATA_VALUE_LENGTH,
+             "LookupCacheV4::MAX_METADATA_VALUE_LENGTH is too small.");
+  if (!ValueTraits<T>::IsFixedLength()) {
+    // We need to write out the variable value length.
+    nsresult rv = WriteValue(aOutputStream, writeLength);
+    NS_ENSURE_SUCCESS(rv, rv);
+  }
+
+  // Write out the value.
+  auto valueReadPtr = ValueTraits<T>::ReadPtr(aValue);
+  uint32_t written;
+  nsresult rv = aOutputStream->Write(valueReadPtr, writeLength, &written);
+  NS_ENSURE_SUCCESS(rv, rv);
+  if (NS_WARN_IF(written != writeLength)) {
+    return NS_ERROR_FAILURE;
+  }
+
+  return rv;
+}
+
+template <typename T>
+static nsresult ReadValue(nsIInputStream* aInputStream, T& aValue) {
+  nsresult rv;
+
+  uint32_t readLength;
+  if (ValueTraits<T>::IsFixedLength()) {
+    readLength = ValueTraits<T>::Length(aValue);
+  } else {
+    // Read the variable value length from file.
+    nsresult rv = ReadValue(aInputStream, readLength);
+    NS_ENSURE_SUCCESS(rv, rv);
+  }
+
+  // Sanity-check the readLength in case of disk corruption
+  // (see bug 1433636).
+  if (readLength > LookupCacheV4::MAX_METADATA_VALUE_LENGTH) {
+    return NS_ERROR_FILE_CORRUPTED;
+  }
+
+  // Read the value.
+  uint32_t read;
+  auto valueWritePtr = ValueTraits<T>::WritePtr(aValue, readLength);
+  rv = aInputStream->Read(valueWritePtr, readLength, &read);
+  if (NS_FAILED(rv) || read != readLength) {
+    LOG(("Failed to read the value."));
+    return NS_FAILED(rv) ? rv : NS_ERROR_FAILURE;
+  }
+
+  return rv;
+}
+
+}  // end of unnamed namespace.
+////////////////////////////////////////////////////////////////////////
 
 namespace mozilla {
 namespace safebrowsing {
 
 const int LookupCacheV4::VER = 4;
 const uint32_t LookupCacheV4::MAX_METADATA_VALUE_LENGTH = 256;
 
+const uint32_t VLPSET_MAGIC = 0x36044a35;
+const uint32_t VLPSET_VERSION = 1;
+
 // Prefixes coming from updates and VLPrefixSet are both stored in the HashTable
 // where the (key, value) pair is a prefix size and a lexicographic-sorted
 // string. The difference is prefixes from updates use std:string(to avoid
 // additional copies) and prefixes from VLPrefixSet use nsCString. This class
 // provides a common interface for the partial update algorithm to make it
 // easier to operate on two different kind prefix string map..
 class VLPrefixSet {
  public:
@@ -159,44 +252,250 @@ nsresult LookupCacheV4::GetFixedLengthPr
 
 nsresult LookupCacheV4::ClearPrefixes() {
   // Clear by seting a empty map
   PrefixStringMap map;
   return mVLPrefixSet->SetPrefixes(map);
 }
 
 nsresult LookupCacheV4::StoreToFile(nsCOMPtr<nsIFile>& aFile) {
-  return mVLPrefixSet->StoreToFile(aFile);
+  NS_ENSURE_ARG_POINTER(aFile);
+
+  uint32_t fileSize = sizeof(Header) +
+                      mVLPrefixSet->CalculatePreallocateSize() +
+                      nsCrc32CheckSumedOutputStream::CHECKSUM_SIZE;
+
+  nsCOMPtr<nsIOutputStream> localOutFile;
+  nsresult rv =
+      NS_NewSafeLocalFileOutputStream(getter_AddRefs(localOutFile), aFile,
+                                      PR_WRONLY | PR_TRUNCATE | PR_CREATE_FILE);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  // Preallocate the file storage
+  {
+    nsCOMPtr<nsIFileOutputStream> fos(do_QueryInterface(localOutFile));
+    Telemetry::AutoTimer<Telemetry::URLCLASSIFIER_VLPS_FALLOCATE_TIME> timer;
+
+    Unused << fos->Preallocate(fileSize);
+  }
+
+  nsCOMPtr<nsIOutputStream> out;
+  rv = NS_NewCrc32OutputStream(getter_AddRefs(out), localOutFile.forget(),
+                               std::min(fileSize, MAX_BUFFER_SIZE));
+
+  // Write header
+  Header header = {.magic = VLPSET_MAGIC, .version = VLPSET_VERSION};
+  rv = WriteValue(out, header);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  // Write prefixes
+  rv = mVLPrefixSet->WritePrefixes(out);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  // Write checksum
+  nsCOMPtr<nsISafeOutputStream> safeOut = do_QueryInterface(out, &rv);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  rv = safeOut->Finish();
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  LOG(("[%s] Storing PrefixSet successful", mTableName.get()));
+
+  // This is to remove old ".pset" files if exist
+  Unused << CleanOldPrefixSet();
+  return NS_OK;
 }
 
-nsresult LookupCacheV4::LoadFromFile(nsCOMPtr<nsIFile>& aFile) {
-  nsresult rv = mVLPrefixSet->LoadFromFile(aFile);
+nsresult LookupCacheV4::CleanOldPrefixSet() {
+  nsCOMPtr<nsIFile> file;
+  nsresult rv = mStoreDirectory->Clone(getter_AddRefs(file));
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  rv = file->AppendNative(mTableName + NS_LITERAL_CSTRING(".pset"));
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  bool exists;
+  rv = file->Exists(&exists);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  if (exists) {
+    rv = file->Remove(false);
+    if (NS_WARN_IF(NS_FAILED(rv))) {
+      return rv;
+    }
+
+    LOG(("[%s] Old PrefixSet is succuessfully removed!", mTableName.get()));
+  }
+
+  return NS_OK;
+}
+
+nsresult LookupCacheV4::LoadLegacyFile() {
+  nsCOMPtr<nsIFile> file;
+  nsresult rv = mStoreDirectory->Clone(getter_AddRefs(file));
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  rv = file->AppendNative(mTableName + NS_LITERAL_CSTRING(".pset"));
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  bool exists;
+  rv = file->Exists(&exists);
+  NS_ENSURE_SUCCESS(rv, rv);
+
+  if (!exists) {
+    return NS_ERROR_FAILURE;
+  }
+
+  nsCOMPtr<nsIInputStream> localInFile;
+  rv = NS_NewLocalFileInputStream(getter_AddRefs(localInFile), file,
+                                  PR_RDONLY | nsIFile::OS_READAHEAD);
   if (NS_WARN_IF(NS_FAILED(rv))) {
     return rv;
   }
 
-  nsCString state, checksum;
-  rv = LoadMetadata(state, checksum);
-  Telemetry::Accumulate(Telemetry::URLCLASSIFIER_VLPS_METADATA_CORRUPT,
-                        rv == NS_ERROR_FILE_CORRUPTED);
+  // Calculate how big the file is, make sure our read buffer isn't bigger
+  // than the file itself which is just wasting memory.
+  int64_t fileSize;
+  rv = file->GetFileSize(&fileSize);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  if (fileSize < 0 || fileSize > UINT32_MAX) {
+    return NS_ERROR_FAILURE;
+  }
+
+  uint32_t bufferSize =
+      std::min<uint32_t>(static_cast<uint32_t>(fileSize), MAX_BUFFER_SIZE);
+
+  // Convert to buffered stream
+  nsCOMPtr<nsIInputStream> in;
+  rv = NS_NewBufferedInputStream(getter_AddRefs(in), localInFile.forget(),
+                                 bufferSize);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  // Load data
+  rv = mVLPrefixSet->LoadPrefixes(in);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  mPrimed = true;
+
+  LOG(("[%s] Loading Legacy PrefixSet successful", mTableName.get()));
+  return NS_OK;
+}
+
+nsresult LookupCacheV4::LoadFromFile(nsCOMPtr<nsIFile>& aFile) {
+  NS_ENSURE_ARG_POINTER(aFile);
+
+  Telemetry::AutoTimer<Telemetry::URLCLASSIFIER_VLPS_FILELOAD_TIME> timer;
+
+  nsCOMPtr<nsIInputStream> localInFile;
+  nsresult rv = NS_NewLocalFileInputStream(getter_AddRefs(localInFile), aFile,
+                                           PR_RDONLY | nsIFile::OS_READAHEAD);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  // Calculate how big the file is, make sure our read buffer isn't bigger
+  // than the file itself which is just wasting memory.
+  int64_t fileSize;
+  rv = aFile->GetFileSize(&fileSize);
   if (NS_WARN_IF(NS_FAILED(rv))) {
     return rv;
   }
 
-  rv = VerifyChecksum(checksum);
-  Telemetry::Accumulate(Telemetry::URLCLASSIFIER_VLPS_LOAD_CORRUPT,
-                        rv == NS_ERROR_FILE_CORRUPTED);
-  Unused << NS_WARN_IF(NS_FAILED(rv));
-  return rv;
+  if (fileSize < 0 || fileSize > UINT32_MAX) {
+    return NS_ERROR_FAILURE;
+  }
+
+  uint32_t bufferSize =
+      std::min<uint32_t>(static_cast<uint32_t>(fileSize), MAX_BUFFER_SIZE);
+
+  // Convert to buffered stream
+  nsCOMPtr<nsIInputStream> in;
+  rv = NS_NewBufferedInputStream(getter_AddRefs(in), localInFile.forget(),
+                                 bufferSize);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  // Load header
+  Header header;
+  rv = ReadValue(in, header);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  rv = SanityCheck(header);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  // Load data
+  rv = mVLPrefixSet->LoadPrefixes(in);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  // Load crc32 checksum and verify
+  rv = VerifyCRC32(in);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  mPrimed = true;
+
+  LOG(("[%s] Loading PrefixSet successful", mTableName.get()));
+  return NS_OK;
+}
+
+nsresult LookupCacheV4::SanityCheck(const Header& aHeader) {
+  if (aHeader.magic != VLPSET_MAGIC) {
+    return NS_ERROR_FILE_CORRUPTED;
+  }
+
+  if (aHeader.version != VLPSET_VERSION) {
+    return NS_ERROR_FAILURE;
+  }
+
+  return NS_OK;
 }
 
 size_t LookupCacheV4::SizeOfPrefixSet() const {
   return mVLPrefixSet->SizeOfIncludingThis(moz_malloc_size_of);
 }
 
+nsCString LookupCacheV4::GetPrefixSetSuffix() const {
+  return NS_LITERAL_CSTRING(".vlpset");
+}
+
 static nsresult AppendPrefixToMap(PrefixStringMap& prefixes,
                                   const nsACString& prefix) {
   uint32_t len = prefix.Length();
   MOZ_ASSERT(len >= PREFIX_SIZE && len <= COMPLETE_SIZE);
   if (!len) {
     return NS_OK;
   }
 
@@ -217,18 +516,18 @@ static nsresult InitCrypto(nsCOMPtr<nsIC
 
   rv = aCrypto->Init(nsICryptoHash::SHA256);
   NS_WARNING_ASSERTION(NS_SUCCEEDED(rv), "InitCrypto failed");
 
   return rv;
 }
 
 // Read prefix into a buffer and also update the hash which
-// keeps track of the checksum
-static void UpdateChecksum(nsICryptoHash* aCrypto, const nsACString& aPrefix) {
+// keeps track of the sha256 hash
+static void UpdateSHA256(nsICryptoHash* aCrypto, const nsACString& aPrefix) {
   MOZ_ASSERT(aCrypto);
   aCrypto->Update(
       reinterpret_cast<uint8_t*>(const_cast<char*>(aPrefix.BeginReading())),
       aPrefix.Length());
 }
 
 // Please see https://bug1287058.bmoattachments.org/attachment.cgi?id=8795366
 // for detail about partial update algorithm.
@@ -310,26 +609,26 @@ nsresult LookupCacheV4::ApplyUpdate(RefP
           numOldPrefixPicked == removalArray[removalIndex]) {
         removalIndex++;
       } else {
         rv = AppendPrefixToMap(aOutputMap, smallestOldPrefix);
         if (NS_WARN_IF(NS_FAILED(rv))) {
           return rv;
         }
 
-        UpdateChecksum(crypto, smallestOldPrefix);
+        UpdateSHA256(crypto, smallestOldPrefix);
       }
       smallestOldPrefix.SetLength(0);
     } else {
       rv = AppendPrefixToMap(aOutputMap, smallestAddPrefix);
       if (NS_WARN_IF(NS_FAILED(rv))) {
         return rv;
       }
 
-      UpdateChecksum(crypto, smallestAddPrefix);
+      UpdateSHA256(crypto, smallestAddPrefix);
       smallestAddPrefix.SetLength(0);
     }
   }
 
   // We expect index will be greater to 0 because max number of runs will be
   // the number of original prefix plus add prefix.
   if (index <= 0) {
     LOG(("There are still prefixes remaining after reaching maximum runs."));
@@ -338,162 +637,92 @@ nsresult LookupCacheV4::ApplyUpdate(RefP
 
   if (removalIndex < removalArray.Length()) {
     LOG(
         ("There are still prefixes to remove after exhausting the old "
          "PrefixSet."));
     return NS_ERROR_UC_UPDATE_WRONG_REMOVAL_INDICES;
   }
 
-  nsAutoCString checksum;
-  crypto->Finish(false, checksum);
-  if (aTableUpdate->Checksum().IsEmpty()) {
-    LOG(("Update checksum missing."));
+  nsAutoCString sha256;
+  crypto->Finish(false, sha256);
+  if (aTableUpdate->SHA256().IsEmpty()) {
+    LOG(("Update sha256 hash missing."));
     Telemetry::Accumulate(
         Telemetry::URLCLASSIFIER_UPDATE_ERROR, mProvider,
         NS_ERROR_GET_CODE(NS_ERROR_UC_UPDATE_MISSING_CHECKSUM));
 
-    // Generate our own checksum to tableUpdate to ensure there is always
+    // Generate our own sha256 to tableUpdate to ensure there is always
     // checksum in .metadata
-    std::string stdChecksum(checksum.BeginReading(), checksum.Length());
-    aTableUpdate->NewChecksum(stdChecksum);
-  } else if (aTableUpdate->Checksum() != checksum) {
-    LOG(("Checksum mismatch after applying partial update"));
+    std::string stdSha256(sha256.BeginReading(), sha256.Length());
+    aTableUpdate->SetSHA256(stdSha256);
+  } else if (aTableUpdate->SHA256() != sha256) {
+    LOG(("SHA256 hash mismatch after applying partial update"));
     return NS_ERROR_UC_UPDATE_CHECKSUM_MISMATCH;
   }
 
   return NS_OK;
 }
 
 nsresult LookupCacheV4::AddFullHashResponseToCache(
     const FullHashResponseMap& aResponseMap) {
   CopyClassHashTable<FullHashResponseMap>(aResponseMap, mFullHashCache);
 
   return NS_OK;
 }
 
-nsresult LookupCacheV4::VerifyChecksum(const nsACString& aChecksum) {
-  nsCOMPtr<nsICryptoHash> crypto;
-  nsresult rv = InitCrypto(crypto);
-  if (NS_FAILED(rv)) {
+// This function assumes CRC32 checksum is in the end of the input stream
+nsresult LookupCacheV4::VerifyCRC32(nsCOMPtr<nsIInputStream>& aIn) {
+  nsCOMPtr<nsISeekableStream> seekIn = do_QueryInterface(aIn);
+  nsresult rv = seekIn->Seek(nsISeekableStream::NS_SEEK_SET, 0);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  uint64_t len;
+  rv = aIn->Available(&len);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
     return rv;
   }
 
-  PrefixStringMap map;
-  mVLPrefixSet->GetPrefixes(map);
+  uint32_t calculateCrc32 = ~0;
+
+  // We don't want to include the checksum itself
+  len = len - nsCrc32CheckSumedOutputStream::CHECKSUM_SIZE;
+
+  char buffer[STREAM_BUFFER_SIZE];
+  while (len) {
+    uint32_t read;
+    uint64_t readLimit = std::min<uint64_t>(STREAM_BUFFER_SIZE, len);
 
-  VLPrefixSet loadPSet(map);
-  uint32_t index = loadPSet.Count() + 1;
-  for (; index > 0; index--) {
-    nsAutoCString prefix;
-    if (!loadPSet.GetSmallestPrefix(prefix)) {
-      break;
+    rv = aIn->Read(buffer, readLimit, &read);
+    if (NS_WARN_IF(NS_FAILED(rv))) {
+      return rv;
     }
-    UpdateChecksum(crypto, prefix);
+
+    calculateCrc32 = ComputeCrc32c(
+        calculateCrc32, reinterpret_cast<const uint8_t*>(buffer), read);
+
+    len -= read;
   }
 
-  nsAutoCString checksum;
-  crypto->Finish(false, checksum);
+  // Now read the CRC32
+  uint32_t crc32;
+  ReadValue(aIn, crc32);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
 
-  if (checksum != aChecksum) {
-    LOG(("Checksum mismatch when loading prefixes from file."));
+  if (crc32 != calculateCrc32) {
     return NS_ERROR_FILE_CORRUPTED;
   }
 
   return NS_OK;
 }
 
-//////////////////////////////////////////////////////////////////////////
-// A set of lightweight functions for reading/writing value from/to file.
-
-namespace {
-
-template <typename T>
-struct ValueTraits {
-  static_assert(sizeof(T) <= LookupCacheV4::MAX_METADATA_VALUE_LENGTH,
-                "LookupCacheV4::MAX_METADATA_VALUE_LENGTH is too small.");
-  static uint32_t Length(const T& aValue) { return sizeof(T); }
-  static char* WritePtr(T& aValue, uint32_t aLength) { return (char*)&aValue; }
-  static const char* ReadPtr(const T& aValue) { return (char*)&aValue; }
-  static bool IsFixedLength() { return true; }
-};
-
-template <>
-struct ValueTraits<nsACString> {
-  static bool IsFixedLength() { return false; }
-
-  static uint32_t Length(const nsACString& aValue) { return aValue.Length(); }
-
-  static char* WritePtr(nsACString& aValue, uint32_t aLength) {
-    aValue.SetLength(aLength);
-    return aValue.BeginWriting();
-  }
-
-  static const char* ReadPtr(const nsACString& aValue) {
-    return aValue.BeginReading();
-  }
-};
-
-template <typename T>
-static nsresult WriteValue(nsIOutputStream* aOutputStream, const T& aValue) {
-  uint32_t writeLength = ValueTraits<T>::Length(aValue);
-  MOZ_ASSERT(writeLength <= LookupCacheV4::MAX_METADATA_VALUE_LENGTH,
-             "LookupCacheV4::MAX_METADATA_VALUE_LENGTH is too small.");
-  if (!ValueTraits<T>::IsFixedLength()) {
-    // We need to write out the variable value length.
-    nsresult rv = WriteValue(aOutputStream, writeLength);
-    NS_ENSURE_SUCCESS(rv, rv);
-  }
-
-  // Write out the value.
-  auto valueReadPtr = ValueTraits<T>::ReadPtr(aValue);
-  uint32_t written;
-  nsresult rv = aOutputStream->Write(valueReadPtr, writeLength, &written);
-  NS_ENSURE_SUCCESS(rv, rv);
-  if (NS_WARN_IF(written != writeLength)) {
-    return NS_ERROR_FAILURE;
-  }
-
-  return rv;
-}
-
-template <typename T>
-static nsresult ReadValue(nsIInputStream* aInputStream, T& aValue) {
-  nsresult rv;
-
-  uint32_t readLength;
-  if (ValueTraits<T>::IsFixedLength()) {
-    readLength = ValueTraits<T>::Length(aValue);
-  } else {
-    // Read the variable value length from file.
-    nsresult rv = ReadValue(aInputStream, readLength);
-    NS_ENSURE_SUCCESS(rv, rv);
-  }
-
-  // Sanity-check the readLength in case of disk corruption
-  // (see bug 1433636).
-  if (readLength > LookupCacheV4::MAX_METADATA_VALUE_LENGTH) {
-    return NS_ERROR_FILE_CORRUPTED;
-  }
-
-  // Read the value.
-  uint32_t read;
-  auto valueWritePtr = ValueTraits<T>::WritePtr(aValue, readLength);
-  rv = aInputStream->Read(valueWritePtr, readLength, &read);
-  if (NS_FAILED(rv) || read != readLength) {
-    LOG(("Failed to read the value."));
-    return NS_FAILED(rv) ? rv : NS_ERROR_FAILURE;
-  }
-
-  return rv;
-}
-
-}  // end of unnamed namespace.
-////////////////////////////////////////////////////////////////////////
-
 nsresult LookupCacheV4::WriteMetadata(
     RefPtr<const TableUpdateV4> aTableUpdate) {
   NS_ENSURE_ARG_POINTER(aTableUpdate);
   if (nsUrlClassifierDBService::ShutdownHasStarted()) {
     return NS_ERROR_ABORT;
   }
 
   nsCOMPtr<nsIFile> metaFile;
@@ -507,25 +736,24 @@ nsresult LookupCacheV4::WriteMetadata(
   rv = NS_NewLocalFileOutputStream(getter_AddRefs(outputStream), metaFile,
                                    PR_WRONLY | PR_TRUNCATE | PR_CREATE_FILE);
   NS_ENSURE_SUCCESS(rv, rv);
 
   // Write the state.
   rv = WriteValue(outputStream, aTableUpdate->ClientState());
   NS_ENSURE_SUCCESS(rv, rv);
 
-  // Write the checksum.
-  rv = WriteValue(outputStream, aTableUpdate->Checksum());
+  // Write the SHA256 hash.
+  rv = WriteValue(outputStream, aTableUpdate->SHA256());
   NS_ENSURE_SUCCESS(rv, rv);
 
   return rv;
 }
 
-nsresult LookupCacheV4::LoadMetadata(nsACString& aState,
-                                     nsACString& aChecksum) {
+nsresult LookupCacheV4::LoadMetadata(nsACString& aState, nsACString& aSHA256) {
   nsCOMPtr<nsIFile> metaFile;
   nsresult rv = mStoreDirectory->Clone(getter_AddRefs(metaFile));
   NS_ENSURE_SUCCESS(rv, rv);
 
   rv = metaFile->AppendNative(mTableName + METADATA_SUFFIX);
   NS_ENSURE_SUCCESS(rv, rv);
 
   nsCOMPtr<nsIInputStream> localInFile;
@@ -538,20 +766,20 @@ nsresult LookupCacheV4::LoadMetadata(nsA
 
   // Read the list state.
   rv = ReadValue(localInFile, aState);
   if (NS_FAILED(rv)) {
     LOG(("Failed to read state."));
     return rv;
   }
 
-  // Read the checksum.
-  rv = ReadValue(localInFile, aChecksum);
+  // Read the SHA256 hash.
+  rv = ReadValue(localInFile, aSHA256);
   if (NS_FAILED(rv)) {
-    LOG(("Failed to read checksum."));
+    LOG(("Failed to read SHA256 hash."));
     return rv;
   }
 
   return rv;
 }
 
 VLPrefixSet::VLPrefixSet(const PrefixStringMap& aMap) : mCount(0) {
   for (auto iter = aMap.ConstIter(); !iter.Done(); iter.Next()) {
--- a/toolkit/components/url-classifier/LookupCacheV4.h
+++ b/toolkit/components/url-classifier/LookupCacheV4.h
@@ -20,16 +20,19 @@ class LookupCacheV4 final : public Looku
                          const nsACString& aProvider,
                          nsCOMPtr<nsIFile>& aStoreFile)
       : LookupCache(aTableName, aProvider, aStoreFile) {}
 
   virtual nsresult Init() override;
   virtual nsresult Has(const Completion& aCompletion, bool* aHas,
                        uint32_t* aMatchLength, bool* aConfirmed) override;
 
+  virtual nsresult StoreToFile(nsCOMPtr<nsIFile>& aFile) override;
+  virtual nsresult LoadFromFile(nsCOMPtr<nsIFile>& aFile) override;
+
   virtual bool IsEmpty() const override;
 
   nsresult Build(PrefixStringMap& aPrefixMap);
 
   nsresult GetPrefixes(PrefixStringMap& aPrefixMap);
   nsresult GetFixedLengthPrefixes(FallibleTArray<uint32_t>& aPrefixes);
 
   // ApplyUpdate will merge data stored in aTableUpdate with prefixes in
@@ -42,26 +45,35 @@ class LookupCacheV4 final : public Looku
   nsresult WriteMetadata(RefPtr<const TableUpdateV4> aTableUpdate);
   nsresult LoadMetadata(nsACString& aState, nsACString& aChecksum);
 
   static const int VER;
   static const uint32_t MAX_METADATA_VALUE_LENGTH;
 
  protected:
   virtual nsresult ClearPrefixes() override;
-  virtual nsresult StoreToFile(nsCOMPtr<nsIFile>& aFile) override;
-  virtual nsresult LoadFromFile(nsCOMPtr<nsIFile>& aFile) override;
   virtual size_t SizeOfPrefixSet() const override;
+  virtual nsCString GetPrefixSetSuffix() const override;
+  nsCString GetMetadataSuffix() const;
 
  private:
   ~LookupCacheV4() {}
 
   virtual int Ver() const override { return VER; }
 
-  nsresult VerifyChecksum(const nsACString& aChecksum);
+  virtual nsresult LoadLegacyFile() override;
+
+  struct Header {
+    uint32_t magic;
+    uint32_t version;
+  };
+
+  nsresult SanityCheck(const Header& aHeader);
+  nsresult VerifyCRC32(nsCOMPtr<nsIInputStream>& aIn);
+  nsresult CleanOldPrefixSet();
 
   RefPtr<VariableLengthPrefixSet> mVLPrefixSet;
 };
 
 }  // namespace safebrowsing
 }  // namespace mozilla
 
 #endif
--- a/toolkit/components/url-classifier/ProtocolParser.cpp
+++ b/toolkit/components/url-classifier/ProtocolParser.cpp
@@ -824,17 +824,17 @@ nsresult ProtocolParserProtobuf::Process
   auto tuV4 = TableUpdate::Cast<TableUpdateV4>(tu);
   NS_ENSURE_TRUE(tuV4, NS_ERROR_FAILURE);
 
   nsCString state(aResponse.new_client_state().c_str(),
                   aResponse.new_client_state().size());
   tuV4->SetNewClientState(state);
 
   if (aResponse.has_checksum()) {
-    tuV4->NewChecksum(aResponse.checksum().sha256());
+    tuV4->SetSHA256(aResponse.checksum().sha256());
   }
 
   PARSER_LOG(
       ("==== Update for threat type '%d' ====", aResponse.threat_type()));
   PARSER_LOG(("* aListName: %s\n", PromiseFlatCString(aListName).get()));
   PARSER_LOG(("* newState: %s\n", aResponse.new_client_state().c_str()));
   PARSER_LOG(("* isFullUpdate: %s\n", (isFullUpdate ? "yes" : "no")));
   PARSER_LOG(
--- a/toolkit/components/url-classifier/VariableLengthPrefixSet.cpp
+++ b/toolkit/components/url-classifier/VariableLengthPrefixSet.cpp
@@ -23,19 +23,16 @@ static mozilla::LazyLogModule gUrlClassi
 
 namespace mozilla {
 namespace safebrowsing {
 
 #define PREFIX_SIZE_FIXED 4
 
 NS_IMPL_ISUPPORTS(VariableLengthPrefixSet, nsIMemoryReporter)
 
-// Definition required due to std::max<>()
-const uint32_t VariableLengthPrefixSet::MAX_BUFFER_SIZE;
-
 // This class will process prefix size between 4~32. But for 4 bytes prefixes,
 // they will be passed to nsUrlClassifierPrefixSet because of better
 // optimization.
 VariableLengthPrefixSet::VariableLengthPrefixSet()
     : mLock("VariableLengthPrefixSet.mLock"),
       mFixedPrefixSet(new nsUrlClassifierPrefixSet) {}
 
 nsresult VariableLengthPrefixSet::Init(const nsACString& aName) {
@@ -199,101 +196,28 @@ nsresult VariableLengthPrefixSet::IsEmpt
   NS_ENSURE_ARG_POINTER(aEmpty);
 
   mFixedPrefixSet->IsEmpty(aEmpty);
   *aEmpty = *aEmpty && mVLPrefixSet.IsEmpty();
 
   return NS_OK;
 }
 
-nsresult VariableLengthPrefixSet::LoadFromFile(nsCOMPtr<nsIFile>& aFile) {
+nsresult VariableLengthPrefixSet::LoadPrefixes(nsCOMPtr<nsIInputStream>& in) {
   MutexAutoLock lock(mLock);
 
-  NS_ENSURE_ARG_POINTER(aFile);
-
-  Telemetry::AutoTimer<Telemetry::URLCLASSIFIER_VLPS_FILELOAD_TIME> timer;
-
-  nsCOMPtr<nsIInputStream> localInFile;
-  nsresult rv = NS_NewLocalFileInputStream(getter_AddRefs(localInFile), aFile,
-                                           PR_RDONLY | nsIFile::OS_READAHEAD);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  // Calculate how big the file is, make sure our read buffer isn't bigger
-  // than the file itself which is just wasting memory.
-  int64_t fileSize;
-  rv = aFile->GetFileSize(&fileSize);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  if (fileSize < 0 || fileSize > UINT32_MAX) {
-    return NS_ERROR_FAILURE;
-  }
-
-  uint32_t bufferSize =
-      std::min<uint32_t>(static_cast<uint32_t>(fileSize), MAX_BUFFER_SIZE);
-
-  // Convert to buffered stream
-  nsCOMPtr<nsIInputStream> in;
-  rv = NS_NewBufferedInputStream(getter_AddRefs(in), localInFile.forget(),
-                                 bufferSize);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  rv = mFixedPrefixSet->LoadPrefixes(in);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  rv = LoadPrefixes(in);
+  // First read prefixes from fixed-length prefix set
+  nsresult rv = mFixedPrefixSet->LoadPrefixes(in);
   NS_ENSURE_SUCCESS(rv, rv);
 
-  return NS_OK;
-  ;
-}
-
-nsresult VariableLengthPrefixSet::StoreToFile(nsCOMPtr<nsIFile>& aFile) const {
-  NS_ENSURE_ARG_POINTER(aFile);
-
-  MutexAutoLock lock(mLock);
-
-  nsCOMPtr<nsIOutputStream> localOutFile;
-  nsresult rv =
-      NS_NewLocalFileOutputStream(getter_AddRefs(localOutFile), aFile,
-                                  PR_WRONLY | PR_TRUNCATE | PR_CREATE_FILE);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  uint32_t fileSize = 0;
-  // Preallocate the file storage
-  {
-    nsCOMPtr<nsIFileOutputStream> fos(do_QueryInterface(localOutFile));
-    Telemetry::AutoTimer<Telemetry::URLCLASSIFIER_VLPS_FALLOCATE_TIME> timer;
-
-    fileSize += mFixedPrefixSet->CalculatePreallocateSize();
-    fileSize += CalculatePreallocateSize();
-
-    Unused << fos->Preallocate(fileSize);
-  }
-
-  // Convert to buffered stream
-  nsCOMPtr<nsIOutputStream> out;
-  rv = NS_NewBufferedOutputStream(getter_AddRefs(out), localOutFile.forget(),
-                                  std::min(fileSize, MAX_BUFFER_SIZE));
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  rv = mFixedPrefixSet->WritePrefixes(out);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  rv = WritePrefixes(out);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  return NS_OK;
-}
-
-nsresult VariableLengthPrefixSet::LoadPrefixes(nsCOMPtr<nsIInputStream>& in) {
+  // Then read prefixes from variable-length prefix set
   uint32_t magic;
   uint32_t read;
 
-  nsresult rv =
-      in->Read(reinterpret_cast<char*>(&magic), sizeof(uint32_t), &read);
+  rv = in->Read(reinterpret_cast<char*>(&magic), sizeof(uint32_t), &read);
   NS_ENSURE_SUCCESS(rv, rv);
   NS_ENSURE_TRUE(read == sizeof(uint32_t), NS_ERROR_FAILURE);
 
   if (magic != PREFIXSET_VERSION_MAGIC) {
     LOG(("[%s] Version magic mismatch, not loading", mName.get()));
     return NS_ERROR_FILE_CORRUPTED;
   }
 
@@ -344,34 +268,45 @@ nsresult VariableLengthPrefixSet::LoadPr
        totalPrefixes));
 
   return NS_OK;
 }
 
 uint32_t VariableLengthPrefixSet::CalculatePreallocateSize() const {
   uint32_t fileSize = 0;
 
+  // Size of fixed length prefix set.
+  fileSize += mFixedPrefixSet->CalculatePreallocateSize();
+
+  // Size of variable length prefix set.
   // Store how many prefix string.
   fileSize += sizeof(uint32_t);
 
   for (auto iter = mVLPrefixSet.ConstIter(); !iter.Done(); iter.Next()) {
     // Store prefix size, prefix string length, and prefix string.
     fileSize += sizeof(uint8_t);
     fileSize += sizeof(uint32_t);
     fileSize += iter.Data()->Length();
   }
   return fileSize;
 }
 
 nsresult VariableLengthPrefixSet::WritePrefixes(
     nsCOMPtr<nsIOutputStream>& out) const {
+  MutexAutoLock lock(mLock);
+
+  // First, write fixed length prefix set
+  nsresult rv = mFixedPrefixSet->WritePrefixes(out);
+  NS_ENSURE_SUCCESS(rv, rv);
+
+  // Then, write variable length prefix set
   uint32_t written;
   uint32_t writelen = sizeof(uint32_t);
   uint32_t magic = PREFIXSET_VERSION_MAGIC;
-  nsresult rv = out->Write(reinterpret_cast<char*>(&magic), writelen, &written);
+  rv = out->Write(reinterpret_cast<char*>(&magic), writelen, &written);
   NS_ENSURE_SUCCESS(rv, rv);
   NS_ENSURE_TRUE(written == writelen, NS_ERROR_FAILURE);
 
   uint32_t count = mVLPrefixSet.Count();
   rv = out->Write(reinterpret_cast<char*>(&count), writelen, &written);
   NS_ENSURE_SUCCESS(rv, rv);
   NS_ENSURE_TRUE(written == writelen, NS_ERROR_FAILURE);
 
--- a/toolkit/components/url-classifier/VariableLengthPrefixSet.h
+++ b/toolkit/components/url-classifier/VariableLengthPrefixSet.h
@@ -25,37 +25,34 @@ class VariableLengthPrefixSet final : pu
 
   nsresult Init(const nsACString& aName);
   nsresult SetPrefixes(
       const mozilla::safebrowsing::PrefixStringMap& aPrefixMap);
   nsresult GetPrefixes(mozilla::safebrowsing::PrefixStringMap& aPrefixMap);
   nsresult GetFixedLengthPrefixes(FallibleTArray<uint32_t>& aPrefixes);
   nsresult Matches(const nsACString& aFullHash, uint32_t* aLength) const;
   nsresult IsEmpty(bool* aEmpty) const;
-  nsresult LoadFromFile(nsCOMPtr<nsIFile>& aFile);
-  nsresult StoreToFile(nsCOMPtr<nsIFile>& aFile) const;
+
+  nsresult WritePrefixes(nsCOMPtr<nsIOutputStream>& out) const;
+  nsresult LoadPrefixes(nsCOMPtr<nsIInputStream>& in);
+  uint32_t CalculatePreallocateSize() const;
 
   size_t SizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
 
   NS_DECL_THREADSAFE_ISUPPORTS
   NS_DECL_NSIMEMORYREPORTER
 
  private:
   virtual ~VariableLengthPrefixSet();
 
-  static const uint32_t MAX_BUFFER_SIZE = 64 * 1024;
   static const uint32_t PREFIXSET_VERSION_MAGIC = 1;
 
   bool BinarySearch(const nsACString& aFullHash, const nsACString& aPrefixes,
                     uint32_t aPrefixSize) const;
 
-  uint32_t CalculatePreallocateSize() const;
-  nsresult WritePrefixes(nsCOMPtr<nsIOutputStream>& out) const;
-  nsresult LoadPrefixes(nsCOMPtr<nsIInputStream>& in);
-
   // Lock to prevent races between the url-classifier thread (which does most
   // of the operations) and the main thread (which does memory reporting).
   // It should be held for all operations between Init() and destruction that
   // touch this class's data members.
   mutable mozilla::Mutex mLock;
 
   const RefPtr<nsUrlClassifierPrefixSet> mFixedPrefixSet;
   mozilla::safebrowsing::PrefixStringMap mVLPrefixSet;
--- a/toolkit/components/url-classifier/nsCheckSummedOutputStream.cpp
+++ b/toolkit/components/url-classifier/nsCheckSummedOutputStream.cpp
@@ -2,16 +2,17 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "nsCRT.h"
 #include "nsIFile.h"
 #include "nsISupportsImpl.h"
 #include "nsCheckSummedOutputStream.h"
+#include "crc32c.h"
 
 ////////////////////////////////////////////////////////////////////////////////
 // nsCheckSummedOutputStream
 
 NS_IMPL_ISUPPORTS_INHERITED(nsCheckSummedOutputStream, nsBufferedOutputStream,
                             nsISafeOutputStream)
 
 NS_IMETHODIMP
@@ -46,8 +47,41 @@ nsCheckSummedOutputStream::Write(const c
                                  uint32_t *result) {
   nsresult rv = mHash->Update(reinterpret_cast<const uint8_t *>(buf), count);
   NS_ENSURE_SUCCESS(rv, rv);
 
   return nsBufferedOutputStream::Write(buf, count, result);
 }
 
 ////////////////////////////////////////////////////////////////////////////////
+// nsCrc32CheckSumedOutputStream
+NS_IMPL_ISUPPORTS_INHERITED(nsCrc32CheckSumedOutputStream,
+                            nsBufferedOutputStream, nsISafeOutputStream)
+
+NS_IMETHODIMP
+nsCrc32CheckSumedOutputStream::Init(nsIOutputStream *stream,
+                                    uint32_t bufferSize) {
+  mCheckSum = ~0;
+
+  return nsBufferedOutputStream::Init(stream, bufferSize);
+}
+
+NS_IMETHODIMP
+nsCrc32CheckSumedOutputStream::Finish() {
+  uint32_t written;
+  nsresult rv = nsBufferedOutputStream::Write(
+      reinterpret_cast<const char *>(&mCheckSum), sizeof(mCheckSum), &written);
+  NS_ASSERTION(written == sizeof(mCheckSum), "Error writing stream checksum");
+  NS_ENSURE_SUCCESS(rv, rv);
+
+  return nsBufferedOutputStream::Finish();
+}
+
+NS_IMETHODIMP
+nsCrc32CheckSumedOutputStream::Write(const char *buf, uint32_t count,
+                                     uint32_t *result) {
+  mCheckSum =
+      ComputeCrc32c(mCheckSum, reinterpret_cast<const uint8_t *>(buf), count);
+
+  return nsBufferedOutputStream::Write(buf, count, result);
+}
+
+////////////////////////////////////////////////////////////////////////////////
--- a/toolkit/components/url-classifier/nsCheckSummedOutputStream.h
+++ b/toolkit/components/url-classifier/nsCheckSummedOutputStream.h
@@ -48,9 +48,41 @@ inline nsresult NS_NewCheckSummedOutputS
   nsCOMPtr<nsIBufferedOutputStream> out = new nsCheckSummedOutputStream();
   rv = out->Init(localOutFile, nsCheckSummedOutputStream::CHECKSUM_SIZE);
   if (NS_SUCCEEDED(rv)) {
     out.forget(result);
   }
   return rv;
 }
 
+class nsCrc32CheckSumedOutputStream : public nsBufferedOutputStream {
+ public:
+  NS_DECL_ISUPPORTS_INHERITED
+
+  static const uint32_t CHECKSUM_SIZE = 4;
+
+  nsCrc32CheckSumedOutputStream() = default;
+
+  NS_IMETHOD Finish() override;
+  NS_IMETHOD Write(const char *buf, uint32_t count, uint32_t *result) override;
+  NS_IMETHOD Init(nsIOutputStream *stream, uint32_t bufferSize) override;
+
+ protected:
+  virtual ~nsCrc32CheckSumedOutputStream() { nsBufferedOutputStream::Close(); }
+
+  uint32_t mCheckSum;
+};
+
+inline nsresult NS_NewCrc32OutputStream(
+    nsIOutputStream **aResult, already_AddRefed<nsIOutputStream> aOutput,
+    uint32_t aBufferSize) {
+  nsCOMPtr<nsIOutputStream> out = std::move(aOutput);
+
+  nsCOMPtr<nsIBufferedOutputStream> bufferOutput =
+      new nsCrc32CheckSumedOutputStream();
+  nsresult rv = bufferOutput->Init(out, aBufferSize);
+  if (NS_SUCCEEDED(rv)) {
+    bufferOutput.forget(aResult);
+  }
+  return rv;
+}
+
 #endif
--- a/toolkit/components/url-classifier/nsIUrlClassifierPrefixSet.idl
+++ b/toolkit/components/url-classifier/nsIUrlClassifierPrefixSet.idl
@@ -19,11 +19,9 @@ interface nsIUrlClassifierPrefixSet : ns
   // Requires array to be sorted.
   void setPrefixes([const, array, size_is(aLength)] in unsigned long aPrefixes,
                    in unsigned long aLength);
   void getPrefixes(out unsigned long aCount,
                   [array, size_is(aCount), retval] out unsigned long aPrefixes);
   // Do a lookup in the PrefixSet, return whether the value is present.
   boolean contains(in unsigned long aPrefix);
   boolean isEmpty();
-  void loadFromFile(in nsIFile aFile);
-  void storeToFile(in nsIFile aFile);
 };
--- a/toolkit/components/url-classifier/nsUrlClassifierPrefixSet.cpp
+++ b/toolkit/components/url-classifier/nsUrlClassifierPrefixSet.cpp
@@ -7,45 +7,39 @@
 #include "nsUrlClassifierPrefixSet.h"
 #include "nsIUrlClassifierPrefixSet.h"
 #include "crc32c.h"
 #include "nsCOMPtr.h"
 #include "nsDebug.h"
 #include "nsPrintfCString.h"
 #include "nsTArray.h"
 #include "nsString.h"
-#include "nsIFile.h"
 #include "nsTArray.h"
 #include "nsThreadUtils.h"
 #include "nsNetUtil.h"
 #include "nsISeekableStream.h"
 #include "nsIBufferedStreams.h"
-#include "nsIFileStreams.h"
 #include "mozilla/MemoryReporting.h"
 #include "mozilla/Telemetry.h"
-#include "mozilla/FileUtils.h"
 #include "mozilla/Logging.h"
 #include "mozilla/Unused.h"
 #include <algorithm>
 
 using namespace mozilla;
 
 // MOZ_LOG=UrlClassifierPrefixSet:5
 static LazyLogModule gUrlClassifierPrefixSetLog("UrlClassifierPrefixSet");
 #define LOG(args) \
   MOZ_LOG(gUrlClassifierPrefixSetLog, mozilla::LogLevel::Debug, args)
 #define LOG_ENABLED() \
   MOZ_LOG_TEST(gUrlClassifierPrefixSetLog, mozilla::LogLevel::Debug)
 
 NS_IMPL_ISUPPORTS(nsUrlClassifierPrefixSet, nsIUrlClassifierPrefixSet,
                   nsIMemoryReporter)
 
-// Definition required due to std::max<>()
-const uint32_t nsUrlClassifierPrefixSet::MAX_BUFFER_SIZE;
-
 template <typename T>
 static void CalculateTArrayChecksum(const nsTArray<T>& aArray,
                                     uint32_t* outChecksum) {
   *outChecksum = ~0;
 
   for (size_t i = 0; i < aArray.Length(); i++) {
     const T& element = aArray[i];
     const void* pointer = &element;
@@ -340,93 +334,20 @@ bool nsUrlClassifierPrefixSet::IsEmptyIn
 NS_IMETHODIMP
 nsUrlClassifierPrefixSet::IsEmpty(bool* aEmpty) {
   MutexAutoLock lock(mLock);
 
   *aEmpty = IsEmptyInternal();
   return NS_OK;
 }
 
-NS_IMETHODIMP
-nsUrlClassifierPrefixSet::LoadFromFile(nsIFile* aFile) {
+nsresult nsUrlClassifierPrefixSet::LoadPrefixes(nsCOMPtr<nsIInputStream>& in) {
   MutexAutoLock lock(mLock);
 
-  Telemetry::AutoTimer<Telemetry::URLCLASSIFIER_PS_FILELOAD_TIME> timer;
-
-  nsCOMPtr<nsIInputStream> localInFile;
-  nsresult rv = NS_NewLocalFileInputStream(getter_AddRefs(localInFile), aFile,
-                                           PR_RDONLY | nsIFile::OS_READAHEAD);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  // Calculate how big the file is, make sure our read buffer isn't bigger
-  // than the file itself which is just wasting memory.
-  int64_t fileSize;
-  rv = aFile->GetFileSize(&fileSize);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  if (fileSize < 0 || fileSize > UINT32_MAX) {
-    return NS_ERROR_FAILURE;
-  }
-
-  uint32_t bufferSize =
-      std::min<uint32_t>(static_cast<uint32_t>(fileSize), MAX_BUFFER_SIZE);
-
-  // Convert to buffered stream
-  nsCOMPtr<nsIInputStream> in;
-  rv = NS_NewBufferedInputStream(getter_AddRefs(in), localInFile.forget(),
-                                 bufferSize);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  rv = LoadPrefixes(in);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  return NS_OK;
-}
-
-NS_IMETHODIMP
-nsUrlClassifierPrefixSet::StoreToFile(nsIFile* aFile) {
-  MutexAutoLock lock(mLock);
-
-  nsCOMPtr<nsIOutputStream> localOutFile;
-  nsresult rv =
-      NS_NewLocalFileOutputStream(getter_AddRefs(localOutFile), aFile,
-                                  PR_WRONLY | PR_TRUNCATE | PR_CREATE_FILE);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  uint32_t fileSize;
-
-  // Preallocate the file storage
-  {
-    nsCOMPtr<nsIFileOutputStream> fos(do_QueryInterface(localOutFile));
-    Telemetry::AutoTimer<Telemetry::URLCLASSIFIER_PS_FALLOCATE_TIME> timer;
-
-    fileSize = CalculatePreallocateSize();
-
-    // Ignore failure, the preallocation is a hint and we write out the entire
-    // file later on
-    Unused << fos->Preallocate(fileSize);
-  }
-
-  // Convert to buffered stream
-  nsCOMPtr<nsIOutputStream> out;
-  rv = NS_NewBufferedOutputStream(getter_AddRefs(out), localOutFile.forget(),
-                                  std::min(fileSize, MAX_BUFFER_SIZE));
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  rv = WritePrefixes(out);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  LOG(("[%s] Storing PrefixSet successful", mName.get()));
-
-  return NS_OK;
-}
-
-nsresult nsUrlClassifierPrefixSet::LoadPrefixes(nsCOMPtr<nsIInputStream>& in) {
   mCanary.Check();
-
   Clear();
 
   uint32_t magic;
   uint32_t read;
 
   nsresult rv =
       in->Read(reinterpret_cast<char*>(&magic), sizeof(uint32_t), &read);
   NS_ENSURE_SUCCESS(rv, rv);
@@ -513,16 +434,18 @@ uint32_t nsUrlClassifierPrefixSet::Calcu
   uint32_t deltas = mTotalPrefixes - mIndexPrefixes.Length();
   fileSize += 2 * mIndexPrefixes.Length() * sizeof(uint32_t);
   fileSize += deltas * sizeof(uint16_t);
   return fileSize;
 }
 
 nsresult nsUrlClassifierPrefixSet::WritePrefixes(
     nsCOMPtr<nsIOutputStream>& out) const {
+  MutexAutoLock lock(mLock);
+
   mCanary.Check();
 
   // In Bug 1362761, crashes happened while reading mIndexDeltas[i].
   // We suspect that this is due to memory corruption so to test this
   // hypothesis, we will crash the browser. Once we have established
   // memory corruption as the root cause, we can attempt to gracefully
   // handle this.
   uint32_t checksum;
--- a/toolkit/components/url-classifier/nsUrlClassifierPrefixSet.h
+++ b/toolkit/components/url-classifier/nsUrlClassifierPrefixSet.h
@@ -34,43 +34,40 @@ class nsUrlClassifierPrefixSet final : p
  public:
   nsUrlClassifierPrefixSet();
 
   NS_IMETHOD Init(const nsACString& aName) override;
   NS_IMETHOD SetPrefixes(const uint32_t* aArray, uint32_t aLength) override;
   NS_IMETHOD GetPrefixes(uint32_t* aCount, uint32_t** aPrefixes) override;
   NS_IMETHOD Contains(uint32_t aPrefix, bool* aFound) override;
   NS_IMETHOD IsEmpty(bool* aEmpty) override;
-  NS_IMETHOD LoadFromFile(nsIFile* aFile) override;
-  NS_IMETHOD StoreToFile(nsIFile* aFile) override;
 
   nsresult GetPrefixesNative(FallibleTArray<uint32_t>& outArray);
+  nsresult WritePrefixes(nsCOMPtr<nsIOutputStream>& out) const;
+  nsresult LoadPrefixes(nsCOMPtr<nsIInputStream>& in);
+  uint32_t CalculatePreallocateSize() const;
 
   size_t SizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
 
   NS_DECL_THREADSAFE_ISUPPORTS
   NS_DECL_NSIMEMORYREPORTER
 
   friend class mozilla::safebrowsing::VariableLengthPrefixSet;
 
  private:
   virtual ~nsUrlClassifierPrefixSet();
 
-  static const uint32_t MAX_BUFFER_SIZE = 64 * 1024;
   static const uint32_t DELTAS_LIMIT = 120;
   static const uint32_t MAX_INDEX_DIFF = (1 << 16);
   static const uint32_t PREFIXSET_VERSION_MAGIC = 1;
 
   void Clear();
   nsresult MakePrefixSet(const uint32_t* aArray, uint32_t aLength);
   uint32_t BinSearch(uint32_t start, uint32_t end, uint32_t target) const;
   bool IsEmptyInternal() const;
-  uint32_t CalculatePreallocateSize() const;
-  nsresult WritePrefixes(nsCOMPtr<nsIOutputStream>& out) const;
-  nsresult LoadPrefixes(nsCOMPtr<nsIInputStream>& in);
 
   // Lock to prevent races between the url-classifier thread (which does most
   // of the operations) and the main thread (which does memory reporting).
   // It should be held for all operations between Init() and destruction that
   // touch this class's data members.
   mutable mozilla::Mutex mLock;
   // list of fully stored prefixes, that also form the
   // start of a run of deltas in mIndexDeltas.
--- a/toolkit/components/url-classifier/tests/gtest/Common.cpp
+++ b/toolkit/components/url-classifier/tests/gtest/Common.cpp
@@ -142,16 +142,28 @@ nsCString GeneratePrefix(const nsCString
   Completion complete;
   complete.FromPlaintext(aFragment);
 
   nsCString hash;
   hash.Assign((const char*)complete.buf, aLength);
   return hash;
 }
 
+void CheckContent(LookupCacheV4* cache, PrefixStringMap& expected) {
+  PrefixStringMap vlPSetMap;
+  cache->GetPrefixes(vlPSetMap);
+
+  for (auto iter = vlPSetMap.Iter(); !iter.Done(); iter.Next()) {
+    nsCString* expectedPrefix = expected.Get(iter.Key());
+    nsCString* resultPrefix = iter.Data();
+
+    ASSERT_TRUE(resultPrefix->Equals(*expectedPrefix));
+  }
+}
+
 static nsresult BuildCache(LookupCacheV2* cache,
                            const _PrefixArray& prefixArray) {
   AddPrefixArray prefixes;
   AddCompleteArray completions;
   nsresult rv = PrefixArrayToAddPrefixArrayV2(prefixArray, prefixes);
   if (NS_FAILED(rv)) {
     return rv;
   }
--- a/toolkit/components/url-classifier/tests/gtest/Common.h
+++ b/toolkit/components/url-classifier/tests/gtest/Common.h
@@ -5,16 +5,17 @@
 #include "gtest/gtest.h"
 
 using namespace mozilla;
 using namespace mozilla::safebrowsing;
 
 namespace mozilla {
 namespace safebrowsing {
 class Classifier;
+class LookupCacheV4;
 }
 }  // namespace mozilla
 
 typedef nsCString _Fragment;
 typedef nsTArray<nsCString> _PrefixArray;
 
 template <typename Function>
 void RunTestInNewThread(Function&& aFunction);
@@ -38,11 +39,14 @@ void PrefixArrayToPrefixStringMap(const 
                                   PrefixStringMap& out);
 
 nsresult PrefixArrayToAddPrefixArrayV2(const nsTArray<nsCString>& prefixArray,
                                        AddPrefixArray& out);
 
 // Generate a hash prefix from string
 nsCString GeneratePrefix(const nsCString& aFragment, uint8_t aLength);
 
+// To test if the content is equal
+void CheckContent(LookupCacheV4* cache, PrefixStringMap& expected);
+
 // Create a LookupCacheV4 object with sepecified prefix array.
 template <typename T>
 RefPtr<T> SetupLookupCache(const _PrefixArray& prefixArray);
--- a/toolkit/components/url-classifier/tests/gtest/TestFailUpdate.cpp
+++ b/toolkit/components/url-classifier/tests/gtest/TestFailUpdate.cpp
@@ -3,17 +3,17 @@
 #include "string.h"
 #include "gtest/gtest.h"
 #include "mozilla/Unused.h"
 
 using namespace mozilla;
 using namespace mozilla::safebrowsing;
 
 static const char* kFilesInV2[] = {".pset", ".sbstore"};
-static const char* kFilesInV4[] = {".pset", ".metadata"};
+static const char* kFilesInV4[] = {".vlpset", ".metadata"};
 
 #define V2_TABLE "gtest-malware-simple"
 #define V4_TABLE1 "goog-malware-proto"
 #define V4_TABLE2 "goog-phish-proto"
 
 #define ROOT_DIR NS_LITERAL_STRING("safebrowsing")
 #define SB_FILE(x, y) NS_ConvertUTF8toUTF16(nsPrintfCString("%s%s", x, y))
 
@@ -58,17 +58,17 @@ TEST(UrlClassifierFailUpdate, CheckTable
   // Apply V4 update for table1
   {
     RefPtr<TableUpdateV4> update =
         new TableUpdateV4(NS_LITERAL_CSTRING(V4_TABLE1));
     func(update, FULL_UPDATE, "test_prefix");
 
     ApplyUpdate(update);
 
-    // A successful V4 update should create .pset & .metadata files
+    // A successful V4 update should create .vlpset & .metadata files
     CheckFileExist(V4_TABLE1, kFilesInV4, true);
   }
 
   // Apply V4 update for table2
   {
     RefPtr<TableUpdateV4> update =
         new TableUpdateV4(NS_LITERAL_CSTRING(V4_TABLE2));
     func(update, FULL_UPDATE, "test_prefix");
--- a/toolkit/components/url-classifier/tests/gtest/TestLookupCacheV4.cpp
+++ b/toolkit/components/url-classifier/tests/gtest/TestLookupCacheV4.cpp
@@ -38,8 +38,90 @@ TEST(UrlClassifierLookupCacheV4, HasComp
 
 TEST(UrlClassifierLookupCacheV4, HasPrefix) {
   TestHasPrefix(_Fragment("browsing.com/"), true, false);
 }
 
 TEST(UrlClassifierLookupCacheV4, Nomatch) {
   TestHasPrefix(_Fragment("nomatch.com/"), false, false);
 }
+
+// Test an existing .pset should be removed after .vlpset is written
+TEST(UrlClassifierLookupCacheV4, RemoveOldPset) {
+  nsCOMPtr<nsIFile> oldPsetFile;
+  NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR,
+                         getter_AddRefs(oldPsetFile));
+  oldPsetFile->AppendNative(NS_LITERAL_CSTRING("safebrowsing"));
+  oldPsetFile->AppendNative(GTEST_TABLE + NS_LITERAL_CSTRING(".pset"));
+
+  nsCOMPtr<nsIFile> newPsetFile;
+  NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR,
+                         getter_AddRefs(newPsetFile));
+  newPsetFile->AppendNative(NS_LITERAL_CSTRING("safebrowsing"));
+  newPsetFile->AppendNative(GTEST_TABLE + NS_LITERAL_CSTRING(".vlpset"));
+
+  // Create the legacy .pset file
+  nsresult rv = oldPsetFile->Create(nsIFile::NORMAL_FILE_TYPE, 0666);
+  EXPECT_EQ(rv, NS_OK);
+
+  bool exists;
+  rv = oldPsetFile->Exists(&exists);
+  EXPECT_EQ(rv, NS_OK);
+  EXPECT_EQ(exists, true);
+
+  // Setup the data in lookup cache and write its data to disk
+  RefPtr<Classifier> classifier = GetClassifier();
+  _PrefixArray array = {GeneratePrefix(_Fragment("entry.com/"), 4)};
+  rv = SetupLookupCacheV4(classifier, array, GTEST_TABLE);
+  EXPECT_EQ(rv, NS_OK);
+
+  RefPtr<LookupCache> cache = classifier->GetLookupCache(GTEST_TABLE, false);
+  rv = cache->WriteFile();
+  EXPECT_EQ(rv, NS_OK);
+
+  // .vlpset should exist while .pset should be removed
+  rv = newPsetFile->Exists(&exists);
+  EXPECT_EQ(rv, NS_OK);
+  EXPECT_EQ(exists, true);
+
+  rv = oldPsetFile->Exists(&exists);
+  EXPECT_EQ(rv, NS_OK);
+  EXPECT_EQ(exists, false);
+
+  newPsetFile->Remove(false);
+}
+
+// Test the legacy load
+TEST(UrlClassifierLookupCacheV4, LoadOldPset) {
+  nsCOMPtr<nsIFile> oldPsetFile;
+
+  _PrefixArray array = {GeneratePrefix(_Fragment("entry.com/"), 4)};
+  PrefixStringMap map;
+  PrefixArrayToPrefixStringMap(array, map);
+
+  // Prepare .pset file on disk
+  {
+    NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR,
+                           getter_AddRefs(oldPsetFile));
+    oldPsetFile->AppendNative(NS_LITERAL_CSTRING("safebrowsing"));
+    oldPsetFile->AppendNative(GTEST_TABLE + NS_LITERAL_CSTRING(".pset"));
+
+    RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
+    pset->SetPrefixes(map);
+
+    nsCOMPtr<nsIOutputStream> stream;
+    nsresult rv =
+        NS_NewLocalFileOutputStream(getter_AddRefs(stream), oldPsetFile);
+    EXPECT_EQ(rv, NS_OK);
+
+    rv = pset->WritePrefixes(stream);
+    EXPECT_EQ(rv, NS_OK);
+  }
+
+  // Load data from disk
+  RefPtr<Classifier> classifier = GetClassifier();
+  RefPtr<LookupCache> cache = classifier->GetLookupCache(GTEST_TABLE, false);
+
+  RefPtr<LookupCacheV4> cacheV4 = LookupCache::Cast<LookupCacheV4>(cache);
+  CheckContent(cacheV4, map);
+
+  oldPsetFile->Remove(false);
+}
--- a/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp
+++ b/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp
@@ -16,17 +16,17 @@
 using namespace mozilla;
 using namespace mozilla::safebrowsing;
 
 typedef nsCString _Prefix;
 typedef nsTArray<_Prefix> _PrefixArray;
 
 #define GTEST_SAFEBROWSING_DIR NS_LITERAL_CSTRING("safebrowsing")
 #define GTEST_TABLE NS_LITERAL_CSTRING("gtest-malware-proto")
-#define GTEST_PREFIXFILE NS_LITERAL_CSTRING("gtest-malware-proto.pset")
+#define GTEST_PREFIXFILE NS_LITERAL_CSTRING("gtest-malware-proto.vlpset")
 
 // This function removes common elements of inArray and outArray from
 // outArray. This is used by partial update testcase to ensure partial update
 // data won't contain prefixes we already have.
 static void RemoveIntersection(const _PrefixArray& inArray,
                                _PrefixArray& outArray) {
   for (uint32_t i = 0; i < inArray.Length(); i++) {
     int32_t idx = outArray.BinaryIndexOf(inArray[i]);
@@ -49,31 +49,31 @@ static void MergeAndSortArray(const _Pre
                               const _PrefixArray& array2,
                               _PrefixArray& output) {
   output.Clear();
   output.AppendElements(array1);
   output.AppendElements(array2);
   output.Sort();
 }
 
-static void CalculateCheckSum(_PrefixArray& prefixArray, nsCString& checksum) {
+static void CalculateSHA256(_PrefixArray& prefixArray, nsCString& sha256) {
   prefixArray.Sort();
 
   nsresult rv;
   nsCOMPtr<nsICryptoHash> cryptoHash =
       do_CreateInstance(NS_CRYPTO_HASH_CONTRACTID, &rv);
 
   cryptoHash->Init(nsICryptoHash::SHA256);
   for (uint32_t i = 0; i < prefixArray.Length(); i++) {
     const _Prefix& prefix = prefixArray[i];
     cryptoHash->Update(
         reinterpret_cast<uint8_t*>(const_cast<char*>(prefix.get())),
         prefix.Length());
   }
-  cryptoHash->Finish(false, checksum);
+  cryptoHash->Finish(false, sha256);
 }
 
 // N: Number of prefixes, MIN/MAX: minimum/maximum prefix size
 // This function will append generated prefixes to outArray.
 static void CreateRandomSortedPrefixArray(uint32_t N, uint32_t MIN,
                                           uint32_t MAX,
                                           _PrefixArray& outArray) {
   outArray.SetCapacity(outArray.Length() + N);
@@ -109,55 +109,56 @@ static void CreateRandomRemovalIndices(u
     if (!outArray.Contains(idx)) {
       outArray.InsertElementSorted(idx);
     }
   }
 }
 
 // Function to generate TableUpdateV4.
 static void GenerateUpdateData(bool fullUpdate, PrefixStringMap& add,
-                               nsTArray<uint32_t>* removal, nsCString* checksum,
+                               nsTArray<uint32_t>* removal, nsCString* sha256,
                                TableUpdateArray& tableUpdates) {
   RefPtr<TableUpdateV4> tableUpdate = new TableUpdateV4(GTEST_TABLE);
   tableUpdate->SetFullUpdate(fullUpdate);
 
   for (auto iter = add.ConstIter(); !iter.Done(); iter.Next()) {
     nsCString* pstring = iter.Data();
     tableUpdate->NewPrefixes(iter.Key(), *pstring);
   }
 
   if (removal) {
     tableUpdate->NewRemovalIndices(removal->Elements(), removal->Length());
   }
 
-  if (checksum) {
-    std::string stdChecksum;
-    stdChecksum.assign(const_cast<char*>(checksum->BeginReading()),
-                       checksum->Length());
+  if (sha256) {
+    std::string stdSHA256;
+    stdSHA256.assign(const_cast<char*>(sha256->BeginReading()),
+                     sha256->Length());
 
-    tableUpdate->NewChecksum(stdChecksum);
+    tableUpdate->SetSHA256(stdSHA256);
   }
 
   tableUpdates.AppendElement(tableUpdate);
 }
 
 static void VerifyPrefixSet(PrefixStringMap& expected) {
   // Verify the prefix set is written to disk.
   nsCOMPtr<nsIFile> file;
   NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+  file->AppendNative(GTEST_SAFEBROWSING_DIR);
 
-  file->AppendNative(GTEST_SAFEBROWSING_DIR);
+  RefPtr<LookupCacheV4> lookup =
+      new LookupCacheV4(GTEST_TABLE, NS_LITERAL_CSTRING("test"), file);
+  lookup->Init();
+
   file->AppendNative(GTEST_PREFIXFILE);
-
-  RefPtr<VariableLengthPrefixSet> load = new VariableLengthPrefixSet;
-  load->Init(GTEST_TABLE);
+  lookup->LoadFromFile(file);
 
   PrefixStringMap prefixesInFile;
-  load->LoadFromFile(file);
-  load->GetPrefixes(prefixesInFile);
+  lookup->GetPrefixes(prefixesInFile);
 
   for (auto iter = expected.ConstIter(); !iter.Done(); iter.Next()) {
     nsCString* expectedPrefix = iter.Data();
     nsCString* resultPrefix = prefixesInFile.Get(iter.Key());
 
     ASSERT_TRUE(*resultPrefix == *expectedPrefix);
   }
 }
@@ -195,28 +196,28 @@ static void testUpdate(TableUpdateArray&
   RefPtr<Classifier> classifier = new Classifier();
   classifier->Open(*file);
 
   nsresult rv = SyncApplyUpdates(classifier, tableUpdates);
   ASSERT_TRUE(rv == NS_OK);
   VerifyPrefixSet(expected);
 }
 
-static void testFullUpdate(PrefixStringMap& add, nsCString* checksum) {
+static void testFullUpdate(PrefixStringMap& add, nsCString* sha256) {
   TableUpdateArray tableUpdates;
 
-  GenerateUpdateData(true, add, nullptr, checksum, tableUpdates);
+  GenerateUpdateData(true, add, nullptr, sha256, tableUpdates);
 
   testUpdate(tableUpdates, add);
 }
 
 static void testPartialUpdate(PrefixStringMap& add, nsTArray<uint32_t>* removal,
-                              nsCString* checksum, PrefixStringMap& expected) {
+                              nsCString* sha256, PrefixStringMap& expected) {
   TableUpdateArray tableUpdates;
-  GenerateUpdateData(false, add, removal, checksum, tableUpdates);
+  GenerateUpdateData(false, add, removal, sha256, tableUpdates);
 
   testUpdate(tableUpdates, expected);
 }
 
 static void testOpenLookupCache() {
   nsCOMPtr<nsIFile> file;
   NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
   file->AppendNative(GTEST_SAFEBROWSING_DIR);
@@ -228,158 +229,158 @@ static void testOpenLookupCache() {
     ASSERT_EQ(rv, NS_OK);
 
     rv = cache->Open();
     ASSERT_EQ(rv, NS_OK);
   });
 }
 
 // Tests start from here.
-TEST(UrlClassifierTableUpdateV4, FixLenghtPSetFullUpdate) {
+TEST(UrlClassifierTableUpdateV4, FixLengthPSetFullUpdate) {
   srand(time(NULL));
 
   _PrefixArray array;
   PrefixStringMap map;
-  nsCString checksum;
+  nsCString sha256;
 
   CreateRandomSortedPrefixArray(5000, 4, 4, array);
   PrefixArrayToPrefixStringMap(array, map);
-  CalculateCheckSum(array, checksum);
+  CalculateSHA256(array, sha256);
 
-  testFullUpdate(map, &checksum);
+  testFullUpdate(map, &sha256);
 
   Clear();
 }
 
-TEST(UrlClassifierTableUpdateV4, VariableLenghtPSetFullUpdate) {
+TEST(UrlClassifierTableUpdateV4, VariableLengthPSetFullUpdate) {
   _PrefixArray array;
   PrefixStringMap map;
-  nsCString checksum;
+  nsCString sha256;
 
   CreateRandomSortedPrefixArray(5000, 5, 32, array);
   PrefixArrayToPrefixStringMap(array, map);
-  CalculateCheckSum(array, checksum);
+  CalculateSHA256(array, sha256);
 
-  testFullUpdate(map, &checksum);
+  testFullUpdate(map, &sha256);
 
   Clear();
 }
 
 // This test contain both variable length prefix set and fixed-length prefix set
 TEST(UrlClassifierTableUpdateV4, MixedPSetFullUpdate) {
   _PrefixArray array;
   PrefixStringMap map;
-  nsCString checksum;
+  nsCString sha256;
 
   CreateRandomSortedPrefixArray(5000, 4, 4, array);
   CreateRandomSortedPrefixArray(1000, 5, 32, array);
   PrefixArrayToPrefixStringMap(array, map);
-  CalculateCheckSum(array, checksum);
+  CalculateSHA256(array, sha256);
 
-  testFullUpdate(map, &checksum);
+  testFullUpdate(map, &sha256);
 
   Clear();
 }
 
 TEST(UrlClassifierTableUpdateV4, PartialUpdateWithRemoval) {
   _PrefixArray fArray;
 
   // Apply a full update first.
   {
     PrefixStringMap fMap;
-    nsCString checksum;
+    nsCString sha256;
 
     CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
     CreateRandomSortedPrefixArray(2000, 5, 32, fArray);
     PrefixArrayToPrefixStringMap(fArray, fMap);
-    CalculateCheckSum(fArray, checksum);
+    CalculateSHA256(fArray, sha256);
 
-    testFullUpdate(fMap, &checksum);
+    testFullUpdate(fMap, &sha256);
   }
 
   // Apply a partial update with removal.
   {
     _PrefixArray pArray, mergedArray;
     PrefixStringMap pMap, mergedMap;
-    nsCString checksum;
+    nsCString sha256;
 
     CreateRandomSortedPrefixArray(5000, 4, 4, pArray);
     CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
     RemoveIntersection(fArray, pArray);
     PrefixArrayToPrefixStringMap(pArray, pMap);
 
     // Remove 1/5 of elements of original prefix set.
     nsTArray<uint32_t> removal;
     CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
     RemoveElements(removal, fArray);
 
     // Calculate the expected prefix map.
     MergeAndSortArray(fArray, pArray, mergedArray);
     PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
-    CalculateCheckSum(mergedArray, checksum);
+    CalculateSHA256(mergedArray, sha256);
 
-    testPartialUpdate(pMap, &removal, &checksum, mergedMap);
+    testPartialUpdate(pMap, &removal, &sha256, mergedMap);
   }
 
   Clear();
 }
 
 TEST(UrlClassifierTableUpdateV4, PartialUpdateWithoutRemoval) {
   _PrefixArray fArray;
 
   // Apply a full update first.
   {
     PrefixStringMap fMap;
-    nsCString checksum;
+    nsCString sha256;
 
     CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
     CreateRandomSortedPrefixArray(2000, 5, 32, fArray);
     PrefixArrayToPrefixStringMap(fArray, fMap);
-    CalculateCheckSum(fArray, checksum);
+    CalculateSHA256(fArray, sha256);
 
-    testFullUpdate(fMap, &checksum);
+    testFullUpdate(fMap, &sha256);
   }
 
   // Apply a partial update without removal
   {
     _PrefixArray pArray, mergedArray;
     PrefixStringMap pMap, mergedMap;
-    nsCString checksum;
+    nsCString sha256;
 
     CreateRandomSortedPrefixArray(5000, 4, 4, pArray);
     CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
     RemoveIntersection(fArray, pArray);
     PrefixArrayToPrefixStringMap(pArray, pMap);
 
     // Calculate the expected prefix map.
     MergeAndSortArray(fArray, pArray, mergedArray);
     PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
-    CalculateCheckSum(mergedArray, checksum);
+    CalculateSHA256(mergedArray, sha256);
 
-    testPartialUpdate(pMap, nullptr, &checksum, mergedMap);
+    testPartialUpdate(pMap, nullptr, &sha256, mergedMap);
   }
 
   Clear();
 }
 
 // Expect failure because partial update contains prefix already
 // in old prefix set.
 TEST(UrlClassifierTableUpdateV4, PartialUpdatePrefixAlreadyExist) {
   _PrefixArray fArray;
 
   // Apply a full update fist.
   {
     PrefixStringMap fMap;
-    nsCString checksum;
+    nsCString sha256;
 
     CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
     PrefixArrayToPrefixStringMap(fArray, fMap);
-    CalculateCheckSum(fArray, checksum);
+    CalculateSHA256(fArray, sha256);
 
-    testFullUpdate(fMap, &checksum);
+    testFullUpdate(fMap, &sha256);
   }
 
   // Apply a partial update which contains a prefix in previous full update.
   // This should cause an update error.
   {
     _PrefixArray pArray;
     PrefixStringMap pMap;
     TableUpdateArray tableUpdates;
@@ -396,91 +397,91 @@ TEST(UrlClassifierTableUpdateV4, Partial
 
   Clear();
 }
 
 // Test apply partial update directly without applying an full update first.
 TEST(UrlClassifierTableUpdateV4, OnlyPartialUpdate) {
   _PrefixArray pArray;
   PrefixStringMap pMap;
-  nsCString checksum;
+  nsCString sha256;
 
   CreateRandomSortedPrefixArray(5000, 4, 4, pArray);
   CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
   PrefixArrayToPrefixStringMap(pArray, pMap);
-  CalculateCheckSum(pArray, checksum);
+  CalculateSHA256(pArray, sha256);
 
-  testPartialUpdate(pMap, nullptr, &checksum, pMap);
+  testPartialUpdate(pMap, nullptr, &sha256, pMap);
 
   Clear();
 }
 
 // Test partial update without any ADD prefixes, only removalIndices.
 TEST(UrlClassifierTableUpdateV4, PartialUpdateOnlyRemoval) {
   _PrefixArray fArray;
 
   // Apply a full update first.
   {
     PrefixStringMap fMap;
-    nsCString checksum;
+    nsCString sha256;
 
     CreateRandomSortedPrefixArray(5000, 4, 4, fArray);
     CreateRandomSortedPrefixArray(1000, 5, 32, fArray);
     PrefixArrayToPrefixStringMap(fArray, fMap);
-    CalculateCheckSum(fArray, checksum);
+    CalculateSHA256(fArray, sha256);
 
-    testFullUpdate(fMap, &checksum);
+    testFullUpdate(fMap, &sha256);
   }
 
   // Apply a partial update without add prefix, only contain removal indices.
   {
     _PrefixArray pArray;
     PrefixStringMap pMap, mergedMap;
-    nsCString checksum;
+    nsCString sha256;
 
     // Remove 1/5 of elements of original prefix set.
     nsTArray<uint32_t> removal;
     CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
     RemoveElements(removal, fArray);
 
     PrefixArrayToPrefixStringMap(fArray, mergedMap);
-    CalculateCheckSum(fArray, checksum);
+    CalculateSHA256(fArray, sha256);
 
-    testPartialUpdate(pMap, &removal, &checksum, mergedMap);
+    testPartialUpdate(pMap, &removal, &sha256, mergedMap);
   }
 
   Clear();
 }
 
 // Test one tableupdate array contains full update and multiple partial updates.
 TEST(UrlClassifierTableUpdateV4, MultipleTableUpdates) {
   _PrefixArray fArray, pArray, mergedArray;
   PrefixStringMap fMap, pMap, mergedMap;
-  nsCString checksum;
+  nsCString sha256;
 
   TableUpdateArray tableUpdates;
 
   // Generate first full udpate
   CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
   CreateRandomSortedPrefixArray(2000, 5, 32, fArray);
   PrefixArrayToPrefixStringMap(fArray, fMap);
-  CalculateCheckSum(fArray, checksum);
+  CalculateSHA256(fArray, sha256);
 
-  GenerateUpdateData(true, fMap, nullptr, &checksum, tableUpdates);
+  GenerateUpdateData(true, fMap, nullptr, &sha256, tableUpdates);
 
   // Generate second partial update
   CreateRandomSortedPrefixArray(3000, 4, 4, pArray);
   CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
   RemoveIntersection(fArray, pArray);
   PrefixArrayToPrefixStringMap(pArray, pMap);
 
   MergeAndSortArray(fArray, pArray, mergedArray);
-  CalculateCheckSum(mergedArray, checksum);
+  CalculateSHA256(mergedArray, sha256);
 
-  GenerateUpdateData(false, pMap, nullptr, &checksum, tableUpdates);
+  GenerateUpdateData(false, pMap, nullptr, &sha256, tableUpdates);
 
   // Generate thrid partial update
   fArray.AppendElements(pArray);
   fArray.Sort();
   pArray.Clear();
   CreateRandomSortedPrefixArray(3000, 4, 4, pArray);
   CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
   RemoveIntersection(fArray, pArray);
@@ -488,66 +489,66 @@ TEST(UrlClassifierTableUpdateV4, Multipl
 
   // Remove 1/5 of elements of original prefix set.
   nsTArray<uint32_t> removal;
   CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
   RemoveElements(removal, fArray);
 
   MergeAndSortArray(fArray, pArray, mergedArray);
   PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
-  CalculateCheckSum(mergedArray, checksum);
+  CalculateSHA256(mergedArray, sha256);
 
-  GenerateUpdateData(false, pMap, &removal, &checksum, tableUpdates);
+  GenerateUpdateData(false, pMap, &removal, &sha256, tableUpdates);
 
   testUpdate(tableUpdates, mergedMap);
 
   Clear();
 }
 
 // Test apply full update first, and then apply multiple partial updates
 // in one tableupdate array.
 TEST(UrlClassifierTableUpdateV4, MultiplePartialUpdateTableUpdates) {
   _PrefixArray fArray;
 
   // Apply a full update first
   {
     PrefixStringMap fMap;
-    nsCString checksum;
+    nsCString sha256;
 
     // Generate first full udpate
     CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
     CreateRandomSortedPrefixArray(3000, 5, 32, fArray);
     PrefixArrayToPrefixStringMap(fArray, fMap);
-    CalculateCheckSum(fArray, checksum);
+    CalculateSHA256(fArray, sha256);
 
-    testFullUpdate(fMap, &checksum);
+    testFullUpdate(fMap, &sha256);
   }
 
   // Apply multiple partial updates in one table update
   {
     _PrefixArray pArray, mergedArray;
     PrefixStringMap pMap, mergedMap;
-    nsCString checksum;
+    nsCString sha256;
     nsTArray<uint32_t> removal;
     TableUpdateArray tableUpdates;
 
     // Generate first partial update
     CreateRandomSortedPrefixArray(3000, 4, 4, pArray);
     CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
     RemoveIntersection(fArray, pArray);
     PrefixArrayToPrefixStringMap(pArray, pMap);
 
     // Remove 1/5 of elements of original prefix set.
     CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
     RemoveElements(removal, fArray);
 
     MergeAndSortArray(fArray, pArray, mergedArray);
-    CalculateCheckSum(mergedArray, checksum);
+    CalculateSHA256(mergedArray, sha256);
 
-    GenerateUpdateData(false, pMap, &removal, &checksum, tableUpdates);
+    GenerateUpdateData(false, pMap, &removal, &sha256, tableUpdates);
 
     fArray.AppendElements(pArray);
     fArray.Sort();
     pArray.Clear();
     removal.Clear();
 
     // Generate second partial update.
     CreateRandomSortedPrefixArray(2000, 4, 4, pArray);
@@ -556,40 +557,40 @@ TEST(UrlClassifierTableUpdateV4, Multipl
     PrefixArrayToPrefixStringMap(pArray, pMap);
 
     // Remove 1/5 of elements of original prefix set.
     CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
     RemoveElements(removal, fArray);
 
     MergeAndSortArray(fArray, pArray, mergedArray);
     PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
-    CalculateCheckSum(mergedArray, checksum);
+    CalculateSHA256(mergedArray, sha256);
 
-    GenerateUpdateData(false, pMap, &removal, &checksum, tableUpdates);
+    GenerateUpdateData(false, pMap, &removal, &sha256, tableUpdates);
 
     testUpdate(tableUpdates, mergedMap);
   }
 
   Clear();
 }
 
 // Test removal indices are larger than the original prefix set.
 TEST(UrlClassifierTableUpdateV4, RemovalIndexTooLarge) {
   _PrefixArray fArray;
 
   // Apply a full update first
   {
     PrefixStringMap fMap;
-    nsCString checksum;
+    nsCString sha256;
 
     CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
     PrefixArrayToPrefixStringMap(fArray, fMap);
-    CalculateCheckSum(fArray, checksum);
+    CalculateSHA256(fArray, sha256);
 
-    testFullUpdate(fMap, &checksum);
+    testFullUpdate(fMap, &sha256);
   }
 
   // Apply a partial update with removal indice array larger than
   // old prefix set(fArray). This should cause an error.
   {
     _PrefixArray pArray;
     PrefixStringMap pMap;
     nsTArray<uint32_t> removal;
@@ -610,67 +611,67 @@ TEST(UrlClassifierTableUpdateV4, Removal
   Clear();
 }
 
 TEST(UrlClassifierTableUpdateV4, ChecksumMismatch) {
   // Apply a full update first
   {
     _PrefixArray fArray;
     PrefixStringMap fMap;
-    nsCString checksum;
+    nsCString sha256;
 
     CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
     PrefixArrayToPrefixStringMap(fArray, fMap);
-    CalculateCheckSum(fArray, checksum);
+    CalculateSHA256(fArray, sha256);
 
-    testFullUpdate(fMap, &checksum);
+    testFullUpdate(fMap, &sha256);
   }
 
-  // Apply a partial update with incorrect checksum
+  // Apply a partial update with incorrect sha256
   {
     _PrefixArray pArray;
     PrefixStringMap pMap;
-    nsCString checksum;
+    nsCString sha256;
     TableUpdateArray tableUpdates;
 
     CreateRandomSortedPrefixArray(200, 4, 32, pArray);
     PrefixArrayToPrefixStringMap(pArray, pMap);
 
-    // Checksum should be calculated with both old prefix set and add prefix
-    // set, here we only calculate checksum with add prefix set to check if
+    // sha256 should be calculated with both old prefix set and add prefix
+    // set, here we only calculate sha256 with add prefix set to check if
     // applyUpdate will return failure.
-    CalculateCheckSum(pArray, checksum);
+    CalculateSHA256(pArray, sha256);
 
-    GenerateUpdateData(false, pMap, nullptr, &checksum, tableUpdates);
+    GenerateUpdateData(false, pMap, nullptr, &sha256, tableUpdates);
     testUpdateFail(tableUpdates);
   }
 
   Clear();
 }
 
 TEST(UrlClassifierTableUpdateV4, ApplyUpdateThenLoad) {
-  // Apply update with checksum
+  // Apply update with sha256
   {
     _PrefixArray fArray;
     PrefixStringMap fMap;
-    nsCString checksum;
+    nsCString sha256;
 
     CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
     PrefixArrayToPrefixStringMap(fArray, fMap);
-    CalculateCheckSum(fArray, checksum);
+    CalculateSHA256(fArray, sha256);
 
-    testFullUpdate(fMap, &checksum);
+    testFullUpdate(fMap, &sha256);
 
-    // Open lookup cache will load prefix set and verify the checksum
+    // Open lookup cache will load prefix set and verify the sha256
     testOpenLookupCache();
   }
 
   Clear();
 
-  // Apply update without checksum
+  // Apply update without sha256
   {
     _PrefixArray fArray;
     PrefixStringMap fMap;
 
     CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
     PrefixArrayToPrefixStringMap(fArray, fMap);
 
     testFullUpdate(fMap, nullptr);
@@ -701,82 +702,81 @@ TEST(UrlClassifierTableUpdateV4, ApplyUp
                          _Prefix("juno"),
                          _Prefix("mercury"),
                          _Prefix("Stheno, Euryale and Medusa")};
   fArray.Sort();
 
   PrefixStringMap fMap;
   PrefixArrayToPrefixStringMap(fArray, fMap);
 
-  nsCString checksum(
+  nsCString sha256(
       "\xae\x18\x94\xd7\xd0\x83\x5f\xc1"
       "\x58\x59\x5c\x2c\x72\xb9\x6e\x5e"
       "\xf4\xe8\x0a\x6b\xff\x5e\x6b\x81"
       "\x65\x34\x06\x16\x06\x59\xa0\x67");
 
-  testFullUpdate(fMap, &checksum);
+  testFullUpdate(fMap, &sha256);
 
-  // Open lookup cache will load prefix set and verify the checksum
+  // Open lookup cache will load prefix set and verify the sha256
   testOpenLookupCache();
 
   Clear();
 }
 
 // This test ensure that an empty update works correctly. Empty update
 // should be skipped by CheckValidUpdate in Classifier::UpdateTableV4.
 TEST(UrlClassifierTableUpdateV4, EmptyUpdate) {
   PrefixStringMap emptyAddition;
   nsTArray<uint32_t> emptyRemoval;
 
   _PrefixArray array;
   PrefixStringMap map;
-  nsCString checksum;
+  nsCString sha256;
 
-  CalculateCheckSum(array, checksum);
+  CalculateSHA256(array, sha256);
 
   // Test apply empty full/partial update before we already
   // have data in DB.
-  testFullUpdate(emptyAddition, &checksum);
-  testPartialUpdate(emptyAddition, &emptyRemoval, &checksum, map);
+  testFullUpdate(emptyAddition, &sha256);
+  testPartialUpdate(emptyAddition, &emptyRemoval, &sha256, map);
 
   // Apply an full update.
   CreateRandomSortedPrefixArray(100, 4, 4, array);
   CreateRandomSortedPrefixArray(10, 5, 32, array);
   PrefixArrayToPrefixStringMap(array, map);
-  CalculateCheckSum(array, checksum);
+  CalculateSHA256(array, sha256);
 
-  testFullUpdate(map, &checksum);
+  testFullUpdate(map, &sha256);
 
   // Test apply empty full/partial update when we already
   // have data in DB
-  testPartialUpdate(emptyAddition, &emptyRemoval, &checksum, map);
-  testFullUpdate(emptyAddition, &checksum);
+  testPartialUpdate(emptyAddition, &emptyRemoval, &sha256, map);
+  testFullUpdate(emptyAddition, &sha256);
 
   Clear();
 }
 
 // This test ensure applying an empty update directly through update algorithm
 // should be correct.
 TEST(UrlClassifierTableUpdateV4, EmptyUpdate2) {
   // Setup LookupCache with initial data
   _PrefixArray array;
   CreateRandomSortedPrefixArray(100, 4, 4, array);
   CreateRandomSortedPrefixArray(10, 5, 32, array);
   RefPtr<LookupCacheV4> cache = SetupLookupCache<LookupCacheV4>(array);
 
-  // Setup TableUpdate object with only checksum from previous update(initial
+  // Setup TableUpdate object with only sha256 from previous update(initial
   // data).
-  nsCString checksum;
-  CalculateCheckSum(array, checksum);
-  std::string stdChecksum;
-  stdChecksum.assign(const_cast<char*>(checksum.BeginReading()),
-                     checksum.Length());
+  nsCString sha256;
+  CalculateSHA256(array, sha256);
+  std::string stdSHA256;
+  stdSHA256.assign(const_cast<char*>(sha256.BeginReading()), sha256.Length());
 
   RefPtr<TableUpdateV4> tableUpdate = new TableUpdateV4(GTEST_TABLE);
-  tableUpdate->NewChecksum(stdChecksum);
+  tableUpdate->SetSHA256(stdSHA256);
 
   // Apply update directly through LookupCache interface
   PrefixStringMap input, output;
   PrefixArrayToPrefixStringMap(array, input);
   nsresult rv = cache->ApplyUpdate(tableUpdate.get(), input, output);
 
   ASSERT_TRUE(rv == NS_OK);
 
--- a/toolkit/components/url-classifier/tests/gtest/TestVariableLengthPrefixSet.cpp
+++ b/toolkit/components/url-classifier/tests/gtest/TestVariableLengthPrefixSet.cpp
@@ -12,21 +12,21 @@
 #include "gtest/gtest.h"
 
 using namespace mozilla::safebrowsing;
 
 typedef nsCString _Prefix;
 typedef nsTArray<_Prefix> _PrefixArray;
 
 // Create fullhash by appending random characters.
-static nsCString* CreateFullHash(const nsACString& in) {
-  nsCString* out = new nsCString(in);
-  out->SetLength(32);
+static nsCString CreateFullHash(const nsACString& in) {
+  nsCString out(in);
+  out.SetLength(32);
   for (size_t i = in.Length(); i < 32; i++) {
-    out->SetCharAt(char(rand() % 256), i);
+    out.SetCharAt(char(rand() % 256), i);
   }
 
   return out;
 }
 
 // This function generate N prefixes with size between MIN and MAX.
 // The output array will not be cleared, random result will append to it
 static void RandomPrefixes(uint32_t N, uint32_t MIN, uint32_t MAX,
@@ -50,66 +50,53 @@ static void RandomPrefixes(uint32_t N, u
       if (!array.Contains(prefix)) {
         array.AppendElement(prefix);
         added = true;
       }
     }
   }
 }
 
-static void CheckContent(VariableLengthPrefixSet* pset,
-                         PrefixStringMap& expected) {
-  PrefixStringMap vlPSetMap;
-  pset->GetPrefixes(vlPSetMap);
-
-  for (auto iter = vlPSetMap.Iter(); !iter.Done(); iter.Next()) {
-    nsCString* expectedPrefix = expected.Get(iter.Key());
-    nsCString* resultPrefix = iter.Data();
-
-    ASSERT_TRUE(resultPrefix->Equals(*expectedPrefix));
-  }
-}
-
 // This test loops through all the prefixes and converts each prefix to
 // fullhash by appending random characters, each converted fullhash
 // should at least match its original length in the prefixSet.
-static void DoExpectedLookup(VariableLengthPrefixSet* pset,
-                             _PrefixArray& array) {
+static void DoExpectedLookup(LookupCacheV4* cache, _PrefixArray& array) {
   uint32_t matchLength = 0;
   for (uint32_t i = 0; i < array.Length(); i++) {
     const nsCString& prefix = array[i];
-    UniquePtr<nsCString> fullhash(CreateFullHash(prefix));
+    Completion complete;
+    complete.Assign(CreateFullHash(prefix));
 
     // Find match for prefix-generated full hash
-    pset->Matches(*fullhash, &matchLength);
+    bool has, confirmed;
+    cache->Has(complete, &has, &matchLength, &confirmed);
     MOZ_ASSERT(matchLength != 0);
 
     if (matchLength != prefix.Length()) {
       // Return match size is not the same as prefix size.
       // In this case it could be because the generated fullhash match other
       // prefixes, check if this prefix exist.
       bool found = false;
 
       for (uint32_t j = 0; j < array.Length(); j++) {
         if (array[j].Length() != matchLength) {
           continue;
         }
 
-        if (0 == memcmp(fullhash->BeginReading(), array[j].BeginReading(),
-                        matchLength)) {
+        if (0 == memcmp(complete.buf, array[j].BeginReading(), matchLength)) {
           found = true;
           break;
         }
       }
       ASSERT_TRUE(found);
     }
   }
 }
 
-static void DoRandomLookup(VariableLengthPrefixSet* pset, uint32_t N,
+static void DoRandomLookup(LookupCacheV4* cache, uint32_t N,
                            _PrefixArray& array) {
   for (uint32_t i = 0; i < N; i++) {
     // Random 32-bytes test fullhash
     char buf[32];
     for (uint32_t j = 0; j < 32; j++) {
       buf[j] = (char)(rand() % 256);
     }
 
@@ -117,18 +104,21 @@ static void DoRandomLookup(VariableLengt
     nsTArray<uint32_t> expected;
     for (uint32_t j = 0; j < array.Length(); j++) {
       const nsACString& str = array[j];
       if (0 == memcmp(buf, str.BeginReading(), str.Length())) {
         expected.AppendElement(str.Length());
       }
     }
 
+    Completion complete;
+    complete.Assign(nsDependentCSubstring(buf, 32));
+    bool has, confirmed;
     uint32_t matchLength = 0;
-    pset->Matches(nsDependentCSubstring(buf, 32), &matchLength);
+    cache->Has(complete, &has, &matchLength, &confirmed);
 
     ASSERT_TRUE(expected.IsEmpty() ? !matchLength
                                    : expected.Contains(matchLength));
   }
 }
 
 static void SetupPrefixMap(const _PrefixArray& array, PrefixStringMap& map) {
   map.Clear();
@@ -163,99 +153,100 @@ static void SetupPrefixMap(const _Prefix
       memcpy(dst, iter.Data()->ElementAt(i).get(), size);
       dst += size;
     }
 
     map.Put(size, str);
   }
 }
 
+static already_AddRefed<LookupCacheV4> SetupLookupCache(
+    const nsACString& aName) {
+  nsCOMPtr<nsIFile> rootDir;
+  NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(rootDir));
+
+  nsAutoCString provider("test");
+  RefPtr<LookupCacheV4> lookup = new LookupCacheV4(aName, provider, rootDir);
+  lookup->Init();
+
+  return lookup.forget();
+}
+
 // Test setting prefix set with only 4-bytes prefixes
 TEST(UrlClassifierVLPrefixSet, FixedLengthSet) {
   srand(time(nullptr));
 
-  RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
-  pset->Init(NS_LITERAL_CSTRING("test"));
+  RefPtr<LookupCacheV4> cache = SetupLookupCache(NS_LITERAL_CSTRING("test"));
 
   PrefixStringMap map;
   _PrefixArray array = {
       _Prefix("alph"), _Prefix("brav"), _Prefix("char"),
       _Prefix("delt"), _Prefix("echo"), _Prefix("foxt"),
   };
 
   SetupPrefixMap(array, map);
-  pset->SetPrefixes(map);
-
-  DoExpectedLookup(pset, array);
+  cache->Build(map);
 
-  DoRandomLookup(pset, 1000, array);
-
-  CheckContent(pset, map);
+  DoExpectedLookup(cache, array);
+  DoRandomLookup(cache, 1000, array);
+  CheckContent(cache, map);
 
   // Run random test
   array.Clear();
   map.Clear();
 
   RandomPrefixes(1500, 4, 4, array);
 
   SetupPrefixMap(array, map);
-  pset->SetPrefixes(map);
-
-  DoExpectedLookup(pset, array);
+  cache->Build(map);
 
-  DoRandomLookup(pset, 1000, array);
-
-  CheckContent(pset, map);
+  DoExpectedLookup(cache, array);
+  DoRandomLookup(cache, 1000, array);
+  CheckContent(cache, map);
 }
 
 // Test setting prefix set with only 5~32 bytes prefixes
 TEST(UrlClassifierVLPrefixSet, VariableLengthSet) {
-  RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
-  pset->Init(NS_LITERAL_CSTRING("test"));
+  RefPtr<LookupCacheV4> cache = SetupLookupCache(NS_LITERAL_CSTRING("test"));
 
   PrefixStringMap map;
   _PrefixArray array = {
       _Prefix("bravo"),   _Prefix("charlie"),
       _Prefix("delta"),   _Prefix("EchoEchoEchoEchoEcho"),
       _Prefix("foxtrot"), _Prefix("GolfGolfGolfGolfGolfGolfGolfGolf"),
       _Prefix("hotel"),   _Prefix("november"),
       _Prefix("oscar"),   _Prefix("quebec"),
       _Prefix("romeo"),   _Prefix("sierrasierrasierrasierrasierra"),
       _Prefix("Tango"),   _Prefix("whiskey"),
       _Prefix("yankee"),  _Prefix("ZuluZuluZuluZulu")};
 
   SetupPrefixMap(array, map);
-  pset->SetPrefixes(map);
-
-  DoExpectedLookup(pset, array);
+  cache->Build(map);
 
-  DoRandomLookup(pset, 1000, array);
-
-  CheckContent(pset, map);
+  DoExpectedLookup(cache, array);
+  DoRandomLookup(cache, 1000, array);
+  CheckContent(cache, map);
 
   // Run random test
   array.Clear();
   map.Clear();
 
   RandomPrefixes(1500, 5, 32, array);
 
   SetupPrefixMap(array, map);
-  pset->SetPrefixes(map);
-
-  DoExpectedLookup(pset, array);
+  cache->Build(map);
 
-  DoRandomLookup(pset, 1000, array);
-
-  CheckContent(pset, map);
+  DoExpectedLookup(cache, array);
+  DoRandomLookup(cache, 1000, array);
+  CheckContent(cache, map);
 }
 
 // Test setting prefix set with both 4-bytes prefixes and 5~32 bytes prefixes
 TEST(UrlClassifierVLPrefixSet, MixedPrefixSet) {
-  RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
-  pset->Init(NS_LITERAL_CSTRING("test"));
+  RefPtr<LookupCacheV4> cache = SetupLookupCache(NS_LITERAL_CSTRING("test"));
 
   PrefixStringMap map;
   _PrefixArray array = {_Prefix("enus"),
                         _Prefix("apollo"),
                         _Prefix("mars"),
                         _Prefix("Hecatonchires cyclopes"),
                         _Prefix("vesta"),
                         _Prefix("neptunus"),
@@ -268,282 +259,323 @@ TEST(UrlClassifierVLPrefixSet, MixedPref
                         _Prefix("alcyoneus"),
                         _Prefix("hades"),
                         _Prefix("vulcanus"),
                         _Prefix("juno"),
                         _Prefix("mercury"),
                         _Prefix("Stheno, Euryale and Medusa")};
 
   SetupPrefixMap(array, map);
-  pset->SetPrefixes(map);
-
-  DoExpectedLookup(pset, array);
+  cache->Build(map);
 
-  DoRandomLookup(pset, 1000, array);
-
-  CheckContent(pset, map);
+  DoExpectedLookup(cache, array);
+  DoRandomLookup(cache, 1000, array);
+  CheckContent(cache, map);
 
   // Run random test
   array.Clear();
   map.Clear();
 
   RandomPrefixes(1500, 4, 32, array);
 
   SetupPrefixMap(array, map);
-  pset->SetPrefixes(map);
-
-  DoExpectedLookup(pset, array);
+  cache->Build(map);
 
-  DoRandomLookup(pset, 1000, array);
-
-  CheckContent(pset, map);
+  DoExpectedLookup(cache, array);
+  DoRandomLookup(cache, 1000, array);
+  CheckContent(cache, map);
 }
 
 // Test resetting prefix set
 TEST(UrlClassifierVLPrefixSet, ResetPrefix) {
-  RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
-  pset->Init(NS_LITERAL_CSTRING("test"));
+  RefPtr<LookupCacheV4> cache = SetupLookupCache(NS_LITERAL_CSTRING("test"));
 
   // First prefix set
   _PrefixArray array1 = {
       _Prefix("Iceland"),   _Prefix("Peru"),    _Prefix("Mexico"),
       _Prefix("Australia"), _Prefix("Japan"),   _Prefix("Egypt"),
       _Prefix("America"),   _Prefix("Finland"), _Prefix("Germany"),
       _Prefix("Italy"),     _Prefix("France"),  _Prefix("Taiwan"),
   };
   {
     PrefixStringMap map;
 
     SetupPrefixMap(array1, map);
-    pset->SetPrefixes(map);
+    cache->Build(map);
 
-    DoExpectedLookup(pset, array1);
+    DoExpectedLookup(cache, array1);
   }
 
   // Second
   _PrefixArray array2 = {
       _Prefix("Pikachu"),    _Prefix("Bulbasaur"), _Prefix("Charmander"),
       _Prefix("Blastoise"),  _Prefix("Pidgey"),    _Prefix("Mewtwo"),
       _Prefix("Jigglypuff"), _Prefix("Persian"),   _Prefix("Tentacool"),
       _Prefix("Onix"),       _Prefix("Eevee"),     _Prefix("Jynx"),
   };
   {
     PrefixStringMap map;
 
     SetupPrefixMap(array2, map);
-    pset->SetPrefixes(map);
+    cache->Build(map);
 
-    DoExpectedLookup(pset, array2);
+    DoExpectedLookup(cache, array2);
   }
 
   // Should not match any of the first prefix set
   uint32_t matchLength = 0;
   for (uint32_t i = 0; i < array1.Length(); i++) {
-    UniquePtr<nsACString> fullhash(CreateFullHash(array1[i]));
+    Completion complete;
+    complete.Assign(CreateFullHash(array1[i]));
 
-    pset->Matches(*fullhash, &matchLength);
+    // Find match for prefix-generated full hash
+    bool has, confirmed;
+    cache->Has(complete, &has, &matchLength, &confirmed);
+
     ASSERT_TRUE(matchLength == 0);
   }
 }
 
 // Test only set one 4-bytes prefix and one full-length prefix
 TEST(UrlClassifierVLPrefixSet, TinyPrefixSet) {
-  RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
-  pset->Init(NS_LITERAL_CSTRING("test"));
+  RefPtr<LookupCacheV4> cache = SetupLookupCache(NS_LITERAL_CSTRING("test"));
 
   PrefixStringMap map;
   _PrefixArray array = {
       _Prefix("AAAA"),
       _Prefix("11112222333344445555666677778888"),
   };
 
   SetupPrefixMap(array, map);
-  pset->SetPrefixes(map);
-
-  DoExpectedLookup(pset, array);
+  cache->Build(map);
 
-  DoRandomLookup(pset, 1000, array);
-
-  CheckContent(pset, map);
+  DoExpectedLookup(cache, array);
+  DoRandomLookup(cache, 1000, array);
+  CheckContent(cache, map);
 }
 
 // Test empty prefix set and IsEmpty function
 TEST(UrlClassifierVLPrefixSet, EmptyPrefixSet) {
-  RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
-  pset->Init(NS_LITERAL_CSTRING("test"));
+  RefPtr<LookupCacheV4> cache = SetupLookupCache(NS_LITERAL_CSTRING("test"));
 
-  bool empty;
-  pset->IsEmpty(&empty);
+  bool empty = cache->IsEmpty();
   ASSERT_TRUE(empty);
 
   PrefixStringMap map;
   _PrefixArray array1;
 
   // Lookup an empty array should never match
-  DoRandomLookup(pset, 100, array1);
+  DoRandomLookup(cache, 100, array1);
 
   // Insert an 4-bytes prefix, then IsEmpty should return false
   _PrefixArray array2 = {_Prefix("test")};
   SetupPrefixMap(array2, map);
-  pset->SetPrefixes(map);
+  cache->Build(map);
 
-  pset->IsEmpty(&empty);
+  empty = cache->IsEmpty();
   ASSERT_TRUE(!empty);
 
   _PrefixArray array3 = {_Prefix("test variable length")};
 
   // Insert an 5~32 bytes prefix, then IsEmpty should return false
   SetupPrefixMap(array3, map);
-  pset->SetPrefixes(map);
+  cache->Build(map);
 
-  pset->IsEmpty(&empty);
+  empty = cache->IsEmpty();
   ASSERT_TRUE(!empty);
 }
 
 // Test prefix size should only between 4~32 bytes
 TEST(UrlClassifierVLPrefixSet, MinMaxPrefixSet) {
-  RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
-  pset->Init(NS_LITERAL_CSTRING("test"));
+  RefPtr<LookupCacheV4> cache = SetupLookupCache(NS_LITERAL_CSTRING("test"));
 
   PrefixStringMap map;
   {
     _PrefixArray array = {_Prefix("1234"), _Prefix("ABCDEFGHIJKKMNOP"),
                           _Prefix("1aaa2bbb3ccc4ddd5eee6fff7ggg8hhh")};
 
     SetupPrefixMap(array, map);
-    nsresult rv = pset->SetPrefixes(map);
+    nsresult rv = cache->Build(map);
     ASSERT_TRUE(rv == NS_OK);
   }
 
   // Prefix size less than 4-bytes should fail
   {
     _PrefixArray array = {_Prefix("123")};
 
     SetupPrefixMap(array, map);
-    nsresult rv = pset->SetPrefixes(map);
+    nsresult rv = cache->Build(map);
     ASSERT_TRUE(NS_FAILED(rv));
   }
 
   // Prefix size greater than 32-bytes should fail
   {
     _PrefixArray array = {_Prefix("1aaa2bbb3ccc4ddd5eee6fff7ggg8hhh9")};
 
     SetupPrefixMap(array, map);
-    nsresult rv = pset->SetPrefixes(map);
+    nsresult rv = cache->Build(map);
     ASSERT_TRUE(NS_FAILED(rv));
   }
 }
 
 // Test save then load prefix set with only 4-bytes prefixes
 TEST(UrlClassifierVLPrefixSet, LoadSaveFixedLengthPrefixSet) {
-  RefPtr<VariableLengthPrefixSet> save = new VariableLengthPrefixSet;
-  save->Init(NS_LITERAL_CSTRING("test-save"));
-
+  nsCOMPtr<nsIFile> file;
   _PrefixArray array;
-  RandomPrefixes(10000, 4, 4, array);
-
   PrefixStringMap map;
-  SetupPrefixMap(array, map);
-  save->SetPrefixes(map);
 
-  DoExpectedLookup(save, array);
+  // Save
+  {
+    RefPtr<LookupCacheV4> save =
+        SetupLookupCache(NS_LITERAL_CSTRING("test-save"));
 
-  DoRandomLookup(save, 1000, array);
+    RandomPrefixes(10000, 4, 4, array);
 
-  CheckContent(save, map);
+    SetupPrefixMap(array, map);
+    save->Build(map);
 
-  nsCOMPtr<nsIFile> file;
-  NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
-  file->Append(NS_LITERAL_STRING("test.vlpset"));
+    DoExpectedLookup(save, array);
+    DoRandomLookup(save, 1000, array);
+    CheckContent(save, map);
 
-  save->StoreToFile(file);
+    NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+    file->Append(NS_LITERAL_STRING("test.vlpset"));
+    save->StoreToFile(file);
+  }
 
-  RefPtr<VariableLengthPrefixSet> load = new VariableLengthPrefixSet;
-  load->Init(NS_LITERAL_CSTRING("test-load"));
-
-  load->LoadFromFile(file);
+  // Load
+  {
+    RefPtr<LookupCacheV4> load =
+        SetupLookupCache(NS_LITERAL_CSTRING("test-load"));
+    load->LoadFromFile(file);
 
-  DoExpectedLookup(load, array);
-
-  DoRandomLookup(load, 1000, array);
-
-  CheckContent(load, map);
+    DoExpectedLookup(load, array);
+    DoRandomLookup(load, 1000, array);
+    CheckContent(load, map);
+  }
 
   file->Remove(false);
 }
 
 // Test save then load prefix set with only 5~32 bytes prefixes
 TEST(UrlClassifierVLPrefixSet, LoadSaveVariableLengthPrefixSet) {
-  RefPtr<VariableLengthPrefixSet> save = new VariableLengthPrefixSet;
-  save->Init(NS_LITERAL_CSTRING("test-save"));
-
+  nsCOMPtr<nsIFile> file;
   _PrefixArray array;
-  RandomPrefixes(10000, 5, 32, array);
-
   PrefixStringMap map;
-  SetupPrefixMap(array, map);
-  save->SetPrefixes(map);
 
-  DoExpectedLookup(save, array);
+  // Save
+  {
+    RefPtr<LookupCacheV4> save =
+        SetupLookupCache(NS_LITERAL_CSTRING("test-save"));
 
-  DoRandomLookup(save, 1000, array);
+    RandomPrefixes(10000, 5, 32, array);
 
-  CheckContent(save, map);
+    SetupPrefixMap(array, map);
+    save->Build(map);
 
-  nsCOMPtr<nsIFile> file;
-  NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
-  file->Append(NS_LITERAL_STRING("test.vlpset"));
+    DoExpectedLookup(save, array);
+    DoRandomLookup(save, 1000, array);
+    CheckContent(save, map);
 
-  save->StoreToFile(file);
+    NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+    file->Append(NS_LITERAL_STRING("test.vlpset"));
+    save->StoreToFile(file);
+  }
 
-  RefPtr<VariableLengthPrefixSet> load = new VariableLengthPrefixSet;
-  load->Init(NS_LITERAL_CSTRING("test-load"));
-
-  load->LoadFromFile(file);
+