Merge mozilla-central to mozilla-inbound
authorCarsten "Tomcat" Book <cbook@mozilla.com>
Thu, 15 Sep 2016 12:05:56 +0200
changeset 355334 841aca60dc7363ae0fa5bd24d7fdf98097be2659
parent 355333 2e502a75e1dd1ecda5126986ec456107d54b04bf (current diff)
parent 355265 29af101880db7ce7f5f87f58e1ff20988c1c5fc3 (diff)
child 355335 427fea673dd0dd614c896bdac9c67e631bd561ac
push id6570
push userraliiev@mozilla.com
push dateMon, 14 Nov 2016 12:26:13 +0000
treeherdermozilla-beta@f455459b2ae5 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
milestone51.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge mozilla-central to mozilla-inbound
netwerk/test/unit/test_invalidport.js
--- a/browser/base/content/test/general/browser_bookmark_titles.js
+++ b/browser/base/content/test/general/browser_bookmark_titles.js
@@ -24,17 +24,17 @@ add_task(function* () {
     let browser = gBrowser.selectedBrowser;
     browser.stop(); // stop the about:blank load.
 
     // Test that a bookmark of each URI gets the corresponding default title.
     for (let i = 0; i < tests.length; ++i) {
         let [uri, title] = tests[i];
 
         let promiseLoaded = promisePageLoaded(browser);
-        content.location = uri;
+        BrowserTestUtils.loadURI(browser, uri);
         yield promiseLoaded;
         yield checkBookmark(uri, title);
     }
 
     // Network failure test: now that dummy_page.html is in history, bookmarking
     // it should give the last known page title as the default bookmark title.
 
     // Simulate a network outage with offline mode. (Localhost is still
@@ -48,22 +48,24 @@ add_task(function* () {
     });
 
     // LOAD_FLAGS_BYPASS_CACHE isn't good enough. So clear the cache.
     Services.cache2.clear();
 
     let [uri, title] = tests[0];
 
     let promiseLoaded = promisePageLoaded(browser);
-    content.location = uri;
+    BrowserTestUtils.loadURI(browser, uri);
     yield promiseLoaded;
 
     // The offline mode test is only good if the page failed to load.
-    is(content.document.documentURI.substring(0, 14), 'about:neterror',
-        "Offline mode successfully simulated network outage.");
+    yield ContentTask.spawn(browser, null, function() {
+      is(content.document.documentURI.substring(0, 14), 'about:neterror',
+          "Offline mode successfully simulated network outage.");
+    });
     yield checkBookmark(uri, title);
 
     gBrowser.removeCurrentTab();
 });
 
 // Bookmark the current page and confirm that the new bookmark has the expected
 // title. (Then delete the bookmark.)
 function* checkBookmark(uri, expected_title) {
--- a/browser/components/search/content/search.xml
+++ b/browser/components/search/content/search.xml
@@ -1061,16 +1061,17 @@
       </xul:deck>
       <xul:description anonid="search-panel-one-offs"
                        role="group"
                        class="search-panel-one-offs"
                        xbl:inherits="compact">
         <xul:button anonid="search-settings-compact"
                     oncommand="showSettings();"
                     class="searchbar-engine-one-off-item search-setting-button-compact"
+                    aria-label="&changeSearchSettings.button;"
                     xbl:inherits="compact"/>
       </xul:description>
       <xul:vbox anonid="add-engines"/>
       <xul:button anonid="search-settings"
                   oncommand="showSettings();"
                   class="search-setting-button search-panel-header"
                   label="&changeSearchSettings.button;"
                   xbl:inherits="compact"/>
@@ -1372,17 +1373,18 @@
           // could effectively break the urlbar popup by offering a ton of
           // engines.  We should probably make a smaller version of the buttons
           // for compact one-offs.
           if (!this.compact) {
             for (let engine of gBrowser.selectedBrowser.engines || []) {
               let button = document.createElementNS(kXULNS, "button");
               let label = this.bundle.formatStringFromName("cmd_addFoundEngine",
                                                            [engine.title], 1);
-              button.id = "searchbar-add-engine-" + engine.title.replace(/ /g, '-');
+              button.id = this.telemetryOrigin + "-add-engine-" +
+                          engine.title.replace(/ /g, '-');
               button.setAttribute("class", "addengine-item");
               button.setAttribute("label", label);
               button.setAttribute("pack", "start");
 
               button.setAttribute("crop", "end");
               button.setAttribute("tooltiptext", engine.uri);
               button.setAttribute("uri", engine.uri);
               if (engine.icon) {
@@ -1437,19 +1439,22 @@
           // of the suggestion <tree> to be hidden.
           let oneOffCount = engines.length;
           if (this.compact)
             ++oneOffCount;
           let rowCount = Math.ceil(oneOffCount / enginesPerRow);
           let height = rowCount * 33; // 32px per row, 1px border.
           list.setAttribute("height", height + "px");
 
-          // Ensure we can refer to the settings button by ID:
+          // Ensure we can refer to the settings buttons by ID:
           let settingsEl = document.getAnonymousElementByAttribute(this, "anonid", "search-settings");
-          settingsEl.id = this.id + "-anon-search-settings";
+          settingsEl.id = this.telemetryOrigin + "-anon-search-settings";
+          let compactSettingsEl = document.getAnonymousElementByAttribute(this, "anonid", "search-settings-compact");
+          compactSettingsEl.id = this.telemetryOrigin +
+                                 "-anon-search-settings-compact";
 
           let dummyItems = enginesPerRow - (oneOffCount % enginesPerRow || enginesPerRow);
           for (let i = 0; i < engines.length; ++i) {
             let engine = engines[i];
             let button = document.createElementNS(kXULNS, "button");
             button.id = this._buttonIDForEngine(engine);
             let uri = "chrome://browser/skin/search-engine-placeholder.png";
             if (engine.iconURI) {
@@ -1500,17 +1505,17 @@
             }
           }
         ]]></body>
       </method>
 
       <method name="_buttonIDForEngine">
         <parameter name="engine"/>
         <body><![CDATA[
-          return "searchbar-engine-one-off-item-" +
+          return this.telemetryOrigin + "-engine-one-off-item-" +
                  engine.name.replace(/ /g, '-');
         ]]></body>
       </method>
 
       <method name="_buttonForEngine">
         <parameter name="engine"/>
         <body><![CDATA[
           return document.getElementById(this._buttonIDForEngine(engine));
@@ -1537,21 +1542,25 @@
                 document.getAnonymousElementByAttribute(this, "anonid",
                                                         "searchbar-oneoffheader-engine");
               header.selectedIndex = 2;
               headerEngineText.value = val.engine.name;
             }
             else {
               header.selectedIndex = this.query ? 1 : 0;
             }
-            this.setAttribute("aria-activedescendant", val.id);
+            if (this.textbox) {
+              this.textbox.setAttribute("aria-activedescendant", val.id);
+            }
           } else {
             val = null;
             header.selectedIndex = this.query ? 1 : 0;
-            this.removeAttribute("aria-activedescendant");
+            if (this.textbox) {
+              this.textbox.removeAttribute("aria-activedescendant");
+            }
           }
 
           if (aUpdateLogicallySelectedButton) {
             this._selectedButton = val;
             if (val && !val.engine) {
               // If the button doesn't have an engine, then clear the popup's
               // selection to indicate that pressing Return while the button is
               // selected will do the button's command, not search.
@@ -1767,20 +1776,22 @@
           else if (event.keyCode == Ci.nsIDOMKeyEvent.DOM_VK_UP) {
             if (numListItems > 0) {
               if (this.popup.selectedIndex > 0) {
                 // The autocomplete controller should handle this case.
               } else if (this.popup.selectedIndex == 0) {
                 if (!allowEmptySelection) {
                   // Wrap around the selection to the last one-off.
                   this.selectedButton = null;
-                  stopEvent = this.advanceSelection(false, true, true);
-                  if (stopEvent) {
-                    this.popup.selectedIndex = -1;
-                  }
+                  this.popup.selectedIndex = -1;
+                  // Call advanceSelection after setting selectedIndex so that
+                  // screen readers see the newly selected one-off. Both trigger
+                  // accessibility events.
+                  this.advanceSelection(false, true, true);
+                  stopEvent = true;
                 }
               } else {
                 let firstButtonSelected =
                   this.selectedButton &&
                   this.selectedButton == this.getSelectableButtons(true)[0];
                 if (firstButtonSelected) {
                   this.selectedButton = null;
                 } else {
@@ -1794,26 +1805,27 @@
 
           else if (event.keyCode == Ci.nsIDOMKeyEvent.DOM_VK_DOWN) {
             if (numListItems > 0) {
               if (this.popup.selectedIndex >= 0 &&
                   this.popup.selectedIndex < numListItems - 1) {
                 // The autocomplete controller should handle this case.
               } else if (this.popup.selectedIndex == numListItems - 1) {
                 this.selectedButton = null;
-                stopEvent = this.advanceSelection(true, true, true);
-                if (stopEvent) {
-                  stopEvent = !allowEmptySelection;
-                  if (this.textbox && typeof(textboxUserValue) == "string") {
-                    this.textbox.value = textboxUserValue;
-                  }
-                  if (!allowEmptySelection) {
-                    this.popup.selectedIndex = -1;
-                  }
+                if (!allowEmptySelection) {
+                  this.popup.selectedIndex = -1;
+                  stopEvent = true;
                 }
+                if (this.textbox && typeof(textboxUserValue) == "string") {
+                  this.textbox.value = textboxUserValue;
+                }
+                // Call advanceSelection after setting selectedIndex so that
+                // screen readers see the newly selected one-off. Both trigger
+                // accessibility events.
+                this.advanceSelection(true, true, true);
               } else {
                 let buttons = this.getSelectableButtons(true);
                 let lastButtonSelected =
                   this.selectedButton &&
                   this.selectedButton == buttons[buttons.length - 1];
                 if (lastButtonSelected) {
                   this.selectedButton = null;
                   stopEvent = allowEmptySelection;
--- a/browser/extensions/e10srollout/bootstrap.js
+++ b/browser/extensions/e10srollout/bootstrap.js
@@ -12,27 +12,28 @@ Cu.import("resource://gre/modules/Update
 
  // The amount of people to be part of e10s
 const TEST_THRESHOLD = {
   "beta"    : 0.5,  // 50%
   "release" : 1.0,  // 100%
 };
 
 const ADDON_ROLLOUT_POLICY = {
-  "beta"    : "49a", // 10 tested add-ons + any WebExtension
+  "beta"    : "50allmpc", // Any WebExtension or addon with mpc = true
   "release" : "49a", // 10 tested add-ons + any WebExtension
 };
 
 const PREF_COHORT_SAMPLE       = "e10s.rollout.cohortSample";
 const PREF_COHORT_NAME         = "e10s.rollout.cohort";
 const PREF_E10S_OPTED_IN       = "browser.tabs.remote.autostart";
 const PREF_E10S_FORCE_ENABLED  = "browser.tabs.remote.force-enable";
 const PREF_E10S_FORCE_DISABLED = "browser.tabs.remote.force-disable";
 const PREF_TOGGLE_E10S         = "browser.tabs.remote.autostart.2";
 const PREF_E10S_ADDON_POLICY   = "extensions.e10s.rollout.policy";
+const PREF_E10S_ADDON_BLOCKLIST = "extensions.e10s.rollout.blocklist";
 const PREF_E10S_HAS_NONEXEMPT_ADDON = "extensions.e10s.rollout.hasAddon";
 
 function startup() {
   // In theory we only need to run this once (on install()), but
   // it's better to also run it on every startup. If the user has
   // made manual changes to the prefs, this will keep the data
   // reported more accurate.
   // It's also fine (and preferred) to just do it here on startup
@@ -59,19 +60,23 @@ function defineCohort() {
   if (!(updateChannel in TEST_THRESHOLD)) {
     setCohort("unsupportedChannel");
     return;
   }
 
   let addonPolicy = "unknown";
   if (updateChannel in ADDON_ROLLOUT_POLICY) {
     addonPolicy = ADDON_ROLLOUT_POLICY[updateChannel];
-    Preferences.set(PREF_E10S_ADDON_POLICY, ADDON_ROLLOUT_POLICY[updateChannel]);
+    Preferences.set(PREF_E10S_ADDON_POLICY, addonPolicy);
     // This is also the proper place to set the blocklist pref
     // in case it is necessary.
+
+    // Tab Mix Plus exception tracked at bug 1185672.
+    Preferences.set(PREF_E10S_ADDON_BLOCKLIST,
+                    "{dc572301-7619-498c-a57d-39143191b318}");
   } else {
     Preferences.reset(PREF_E10S_ADDON_POLICY);
   }
 
   let userOptedOut = optedOut();
   let userOptedIn = optedIn();
   let disqualified = (Services.appinfo.multiprocessBlockPolicy != 0);
   let testGroup = (getUserSample() < TEST_THRESHOLD[updateChannel]);
--- a/browser/extensions/pocket/skin/windows/pocket.css
+++ b/browser/extensions/pocket/skin/windows/pocket.css
@@ -6,11 +6,11 @@
 
 :-moz-any(#TabsToolbar, .widget-overflow-list) #pocket-button > .toolbarbutton-icon {
     max-width: 18px;
     padding: 0;
 }
 
 @media (-moz-windows-theme: luna-silver) and (max-resolution: 1dppx) {
   #pocket-button {
-    list-style-image: url(chrome://pocket/skin/toolbar-lunaSilver.png)
+    list-style-image: url(Toolbar-lunaSilver.png);
   }
 }
--- a/browser/modules/ContentCrashHandlers.jsm
+++ b/browser/modules/ContentCrashHandlers.jsm
@@ -371,60 +371,62 @@ this.UnsubmittedCrashHandler = {
 
   /**
    * Scans the profile directory for unsubmitted crash reports
    * within the past PENDING_CRASH_REPORT_DAYS days. If it
    * finds any, it will, if necessary, attempt to open a notification
    * bar to prompt the user to submit them.
    *
    * @returns Promise
-   *          Resolves after it tries to append a notification on
-   *          the most recent browser window. If a notification
-   *          cannot be shown, will resolve anyways.
+   *          Resolves with the <xul:notification> after it tries to
+   *          show a notification on the most recent browser window.
+   *          If a notification cannot be shown, will resolve with null.
    */
   checkForUnsubmittedCrashReports: Task.async(function*() {
     let dateLimit = new Date();
     dateLimit.setDate(dateLimit.getDate() - PENDING_CRASH_REPORT_DAYS);
 
     let reportIDs = [];
     try {
       reportIDs = yield CrashSubmit.pendingIDsAsync(dateLimit);
     } catch (e) {
       Cu.reportError(e);
-      return;
+      return null;
     }
 
     if (reportIDs.length) {
       if (CrashNotificationBar.autoSubmit) {
         CrashNotificationBar.submitReports(reportIDs);
       } else {
-        this.showPendingSubmissionsNotification(reportIDs);
+        return this.showPendingSubmissionsNotification(reportIDs);
       }
     }
+    return null;
   }),
 
   /**
    * Given an array of unsubmitted crash report IDs, try to open
    * up a notification asking the user to submit them.
    *
    * @param reportIDs (Array<string>)
    *        The Array of report IDs to offer the user to send.
+   * @returns The <xul:notification> if one is shown. null otherwise.
    */
   showPendingSubmissionsNotification(reportIDs) {
     let count = reportIDs.length;
     if (!count) {
-      return;
+      return null;
     }
 
     let messageTemplate =
       gNavigatorBundle.GetStringFromName("pendingCrashReports2.label");
 
     let message = PluralForm.get(count, messageTemplate).replace("#1", count);
 
-    CrashNotificationBar.show({
+    return CrashNotificationBar.show({
       notificationID: "pending-crash-reports",
       message,
       reportIDs,
     });
   },
 };
 
 this.CrashNotificationBar = {
@@ -445,30 +447,31 @@ this.CrashNotificationBar = {
    *        notificationID (string)
    *          The ID for the notification to be opened.
    *
    *        message (string)
    *          The message to be displayed in the notification.
    *
    *        reportIDs (Array<string>)
    *          The array of report IDs to offer to the user.
+   * @returns The <xul:notification> if one is shown. null otherwise.
    */
   show({ notificationID, message, reportIDs }) {
     let chromeWin = RecentWindow.getMostRecentBrowserWindow();
     if (!chromeWin) {
       // Can't show a notification in this case. We'll hopefully
       // get another opportunity to have the user submit their
       // crash reports later.
-      return;
+      return null;
     }
 
     let nb =  chromeWin.document.getElementById("global-notificationbox");
     let notification = nb.getNotificationWithValue(notificationID);
     if (notification) {
-      return;
+      return null;
     }
 
     let buttons = [{
       label: gNavigatorBundle.GetStringFromName("pendingCrashReports.send"),
       callback: () => {
         this.submitReports(reportIDs);
       },
     },
@@ -494,20 +497,20 @@ this.CrashNotificationBar = {
         // to submit the reports. We'll ignore these particular
         // reports going forward.
         reportIDs.forEach(function(reportID) {
           CrashSubmit.ignore(reportID);
         });
       }
     };
 
-    nb.appendNotification(message, notificationID,
-                          "chrome://browser/skin/tab-crashed.svg",
-                          nb.PRIORITY_INFO_HIGH, buttons,
-                          eventCallback);
+    return nb.appendNotification(message, notificationID,
+                                 "chrome://browser/skin/tab-crashed.svg",
+                                 nb.PRIORITY_INFO_HIGH, buttons,
+                                 eventCallback);
   },
 
   get autoSubmit() {
     return Services.prefs
                    .getBoolPref("browser.crashReports.unsubmittedCheck.autoSubmit");
   },
 
   set autoSubmit(val) {
--- a/browser/modules/test/browser.ini
+++ b/browser/modules/test/browser.ini
@@ -18,11 +18,13 @@ support-files =
   contentSearchSuggestions.xml
 [browser_NetworkPrioritizer.js]
 [browser_SelfSupportBackend.js]
 support-files =
   ../../components/uitour/test/uitour.html
   ../../components/uitour/UITour-lib.js
 [browser_taskbar_preview.js]
 skip-if = os != "win"
+[browser_UnsubmittedCrashHandler.js]
+run-if = crashreporter
 [browser_UsageTelemetry.js]
 [browser_UsageTelemetry_private_and_restore.js]
 [browser_urlBar_zoom.js]
new file mode 100644
--- /dev/null
+++ b/browser/modules/test/browser_UnsubmittedCrashHandler.js
@@ -0,0 +1,419 @@
+"use strict";
+
+/**
+ * This suite tests the "unsubmitted crash report" notification
+ * that is seen when we detect pending crash reports on startup.
+ */
+
+const { UnsubmittedCrashHandler } =
+  Cu.import("resource:///modules/ContentCrashHandlers.jsm", this);
+const { FileUtils } =
+  Cu.import("resource://gre/modules/FileUtils.jsm", this);
+const { makeFakeAppDir }  =
+  Cu.import("resource://testing-common/AppData.jsm", this);
+const { OS } =
+  Cu.import("resource://gre/modules/osfile.jsm", this);
+
+const DAY = 24 * 60 * 60 * 1000; // milliseconds
+const SERVER_URL = "http://example.com/browser/toolkit/crashreporter/test/browser/crashreport.sjs";
+
+/**
+ * Returns the directly where the browsing is storing the
+ * pending crash reports.
+ *
+ * @returns nsIFile
+ */
+function getPendingCrashReportDir() {
+  // The fake UAppData directory that makeFakeAppDir provides
+  // is just UAppData under the profile directory.
+  return FileUtils.getDir("ProfD", [
+    "UAppData",
+    "Crash Reports",
+    "pending",
+  ], false);
+}
+
+/**
+ * Synchronously deletes all entries inside the pending
+ * crash report directory.
+ */
+function clearPendingCrashReports() {
+  let dir = getPendingCrashReportDir();
+  let entries = dir.directoryEntries;
+
+  while (entries.hasMoreElements()) {
+    let entry = entries.getNext().QueryInterface(Ci.nsIFile);
+    if (entry.isFile()) {
+      entry.remove(false);
+    }
+  }
+}
+
+/**
+ * Randomly generates howMany crash report .dmp and .extra files
+ * to put into the pending crash report directory. We're not
+ * actually creating real crash reports here, just stubbing
+ * out enough of the files to satisfy our notification and
+ * submission code.
+ *
+ * @param howMany (int)
+ *        How many pending crash reports to put in the pending
+ *        crash report directory.
+ * @param accessDate (Date, optional)
+ *        What date to set as the last accessed time on the created
+ *        crash reports. This defaults to the current date and time.
+ * @returns Promise
+ */
+function* createPendingCrashReports(howMany, accessDate) {
+  let dir = getPendingCrashReportDir();
+  if (!accessDate) {
+    accessDate = new Date();
+  }
+
+  /**
+   * Helper function for creating a file in the pending crash report
+   * directory.
+   *
+   * @param fileName (string)
+   *        The filename for the crash report, not including the
+   *        extension. This is usually a UUID.
+   * @param extension (string)
+   *        The file extension for the created file.
+   * @param accessDate (Date)
+   *        The date to set lastAccessed to.
+   * @param contents (string, optional)
+   *        Set this to whatever the file needs to contain, if anything.
+   * @returns Promise
+   */
+  let createFile = (fileName, extension, accessDate, contents) => {
+    let file = dir.clone();
+    file.append(fileName + "." + extension);
+    file.create(Ci.nsILocalFile.NORMAL_FILE_TYPE, FileUtils.PERMS_FILE);
+    let promises = [OS.File.setDates(file.path, accessDate)];
+
+    if (contents) {
+      let encoder = new TextEncoder();
+      let array = encoder.encode(contents);
+      promises.push(OS.File.writeAtomic(file.path, array, {
+        tmpPath: file.path + ".tmp",
+      }));
+    }
+    return Promise.all(promises);
+  }
+
+  let uuidGenerator = Cc["@mozilla.org/uuid-generator;1"]
+                      .getService(Ci.nsIUUIDGenerator);
+  // CrashSubmit expects there to be a ServerURL key-value
+  // pair in the .extra file, so we'll satisfy it.
+  let extraFileContents = "ServerURL=" + SERVER_URL;
+
+  return Task.spawn(function*() {
+    let uuids = [];
+    for (let i = 0; i < howMany; ++i) {
+      let uuid = uuidGenerator.generateUUID().toString();
+      // Strip the {}...
+      uuid = uuid.substring(1, uuid.length - 1);
+      yield createFile(uuid, "dmp", accessDate);
+      yield createFile(uuid, "extra", accessDate, extraFileContents);
+      uuids.push(uuid);
+    }
+    return uuids;
+  });
+}
+
+/**
+ * Returns a Promise that resolves once CrashSubmit starts sending
+ * success notifications for crash submission matching the reportIDs
+ * being passed in.
+ *
+ * @param reportIDs (Array<string>)
+ *        The IDs for the reports that we expect CrashSubmit to have sent.
+ * @returns Promise
+ */
+function waitForSubmittedReports(reportIDs) {
+  let promises = [];
+  for (let reportID of reportIDs) {
+    let promise = TestUtils.topicObserved("crash-report-status", (subject, data) => {
+      if (data == "success") {
+        let propBag = subject.QueryInterface(Ci.nsIPropertyBag2);
+        let dumpID = propBag.getPropertyAsAString("minidumpID");
+        if (dumpID == reportID) {
+          return true;
+        }
+      }
+      return false;
+    });
+    promises.push(promise);
+  }
+  return Promise.all(promises);
+}
+
+/**
+ * Returns a Promise that resolves once a .dmp.ignore file is created for
+ * the crashes in the pending directory matching the reportIDs being
+ * passed in.
+ *
+ * @param reportIDs (Array<string>)
+ *        The IDs for the reports that we expect CrashSubmit to have been
+ *        marked for ignoring.
+ * @returns Promise
+ */
+function waitForIgnoredReports(reportIDs) {
+  let dir = getPendingCrashReportDir();
+  let promises = [];
+  for (let reportID of reportIDs) {
+    let file = dir.clone();
+    file.append(reportID + ".dmp.ignore");
+    promises.push(OS.File.exists(file.path));
+  }
+  return Promise.all(promises);
+}
+
+let gNotificationBox;
+
+add_task(function* setup() {
+  // Pending crash reports are stored in the UAppData folder,
+  // which exists outside of the profile folder. In order to
+  // not overwrite / clear pending crash reports for the poor
+  // soul who runs this test, we use AppData.jsm to point to
+  // a special made-up directory inside the profile
+  // directory.
+  yield makeFakeAppDir();
+  // We'll assume that the notifications will be shown in the current
+  // browser window's global notification box.
+  gNotificationBox = document.getElementById("global-notificationbox");
+
+  // If we happen to already be seeing the unsent crash report
+  // notification, it's because the developer running this test
+  // happened to have some unsent reports in their UAppDir.
+  // We'll remove the notification without touching those reports.
+  let notification =
+    gNotificationBox.getNotificationWithValue("pending-crash-reports");
+  if (notification) {
+    notification.close();
+  }
+
+  let env = Cc["@mozilla.org/process/environment;1"]
+              .getService(Components.interfaces.nsIEnvironment);
+  let oldServerURL = env.get("MOZ_CRASHREPORTER_URL");
+  env.set("MOZ_CRASHREPORTER_URL", SERVER_URL);
+
+  registerCleanupFunction(function() {
+    gNotificationBox = null;
+    clearPendingCrashReports();
+    env.set("MOZ_CRASHREPORTER_URL", oldServerURL);
+  });
+});
+
+/**
+ * Tests that if there are no pending crash reports, then the
+ * notification will not show up.
+ */
+add_task(function* test_no_pending_no_notification() {
+  // Make absolutely sure there are no pending crash reports first...
+  clearPendingCrashReports();
+  let notification =
+    yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+  Assert.equal(notification, null,
+               "There should not be a notification if there are no " +
+               "pending crash reports");
+});
+
+/**
+ * Tests that there is a notification if there is one pending
+ * crash report.
+ */
+add_task(function* test_one_pending() {
+  yield createPendingCrashReports(1);
+  let notification =
+    yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+  Assert.ok(notification, "There should be a notification");
+
+  gNotificationBox.removeNotification(notification, true);
+  clearPendingCrashReports();
+});
+
+/**
+ * Tests that there is a notification if there is more than one
+ * pending crash report.
+ */
+add_task(function* test_several_pending() {
+  yield createPendingCrashReports(3);
+  let notification =
+    yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+  Assert.ok(notification, "There should be a notification");
+
+  gNotificationBox.removeNotification(notification, true);
+  clearPendingCrashReports();
+});
+
+/**
+ * Tests that there is no notification if the only pending crash
+ * reports are over 28 days old. Also checks that if we put a newer
+ * crash with that older set, that we can still get a notification.
+ */
+add_task(function* test_several_pending() {
+  // Let's create some crash reports from 30 days ago.
+  let oldDate = new Date(Date.now() - (30 * DAY));
+  yield createPendingCrashReports(3, oldDate);
+  let notification =
+    yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+  Assert.equal(notification, null,
+               "There should not be a notification if there are only " +
+               "old pending crash reports");
+  // Now let's create a new one and check again
+  yield createPendingCrashReports(1);
+  notification =
+    yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+  Assert.ok(notification, "There should be a notification");
+
+  gNotificationBox.removeNotification(notification, true);
+  clearPendingCrashReports();
+});
+
+/**
+ * Tests that the notification can submit a report.
+ */
+add_task(function* test_can_submit() {
+  let reportIDs = yield createPendingCrashReports(1);
+  let notification =
+    yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+  Assert.ok(notification, "There should be a notification");
+
+  // Attempt to submit the notification by clicking on the submit
+  // button
+  let buttons = notification.querySelectorAll(".notification-button");
+  // ...which should be the first button.
+  let submit = buttons[0];
+
+  let promiseReports = waitForSubmittedReports(reportIDs);
+  info("Sending crash report");
+  submit.click();
+  info("Sent!");
+  // We'll not wait for the notification to finish its transition -
+  // we'll just remove it right away.
+  gNotificationBox.removeNotification(notification, true);
+
+  info("Waiting on reports to be received.");
+  yield promiseReports;
+  info("Received!");
+  clearPendingCrashReports();
+});
+
+/**
+ * Tests that the notification can submit multiple reports.
+ */
+add_task(function* test_can_submit_several() {
+  let reportIDs = yield createPendingCrashReports(3);
+  let notification =
+    yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+  Assert.ok(notification, "There should be a notification");
+
+  // Attempt to submit the notification by clicking on the submit
+  // button
+  let buttons = notification.querySelectorAll(".notification-button");
+  // ...which should be the first button.
+  let submit = buttons[0];
+
+  let promiseReports = waitForSubmittedReports(reportIDs);
+  info("Sending crash reports");
+  submit.click();
+  info("Sent!");
+  // We'll not wait for the notification to finish its transition -
+  // we'll just remove it right away.
+  gNotificationBox.removeNotification(notification, true);
+
+  info("Waiting on reports to be received.");
+  yield promiseReports;
+  info("Received!");
+  clearPendingCrashReports();
+});
+
+/**
+ * Tests that choosing "Send Always" flips the autoSubmit pref
+ * and sends the pending crash reports.
+ */
+add_task(function* test_can_submit_always() {
+  let pref = "browser.crashReports.unsubmittedCheck.autoSubmit";
+  Assert.equal(Services.prefs.getBoolPref(pref), false,
+               "We should not be auto-submitting by default");
+
+  let reportIDs = yield createPendingCrashReports(1);
+  let notification =
+    yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+  Assert.ok(notification, "There should be a notification");
+
+  // Attempt to submit the notification by clicking on the send all
+  // button
+  let buttons = notification.querySelectorAll(".notification-button");
+  // ...which should be the second button.
+  let sendAll = buttons[1];
+
+  let promiseReports = waitForSubmittedReports(reportIDs);
+  info("Sending crash reports");
+  sendAll.click();
+  info("Sent!");
+  // We'll not wait for the notification to finish its transition -
+  // we'll just remove it right away.
+  gNotificationBox.removeNotification(notification, true);
+
+  info("Waiting on reports to be received.");
+  yield promiseReports;
+  info("Received!");
+
+  // Make sure the pref was set
+  Assert.equal(Services.prefs.getBoolPref(pref), true,
+               "The autoSubmit pref should have been set");
+
+  // And revert back to default now.
+  Services.prefs.clearUserPref(pref);
+
+  clearPendingCrashReports();
+});
+
+/**
+ * Tests that if the user has chosen to automatically send
+ * crash reports that no notification is displayed to the
+ * user.
+ */
+add_task(function* test_can_auto_submit() {
+  yield SpecialPowers.pushPrefEnv({ set: [
+    ["browser.crashReports.unsubmittedCheck.autoSubmit", true],
+  ]});
+
+  let reportIDs = yield createPendingCrashReports(3);
+  let promiseReports = waitForSubmittedReports(reportIDs);
+  let notification =
+    yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+  Assert.equal(notification, null, "There should be no notification");
+  info("Waiting on reports to be received.");
+  yield promiseReports;
+  info("Received!");
+
+  clearPendingCrashReports();
+  yield SpecialPowers.popPrefEnv();
+});
+
+/**
+ * Tests that if the user chooses to dismiss the notification,
+ * then the current pending requests won't cause the notification
+ * to appear again in the future.
+ */
+add_task(function* test_can_ignore() {
+  let reportIDs = yield createPendingCrashReports(3);
+  let notification =
+    yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+  Assert.ok(notification, "There should be a notification");
+
+  // Dismiss the notification by clicking on the "X" button.
+  let anonyNodes = document.getAnonymousNodes(notification)[0];
+  let closeButton = anonyNodes.querySelector(".close-icon");
+  closeButton.click();
+  yield waitForIgnoredReports(reportIDs);
+
+  notification =
+    yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+  Assert.equal(notification, null, "There should be no notification");
+
+  clearPendingCrashReports();
+});
--- a/browser/themes/linux/browser.css
+++ b/browser/themes/linux/browser.css
@@ -573,20 +573,16 @@ menuitem:not([type]):not(.menuitem-toolt
 #menu_openHelp {
   list-style-image: url("moz-icon://stock/gtk-help?size=menu");
 }
 
 #aboutName {
   list-style-image: url("moz-icon://stock/gtk-about?size=menu");
 }
 
-#javascriptConsole {
-  list-style-image: url("chrome://global/skin/console/console.png");
-}
-
 /* Primary toolbar buttons */
 
 :-moz-any(toolbar, .widget-overflow-list) .toolbarbutton-1 > .toolbarbutton-icon,
 :-moz-any(toolbar, .widget-overflow-list) .toolbarbutton-1 > :-moz-any(.toolbarbutton-menubutton-button, .toolbarbutton-badge-stack) > .toolbarbutton-icon {
   max-width: 16px;
 }
 
 :-moz-any(toolbar, .widget-overflow-list) .toolbarbutton-1:-moz-any(@primaryToolbarButtons@, .toolbarbutton-legacy-addon) > .toolbarbutton-icon,
--- a/browser/themes/shared/plugin-doorhanger.inc.css
+++ b/browser/themes/shared/plugin-doorhanger.inc.css
@@ -13,17 +13,17 @@
 
 .center-item-label {
   margin-inline-start: 6px;
   margin-bottom: 0;
   text-overflow: ellipsis;
 }
 
 .center-item-warning-icon {
-  background-image: url("chrome://mozapps/skin/extensions/alerticon-info-negative.png");
+  background-image: url("chrome://mozapps/skin/extensions/alerticon-info-negative.svg");
   background-repeat: no-repeat;
   width: 16px;
   height: 15px;
   margin-inline-start: 6px;
 }
 
 .click-to-play-plugins-notification-button-container {
   background-color: var(--arrowpanel-dimmed);
--- a/media/libstagefright/binding/mp4parse-cargo.patch
+++ b/media/libstagefright/binding/mp4parse-cargo.patch
@@ -4,17 +4,17 @@ index 5092cd7..ecbc8c0 100644
 +++ b/media/libstagefright/binding/mp4parse_capi/Cargo.toml
 @@ -17,14 +17,9 @@ exclude = [
    "*.mp4",
  ]
  
 -build = "build.rs"
 -
  [dependencies]
- "mp4parse" = {version = "0.5.0", path = "../mp4parse"}
+ "mp4parse" = {version = "0.5.1", path = "../mp4parse"}
  
 -[build-dependencies]
 -rusty-cheddar = "0.3.2"
 -
  [features]
  fuzz = ["mp4parse/fuzz"]
  
 diff --git a/media/libstagefright/binding/mp4parse/Cargo.toml b/media/libstagefright/binding/mp4parse/Cargo.toml
--- a/media/libstagefright/binding/mp4parse/Cargo.toml
+++ b/media/libstagefright/binding/mp4parse/Cargo.toml
@@ -1,11 +1,11 @@
 [package]
 name = "mp4parse"
-version = "0.5.0"
+version = "0.5.1"
 authors = [
   "Ralph Giles <giles@mozilla.com>",
   "Matthew Gregan <kinetik@flim.org>",
 ]
 
 description = "Parser for ISO base media file format (mp4)"
 documentation = "https://mp4parse-docs.surge.sh/mp4parse/"
 license = "MPL-2.0"
--- a/media/libstagefright/binding/mp4parse/src/lib.rs
+++ b/media/libstagefright/binding/mp4parse/src/lib.rs
@@ -286,29 +286,31 @@ pub enum TrackType {
     Video,
     Unknown,
 }
 
 impl Default for TrackType {
     fn default() -> Self { TrackType::Unknown }
 }
 
-/// The media's global (mvhd) timescale.
+/// The media's global (mvhd) timescale in units per second.
 #[derive(Debug, Copy, Clone, PartialEq)]
 pub struct MediaTimeScale(pub u64);
 
-/// A time scaled by the media's global (mvhd) timescale.
+/// A time to be scaled by the media's global (mvhd) timescale.
 #[derive(Debug, Copy, Clone, PartialEq)]
 pub struct MediaScaledTime(pub u64);
 
 /// The track's local (mdhd) timescale.
+/// Members are timescale units per second and the track id.
 #[derive(Debug, Copy, Clone, PartialEq)]
 pub struct TrackTimeScale(pub u64, pub usize);
 
-/// A time scaled by the track's local (mdhd) timescale.
+/// A time to be scaled by the track's local (mdhd) timescale.
+/// Members are time in scale units and the track id.
 #[derive(Debug, Copy, Clone, PartialEq)]
 pub struct TrackScaledTime(pub u64, pub usize);
 
 /// A fragmented file contains no sample data in stts, stsc, and stco.
 #[derive(Debug, Default)]
 pub struct EmptySampleTableBoxes {
     pub empty_stts : bool,
     pub empty_stsc : bool,
--- a/media/libstagefright/binding/mp4parse_capi/Cargo.toml
+++ b/media/libstagefright/binding/mp4parse_capi/Cargo.toml
@@ -1,11 +1,11 @@
 [package]
 name = "mp4parse_capi"
-version = "0.5.0"
+version = "0.5.1"
 authors = [
   "Ralph Giles <giles@mozilla.com>",
   "Matthew Gregan <kinetik@flim.org>",
 ]
 
 description = "Parser for ISO base media file format (mp4)"
 documentation = "https://mp4parse-docs.surge.sh/mp4parse/"
 license = "MPL-2.0"
@@ -13,16 +13,16 @@ license = "MPL-2.0"
 repository = "https://github.com/mozilla/mp4parse-rust"
 
 # Avoid complaints about trying to package test files.
 exclude = [
   "*.mp4",
 ]
 
 [dependencies]
-"mp4parse" = {version = "0.5.0", path = "../mp4parse"}
+"mp4parse" = {version = "0.5.1", path = "../mp4parse"}
 
 [features]
 fuzz = ["mp4parse/fuzz"]
 
 # Somewhat heavy-handed, but we want at least -Z force-overflow-checks=on.
 [profile.release]
 debug-assertions = true
--- a/media/libstagefright/binding/mp4parse_capi/src/lib.rs
+++ b/media/libstagefright/binding/mp4parse_capi/src/lib.rs
@@ -273,25 +273,46 @@ pub unsafe extern fn mp4parse_get_track_
     // Make sure the track count fits in a u32.
     if context.tracks.len() > u32::max_value() as usize {
         return MP4PARSE_ERROR_INVALID;
     }
     *count = context.tracks.len() as u32;
     MP4PARSE_OK
 }
 
-fn media_time_to_ms(time: MediaScaledTime, scale: MediaTimeScale) -> u64 {
-    assert!(scale.0 != 0);
-    time.0 * 1000000 / scale.0
+/// Calculate numerator * scale / denominator, if possible.
+///
+/// Applying the associativity of integer arithmetic, we divide first
+/// and add the remainder after multiplying each term separately
+/// to preserve precision while leaving more headroom. That is,
+/// (n * s) / d is split into floor(n / d) * s + (n % d) * s / d.
+///
+/// Return None on overflow or if the denominator is zero.
+fn rational_scale(numerator: u64, denominator: u64, scale: u64) -> Option<u64> {
+    if denominator == 0 {
+        return None;
+    }
+    let integer = numerator / denominator;
+    let remainder = numerator % denominator;
+    match integer.checked_mul(scale) {
+        Some(integer) => remainder.checked_mul(scale)
+            .and_then(|remainder| (remainder/denominator).checked_add(integer)),
+        None => None,
+    }
 }
 
-fn track_time_to_ms(time: TrackScaledTime, scale: TrackTimeScale) -> u64 {
+fn media_time_to_us(time: MediaScaledTime, scale: MediaTimeScale) -> Option<u64> {
+    let microseconds_per_second = 1000000;
+    rational_scale(time.0, scale.0, microseconds_per_second)
+}
+
+fn track_time_to_us(time: TrackScaledTime, scale: TrackTimeScale) -> Option<u64> {
     assert!(time.1 == scale.1);
-    assert!(scale.0 != 0);
-    time.0 * 1000000 / scale.0
+    let microseconds_per_second = 1000000;
+    rational_scale(time.0, scale.0, microseconds_per_second)
 }
 
 /// Fill the supplied `mp4parse_track_info` with metadata for `track`.
 #[no_mangle]
 pub unsafe extern fn mp4parse_get_track_info(parser: *mut mp4parse_parser, track_index: u32, info: *mut mp4parse_track_info) -> mp4parse_error {
     if parser.is_null() || info.is_null() || (*parser).poisoned() {
         return MP4PARSE_ERROR_BADARG;
     }
@@ -328,23 +349,34 @@ pub unsafe extern fn mp4parse_get_track_
 
     let track = &context.tracks[track_index];
 
     if let (Some(track_timescale),
             Some(context_timescale),
             Some(track_duration)) = (track.timescale,
                                      context.timescale,
                                      track.duration) {
-        info.media_time = track.media_time.map_or(0, |media_time| {
-            track_time_to_ms(media_time, track_timescale) as i64
-        }) - track.empty_duration.map_or(0, |empty_duration| {
-            media_time_to_ms(empty_duration, context_timescale) as i64
-        });
+        let media_time =
+            match track.media_time.map_or(Some(0), |media_time| {
+                    track_time_to_us(media_time, track_timescale) }) {
+                Some(time) => time as i64,
+                None => return MP4PARSE_ERROR_INVALID,
+            };
+        let empty_duration =
+            match track.empty_duration.map_or(Some(0), |empty_duration| {
+                    media_time_to_us(empty_duration, context_timescale) }) {
+                Some(time) => time as i64,
+                None => return MP4PARSE_ERROR_INVALID,
+            };
+        info.media_time = media_time - empty_duration;
 
-        info.duration = track_time_to_ms(track_duration, track_timescale);
+        match track_time_to_us(track_duration, track_timescale) {
+            Some(duration) => info.duration = duration,
+            None => return MP4PARSE_ERROR_INVALID,
+        }
     } else {
         return MP4PARSE_ERROR_INVALID
     }
 
     info.track_id = match track.track_id {
         Some(track_id) => track_id,
         None => return MP4PARSE_ERROR_INVALID,
     };
@@ -742,8 +774,34 @@ fn arg_validation_with_data() {
         assert_eq!(MP4PARSE_ERROR_BADARG, mp4parse_get_track_audio_info(parser, 3, &mut audio));
         assert_eq!(audio.channels, 0);
         assert_eq!(audio.bit_depth, 0);
         assert_eq!(audio.sample_rate, 0);
 
         mp4parse_free(parser);
     }
 }
+
+#[test]
+fn rational_scale_overflow() {
+    assert_eq!(rational_scale(17, 3, 1000), Some(5666));
+    let large = 0x4000_0000_0000_0000;
+    assert_eq!(rational_scale(large, 2, 2), Some(large));
+    assert_eq!(rational_scale(large, 4, 4), Some(large));
+    assert_eq!(rational_scale(large, 2, 8), None);
+    assert_eq!(rational_scale(large, 8, 4), Some(large/2));
+    assert_eq!(rational_scale(large + 1, 4, 4), Some(large+1));
+    assert_eq!(rational_scale(large, 40, 1000), None);
+}
+
+#[test]
+fn media_time_overflow() {
+  let scale = MediaTimeScale(90000);
+  let duration = MediaScaledTime(9007199254710000);
+  assert_eq!(media_time_to_us(duration, scale), Some(100079991719000000));
+}
+
+#[test]
+fn track_time_overflow() {
+  let scale = TrackTimeScale(44100, 0);
+  let duration = TrackScaledTime(4413527634807900, 0);
+  assert_eq!(track_time_to_us(duration, scale), Some(100079991719000000));
+}
--- a/media/libstagefright/binding/update-rust.sh
+++ b/media/libstagefright/binding/update-rust.sh
@@ -1,13 +1,13 @@
 #!/bin/sh
 # Script to update mp4parse-rust sources to latest upstream
 
 # Default version.
-VER=v0.5.0
+VER=v0.5.1
 
 # Accept version or commit from the command line.
 if test -n "$1"; then
   VER=$1
 fi
 
 echo "Fetching sources..."
 rm -rf _upstream
--- a/netwerk/base/nsStandardURL.cpp
+++ b/netwerk/base/nsStandardURL.cpp
@@ -1285,16 +1285,18 @@ nsStandardURL::GetHost(nsACString &resul
 {
     result = Host();
     return NS_OK;
 }
 
 NS_IMETHODIMP
 nsStandardURL::GetPort(int32_t *result)
 {
+    // should never be more than 16 bit
+    MOZ_ASSERT(mPort <= std::numeric_limits<uint16_t>::max());
     *result = mPort;
     return NS_OK;
 }
 
 // result may contain unescaped UTF-8 characters
 NS_IMETHODIMP
 nsStandardURL::GetPath(nsACString &result)
 {
@@ -1962,18 +1964,19 @@ nsStandardURL::SetPort(int32_t port)
 {
     ENSURE_MUTABLE();
 
     LOG(("nsStandardURL::SetPort [port=%d]\n", port));
 
     if ((port == mPort) || (mPort == -1 && port == mDefaultPort))
         return NS_OK;
 
-    // ports must be >= 0
-    if (port < -1) // -1 == use default
+    // ports must be >= 0 and 16 bit
+    // -1 == use default
+    if (port < -1 || port > std::numeric_limits<uint16_t>::max())
         return NS_ERROR_MALFORMED_URI;
 
     if (mURLType == URLTYPE_NO_AUTHORITY) {
         NS_WARNING("cannot set port on no-auth url");
         return NS_ERROR_UNEXPECTED;
     }
 
     InvalidateCache();
@@ -3118,17 +3121,18 @@ NS_IMETHODIMP
 nsStandardURL::Init(uint32_t urlType,
                     int32_t defaultPort,
                     const nsACString &spec,
                     const char *charset,
                     nsIURI *baseURI)
 {
     ENSURE_MUTABLE();
 
-    if (spec.Length() > (uint32_t) net_GetURLMaxLength()) {
+    if (spec.Length() > (uint32_t) net_GetURLMaxLength() ||
+        defaultPort > std::numeric_limits<uint16_t>::max()) {
         return NS_ERROR_MALFORMED_URI;
     }
 
     InvalidateCache();
 
     switch (urlType) {
     case URLTYPE_STANDARD:
         mParser = net_GetStdURLParser();
@@ -3169,16 +3173,21 @@ nsStandardURL::Init(uint32_t urlType,
 
 NS_IMETHODIMP
 nsStandardURL::SetDefaultPort(int32_t aNewDefaultPort)
 {
     ENSURE_MUTABLE();
 
     InvalidateCache();
 
+    // should never be more than 16 bit
+    if (aNewDefaultPort >= std::numeric_limits<uint16_t>::max()) {
+        return NS_ERROR_MALFORMED_URI;
+    }
+
     // If we're already using the new default-port as a custom port, then clear
     // it off of our mSpec & set mPort to -1, to indicate that we'll be using
     // the default from now on (which happens to match what we already had).
     if (mPort == aNewDefaultPort) {
         ReplacePortInSpec(-1);
         mPort = -1;
     }
     mDefaultPort = aNewDefaultPort;
--- a/netwerk/base/nsURLParsers.cpp
+++ b/netwerk/base/nsURLParsers.cpp
@@ -601,17 +601,17 @@ nsAuthURLParser::ParseServerInfo(const c
             }
             else {
                 const char* nondigit = NS_strspnp("0123456789", buf.get());
                 if (nondigit && *nondigit)
                     return NS_ERROR_MALFORMED_URI;
 
                 nsresult err;
                 *port = buf.ToInteger(&err);
-                if (NS_FAILED(err) || *port < 0)
+                if (NS_FAILED(err) || *port < 0 || *port > std::numeric_limits<uint16_t>::max())
                     return NS_ERROR_MALFORMED_URI;
             }
         }
     }
     else {
         // serverinfo = <hostname>
         SET_RESULT(hostname, 0, serverinfoLen);
         if (port)
--- a/netwerk/test/unit/test_bug652761.js
+++ b/netwerk/test/unit/test_bug652761.js
@@ -1,20 +1,17 @@
 // This is just a crashtest for a url that is rejected at parse time (port 80,000)
 
 Cu.import("resource://gre/modules/NetUtil.jsm");
 
-function completeTest(request, data, ctx)
+function run_test()
 {
+    // Bug 1301621 makes invalid ports throw
+    Assert.throws(() => {
+        var chan = NetUtil.newChannel({
+          uri: "http://localhost:80000/",
+          loadUsingSystemPrincipal: true
+        });
+    }, "invalid port");
+
     do_test_finished();
 }
 
-function run_test()
-{
-    var chan = NetUtil.newChannel({
-      uri: "http://localhost:80000/",
-      loadUsingSystemPrincipal: true
-    });
-    var httpChan = chan.QueryInterface(Components.interfaces.nsIHttpChannel);
-    httpChan.asyncOpen2(new ChannelListener(completeTest,httpChan, CL_EXPECT_FAILURE));
-    do_test_pending();
-}
-
deleted file mode 100644
--- a/netwerk/test/unit/test_invalidport.js
+++ /dev/null
@@ -1,38 +0,0 @@
-// This is essentially a crashtest for accessing an out of range port
-// Perform the async open several times in order to induce exponential
-// scheduling behavior bugs.
-
-Cu.import("resource://gre/modules/NetUtil.jsm");
-
-var CC = Components.Constructor;
-
-var counter = 0;
-const iterations = 10;
-
-var listener = {
-  onStartRequest: function test_onStartR(request, ctx) {
-  },
-
-  onDataAvailable: function test_ODA() {
-    do_throw("Should not get any data!");
-  },
-
-  onStopRequest: function test_onStopR(request, ctx, status) {
-    if (counter++ == iterations)
-      do_test_finished();
-    else
-      execute_test();
-  },
-};
-
-function run_test() {
-  execute_test();
-  do_test_pending();
-}
-
-function execute_test() {
-  var chan = NetUtil.newChannel({uri: "http://localhost:75000", loadUsingSystemPrincipal: true});
-  chan.QueryInterface(Ci.nsIHttpChannel);
-  chan.asyncOpen2(listener);
-}
-
new file mode 100644
--- /dev/null
+++ b/netwerk/test/unit/test_large_port.js
@@ -0,0 +1,36 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// Ensure that non-16-bit URIs are rejected
+
+"use strict";
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+
+const StandardURL = Components.Constructor("@mozilla.org/network/standard-url;1",
+                                           "nsIStandardURL",
+                                           "init");
+function run_test()
+{
+    // Bug 1301621 makes invalid ports throw
+    Assert.throws(() => {
+        new StandardURL(Ci.nsIStandardURL.URLTYPE_AUTHORITY, 65536,
+                "http://localhost", "UTF-8", null)
+    }, "invalid port during creation");
+    let url = new StandardURL(Ci.nsIStandardURL.URLTYPE_AUTHORITY, 65535,
+                              "http://localhost", "UTF-8", null)
+                .QueryInterface(Ci.nsIStandardURL)
+
+    Assert.throws(() => {
+        url.setDefaultPort(65536);
+    }, "invalid port in setDefaultPort");
+    Assert.throws(() => {
+        url.port = 65536;
+    }, "invalid port in port setter");
+
+    do_check_eq(url.QueryInterface(Ci.nsIURI).port, -1);
+    do_test_finished();
+}
+
--- a/netwerk/test/unit/xpcshell.ini
+++ b/netwerk/test/unit/xpcshell.ini
@@ -227,18 +227,18 @@ skip-if = bits != 32
 [test_idn_blacklist.js]
 [test_idn_urls.js]
 [test_idna2008.js]
 # IDNA2008 depends on ICU, not available on android
 skip-if = os == "android"
 [test_immutable.js]
 skip-if = !hasNode
 run-sequentially = node server exceptions dont replay well
-[test_invalidport.js]
 [test_localstreams.js]
+[test_large_port.js]
 [test_mismatch_last-modified.js]
 [test_MIME_params.js]
 [test_mozTXTToHTMLConv.js]
 [test_multipart_byteranges.js]
 [test_multipart_streamconv.js]
 [test_multipart_streamconv_missing_lead_boundary.js]
 [test_nestedabout_serialize.js]
 [test_net_addr.js]
new file mode 100644
--- /dev/null
+++ b/python/mozlint/test/conftest.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+import pytest
+
+from mozlint import LintRoller
+
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+@pytest.fixture
+def lint(request):
+    lintargs = getattr(request.module, 'lintargs', {})
+    return LintRoller(root=here, **lintargs)
+
+
+@pytest.fixture(scope='session')
+def filedir():
+    return os.path.join(here, 'files')
+
+
+@pytest.fixture(scope='module')
+def files(filedir, request):
+    suffix_filter = getattr(request.module, 'files', [''])
+    return [os.path.join(filedir, p) for p in os.listdir(filedir)
+            if any(p.endswith(suffix) for suffix in suffix_filter)]
+
+
+@pytest.fixture(scope='session')
+def lintdir():
+    return os.path.join(here, 'linters')
+
+
+@pytest.fixture(scope='module')
+def linters(lintdir, request):
+    suffix_filter = getattr(request.module, 'linters', ['.lint'])
+    return [os.path.join(lintdir, p) for p in os.listdir(lintdir)
+            if any(p.endswith(suffix) for suffix in suffix_filter)]
--- a/python/mozlint/test/test_formatters.py
+++ b/python/mozlint/test/test_formatters.py
@@ -1,94 +1,90 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import unicode_literals
 
 import json
-import os
+import sys
 from collections import defaultdict
-from unittest import TestCase
 
-from mozunit import main
+import pytest
 
 from mozlint import ResultContainer
 from mozlint import formatters
 
 
-here = os.path.abspath(os.path.dirname(__file__))
+@pytest.fixture
+def results(scope='module'):
+    containers = (
+        ResultContainer(
+            linter='foo',
+            path='a/b/c.txt',
+            message="oh no foo",
+            lineno=1,
+        ),
+        ResultContainer(
+            linter='bar',
+            path='d/e/f.txt',
+            message="oh no bar",
+            hint="try baz instead",
+            level='warning',
+            lineno=4,
+            column=2,
+            rule="bar-not-allowed",
+        ),
+        ResultContainer(
+            linter='baz',
+            path='a/b/c.txt',
+            message="oh no baz",
+            lineno=4,
+            source="if baz:",
+        ),
+    )
+    results = defaultdict(list)
+    for c in containers:
+        results[c.path].append(c)
+    return results
 
 
-class TestFormatters(TestCase):
-
-    def __init__(self, *args, **kwargs):
-        TestCase.__init__(self, *args, **kwargs)
-
-        containers = (
-            ResultContainer(
-                linter='foo',
-                path='a/b/c.txt',
-                message="oh no foo",
-                lineno=1,
-            ),
-            ResultContainer(
-                linter='bar',
-                path='d/e/f.txt',
-                message="oh no bar",
-                hint="try baz instead",
-                level='warning',
-                lineno=4,
-                column=2,
-                rule="bar-not-allowed",
-            ),
-            ResultContainer(
-                linter='baz',
-                path='a/b/c.txt',
-                message="oh no baz",
-                lineno=4,
-                source="if baz:",
-            ),
-        )
-
-        self.results = defaultdict(list)
-        for c in containers:
-            self.results[c.path].append(c)
-
-    def test_stylish_formatter(self):
-        expected = """
+def test_stylish_formatter(results):
+    expected = """
 a/b/c.txt
   1  error  oh no foo  (foo)
   4  error  oh no baz  (baz)
 
 d/e/f.txt
   4:2  warning  oh no bar  bar-not-allowed (bar)
 
 \u2716 3 problems (2 errors, 1 warning)
 """.strip()
 
-        fmt = formatters.get('stylish', disable_colors=True)
-        self.assertEqual(expected, fmt(self.results))
+    fmt = formatters.get('stylish', disable_colors=True)
+    assert expected == fmt(results)
 
-    def test_treeherder_formatter(self):
-        expected = """
+
+def test_treeherder_formatter(results):
+    expected = """
 TEST-UNEXPECTED-ERROR | a/b/c.txt:1 | oh no foo (foo)
 TEST-UNEXPECTED-ERROR | a/b/c.txt:4 | oh no baz (baz)
 TEST-UNEXPECTED-WARNING | d/e/f.txt:4:2 | oh no bar (bar-not-allowed)
 """.strip()
 
-        fmt = formatters.get('treeherder')
-        self.assertEqual(expected, fmt(self.results))
+    fmt = formatters.get('treeherder')
+    assert expected == fmt(results)
 
-    def test_json_formatter(self):
-        fmt = formatters.get('json')
-        formatted = json.loads(fmt(self.results))
 
-        self.assertEqual(set(formatted.keys()), set(self.results.keys()))
+def test_json_formatter(results):
+    fmt = formatters.get('json')
+    formatted = json.loads(fmt(results))
 
-        slots = ResultContainer.__slots__
-        for errors in formatted.values():
-            for err in errors:
-                self.assertTrue(all(s in err for s in slots))
+    assert set(formatted.keys()) == set(results.keys())
+
+    slots = ResultContainer.__slots__
+    for errors in formatted.values():
+        for err in errors:
+            assert all(s in err for s in slots)
 
 
 if __name__ == '__main__':
-    main()
+    sys.exit(pytest.main(['--verbose', __file__]))
--- a/python/mozlint/test/test_parser.py
+++ b/python/mozlint/test/test_parser.py
@@ -1,68 +1,55 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import os
-from unittest import TestCase
+import sys
 
-from mozunit import main
+import pytest
 
 from mozlint.parser import Parser
 from mozlint.errors import (
     LinterNotFound,
     LinterParseError,
 )
 
 
-here = os.path.abspath(os.path.dirname(__file__))
+@pytest.fixture(scope='module')
+def parse(lintdir):
+    parser = Parser()
+
+    def _parse(name):
+        path = os.path.join(lintdir, name)
+        return parser(path)
+    return _parse
 
 
-class TestParser(TestCase):
-
-    def __init__(self, *args, **kwargs):
-        TestCase.__init__(self, *args, **kwargs)
-
-        self._lintdir = os.path.join(here, 'linters')
-        self._parse = Parser()
-
-    def parse(self, name):
-        return self._parse(os.path.join(self._lintdir, name))
+def test_parse_valid_linter(parse):
+    lintobj = parse('string.lint')
+    assert isinstance(lintobj, dict)
+    assert 'name' in lintobj
+    assert 'description' in lintobj
+    assert 'type' in lintobj
+    assert 'payload' in lintobj
 
-    def test_parse_valid_linter(self):
-        linter = self.parse('string.lint')
-        self.assertIsInstance(linter, dict)
-        self.assertIn('name', linter)
-        self.assertIn('description', linter)
-        self.assertIn('type', linter)
-        self.assertIn('payload', linter)
-
-    def test_parse_invalid_type(self):
-        with self.assertRaises(LinterParseError):
-            self.parse('invalid_type.lint')
 
-    def test_parse_invalid_extension(self):
-        with self.assertRaises(LinterParseError):
-            self.parse('invalid_extension.lnt')
-
-    def test_parse_invalid_include_exclude(self):
-        with self.assertRaises(LinterParseError):
-            self.parse('invalid_include.lint')
-
-        with self.assertRaises(LinterParseError):
-            self.parse('invalid_exclude.lint')
+@pytest.mark.parametrize('linter', [
+    'invalid_type.lint',
+    'invalid_extension.lnt',
+    'invalid_include.lint',
+    'invalid_exclude.lint',
+    'missing_attrs.lint',
+    'missing_definition.lint',
+])
+def test_parse_invalid_linter(parse, linter):
+    with pytest.raises(LinterParseError):
+        parse(linter)
 
-    def test_parse_missing_attributes(self):
-        with self.assertRaises(LinterParseError):
-            self.parse('missing_attrs.lint')
 
-    def test_parse_missing_definition(self):
-        with self.assertRaises(LinterParseError):
-            self.parse('missing_definition.lint')
-
-    def test_parse_non_existent_linter(self):
-        with self.assertRaises(LinterNotFound):
-            self.parse('missing_file.lint')
+def test_parse_non_existent_linter(parse):
+    with pytest.raises(LinterNotFound):
+        parse('missing_file.lint')
 
 
 if __name__ == '__main__':
-    main()
+    sys.exit(pytest.main(['--verbose', __file__]))
--- a/python/mozlint/test/test_roller.py
+++ b/python/mozlint/test/test_roller.py
@@ -1,80 +1,70 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import os
 import sys
-from unittest import TestCase
 
-from mozunit import main
+import pytest
 
-from mozlint import LintRoller, ResultContainer
+from mozlint import ResultContainer
 from mozlint.errors import LintersNotConfigured, LintException
 
 
 here = os.path.abspath(os.path.dirname(__file__))
 
 
-class TestLintRoller(TestCase):
+linters = ('string.lint', 'regex.lint', 'external.lint')
 
-    def __init__(self, *args, **kwargs):
-        TestCase.__init__(self, *args, **kwargs)
 
-        self.filedir = os.path.join(here, 'files')
-        self.files = [os.path.join(self.filedir, f) for f in os.listdir(self.filedir)]
-        self.lintdir = os.path.join(here, 'linters')
+def test_roll_no_linters_configured(lint, files):
+    with pytest.raises(LintersNotConfigured):
+        lint.roll(files)
 
-        names = ('string.lint', 'regex.lint', 'external.lint')
-        self.linters = [os.path.join(self.lintdir, n) for n in names]
+
+def test_roll_successful(lint, linters, files):
+    lint.read(linters)
 
-    def setUp(self):
-        TestCase.setUp(self)
-        self.lint = LintRoller(root=here)
+    result = lint.roll(files)
+    assert len(result) == 1
 
-    def test_roll_no_linters_configured(self):
-        with self.assertRaises(LintersNotConfigured):
-            self.lint.roll(self.files)
+    path = result.keys()[0]
+    assert os.path.basename(path) == 'foobar.js'
 
-    def test_roll_successful(self):
-        self.lint.read(self.linters)
+    errors = result[path]
+    assert isinstance(errors, list)
+    assert len(errors) == 6
 
-        result = self.lint.roll(self.files)
-        self.assertEqual(len(result), 1)
-
-        path = result.keys()[0]
-        self.assertEqual(os.path.basename(path), 'foobar.js')
+    container = errors[0]
+    assert isinstance(container, ResultContainer)
+    assert container.rule == 'no-foobar'
 
-        errors = result[path]
-        self.assertIsInstance(errors, list)
-        self.assertEqual(len(errors), 6)
+
+def test_roll_catch_exception(lint, lintdir, files):
+    lint.read(os.path.join(lintdir, 'raises.lint'))
 
-        container = errors[0]
-        self.assertIsInstance(container, ResultContainer)
-        self.assertEqual(container.rule, 'no-foobar')
+    # suppress printed traceback from test output
+    old_stderr = sys.stderr
+    sys.stderr = open(os.devnull, 'w')
+    with pytest.raises(LintException):
+        lint.roll(files)
+    sys.stderr = old_stderr
 
-    def test_roll_catch_exception(self):
-        self.lint.read(os.path.join(self.lintdir, 'raises.lint'))
 
-        # suppress printed traceback from test output
-        old_stderr = sys.stderr
-        sys.stderr = open(os.devnull, 'w')
-        with self.assertRaises(LintException):
-            self.lint.roll(self.files)
-        sys.stderr = old_stderr
+def test_roll_with_excluded_path(lint, linters, files):
+    lint.lintargs.update({'exclude': ['**/foobar.js']})
 
-    def test_roll_with_excluded_path(self):
-        self.lint.lintargs.update({'exclude': ['**/foobar.js']})
+    lint.read(linters)
+    result = lint.roll(files)
 
-        self.lint.read(self.linters)
-        result = self.lint.roll(self.files)
+    assert len(result) == 0
 
-        self.assertEqual(len(result), 0)
 
-    def test_roll_with_invalid_extension(self):
-        self.lint.read(os.path.join(self.lintdir, 'external.lint'))
-        result = self.lint.roll(os.path.join(self.filedir, 'foobar.py'))
-        self.assertEqual(len(result), 0)
+def test_roll_with_invalid_extension(lint, lintdir, filedir):
+    lint.read(os.path.join(lintdir, 'external.lint'))
+    result = lint.roll(os.path.join(filedir, 'foobar.py'))
+    assert len(result) == 0
 
 
 if __name__ == '__main__':
-    main()
+    sys.exit(pytest.main(['--verbose', __file__]))
--- a/python/mozlint/test/test_types.py
+++ b/python/mozlint/test/test_types.py
@@ -1,78 +1,50 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import os
-from unittest import TestCase
+import sys
 
-from mozunit import main
+import pytest
 
-from mozlint import LintRoller
 from mozlint.result import ResultContainer
 
 
-here = os.path.abspath(os.path.dirname(__file__))
+@pytest.fixture
+def path(filedir):
+    def _path(name):
+        return os.path.join(filedir, name)
+    return _path
+
+
+@pytest.fixture(params=['string.lint', 'regex.lint', 'external.lint'])
+def linter(lintdir, request):
+    return os.path.join(lintdir, request.param)
 
 
-class TestLinterTypes(TestCase):
-
-    def __init__(self, *args, **kwargs):
-        TestCase.__init__(self, *args, **kwargs)
-
-        self.lintdir = os.path.join(here, 'linters')
-        self.filedir = os.path.join(here, 'files')
-        self.files = [os.path.join(self.filedir, f) for f in os.listdir(self.filedir)]
-
-    def setUp(self):
-        TestCase.setUp(self)
-        self.lint = LintRoller(root=here)
+def test_linter_types(lint, linter, files, path):
+    lint.read(linter)
+    result = lint.roll(files)
+    assert isinstance(result, dict)
+    assert path('foobar.js') in result
+    assert path('no_foobar.js') not in result
 
-    def path(self, name):
-        return os.path.join(self.filedir, name)
-
-    def test_string_linter(self):
-        self.lint.read(os.path.join(self.lintdir, 'string.lint'))
-        result = self.lint.roll(self.files)
-        self.assertIsInstance(result, dict)
-
-        self.assertIn(self.path('foobar.js'), result.keys())
-        self.assertNotIn(self.path('no_foobar.js'), result.keys())
-
-        result = result[self.path('foobar.js')][0]
-        self.assertIsInstance(result, ResultContainer)
-        self.assertEqual(result.linter, 'StringLinter')
+    result = result[path('foobar.js')][0]
+    assert isinstance(result, ResultContainer)
 
-    def test_regex_linter(self):
-        self.lint.read(os.path.join(self.lintdir, 'regex.lint'))
-        result = self.lint.roll(self.files)
-        self.assertIsInstance(result, dict)
-        self.assertIn(self.path('foobar.js'), result.keys())
-        self.assertNotIn(self.path('no_foobar.js'), result.keys())
+    name = os.path.basename(linter).split('.')[0]
+    assert result.linter.lower().startswith(name)
 
-        result = result[self.path('foobar.js')][0]
-        self.assertIsInstance(result, ResultContainer)
-        self.assertEqual(result.linter, 'RegexLinter')
 
-    def test_external_linter(self):
-        self.lint.read(os.path.join(self.lintdir, 'external.lint'))
-        result = self.lint.roll(self.files)
-        self.assertIsInstance(result, dict)
-        self.assertIn(self.path('foobar.js'), result.keys())
-        self.assertNotIn(self.path('no_foobar.js'), result.keys())
+def test_no_filter(lint, lintdir, files):
+    lint.read(os.path.join(lintdir, 'explicit_path.lint'))
+    result = lint.roll(files)
+    assert len(result) == 0
 
-        result = result[self.path('foobar.js')][0]
-        self.assertIsInstance(result, ResultContainer)
-        self.assertEqual(result.linter, 'ExternalLinter')
-
-    def test_no_filter(self):
-        self.lint.read(os.path.join(self.lintdir, 'explicit_path.lint'))
-        result = self.lint.roll(self.files)
-        self.assertEqual(len(result), 0)
-
-        self.lint.lintargs['use_filters'] = False
-        result = self.lint.roll(self.files)
-        self.assertEqual(len(result), 2)
+    lint.lintargs['use_filters'] = False
+    result = lint.roll(files)
+    assert len(result) == 2
 
 
 if __name__ == '__main__':
-    main()
+    sys.exit(pytest.main(['--verbose', __file__]))
--- a/taskcluster/ci/desktop-test/tests.yml
+++ b/taskcluster/ci/desktop-test/tests.yml
@@ -389,32 +389,34 @@ reftest-no-accel:
 web-platform-tests:
     description: "Web platform test run"
     suite: web-platform-tests
     treeherder-symbol: tc-W()
     chunks: 12
     max-run-time: 7200
     instance-size: xlarge
     docker-image: {"in-tree": "desktop1604-test"}
+    checkout: true
     mozharness:
         script: mozharness/scripts/web_platform_tests.py
         no-read-buildbot-config: true
         config:
             - mozharness/configs/web_platform_tests/prod_config.py
             - mozharness/configs/remove_executables.py
         extra-options:
             - --test-type=testharness
 
 web-platform-tests-reftests:
     description: "Web platform reftest run"
     suite: web-platform-tests-reftests
     treeherder-symbol: tc-W(Wr)
     max-run-time: 5400
     instance-size: xlarge
     docker-image: {"in-tree": "desktop1604-test"}
+    checkout: true
     mozharness:
         script: mozharness/scripts/web_platform_tests.py
         no-read-buildbot-config: true
         config:
             - mozharness/configs/web_platform_tests/prod_config.py
             - mozharness/configs/remove_executables.py
         extra-options:
             - --test-type=reftest
--- a/taskcluster/taskgraph/transforms/job/common.py
+++ b/taskcluster/taskgraph/transforms/job/common.py
@@ -52,16 +52,48 @@ def docker_worker_add_gecko_vcs_env_vars
     env.update({
         'GECKO_BASE_REPOSITORY': config.params['base_repository'],
         'GECKO_HEAD_REF': config.params['head_rev'],
         'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
         'GECKO_HEAD_REV': config.params['head_rev'],
     })
 
 
+def docker_worker_support_vcs_checkout(config, job, taskdesc):
+    """Update a job/task with parameters to enable a VCS checkout.
+
+    The configuration is intended for tasks using "run-task" and its
+    VCS checkout behavior.
+    """
+    level = config.params['level']
+
+    taskdesc['worker'].setdefault('caches', []).extend([
+        {
+            'type': 'persistent',
+            'name': 'level-%s-hg-shared' % level,
+            'mount-point': '/home/worker/hg-shared',
+        }, {
+            'type': 'persistent',
+            'name': 'level-%s-checkouts' % level,
+            'mount-point': '/home/worker/checkouts',
+        }
+    ])
+
+    taskdesc['worker'].setdefault('env', {}).update({
+        'GECKO_BASE_REPOSITORY': config.params['base_repository'],
+        'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
+        'GECKO_HEAD_REV': config.params['head_rev'],
+    })
+
+    # Give task access to hgfingerprint secret so it can pin the certificate
+    # for hg.mozilla.org.
+    taskdesc['scopes'].append('secrets:get:project/taskcluster/gecko/hgfingerprint')
+    taskdesc['worker']['taskcluster-proxy'] = True
+
+
 def docker_worker_setup_secrets(config, job, taskdesc):
     """Set up access to secrets via taskcluster-proxy.  The value of
     run['secrets'] should be a boolean or a list of secret names that
     can be accessed."""
     if not job['run'].get('secrets'):
         return
 
     taskdesc['worker']['taskcluster-proxy'] = True
--- a/taskcluster/taskgraph/transforms/job/run_task.py
+++ b/taskcluster/taskgraph/transforms/job/run_task.py
@@ -5,16 +5,19 @@
 Support for running jobs that are invoked via the `run-task` script.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import copy
 
 from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.common import (
+    docker_worker_support_vcs_checkout,
+)
 from voluptuous import Schema, Required, Any
 
 run_task_schema = Schema({
     Required('using'): 'run-task',
 
     # if true, add a cache at ~worker/.cache, which is where things like pip
     # tend to hide their caches.  This cache is never added for level-1 jobs.
     Required('cache-dotcache', default=False): bool,
@@ -27,51 +30,30 @@ run_task_schema = Schema({
     # it will be included in a single argument to `bash -cx`.
     Required('command'): Any([basestring], basestring),
 })
 
 
 @run_job_using("docker-worker", "run-task", schema=run_task_schema)
 def docker_worker_run_task(config, job, taskdesc):
     run = job['run']
-    checkout = run['checkout']
 
     worker = taskdesc['worker'] = copy.deepcopy(job['worker'])
 
-    if checkout:
-        worker['caches'] = [{
-            'type': 'persistent',
-            'name': 'level-{}-hg-shared'.format(config.params['level']),
-            'mount-point': "/home/worker/hg-shared",
-        }, {
-            'type': 'persistent',
-            'name': 'level-{}-checkouts'.format(config.params['level']),
-            'mount-point': "/home/worker/checkouts",
-        }]
+    if run['checkout']:
+        docker_worker_support_vcs_checkout(config, job, taskdesc)
 
     if run.get('cache-dotcache') and int(config.params['level']) > 1:
         worker['caches'].append({
             'type': 'persistent',
             'name': 'level-{level}-{project}-dotcache'.format(**config.params),
             'mount-point': '/home/worker/.cache',
         })
 
-    env = worker['env'] = {}
-    env.update({
-        'GECKO_BASE_REPOSITORY': config.params['base_repository'],
-        'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
-        'GECKO_HEAD_REV': config.params['head_rev'],
-    })
-
-    # give the task access to the hgfingerprint secret
-    if checkout:
-        taskdesc['scopes'].append('secrets:get:project/taskcluster/gecko/hgfingerprint')
-        worker['taskcluster-proxy'] = True
-
     run_command = run['command']
     if isinstance(run_command, basestring):
         run_command = ['bash', '-cx', run_command]
     command = ['/home/worker/bin/run-task']
-    if checkout:
+    if run['checkout']:
         command.append('--vcs-checkout=/home/worker/checkouts/gecko')
     command.append('--')
     command.extend(run_command)
     worker['command'] = command
--- a/taskcluster/taskgraph/transforms/tests/make_task_description.py
+++ b/taskcluster/taskgraph/transforms/tests/make_task_description.py
@@ -15,16 +15,19 @@ The test description should be fully for
 transforms, and these transforms should not embody any specific knowledge about
 what should run where. this is the wrong place for special-casing platforms,
 for example - use `all_tests.py` instead.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 from taskgraph.transforms.base import TransformSequence
+from taskgraph.transforms.job.common import (
+    docker_worker_support_vcs_checkout,
+)
 
 import logging
 
 ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
 
 ARTIFACTS = [
     # (artifact name prefix, in-image path)
     ("public/logs/", "/home/worker/workspace/build/upload/logs/"),
@@ -152,18 +155,16 @@ def docker_worker_setup(config, test, ta
     worker['caches'] = [{
         'type': 'persistent',
         'name': 'level-{}-{}-test-workspace'.format(
             config.params['level'], config.params['project']),
         'mount-point': "/home/worker/workspace",
     }]
 
     env = worker['env'] = {
-        'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
-        'GECKO_HEAD_REV': config.params['head_rev'],
         'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
         'MOZHARNESS_SCRIPT': mozharness['script'],
         'MOZHARNESS_URL': {'task-reference': mozharness_url},
         'MOZILLA_BUILD_URL': {'task-reference': installer_url},
         'NEED_PULSEAUDIO': 'true',
         'NEED_WINDOW_MANAGER': 'true',
     }
 
@@ -187,19 +188,26 @@ def docker_worker_setup(config, test, ta
             'docker-worker:relengapi-proxy:tooltool.download.public',
         ])
 
     # assemble the command line
     command = [
         '/home/worker/bin/run-task',
         # The workspace cache/volume is default owned by root:root.
         '--chown', '/home/worker/workspace',
+    ]
+
+    if test['checkout']:
+        docker_worker_support_vcs_checkout(config, test, taskdesc)
+        command.extend(['--vcs-checkout', '/home/worker/checkouts/gecko'])
+
+    command.extend([
         '--',
         '/home/worker/bin/test-linux.sh',
-    ]
+    ])
 
     if mozharness.get('no-read-buildbot-config'):
         command.append("--no-read-buildbot-config")
     command.extend([
         {"task-reference": "--installer-url=" + installer_url},
         {"task-reference": "--test-packages-url=" + test_packages_url},
     ])
     command.extend(mozharness.get('extra-options', []))
--- a/taskcluster/taskgraph/transforms/tests/test_description.py
+++ b/taskcluster/taskgraph/transforms/tests/test_description.py
@@ -114,16 +114,19 @@ test_description_schema = Schema({
 
     # seconds of runtime after which the task will be killed.  Like 'chunks',
     # this can be keyed by test pltaform.
     Required('max-run-time', default=3600): Any(
         int,
         {'by-test-platform': {basestring: int}},
     ),
 
+    # Whether to perform a gecko checkout.
+    Required('checkout', default=False): bool,
+
     # What to run
     Required('mozharness'): Any({
         # the mozharness script used to run this task
         Required('script'): basestring,
 
         # the config files required for the task
         Required('config'): Any(
             [basestring],
--- a/testing/docker/desktop-test/Dockerfile
+++ b/testing/docker/desktop-test/Dockerfile
@@ -22,16 +22,18 @@ RUN bash /setup/system-setup.sh
 ADD topsrcdir/testing/docker/recipes/run-task /home/worker/bin/run-task
 
 # %include taskcluster/scripts/tester/test-ubuntu1204.sh
 ADD topsrcdir/taskcluster/scripts/tester/test-ubuntu1204.sh /home/worker/bin/test-linux.sh
 
 # This will create a host mounted filesystem when the cache is stripped
 # on Try. This cancels out some of the performance losses of aufs. See
 # bug 1291940.
+VOLUME /home/worker/hg-shared
+VOLUME /home/worker/checkouts
 VOLUME /home/worker/workspace
 
 # Set variable normally configured at login, by the shells parent process, these
 # are taken from GNU su manual
 ENV           HOME          /home/worker
 ENV           SHELL         /bin/bash
 ENV           USER          worker
 ENV           LOGNAME       worker
--- a/testing/docker/desktop1604-test/Dockerfile
+++ b/testing/docker/desktop1604-test/Dockerfile
@@ -22,16 +22,18 @@ ADD topsrcdir/testing/docker/recipes/xvf
 ADD topsrcdir/testing/docker/recipes/run-task /home/worker/bin/run-task
 
 # %include taskcluster/scripts/tester/test-ubuntu1604.sh
 ADD topsrcdir/taskcluster/scripts/tester/test-ubuntu1604.sh /home/worker/bin/test-linux.sh
 
 # This will create a host mounted filesystem when the cache is stripped
 # on Try. This cancels out some of the performance losses of aufs. See
 # bug 1291940.
+VOLUME /home/worker/hg-shared
+VOLUME /home/worker/checkouts
 VOLUME /home/worker/workspace
 
 # Set variable normally configured at login, by the shells parent process, these
 # are taken from GNU su manual
 ENV           HOME          /home/worker
 ENV           SHELL         /bin/bash
 ENV           USER          worker
 ENV           LOGNAME       worker
--- a/testing/marionette/harness/marionette/tests/unit/test_navigation.py
+++ b/testing/marionette/harness/marionette/tests/unit/test_navigation.py
@@ -1,40 +1,40 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import time
+import unittest
 import urllib
 
 from marionette import MarionetteTestCase
 from marionette_driver.errors import MarionetteException, TimeoutException
-from marionette_driver.by import By
+from marionette_driver import By, Wait
 
 
 def inline(doc):
     return "data:text/html;charset=utf-8,%s" % urllib.quote(doc)
 
 
 class TestNavigate(MarionetteTestCase):
     def setUp(self):
         MarionetteTestCase.setUp(self)
-        self.marionette.execute_script("window.location.href = 'about:blank'")
-        self.assertEqual("about:blank", self.location_href)
+        self.marionette.navigate("about:")
         self.test_doc = self.marionette.absolute_url("test.html")
         self.iframe_doc = self.marionette.absolute_url("test_iframe.html")
 
     def test_set_location_through_execute_script(self):
         self.marionette.execute_script("window.location.href = '%s'" % self.test_doc)
-        self.assertEqual(self.test_doc, self.location_href)
+        Wait(self.marionette).until(lambda _: self.test_doc == self.location_href)
         self.assertEqual("Marionette Test", self.marionette.title)
 
     def test_navigate(self):
         self.marionette.navigate(self.test_doc)
-        self.assertNotEqual("about:blank", self.location_href)
+        self.assertNotEqual("about:", self.location_href)
         self.assertEqual("Marionette Test", self.marionette.title)
 
     def test_navigate_chrome_error(self):
         with self.marionette.using_context("chrome"):
             self.assertRaisesRegexp(MarionetteException, "Cannot navigate in chrome context",
                                     self.marionette.navigate, "about:blank")
 
     def test_get_current_url_returns_top_level_browsing_context_url(self):
@@ -118,16 +118,17 @@ class TestNavigate(MarionetteTestCase):
         self.assertEqual(self.marionette.get_url(), "about:blocked")
 
     def test_find_element_state_complete(self):
         self.marionette.navigate(self.test_doc)
         state = self.marionette.execute_script("return window.document.readyState")
         self.assertEqual("complete", state)
         self.assertTrue(self.marionette.find_element(By.ID, "mozLink"))
 
+    @unittest.skip("Bug 1302707 - No timeout exception raised.")
     def test_should_throw_a_timeoutexception_when_loading_page(self):
         try:
             self.marionette.timeouts("page load", 0)
             self.marionette.navigate(self.test_doc)
             self.assertTrue(self.marionette.find_element(By.ID, "mozLink"))
             self.fail("Should have thrown a MarionetteException")
         except TimeoutException as e:
             self.assertTrue("Error loading page, timed out" in str(e))
--- a/testing/marionette/harness/marionette/tests/unit/test_window_management.py
+++ b/testing/marionette/harness/marionette/tests/unit/test_window_management.py
@@ -1,15 +1,15 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import time
 from marionette import MarionetteTestCase
-from marionette_driver.by import By
+from marionette_driver import By, Wait
 
 
 class TestSwitchWindow(MarionetteTestCase):
     def open_new_window(self):
         self.marionette.set_context("chrome")
         self.marionette.set_script_timeout(5000)
         self.marionette.execute_async_script("""
 var ww = Components.classes["@mozilla.org/embedcomp/window-watcher;1"]
@@ -36,17 +36,19 @@ if (win != null)
     def test_windows(self):
         orig_win = self.marionette.current_window_handle
         orig_available = self.marionette.window_handles
         self.open_new_window()
         #assert we're still in the original window
         self.assertEqual(self.marionette.current_window_handle, orig_win)
         now_available = self.marionette.window_handles
         #assert we can find the new window
-        self.assertEqual(len(now_available), len(orig_available) + 1)
+        Wait(self.marionette).until(
+            lambda _: len(now_available) == len(orig_available) + 1,
+            message="The new window has not been opened.")
         #assert that our window is there
         self.assertTrue(orig_win in now_available)
         new_win = None
         for win in now_available:
             if win != orig_win:
                 new_win = orig_win
         #switch to another window
         self.marionette.switch_to_window(new_win)
--- a/testing/web-platform/meta/url/url-constructor.html.ini
+++ b/testing/web-platform/meta/url/url-constructor.html.ini
@@ -232,19 +232,16 @@
     expected: FAIL
 
   [Parsing: <sc://ñ.test/> against <about:blank>]
     expected: FAIL
 
   [Parsing: <file:..> against <http://www.example.com/test>]
     expected: FAIL
 
-  [Parsing: <http://f:999999/c> against <http://example.org/foo/bar>]
-    expected: FAIL
-
   [Parsing: <http://www/foo%2Ehtml> against <about:blank>]
     expected: FAIL
 
   [Parsing: <http://example.com/foo/%2e%2> against <about:blank>]
     expected: FAIL
 
   [Parsing: <http://example.com/foo/%2e./%2e%2e/.%2e/%2e.bar> against <about:blank>]
     expected: FAIL
--- a/testing/web-platform/meta/url/url-setters.html.ini
+++ b/testing/web-platform/meta/url/url-setters.html.ini
@@ -70,19 +70,16 @@
     expected: FAIL
 
   [Setting <view-source+http://example.net/path>.host = 'example.com\\stuff' \\ is not a delimiter for non-special schemes, and it’s invalid in a domain]
     expected: FAIL
 
   [Setting <view-source+http://example.net/path>.host = 'example.com:8080stuff2' Anything other than ASCII digit stops the port parser in a setter but is not an error]
     expected: FAIL
 
-  [Setting <http://example.net/path>.host = 'example.com:65536' Port numbers are 16 bit integers, overflowing is an error. Hostname is still set, though.]
-    expected: FAIL
-
   [Setting <view-source+http://example.net/foo>.hostname = '' The empty host is OK for non-special schemes]
     expected: FAIL
 
   [Setting <a:/foo>.hostname = 'example.net' Path-only URLs can gain a host]
     expected: FAIL
 
   [Setting <http://example.net:8080>.hostname = '0x7F000001' IPv4 address syntax is normalized]
     expected: FAIL
@@ -100,19 +97,16 @@
     expected: FAIL
 
   [Setting <http://example.net:8080>.port = '' Port number is unchanged if empty in the new value. Note: this may change, see https://github.com/whatwg/url/pull/113]
     expected: FAIL
 
   [Setting <view-source+http://example.net/path>.port = '8080stuff2' Anything other than ASCII digit stops the port parser in a setter but is not an error]
     expected: FAIL
 
-  [Setting <http://example.net:8080/path>.port = '65536' Port numbers are 16 bit integers, overflowing is an error]
-    expected: FAIL
-
   [Setting <unix:/run/foo.socket?timeout=10>.pathname = '/var/log/../run/bar.socket']
     expected: FAIL
 
   [Setting <http://example.net/home?lang=fr#nav>.pathname = '\\a\\%2E\\b\\%2e.\\c' \\ is a segment delimiter for 'special' URLs]
     expected: FAIL
 
   [Setting <view-source+http://example.net/home?lang=fr#nav>.pathname = '\\a\\%2E\\b\\%2e.\\c' \\ is *not* a segment delimiter for non-'special' URLs]
     expected: FAIL
--- a/toolkit/components/search/tests/xpcshell/test_location_error.js
+++ b/toolkit/components/search/tests/xpcshell/test_location_error.js
@@ -1,16 +1,16 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 function run_test() {
   installTestEngine();
 
-  // using a port > 2^32 causes an error to be reported.
-  let url = "http://localhost:111111111";
+  // We use an invalid port that parses but won't open
+  let url = "http://localhost:0";
 
   Services.prefs.setCharPref("browser.search.geoip.url", url);
   Services.search.init(() => {
     try {
       Services.prefs.getCharPref("browser.search.countryCode");
       ok(false, "not expecting countryCode to be set");
     } catch (ex) {}
     // should have an error recorded.
--- a/toolkit/library/rust/Cargo.lock
+++ b/toolkit/library/rust/Cargo.lock
@@ -1,28 +1,28 @@
 [root]
 name = "gkrust"
 version = "0.1.0"
 dependencies = [
- "mp4parse_capi 0.5.0",
+ "mp4parse_capi 0.5.1",
 ]
 
 [[package]]
 name = "byteorder"
 version = "0.5.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "mp4parse"
-version = "0.5.0"
+version = "0.5.1"
 dependencies = [
  "byteorder 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "mp4parse_capi"
-version = "0.5.0"
+version = "0.5.1"
 dependencies = [
- "mp4parse 0.5.0",
+ "mp4parse 0.5.1",
 ]
 
 [metadata]
 "checksum byteorder 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0fc10e8cc6b2580fda3f36eb6dc5316657f812a3df879a44a66fc9f0fdbc4855"
--- a/toolkit/mozapps/extensions/internal/E10SAddonsRollout.jsm
+++ b/toolkit/mozapps/extensions/internal/E10SAddonsRollout.jsm
@@ -113,16 +113,19 @@ const RolloutPolicy = {
   // Set agreed for Release 49
   "49a": { addons: set49Release, webextensions: true },
   "49b": { addons: set49Release, webextensions: false },
 
   // Smaller set that can be used for Release 49
   "49limiteda": { addons: set49PaneOnly, webextensions: true },
   "49limitedb": { addons: set49PaneOnly, webextensions: false },
 
+  // Beta testing on 50
+  "50allmpc": { addons: [], webextensions: true, mpc: true },
+
   "xpcshell-test": { addons: [ADDONS.test1, ADDONS.test2], webextensions: false },
 };
 
 Object.defineProperty(this, "isAddonPartOfE10SRollout", {
   configurable: false,
   enumerable: false,
   writable: false,
   value: function isAddonPartOfE10SRollout(aAddon) {
@@ -138,16 +141,20 @@ Object.defineProperty(this, "isAddonPart
     }
 
     let policy = RolloutPolicy[policyId];
 
     if (policy.webextensions && aAddon.type == "webextension") {
       return true;
     }
 
+    if (policy.mpc && aAddon.multiprocessCompatible) {
+      return true;
+    }
+
     for (let rolloutAddon of policy.addons) {
       if (aAddon.id == rolloutAddon.id &&
           Services.vc.compare(aAddon.version, rolloutAddon.minVersion) >= 0) {
         return true;
       }
     }
 
     return false;
--- a/toolkit/xre/nsAppRunner.cpp
+++ b/toolkit/xre/nsAppRunner.cpp
@@ -7,22 +7,23 @@
 #include "mozilla/dom/ContentChild.h"
 #include "mozilla/ipc/GeckoChildProcessHost.h"
 
 #include "mozilla/ArrayUtils.h"
 #include "mozilla/Attributes.h"
 #include "mozilla/ChaosMode.h"
 #include "mozilla/IOInterposer.h"
 #include "mozilla/Likely.h"
+#include "mozilla/MemoryChecking.h"
 #include "mozilla/Poison.h"
 #include "mozilla/Preferences.h"
+#include "mozilla/ScopeExit.h"
 #include "mozilla/Services.h"
 #include "mozilla/ServoBindings.h"
 #include "mozilla/Telemetry.h"
-#include "mozilla/MemoryChecking.h"
 
 #include "nsAppRunner.h"
 #include "mozilla/AppData.h"
 #if defined(MOZ_UPDATER) && !defined(MOZ_WIDGET_ANDROID)
 #include "nsUpdateDriver.h"
 #endif
 #include "ProfileReset.h"
 
@@ -373,16 +374,35 @@ strimatch(const char* lowerstr, const ch
     ++mixedstr;
   }
 
   if (*mixedstr) return false; // lowerstr is shorter
 
   return true;
 }
 
+static bool gIsExpectedExit = false;
+
+void MozExpectedExit() {
+  gIsExpectedExit = true;
+}
+
+/**
+ * Runs atexit() to catch unexpected exit from 3rd party libraries like the
+ * Intel graphics driver calling exit in an error condition. When they
+ * call exit() to report an error we won't shutdown correctly and wont catch
+ * the issue with our crash reporter.
+ */
+static void UnexpectedExit() {
+  if (!gIsExpectedExit) {
+    gIsExpectedExit = true; // Don't risk re-entrency issues when crashing.
+    MOZ_CRASH("Exit called by third party code.");
+  }
+}
+
 /**
  * Output a string to the user.  This method is really only meant to be used to
  * output last-ditch error messages designed for developers NOT END USERS.
  *
  * @param isError
  *        Pass true to indicate severe errors.
  * @param fmt
  *        printf-style format string followed by arguments.
@@ -3013,16 +3033,21 @@ public:
  */
 int
 XREMain::XRE_mainInit(bool* aExitFlag)
 {
   if (!aExitFlag)
     return 1;
   *aExitFlag = false;
 
+  atexit(UnexpectedExit);
+  auto expectedShutdown = mozilla::MakeScopeExit([&] {
+    MozExpectedExit();
+  });
+
   StartupTimeline::Record(StartupTimeline::MAIN);
 
   if (PR_GetEnv("MOZ_CHAOSMODE")) {
     ChaosFeature feature = ChaosFeature::Any;
     long featureInt = strtol(PR_GetEnv("MOZ_CHAOSMODE"), nullptr, 16);
     if (featureInt) {
       // NOTE: MOZ_CHAOSMODE=0 or a non-hex value maps to Any feature.
       feature = static_cast<ChaosFeature>(featureInt);
--- a/toolkit/xre/nsAppRunner.h
+++ b/toolkit/xre/nsAppRunner.h
@@ -92,16 +92,23 @@ NS_LockProfilePath(nsIFile* aPath, nsIFi
                    nsIProfileUnlocker* *aUnlocker, nsIProfileLock* *aResult);
 
 void
 WriteConsoleLog();
 
 void
 OverrideDefaultLocaleIfNeeded();
 
+/**
+ * Allow exit() calls to complete. This should be done from a proper Gecko
+ * shutdown path. Otherwise we aim to catch improper shutdowns.
+ */
+void
+MozExpectedExit();
+
 #ifdef XP_WIN
 void
 UseParentConsole();
 
 BOOL
 WinLaunchChild(const wchar_t *exePath, int argc,
                char **argv, HANDLE userToken = nullptr,
                HANDLE *hProcess = nullptr);
--- a/toolkit/xre/nsNativeAppSupportUnix.cpp
+++ b/toolkit/xre/nsNativeAppSupportUnix.cpp
@@ -467,16 +467,17 @@ nsNativeAppSupportUnix::Start(bool *aRet
                      GTK_BUTTONS_OK,
                      UNSUPPORTED_GTK_MSG,
                      gtk_major_version,
                      gtk_minor_version,
                      MIN_GTK_MAJOR_VERSION,
                      MIN_GTK_MINOR_VERSION);
     gtk_dialog_run(GTK_DIALOG(versionErrDialog));
     gtk_widget_destroy(versionErrDialog);
+    MozExpectedExit();
     exit(0);
   }
 #endif
 
   *aRetVal = true;
 
 #ifdef MOZ_X11
   gboolean sm_disable = FALSE;
--- a/widget/android/nsAppShell.cpp
+++ b/widget/android/nsAppShell.cpp
@@ -99,17 +99,19 @@ public:
 NS_IMPL_ISUPPORTS(WakeLockListener, nsIDOMMozWakeLockListener)
 nsCOMPtr<nsIPowerManagerService> sPowerManagerService = nullptr;
 StaticRefPtr<WakeLockListener> sWakeLockListener;
 
 
 class GeckoThreadSupport final
     : public java::GeckoThread::Natives<GeckoThreadSupport>
 {
-    static uint32_t sPauseCount;
+    // When this number goes above 0, the app is paused. When less than or
+    // equal to zero, the app is resumed.
+    static int32_t sPauseCount;
 
 public:
     static void SpeculativeConnect(jni::String::Param aUriStr)
     {
         if (!NS_IsMainThread()) {
             // We will be on the main thread if the call was queued on the Java
             // side during startup. Otherwise, the call was not queued, which
             // means Gecko is already sufficiently loaded, and we don't really
@@ -137,18 +139,20 @@ public:
         };
         nsAppShell::SyncRunEvent(NoOpEvent());
     }
 
     static void OnPause()
     {
         MOZ_ASSERT(NS_IsMainThread());
 
-        if ((++sPauseCount) > 1) {
-            // Already paused.
+        sPauseCount++;
+        // If sPauseCount is now 1, we just crossed the threshold from "resumed"
+        // "paused". so we should notify observers and so on.
+        if (sPauseCount != 1) {
             return;
         }
 
         nsCOMPtr<nsIObserverService> obsServ =
             mozilla::services::GetObserverService();
         obsServ->NotifyObservers(nullptr, "application-background", nullptr);
 
         NS_NAMED_LITERAL_STRING(minimize, "heap-minimize");
@@ -169,18 +173,20 @@ public:
             prefs->SavePrefFile(nullptr);
         }
     }
 
     static void OnResume()
     {
         MOZ_ASSERT(NS_IsMainThread());
 
-        if (!sPauseCount || (--sPauseCount) > 0) {
-            // Still paused.
+        sPauseCount--;
+        // If sPauseCount is now 0, we just crossed the threshold from "paused"
+        // to "resumed", so we should notify observers and so on.
+        if (sPauseCount != 0) {
             return;
         }
 
         // If we are OOM killed with the disk cache enabled, the entire
         // cache will be cleared (bug 105843), so shut down cache on backgrounding
         // and re-init here
         if (nsCacheService::GlobalInstance()) {
             nsCacheService::GlobalInstance()->Init();
@@ -211,17 +217,17 @@ public:
         if (!AndroidBridge::Bridge()) {
             return -1;
         }
 
         return AndroidBridge::Bridge()->RunDelayedUiThreadTasks();
     }
 };
 
-uint32_t GeckoThreadSupport::sPauseCount;
+int32_t GeckoThreadSupport::sPauseCount;
 
 
 class GeckoAppShellSupport final
     : public java::GeckoAppShell::Natives<GeckoAppShellSupport>
 {
 public:
     static void ReportJavaCrash(const jni::Class::LocalRef& aCls,
                                 jni::Throwable::Param aException,