Merge autoland to mozilla-central a=merge
authorAndreea Pavel <apavel@mozilla.com>
Fri, 21 Jan 2022 23:41:00 +0200
changeset 605190 00753e705770d35e46096f2894203b92910e98ce
parent 605139 10f302a4e5bd63560587dfce8163ce41ab01846a (current diff)
parent 605189 bd2e82e136f6e30411c382c49f49e02b604ea6c9 (diff)
child 605191 e62280d66ce09c88ca1b0ec9d644b4a36e547ef6
child 605199 ce52a5aa027f99b49651cf171b1e8c10b5256e78
push id39177
push userapavel@mozilla.com
push dateFri, 21 Jan 2022 21:41:38 +0000
treeherdermozilla-central@00753e705770 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge
milestone98.0a1
first release with
nightly linux32
00753e705770 / 98.0a1 / 20220121214138 / files
nightly linux64
00753e705770 / 98.0a1 / 20220121214138 / files
nightly mac
00753e705770 / 98.0a1 / 20220121214138 / files
nightly win32
00753e705770 / 98.0a1 / 20220121214138 / files
nightly win64
00753e705770 / 98.0a1 / 20220121214138 / files
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
releases
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge autoland to mozilla-central a=merge
third_party/python/attrs/attrs-19.1.0.dist-info/LICENSE
third_party/python/attrs/attrs-19.1.0.dist-info/METADATA
third_party/python/attrs/attrs-19.1.0.dist-info/RECORD
third_party/python/attrs/attrs-19.1.0.dist-info/WHEEL
third_party/python/attrs/attrs-19.1.0.dist-info/top_level.txt
--- a/browser/app/profile/firefox.js
+++ b/browser/app/profile/firefox.js
@@ -574,16 +574,20 @@ pref("browser.search.separatePrivateDefa
 pref("browser.search.separatePrivateDefault.ui.banner.max", 0);
 
 // Enables the display of the Mozilla VPN banner in private browsing windows
 pref("browser.privatebrowsing.vpnpromourl", "https://vpn.mozilla.org/?utm_source=firefox-browser&utm_medium=firefox-%CHANNEL%-browser&utm_campaign=private-browsing-vpn-link");
 
 // Whether the user is opted-in to privacy segmentation.
 pref("browser.privacySegmentation.enabled", false);
 
+// Use dark theme variant for PBM windows. This is only supported if the theme
+// sets darkTheme data.
+pref("browser.theme.dark-private-windows", false);
+
 pref("browser.sessionhistory.max_entries", 50);
 
 // Built-in default permissions.
 pref("permissions.manager.defaultsUrl", "resource://app/defaults/permissions");
 
 // Set default fallback values for site permissions we want
 // the user to be able to globally change.
 pref("permissions.default.camera", 0);
--- a/browser/components/downloads/test/browser/browser_downloads_context_menu_delete_file.js
+++ b/browser/components/downloads/test/browser/browser_downloads_context_menu_delete_file.js
@@ -25,16 +25,20 @@ async function createDownloadFiles() {
     contentType: "text/javascript",
     target: await createDownloadedFile(
       PathUtils.join(gDownloadDir, "downloaded.js"),
       "Test file"
     ),
   });
 }
 
+registerCleanupFunction(async function() {
+  await task_resetState();
+});
+
 add_task(async function test_download_deleteFile() {
   await SpecialPowers.pushPrefEnv({
     set: [
       ["browser.download.improvements_to_download_panel", true],
       ["browser.download.alwaysOpenPanel", false],
       ["browser.download.clearHistoryOnDelete", 2],
     ],
   });
--- a/browser/components/enterprisepolicies/Policies.jsm
+++ b/browser/components/enterprisepolicies/Policies.jsm
@@ -2308,18 +2308,21 @@ function addAllowDenyPermissions(permiss
     try {
       Services.perms.addFromPrincipal(
         Services.scriptSecurityManager.createContentPrincipalFromOrigin(origin),
         permissionName,
         Ci.nsIPermissionManager.ALLOW_ACTION,
         Ci.nsIPermissionManager.EXPIRE_POLICY
       );
     } catch (ex) {
-      log.error(`Added by default for ${permissionName} permission in the permission
-      manager - ${origin.href}`);
+      // It's possible if the origin was invalid, we'll have a string instead of an origin.
+      log.error(
+        `Unable to add ${permissionName} permission for ${origin.href ||
+          origin}`
+      );
     }
   }
 
   for (let origin of blockList) {
     Services.perms.addFromPrincipal(
       Services.scriptSecurityManager.createContentPrincipalFromOrigin(origin),
       permissionName,
       Ci.nsIPermissionManager.DENY_ACTION,
--- a/browser/components/enterprisepolicies/schemas/policies-schema.json
+++ b/browser/components/enterprisepolicies/schemas/policies-schema.json
@@ -79,17 +79,17 @@
         },
         "PrivateBrowsing": {
           "type": "boolean"
         }
       }
     },
 
     "AutoLaunchProtocolsFromOrigins": {
-      "type": "array",
+      "type": ["array", "JSON"],
       "items": {
         "type": "object",
         "properties": {
           "allowed_origins": {
             "type": "array",
             "items": {
               "type": "origin"
             }
@@ -481,17 +481,17 @@
           "items": {
             "type": "string"
           }
         }
       }
     },
 
     "ExtensionSettings": {
-      "type": "object",
+      "type": ["object", "JSON"],
       "properties": {
         "*": {
           "type": "object",
           "properties": {
             "installation_mode": {
               "type": "string",
               "enum": ["allowed", "blocked"]
             },
@@ -601,17 +601,17 @@
 
         "Locked": {
           "type": "boolean"
         }
       }
     },
 
     "Handlers": {
-      "type": "object",
+      "type": ["object", "JSON"],
       "patternProperties": {
         "^(mimeTypes|extensions|schemes)$": {
           "type": "object",
           "patternProperties": {
             "^.*$": {
               "type": "object",
               "properties": {
                 "action": {
@@ -743,17 +743,17 @@
             "type": "string"
           },
           "url": {
             "type": "string"
           }
         },
         "type": "object"
       },
-      "type": "array"
+      "type": ["array", "JSON"]
     },
 
     "ManualAppUpdateOnly": {
         "type": "boolean"
     },
 
     "NetworkPrediction": {
       "type": "boolean"
@@ -1008,17 +1008,17 @@
 
         "Locked": {
           "type": "boolean"
         }
       }
     },
 
     "Preferences": {
-      "type": "object",
+      "type": ["object", "JSON"],
       "patternProperties": {
         "^.*$": {
           "type": ["number", "boolean", "string", "object"],
           "properties": {
             "Value": {
               "type": ["number", "boolean", "string"]
             },
             "Status": {
--- a/browser/components/enterprisepolicies/tests/xpcshell/test_extensionsettings.js
+++ b/browser/components/enterprisepolicies/tests/xpcshell/test_extensionsettings.js
@@ -193,16 +193,44 @@ add_task(async function test_addon_norma
   notEqual(
     addon.permissions & AddonManager.PERM_CAN_DISABLE,
     0,
     "Addon should be able to be disabled."
   );
   await addon.uninstall();
 });
 
+add_task(async function test_extensionsettings_string() {
+  await setupPolicyEngineWithJson({
+    policies: {
+      ExtensionSettings: '{"*": {"installation_mode": "blocked"}}',
+    },
+  });
+
+  let extensionSettings = Services.policies.getExtensionSettings("*");
+  equal(extensionSettings.installation_mode, "blocked");
+});
+
+add_task(async function test_extensionsettings_string() {
+  let restrictedDomains = Services.prefs.getCharPref(
+    "extensions.webextensions.restrictedDomains"
+  );
+  await setupPolicyEngineWithJson({
+    policies: {
+      ExtensionSettings:
+        '{"*": {"restricted_domains": ["example.com","example.org"]}}',
+    },
+  });
+
+  let newRestrictedDomains = Services.prefs.getCharPref(
+    "extensions.webextensions.restrictedDomains"
+  );
+  equal(newRestrictedDomains, restrictedDomains + ",example.com,example.org");
+});
+
 add_task(async function test_theme() {
   let themeFile = AddonTestUtils.createTempWebExtensionFile({
     manifest: {
       applications: {
         gecko: {
           id: themeID,
         },
       },
--- a/browser/components/enterprisepolicies/tests/xpcshell/test_preferences.js
+++ b/browser/components/enterprisepolicies/tests/xpcshell/test_preferences.js
@@ -209,16 +209,27 @@ add_task(async function test_security_pr
         },
       },
     },
   });
 
   checkUnsetPref("security.this.should.not.work");
 });
 
+add_task(async function test_JSON_preferences() {
+  await setupPolicyEngineWithJson({
+    policies: {
+      Preferences:
+        '{"browser.policies.test.default.boolean.json": {"Value": true,"Status": "default"}}',
+    },
+  });
+
+  checkDefaultPref("browser.policies.test.default.boolean.json", true);
+});
+
 add_task(async function test_bug_1666836() {
   await setupPolicyEngineWithJson({
     policies: {
       Preferences: {
         "browser.tabs.warnOnClose": {
           Value: 0,
           Status: "default",
         },
--- a/build/python-test_virtualenv_packages.txt
+++ b/build/python-test_virtualenv_packages.txt
@@ -1,2 +1,2 @@
 vendored:third_party/python/glean_parser
-pypi:pytest==3.6.2
+pypi:pytest==4.6.6
--- a/config/mozunit/mozunit/mozunit.py
+++ b/config/mozunit/mozunit/mozunit.py
@@ -308,17 +308,17 @@ def main(*args, **kwargs):
             a.startswith("--color") for a in args
         ):
             args.append("--color=yes")
 
         module = __import__("__main__")
         args.extend(
             [
                 "--rootdir",
-                topsrcdir,
+                str(topsrcdir),
                 "-c",
                 os.path.join(here, "pytest.ini"),
                 "-vv",
                 "-p",
                 "mozlog.pytest_mozlog.plugin",
                 "-p",
                 "mozunit.pytest_plugin",
                 "-p",
--- a/devtools/client/debugger/src/actions/sources/select.js
+++ b/devtools/client/debugger/src/actions/sources/select.js
@@ -5,17 +5,17 @@
 /**
  * Redux actions for the sources state
  * @module actions/sources
  */
 
 import { isOriginalId } from "devtools-source-map";
 
 import { getSourceFromId, getSourceWithContent } from "../../reducers/sources";
-import { tabExists } from "../../reducers/tabs";
+import { tabExists } from "../../selectors/tabs";
 import { setSymbols } from "./symbols";
 import { setInScopeLines } from "../ast";
 import { closeActiveSearch, updateActiveFileSearch } from "../ui";
 import { togglePrettyPrint } from "./prettyPrint";
 import { addTab, closeTab } from "../tabs";
 import { loadSourceText } from "./loadSourceText";
 import { mapDisplayNames } from "../pause";
 import { setBreakableLines } from ".";
--- a/devtools/client/debugger/src/reducers/sources.js
+++ b/devtools/client/debugger/src/reducers/sources.js
@@ -46,17 +46,17 @@ import { prefs } from "../utils/prefs";
 
 import {
   hasSourceActor,
   getSourceActor,
   getSourceActors,
   getAllThreadsBySource,
   getBreakableLinesForSourceActors,
 } from "./source-actors";
-import { getAllThreads } from "./threads";
+import { getAllThreads } from "../selectors/threads";
 
 export function initialSourcesState(state) {
   return {
     /**
      * All currently available sources.
      *
      * See create.js: `createSourceObject` method for the description of stored objects.
      * This reducers will add an extra `content` attribute which is the source text for each source.
--- a/devtools/client/debugger/src/reducers/tabs.js
+++ b/devtools/client/debugger/src/reducers/tabs.js
@@ -2,29 +2,21 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
 
 /**
  * Tabs reducer
  * @module reducers/tabs
  */
 
-import { createSelector } from "reselect";
 import { isOriginalId } from "devtools-source-map";
 
 import { isSimilarTab, persistTabs } from "../utils/tabs";
-import { makeShallowQuery } from "../utils/resource";
-import { getPrettySourceURL } from "../utils/source";
 
-import {
-  getSource,
-  getSpecificSourceByURL,
-  getSources,
-  resourceAsSourceBase,
-} from "./sources";
+import { getSource, getSpecificSourceByURL } from "./sources";
 
 export function initialTabState() {
   return { tabs: [] };
 }
 
 function resetTabState(state) {
   const tabs = persistTabs(state.tabs);
   return { tabs };
@@ -244,39 +236,9 @@ function moveTab(tabs, currentIndex, new
   // Remove the item from its current location
   newTabs.splice(currentIndex, 1);
   // And add it to the new one
   newTabs.splice(newIndex, 0, item);
 
   return { tabs: newTabs };
 }
 
-// Selectors
-
-export const getTabs = state => state.tabs.tabs;
-
-export const getSourceTabs = createSelector(
-  state => state.tabs,
-  ({ tabs }) => tabs.filter(tab => tab.sourceId)
-);
-
-export const getSourcesForTabs = state => {
-  const tabs = getSourceTabs(state);
-  const sources = getSources(state);
-  return querySourcesForTabs(sources, tabs);
-};
-
-const querySourcesForTabs = makeShallowQuery({
-  filter: (_, tabs) => tabs.map(({ sourceId }) => sourceId),
-  map: resourceAsSourceBase,
-  reduce: items => items,
-});
-
-export function tabExists(state, sourceId) {
-  return !!getSourceTabs(state).find(tab => tab.sourceId == sourceId);
-}
-
-export function hasPrettyTab(state, sourceUrl) {
-  const prettyUrl = getPrettySourceURL(sourceUrl);
-  return !!getSourceTabs(state).find(tab => tab.url === prettyUrl);
-}
-
 export default update;
--- a/devtools/client/debugger/src/reducers/threads.js
+++ b/devtools/client/debugger/src/reducers/threads.js
@@ -2,18 +2,16 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
 
 /**
  * Threads reducer
  * @module reducers/threads
  */
 
-import { createSelector } from "reselect";
-
 export function initialThreadsState() {
   return {
     threads: [],
     isWebExtension: false,
   };
 }
 
 export default function update(state = initialThreadsState(), action) {
@@ -48,57 +46,8 @@ export default function update(state = i
           return t;
         }),
       };
 
     default:
       return state;
   }
 }
-
-export const getWorkerCount = state => getThreads(state).length;
-
-export function getWorkerByThread(state, thread) {
-  return getThreads(state).find(worker => worker.actor == thread);
-}
-
-function isMainThread(thread) {
-  return thread.isTopLevel;
-}
-
-export function getMainThread(state) {
-  return state.threads.threads.find(isMainThread);
-}
-
-export function getDebuggeeUrl(state) {
-  return getMainThread(state)?.url || "";
-}
-
-export const getThreads = createSelector(
-  state => state.threads.threads,
-  threads => threads.filter(thread => !isMainThread(thread))
-);
-
-export const getAllThreads = createSelector(
-  getMainThread,
-  getThreads,
-  (mainThread, threads) => {
-    const orderedThreads = Array.from(threads).sort((threadA, threadB) => {
-      if (threadA.name === threadB.name) {
-        return 0;
-      }
-      return threadA.name < threadB.name ? -1 : 1;
-    });
-    return [mainThread, ...orderedThreads].filter(Boolean);
-  }
-);
-
-export function getThread(state, threadActor) {
-  return getAllThreads(state).find(thread => thread.actor === threadActor);
-}
-
-// checks if a path begins with a thread actor
-// e.g "server1.conn0.child1/workerTarget22/context1/dbg-workers.glitch.me"
-export function startsWithThreadActor(state, path) {
-  const threadActors = getAllThreads(state).map(t => t.actor);
-  const match = path.match(new RegExp(`(${threadActors.join("|")})\/(.*)`));
-  return match?.[1];
-}
--- a/devtools/client/debugger/src/selectors/index.js
+++ b/devtools/client/debugger/src/selectors/index.js
@@ -48,16 +48,18 @@ export { getBreakpointSources } from "./
 export { isLineInScope } from "./isLineInScope";
 export { getXHRBreakpoints, shouldPauseOnAnyXHR } from "./breakpoints";
 export * from "./visibleColumnBreakpoints";
 export {
   getSelectedFrame,
   getSelectedFrames,
   getVisibleSelectedFrame,
 } from "./pause";
+export * from "./tabs";
+export * from "./threads";
 
 import { objectInspector } from "devtools/client/shared/components/reps/index";
 
 const { reducer } = objectInspector;
 
 Object.keys(reducer).forEach(function(key) {
   if (key === "default" || key === "__esModule") {
     return;
--- a/devtools/client/debugger/src/selectors/moz.build
+++ b/devtools/client/debugger/src/selectors/moz.build
@@ -10,11 +10,13 @@ CompiledModules(
     "breakpoints.js",
     "breakpointSources.js",
     "getCallStackFrames.js",
     "inComponent.js",
     "index.js",
     "isLineInScope.js",
     "isSelectedFrameVisible.js",
     "pause.js",
+    "tabs.js",
+    "threads.js",
     "visibleBreakpoints.js",
     "visibleColumnBreakpoints.js",
 )
copy from devtools/client/debugger/src/reducers/tabs.js
copy to devtools/client/debugger/src/selectors/tabs.js
--- a/devtools/client/debugger/src/reducers/tabs.js
+++ b/devtools/client/debugger/src/selectors/tabs.js
@@ -1,260 +1,17 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
 
-/**
- * Tabs reducer
- * @module reducers/tabs
- */
-
 import { createSelector } from "reselect";
-import { isOriginalId } from "devtools-source-map";
-
-import { isSimilarTab, persistTabs } from "../utils/tabs";
 import { makeShallowQuery } from "../utils/resource";
 import { getPrettySourceURL } from "../utils/source";
 
-import {
-  getSource,
-  getSpecificSourceByURL,
-  getSources,
-  resourceAsSourceBase,
-} from "./sources";
-
-export function initialTabState() {
-  return { tabs: [] };
-}
-
-function resetTabState(state) {
-  const tabs = persistTabs(state.tabs);
-  return { tabs };
-}
-
-function update(state = initialTabState(), action) {
-  switch (action.type) {
-    case "ADD_TAB":
-    case "UPDATE_TAB":
-      return updateTabList(state, action);
-
-    case "MOVE_TAB":
-      return moveTabInList(state, action);
-    case "MOVE_TAB_BY_SOURCE_ID":
-      return moveTabInListBySourceId(state, action);
-
-    case "CLOSE_TAB":
-      return removeSourceFromTabList(state, action);
-
-    case "CLOSE_TABS":
-      return removeSourcesFromTabList(state, action);
-
-    case "ADD_SOURCE":
-      return addVisibleTabs(state, [action.source]);
-
-    case "ADD_SOURCES":
-      return addVisibleTabs(state, action.sources);
-
-    case "SET_SELECTED_LOCATION": {
-      return addSelectedSource(state, action.source);
-    }
-
-    case "NAVIGATE": {
-      return resetTabState(state);
-    }
-
-    default:
-      return state;
-  }
-}
-
-/**
- * Gets the next tab to select when a tab closes. Heuristics:
- * 1. if the selected tab is available, it remains selected
- * 2. if it is gone, the next available tab to the left should be active
- * 3. if the first tab is active and closed, select the second tab
- *
- * @memberof reducers/tabs
- * @static
- */
-export function getNewSelectedSourceId(state, tabList) {
-  const { selectedLocation } = state.sources;
-  const availableTabs = state.tabs.tabs;
-  if (!selectedLocation) {
-    return "";
-  }
-
-  const selectedTab = getSource(state, selectedLocation.sourceId);
-  if (!selectedTab) {
-    return "";
-  }
-
-  const matchingTab = availableTabs.find(tab =>
-    isSimilarTab(tab, selectedTab.url, isOriginalId(selectedLocation.sourceId))
-  );
-
-  if (matchingTab) {
-    const { sources } = state.sources;
-    if (!sources) {
-      return "";
-    }
-
-    const selectedSource = getSpecificSourceByURL(
-      state,
-      selectedTab.url,
-      selectedTab.isOriginal
-    );
-
-    if (selectedSource) {
-      return selectedSource.id;
-    }
-
-    return "";
-  }
-
-  const tabUrls = tabList.map(tab => tab.url);
-  const leftNeighborIndex = Math.max(tabUrls.indexOf(selectedTab.url) - 1, 0);
-  const lastAvailbleTabIndex = availableTabs.length - 1;
-  const newSelectedTabIndex = Math.min(leftNeighborIndex, lastAvailbleTabIndex);
-  const availableTab = availableTabs[newSelectedTabIndex];
-
-  if (availableTab) {
-    const tabSource = getSpecificSourceByURL(
-      state,
-      availableTab.url,
-      availableTab.isOriginal
-    );
-
-    if (tabSource) {
-      return tabSource.id;
-    }
-  }
-
-  return "";
-}
-
-function matchesSource(tab, source) {
-  return tab.sourceId === source.id || matchesUrl(tab, source);
-}
-
-function matchesUrl(tab, source) {
-  return tab.url === source.url && tab.isOriginal == isOriginalId(source.id);
-}
-
-function addSelectedSource(state, source) {
-  if (
-    state.tabs
-      .filter(({ sourceId }) => sourceId)
-      .map(({ sourceId }) => sourceId)
-      .includes(source.id)
-  ) {
-    return state;
-  }
-
-  const isOriginal = isOriginalId(source.id);
-  return updateTabList(state, {
-    url: source.url,
-    isOriginal,
-    framework: null,
-    sourceId: source.id,
-  });
-}
-
-function addVisibleTabs(state, sources) {
-  const tabCount = state.tabs.filter(({ sourceId }) => sourceId).length;
-  const tabs = state.tabs
-    .map(tab => {
-      const source = sources.find(src => matchesUrl(tab, src));
-      if (!source) {
-        return tab;
-      }
-      return { ...tab, sourceId: source.id };
-    })
-    .filter(tab => tab.sourceId);
-
-  if (tabs.length == tabCount) {
-    return state;
-  }
-
-  return { tabs };
-}
-
-function removeSourceFromTabList(state, { source }) {
-  const { tabs } = state;
-  const newTabs = tabs.filter(tab => !matchesSource(tab, source));
-  return { tabs: newTabs };
-}
-
-function removeSourcesFromTabList(state, { sources }) {
-  const { tabs } = state;
-
-  const newTabs = sources.reduce(
-    (tabList, source) => tabList.filter(tab => !matchesSource(tab, source)),
-    tabs
-  );
-
-  return { tabs: newTabs };
-}
-
-/**
- * Adds the new source to the tab list if it is not already there
- * @memberof reducers/tabs
- * @static
- */
-function updateTabList(
-  state,
-  { url, framework = null, sourceId, isOriginal = false }
-) {
-  let { tabs } = state;
-  // Set currentIndex to -1 for URL-less tabs so that they aren't
-  // filtered by isSimilarTab
-  const currentIndex = url
-    ? tabs.findIndex(tab => isSimilarTab(tab, url, isOriginal))
-    : -1;
-
-  if (currentIndex === -1) {
-    const newTab = {
-      url,
-      framework,
-      sourceId,
-      isOriginal,
-    };
-    tabs = [newTab, ...tabs];
-  } else if (framework) {
-    tabs[currentIndex].framework = framework;
-  }
-
-  return { ...state, tabs };
-}
-
-function moveTabInList(state, { url, tabIndex: newIndex }) {
-  const { tabs } = state;
-  const currentIndex = tabs.findIndex(tab => tab.url == url);
-  return moveTab(tabs, currentIndex, newIndex);
-}
-
-function moveTabInListBySourceId(state, { sourceId, tabIndex: newIndex }) {
-  const { tabs } = state;
-  const currentIndex = tabs.findIndex(tab => tab.sourceId == sourceId);
-  return moveTab(tabs, currentIndex, newIndex);
-}
-
-function moveTab(tabs, currentIndex, newIndex) {
-  const item = tabs[currentIndex];
-
-  const newTabs = Array.from(tabs);
-  // Remove the item from its current location
-  newTabs.splice(currentIndex, 1);
-  // And add it to the new one
-  newTabs.splice(newIndex, 0, item);
-
-  return { tabs: newTabs };
-}
-
-// Selectors
+import { getSources, resourceAsSourceBase } from "../reducers/sources";
 
 export const getTabs = state => state.tabs.tabs;
 
 export const getSourceTabs = createSelector(
   state => state.tabs,
   ({ tabs }) => tabs.filter(tab => tab.sourceId)
 );
 
@@ -273,10 +30,8 @@ const querySourcesForTabs = makeShallowQ
 export function tabExists(state, sourceId) {
   return !!getSourceTabs(state).find(tab => tab.sourceId == sourceId);
 }
 
 export function hasPrettyTab(state, sourceUrl) {
   const prettyUrl = getPrettySourceURL(sourceUrl);
   return !!getSourceTabs(state).find(tab => tab.url === prettyUrl);
 }
-
-export default update;
copy from devtools/client/debugger/src/reducers/threads.js
copy to devtools/client/debugger/src/selectors/threads.js
--- a/devtools/client/debugger/src/reducers/threads.js
+++ b/devtools/client/debugger/src/selectors/threads.js
@@ -1,82 +1,14 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
 
-/**
- * Threads reducer
- * @module reducers/threads
- */
-
 import { createSelector } from "reselect";
 
-export function initialThreadsState() {
-  return {
-    threads: [],
-    isWebExtension: false,
-  };
-}
-
-export default function update(state = initialThreadsState(), action) {
-  switch (action.type) {
-    case "CONNECT":
-      return {
-        ...state,
-        isWebExtension: action.isWebExtension,
-      };
-    case "INSERT_THREAD":
-      return {
-        ...state,
-        threads: [...state.threads, action.newThread],
-      };
-
-    case "REMOVE_THREAD":
-      const { oldThread } = action;
-      return {
-        ...state,
-        threads: state.threads.filter(
-          thread => oldThread.actor != thread.actor
-        ),
-      };
-    case "UPDATE_SERVICE_WORKER_STATUS":
-      const { thread, status } = action;
-      return {
-        ...state,
-        threads: state.threads.map(t => {
-          if (t.actor == thread) {
-            return { ...t, serviceWorkerStatus: status };
-          }
-          return t;
-        }),
-      };
-
-    default:
-      return state;
-  }
-}
-
-export const getWorkerCount = state => getThreads(state).length;
-
-export function getWorkerByThread(state, thread) {
-  return getThreads(state).find(worker => worker.actor == thread);
-}
-
-function isMainThread(thread) {
-  return thread.isTopLevel;
-}
-
-export function getMainThread(state) {
-  return state.threads.threads.find(isMainThread);
-}
-
-export function getDebuggeeUrl(state) {
-  return getMainThread(state)?.url || "";
-}
-
 export const getThreads = createSelector(
   state => state.threads.threads,
   threads => threads.filter(thread => !isMainThread(thread))
 );
 
 export const getAllThreads = createSelector(
   getMainThread,
   getThreads,
@@ -86,16 +18,28 @@ export const getAllThreads = createSelec
         return 0;
       }
       return threadA.name < threadB.name ? -1 : 1;
     });
     return [mainThread, ...orderedThreads].filter(Boolean);
   }
 );
 
+function isMainThread(thread) {
+  return thread.isTopLevel;
+}
+
+export function getMainThread(state) {
+  return state.threads.threads.find(isMainThread);
+}
+
+export function getDebuggeeUrl(state) {
+  return getMainThread(state)?.url || "";
+}
+
 export function getThread(state, threadActor) {
   return getAllThreads(state).find(thread => thread.actor === threadActor);
 }
 
 // checks if a path begins with a thread actor
 // e.g "server1.conn0.child1/workerTarget22/context1/dbg-workers.glitch.me"
 export function startsWithThreadActor(state, path) {
   const threadActors = getAllThreads(state).map(t => t.actor);
--- a/dom/abort/AbortFollower.h
+++ b/dom/abort/AbortFollower.h
@@ -5,53 +5,54 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef mozilla_dom_AbortFollower_h
 #define mozilla_dom_AbortFollower_h
 
 #include "jsapi.h"
 #include "nsISupportsImpl.h"
 #include "nsTObserverArray.h"
+#include "mozilla/WeakPtr.h"
 
 namespace mozilla {
 namespace dom {
 
 class AbortSignal;
 class AbortSignalImpl;
 
 // This class must be implemented by objects who want to follow an
 // AbortSignalImpl.
 class AbortFollower : public nsISupports {
  public:
   virtual void RunAbortAlgorithm() = 0;
 
+  // This adds strong reference to this follower on the signal, which means
+  // you'll need to call Unfollow() to prevent your object from living
+  // needlessly longer.
   void Follow(AbortSignalImpl* aSignal);
 
+  // Explicitly call this to let garbage collection happen sooner when the
+  // follower finished its work and cannot be aborted anymore.
   void Unfollow();
 
   bool IsFollowing() const;
 
   AbortSignalImpl* Signal() const { return mFollowingSignal; }
 
  protected:
-  // Subclasses of this class must call these Traverse and Unlink functions
-  // during corresponding cycle collection operations.
-  static void Traverse(AbortFollower* aFollower,
-                       nsCycleCollectionTraversalCallback& cb);
-
   static void Unlink(AbortFollower* aFollower) { aFollower->Unfollow(); }
 
   virtual ~AbortFollower();
 
   friend class AbortSignalImpl;
 
-  RefPtr<AbortSignalImpl> mFollowingSignal;
+  WeakPtr<AbortSignalImpl> mFollowingSignal;
 };
 
-class AbortSignalImpl : public nsISupports {
+class AbortSignalImpl : public nsISupports, public SupportsWeakPtr {
  public:
   explicit AbortSignalImpl(bool aAborted, JS::Handle<JS::Value> aReason);
 
   bool Aborted() const;
 
   // Web IDL Layer
   void GetReason(JSContext* aCx, JS::MutableHandle<JS::Value> aReason);
   // Helper for other DOM code
@@ -62,30 +63,32 @@ class AbortSignalImpl : public nsISuppor
  protected:
   // Subclasses of this class must call these Traverse and Unlink functions
   // during corresponding cycle collection operations.
   static void Traverse(AbortSignalImpl* aSignal,
                        nsCycleCollectionTraversalCallback& cb);
 
   static void Unlink(AbortSignalImpl* aSignal);
 
-  virtual ~AbortSignalImpl() = default;
+  virtual ~AbortSignalImpl() { UnlinkFollowers(); }
 
   JS::Heap<JS::Value> mReason;
 
  private:
   friend class AbortFollower;
 
   void MaybeAssignAbortError(JSContext* aCx);
 
+  void UnlinkFollowers();
+
   // Raw pointers.  |AbortFollower::Follow| adds to this array, and
-  // |AbortFollower::Unfollow| (also callbed by the destructor) will remove
+  // |AbortFollower::Unfollow| (also called by the destructor) will remove
   // from this array.  Finally, calling |SignalAbort()| will (after running all
   // abort algorithms) empty this and make all contained followers |Unfollow()|.
-  nsTObserverArray<AbortFollower*> mFollowers;
+  nsTObserverArray<RefPtr<AbortFollower>> mFollowers;
 
   bool mAborted;
 };
 
 }  // namespace dom
 }  // namespace mozilla
 
 #endif  // mozilla_dom_AbortFollower_h
--- a/dom/abort/AbortSignal.cpp
+++ b/dom/abort/AbortSignal.cpp
@@ -48,36 +48,33 @@ void AbortSignalImpl::SignalAbort(JS::Ha
   mAborted = true;
   mReason = aReason;
 
   // Step 3.
   // When there are multiple followers, the follower removal algorithm
   // https://dom.spec.whatwg.org/#abortsignal-remove could be invoked in an
   // earlier algorithm to remove a later algorithm, so |mFollowers| must be a
   // |nsTObserverArray| to defend against mutation.
-  for (RefPtr<AbortFollower> follower : mFollowers.ForwardRange()) {
+  for (RefPtr<AbortFollower>& follower : mFollowers.ForwardRange()) {
     MOZ_ASSERT(follower->mFollowingSignal == this);
     follower->RunAbortAlgorithm();
   }
 
   // Step 4.
-  // Clear follower->signal links, then clear signal->follower links.
-  for (AbortFollower* follower : mFollowers.ForwardRange()) {
-    follower->mFollowingSignal = nullptr;
-  }
-  mFollowers.Clear();
+  UnlinkFollowers();
 }
 
 void AbortSignalImpl::Traverse(AbortSignalImpl* aSignal,
                                nsCycleCollectionTraversalCallback& cb) {
-  // To be filled in shortly.
+  ImplCycleCollectionTraverse(cb, aSignal->mFollowers, "mFollowers", 0);
 }
 
 void AbortSignalImpl::Unlink(AbortSignalImpl* aSignal) {
   aSignal->mReason.setUndefined();
+  aSignal->UnlinkFollowers();
 }
 
 void AbortSignalImpl::MaybeAssignAbortError(JSContext* aCx) {
   MOZ_ASSERT(mAborted);
   if (!mReason.isUndefined()) {
     return;
   }
 
@@ -86,25 +83,33 @@ void AbortSignalImpl::MaybeAssignAbortEr
 
   if (NS_WARN_IF(!ToJSValue(aCx, dom, &exception))) {
     return;
   }
 
   mReason.set(exception);
 }
 
+void AbortSignalImpl::UnlinkFollowers() {
+  // Manually unlink all followers before destructing the array, or otherwise
+  // the array will be accessed by Unfollow() while being destructed.
+  for (RefPtr<AbortFollower>& follower : mFollowers.ForwardRange()) {
+    follower->mFollowingSignal = nullptr;
+  }
+  mFollowers.Clear();
+}
+
 // AbortSignal
 // ----------------------------------------------------------------------------
 
 NS_IMPL_CYCLE_COLLECTION_CLASS(AbortSignal)
 
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(AbortSignal,
                                                   DOMEventTargetHelper)
   AbortSignalImpl::Traverse(static_cast<AbortSignalImpl*>(tmp), cb);
-  AbortFollower::Traverse(static_cast<AbortFollower*>(tmp), cb);
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
 NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(AbortSignal,
                                                 DOMEventTargetHelper)
   AbortSignalImpl::Unlink(static_cast<AbortSignalImpl*>(tmp));
   AbortFollower::Unlink(static_cast<AbortFollower*>(tmp));
 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
 
@@ -208,15 +213,9 @@ void AbortFollower::Unfollow() {
     // that |this| will be found in |mFollowingSignal->mFollowers|.
     mFollowingSignal->mFollowers.RemoveElement(this);
     mFollowingSignal = nullptr;
   }
 }
 
 bool AbortFollower::IsFollowing() const { return !!mFollowingSignal; }
 
-/* static */ void AbortFollower::Traverse(
-    AbortFollower* aFollower, nsCycleCollectionTraversalCallback& cb) {
-  ImplCycleCollectionTraverse(cb, aFollower->mFollowingSignal,
-                              "mFollowingSignal", 0);
-}
-
 }  // namespace mozilla::dom
--- a/dom/base/Navigator.cpp
+++ b/dom/base/Navigator.cpp
@@ -869,19 +869,43 @@ uint32_t Navigator::MaxTouchPoints(Calle
 //*****************************************************************************
 //    Navigator::nsIDOMClientInformation
 //*****************************************************************************
 
 // This list should be kept up-to-date with the spec:
 // https://html.spec.whatwg.org/multipage/system-state.html#custom-handlers
 // If you change this list, please also update the copy in E10SUtils.jsm.
 static const char* const kSafeSchemes[] = {
-    "bitcoin", "geo", "im",   "irc",  "ircs",        "magnet", "mailto",
-    "matrix",  "mms", "news", "nntp", "openpgp4fpr", "sip",    "sms",
-    "smsto",   "ssh", "tel",  "urn",  "webcal",      "wtai",   "xmpp"};
+    // clang-format off
+    "bitcoin",
+    "ftp",
+    "ftps",
+    "geo",
+    "im",
+    "irc",
+    "ircs",
+    "magnet",
+    "mailto",
+    "matrix",
+    "mms",
+    "news",
+    "nntp",
+    "openpgp4fpr",
+    "sftp",
+    "sip",
+    "sms",
+    "smsto",
+    "ssh",
+    "tel",
+    "urn",
+    "webcal",
+    "wtai",
+    "xmpp",
+    // clang-format on
+};
 
 void Navigator::CheckProtocolHandlerAllowed(const nsAString& aScheme,
                                             nsIURI* aHandlerURI,
                                             nsIURI* aDocumentURI,
                                             ErrorResult& aRv) {
   auto raisePermissionDeniedHandler = [&] {
     nsAutoCString spec;
     aHandlerURI->GetSpec(spec);
--- a/dom/base/nsDOMWindowUtils.cpp
+++ b/dom/base/nsDOMWindowUtils.cpp
@@ -423,16 +423,26 @@ nsDOMWindowUtils::GetViewportFitInfo(nsA
     aViewportFit.AssignLiteral("cover");
   } else {
     aViewportFit.AssignLiteral("auto");
   }
   return NS_OK;
 }
 
 NS_IMETHODIMP
+nsDOMWindowUtils::SetMousewheelAutodir(Element* aElement, bool aEnabled,
+                                       bool aHonourRoot) {
+  aElement->SetProperty(nsGkAtoms::forceMousewheelAutodir,
+                        reinterpret_cast<void*>(aEnabled));
+  aElement->SetProperty(nsGkAtoms::forceMousewheelAutodirHonourRoot,
+                        reinterpret_cast<void*>(aHonourRoot));
+  return NS_OK;
+}
+
+NS_IMETHODIMP
 nsDOMWindowUtils::SetDisplayPortForElement(float aXPx, float aYPx,
                                            float aWidthPx, float aHeightPx,
                                            Element* aElement,
                                            uint32_t aPriority) {
   PresShell* presShell = GetPresShell();
   if (!presShell) {
     return NS_ERROR_FAILURE;
   }
--- a/dom/events/EventListenerManager.cpp
+++ b/dom/events/EventListenerManager.cpp
@@ -2139,17 +2139,16 @@ EventListenerManager::ListenerSignalFoll
 NS_IMPL_CYCLE_COLLECTION_CLASS(EventListenerManager::ListenerSignalFollower)
 
 NS_IMPL_CYCLE_COLLECTING_ADDREF(EventListenerManager::ListenerSignalFollower)
 NS_IMPL_CYCLE_COLLECTING_RELEASE(EventListenerManager::ListenerSignalFollower)
 
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(
     EventListenerManager::ListenerSignalFollower)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mListener)
-  AbortFollower::Traverse(static_cast<AbortFollower*>(tmp), cb);
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
 NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(
     EventListenerManager::ListenerSignalFollower)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mListener)
   AbortFollower::Unlink(static_cast<AbortFollower*>(tmp));
   tmp->mListenerManager = nullptr;
 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
--- a/dom/events/test/window_wheel_default_action.html
+++ b/dom/events/test/window_wheel_default_action.html
@@ -2395,27 +2395,123 @@ function doTestAutoDirScroll2(aSettings,
       event: { deltaMode: WheelEvent.DOM_DELTA_PAGE,
                deltaX: -0.5, deltaY: 0.0, deltaZ: 0.0,
                lineOrPageDeltaX: 0, lineOrPageDeltaY: 0, isMomentum: false,
                expectedOverflowDeltaX: 0, expectedOverflowDeltaY: 0,
                shiftKey: false, ctrlKey: false, altKey: false, metaKey: false, osKey: false },
       adjusted: true,
       expected: kAdjustedForLeft.result,
       cleanup (cb) {
+                 SpecialPowers.pushPrefEnv({"set":
+                                               [["mousewheel.autodir.enabled",
+                                                 false]]},
+                                            cb);
+               } },
+    // Tests:   Test that autodir scrolling can be force-enabled using windowUtils.
+    //          This only tests vertical wheel scrolls being adjusted to be
+    //          horizontal, rather than re-testing all autodir behaviours just for
+    //          this way of enabling it.
+    // Results: Vertical wheel scrolls are adjusted to be horizontal whereas the
+    //          horizontal wheel scrolls are unadjusted.
+    // Reason:  Auto-dir adjustment applies to a target if the target overflows
+    //          in only one direction and the direction is orthogonal to the
+    //          wheel and deltaZ is zero.
+    { description: "force-enabled auto-dir scroll to " + kAdjustedForDown.desc +
+                   "(originally bottom) by pixel scroll even if lineOrPageDelta is 0, " +
+                   "no vertical scrollbar",
+      event: { deltaMode: WheelEvent.DOM_DELTA_PIXEL,
+               deltaX: 0.0, deltaY: 8.0, deltaZ: 0.0,
+               lineOrPageDeltaX: 0, lineOrPageDeltaY: 0, isMomentum: false,
+               expectedOverflowDeltaX: 0, expectedOverflowDeltaY: 0,
+               shiftKey: false, ctrlKey: false, altKey: false, metaKey: false, osKey: false },
+      adjusted: true,
+      expected: kAdjustedForDown.result,
+      prepare (cb) {
+                 gScrollableElement.style.overflowX = "auto";
+                 gScrollableElement.style.overflowY = "hidden";
+                 resetScrollPosition(gScrollableElement);
+                 winUtils.setMousewheelAutodir(gScrollableElement, true, kHonoursRoot)
+                 cb();
+               } },
+    { description: "force-enabled auto-dir scroll to " + kAdjustedForDown.desc +
+                   "(originally bottom) by pixel scroll when lineOrPageDelta is 1, " +
+                   "no vertical scrollbar",
+      event: { deltaMode: WheelEvent.DOM_DELTA_PIXEL,
+               deltaX: 0.0, deltaY: 8.0, deltaZ: 0.0,
+               lineOrPageDeltaX: 0, lineOrPageDeltaY: 1, isMomentum: false,
+               expectedOverflowDeltaX: 0, expectedOverflowDeltaY: 0,
+               shiftKey: false, ctrlKey: false, altKey: false, metaKey: false, osKey: false },
+      adjusted: true,
+      expected: kAdjustedForDown.result },
+    { description: "force-enabled auto-dir scroll to " + kAdjustedForUp.desc +
+                   "(originally top) by pixel scroll even if lineOrPageDelta is 0, " +
+                   "no vertical scrollbar",
+      event: { deltaMode: WheelEvent.DOM_DELTA_PIXEL,
+               deltaX: 0.0, deltaY: -8.0, deltaZ: 0.0,
+               lineOrPageDeltaX: 0, lineOrPageDeltaY: 0, isMomentum: false,
+               expectedOverflowDeltaX: 0, expectedOverflowDeltaY: 0,
+               shiftKey: false, ctrlKey: false, altKey: false, metaKey: false, osKey: false },
+      adjusted: true,
+      expected: kAdjustedForUp.result },
+    { description: "force-enabled auto-dir scroll to " + kAdjustedForUp.desc +
+                   "(originally top) by pixel scroll when lineOrPageDelta is -1, " +
+                   "no vertical scrollbar",
+      event: { deltaMode: WheelEvent.DOM_DELTA_PIXEL,
+               deltaX: 0.0, deltaY: -8.0, deltaZ: 0.0,
+               lineOrPageDeltaX: 0, lineOrPageDeltaY: -1, isMomentum: false,
+               expectedOverflowDeltaX: 0, expectedOverflowDeltaY: 0,
+               shiftKey: false, ctrlKey: false, altKey: false, metaKey: false, osKey: false },
+      adjusted: true,
+      expected: kAdjustedForUp.result },
+    { description: "force-enabled auto-dir scroll to right by pixel scroll even if lineOrPageDelta is 0, " +
+                   "no vertical scrollbar",
+      event: { deltaMode: WheelEvent.DOM_DELTA_PIXEL,
+               deltaX: 8.0, deltaY: 0.0, deltaZ: 0.0,
+               lineOrPageDeltaX: 0, lineOrPageDeltaY: 0, isMomentum: false,
+               expectedOverflowDeltaX: 0, expectedOverflowDeltaY: 0,
+               shiftKey: false, ctrlKey: false, altKey: false, metaKey: false, osKey: false },
+      adjusted: false,
+      expected: kScrollRight },
+    { description: "force-enabled auto-dir scroll to right by pixel scroll when lineOrPageDelta is 1, " +
+                   "no vertical scrollbar",
+      event: { deltaMode: WheelEvent.DOM_DELTA_PIXEL,
+               deltaX: 8.0, deltaY: 0.0, deltaZ: 0.0,
+               lineOrPageDeltaX: 1, lineOrPageDeltaY: 0, isMomentum: false,
+               expectedOverflowDeltaX: 0, expectedOverflowDeltaY: 0,
+               shiftKey: false, ctrlKey: false, altKey: false, metaKey: false, osKey: false },
+      adjusted: false,
+      expected: kScrollRight },
+    { description: "force-enabled auto-dir scroll to left by pixel scroll even if lineOrPageDelta is 0, " +
+                   "no vertical scrollbar",
+      event: { deltaMode: WheelEvent.DOM_DELTA_PIXEL,
+               deltaX: -8.0, deltaY: 0.0, deltaZ: 0.0,
+               lineOrPageDeltaX: 0, lineOrPageDeltaY: 0, isMomentum: false,
+               expectedOverflowDeltaX: 0, expectedOverflowDeltaY: 0,
+               shiftKey: false, ctrlKey: false, altKey: false, metaKey: false, osKey: false },
+      adjusted: false,
+      expected: kScrollLeft },
+    { description: "force-enabled auto-dir scroll to left by pixel scroll when lineOrPageDelta is -1, " +
+                   "no vertical scrollbar",
+      event: { deltaMode: WheelEvent.DOM_DELTA_PIXEL,
+               deltaX: -8.0, deltaY: 0.0, deltaZ: 0.0,
+               lineOrPageDeltaX: -1, lineOrPageDeltaY: 0, isMomentum: false,
+               expectedOverflowDeltaX: 0, expectedOverflowDeltaY: 0,
+               shiftKey: false, ctrlKey: false, altKey: false, metaKey: false, osKey: false },
+      adjusted: false,
+      expected: kScrollLeft,
+      cleanup (cb) {
                  gScrollableElement.style.position = "static";
                  gScrollableElement.style.top = "auto";
                  gScrollableElement.style.left = "auto";
                  gScrollableElement.style.overflow = "auto";
                  Object.assign(document.body.style, kOldStyleForRoot);
                  Object.assign(gScrollableElement.style, kOldStyleForTarget);
-                 SpecialPowers.pushPrefEnv({"set":
-                                               [["mousewheel.autodir.enabled",
-                                                 false]]},
-                                            cb);
-               } },
+                 winUtils.setMousewheelAutodir(gScrollableElement, false, false);
+                 cb();
+              } },
   ];
 
   let styleDescForRoot = "";
   let styleDescForTarget = "";
   Object.keys(kStyleForRoot).forEach(function(property)
   {
     kOldStyleForRoot[property] = document.body.style[property];
     document.body.style[property] = kStyleForRoot[property];
--- a/dom/fetch/Fetch.cpp
+++ b/dom/fetch/Fetch.cpp
@@ -170,17 +170,16 @@ NS_IMPL_CYCLE_COLLECTION_CLASS(WorkerSig
 NS_IMPL_CYCLE_COLLECTING_ADDREF(WorkerSignalFollower)
 NS_IMPL_CYCLE_COLLECTING_RELEASE(WorkerSignalFollower)
 
 NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(WorkerSignalFollower)
   AbortFollower::Unlink(static_cast<AbortFollower*>(tmp));
 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
 
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(WorkerSignalFollower)
-  AbortFollower::Traverse(static_cast<AbortFollower*>(tmp), cb);
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(WorkerSignalFollower)
   NS_INTERFACE_MAP_ENTRY(nsISupports)
 NS_INTERFACE_MAP_END
 
 // This class orchestrates the proxying of AbortSignal operations between the
 // main thread and a worker thread.
@@ -1811,17 +1810,16 @@ NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_IN
 #ifdef MOZ_DOM_STREAMS
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mReadableStreamBody)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mReadableStreamReader)
 #endif
 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
 
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(EmptyBody,
                                                   FetchBody<EmptyBody>)
-  AbortFollower::Traverse(static_cast<AbortFollower*>(tmp), cb);
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOwner)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mAbortSignalImpl)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mFetchStreamReader)
 #ifdef MOZ_DOM_STREAMS
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mReadableStreamBody)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mReadableStreamReader)
 #endif
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
--- a/dom/fetch/FetchDriver.cpp
+++ b/dom/fetch/FetchDriver.cpp
@@ -904,16 +904,17 @@ void FetchDriver::FailWithNetworkError(n
 
   // mObserver could be null after OnResponseAvailable().
   if (mObserver) {
     mObserver->OnResponseEnd(FetchDriverObserver::eByNetworking);
     mObserver = nullptr;
   }
 
   mChannel = nullptr;
+  Unfollow();
 }
 
 NS_IMETHODIMP
 FetchDriver::OnStartRequest(nsIRequest* aRequest) {
   AssertIsOnMainThread();
 
   // Note, this can be called multiple times if we are doing an opaqueredirect.
   // In that case we will get a simulated OnStartRequest() and then the real
@@ -1433,16 +1434,17 @@ void FetchDriver::FinishOnStopRequest(
   }
 
   if (mObserver) {
     mObserver->OnResponseEnd(FetchDriverObserver::eByNetworking);
     mObserver = nullptr;
   }
 
   mChannel = nullptr;
+  Unfollow();
 }
 
 NS_IMETHODIMP
 FetchDriver::AsyncOnChannelRedirect(nsIChannel* aOldChannel,
                                     nsIChannel* aNewChannel, uint32_t aFlags,
                                     nsIAsyncVerifyRedirectCallback* aCallback) {
   nsCOMPtr<nsIHttpChannel> oldHttpChannel = do_QueryInterface(aOldChannel);
   nsCOMPtr<nsIHttpChannel> newHttpChannel = do_QueryInterface(aNewChannel);
--- a/dom/fetch/FetchObserver.cpp
+++ b/dom/fetch/FetchObserver.cpp
@@ -9,17 +9,16 @@
 #include "mozilla/dom/EventBinding.h"
 
 namespace mozilla::dom {
 
 NS_IMPL_CYCLE_COLLECTION_CLASS(FetchObserver)
 
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(FetchObserver,
                                                   DOMEventTargetHelper)
-  AbortFollower::Traverse(static_cast<AbortFollower*>(tmp), cb);
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
 NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(FetchObserver,
                                                 DOMEventTargetHelper)
   AbortFollower::Unlink(static_cast<AbortFollower*>(tmp));
 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
 
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(FetchObserver)
--- a/dom/fetch/Request.cpp
+++ b/dom/fetch/Request.cpp
@@ -48,17 +48,16 @@ NS_IMPL_CYCLE_COLLECTION_UNLINK_END
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(Request, FetchBody<Request>)
 #ifdef MOZ_DOM_STREAMS
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mReadableStreamBody)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mReadableStreamReader)
 #endif
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOwner)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mHeaders)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSignal)
-  AbortFollower::Traverse(static_cast<AbortFollower*>(tmp), cb);
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
 NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN_INHERITED(Request, FetchBody<Request>)
 #ifndef MOZ_DOM_STREAMS
   NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mReadableStreamBody)
   MOZ_DIAGNOSTIC_ASSERT(!tmp->mReadableStreamReader);
   NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mReadableStreamReader)
 #endif
--- a/dom/fetch/Response.cpp
+++ b/dom/fetch/Response.cpp
@@ -51,17 +51,16 @@ NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_IN
 #else
   tmp->mReadableStreamBody = nullptr;
   tmp->mReadableStreamReader = nullptr;
 #endif
   NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER
 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
 
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(Response, FetchBody<Response>)
-  AbortFollower::Traverse(static_cast<AbortFollower*>(tmp), cb);
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOwner)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mHeaders)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSignalImpl)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mFetchStreamReader)
 #ifdef MOZ_DOM_STREAMS
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mReadableStreamBody)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mReadableStreamReader)
 #endif
--- a/dom/interfaces/base/nsIDOMWindowUtils.idl
+++ b/dom/interfaces/base/nsIDOMWindowUtils.idl
@@ -122,16 +122,27 @@ interface nsIDOMWindowUtils : nsISupport
   AString getViewportFitInfo();
 
   /**
    * Information about the window size in device pixels.
    */
   void getContentViewerSize(out uint32_t aDisplayWidth, out uint32_t aDisplayHeight);
 
   /**
+   * For any scrollable element, this allows you to override the default
+   * scroll behaviour and force autodir (which allows a mousewheel to
+   * horizontally scroll regions that only scroll on that one axis).
+   *
+   * See the documentation for mousewheel.autodir.enabled and
+   * mousewheel.autodir.honourroot for a more thorough explanation of
+   * what these behaviours do.
+   */
+  void setMousewheelAutodir(in Element aElement, in boolean aEnabled, in boolean aHonourRoot);
+
+  /**
    * For any scrollable element, this allows you to override the
    * visible region and draw more than what is visible, which is
    * useful for asynchronous drawing. The "displayport" will be
    * <xPx, yPx, widthPx, heightPx> in units of CSS pixels,
    * regardless of the size of the enclosing container.  This
    * will *not* trigger reflow.
    *
    * For the root scroll area, pass in the root document element.
--- a/dom/security/test/csp/mochitest.ini
+++ b/dom/security/test/csp/mochitest.ini
@@ -247,16 +247,17 @@ support-files =
   Ahem.ttf
   file_independent_iframe_csp.html
 prefs =
   security.mixed_content.upgrade_display_content=false
 
 [test_base-uri.html]
 [test_blob_data_schemes.html]
 [test_blob_uri_blocks_modals.html]
+skip-if = xorigin && os == "linux" && (asan || tsan) # alert should be blocked by CSP - got false, expected true
 [test_connect-src.html]
 [test_CSP.html]
 [test_bug1452037.html]
 [test_allow_https_schemes.html]
 [test_bug663567.html]
 skip-if =
   fission && xorigin && debug && os == "win"  # Bug 1716406 - New fission platform triage
 [test_bug802872.html]
--- a/dom/webauthn/WebAuthnManager.cpp
+++ b/dom/webauthn/WebAuthnManager.cpp
@@ -48,17 +48,16 @@ NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_IN
                                                 WebAuthnManagerBase)
   AbortFollower::Unlink(static_cast<AbortFollower*>(tmp));
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mTransaction)
   tmp->mTransaction.reset();
 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
 
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(WebAuthnManager,
                                                   WebAuthnManagerBase)
-  AbortFollower::Traverse(static_cast<AbortFollower*>(tmp), cb);
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mTransaction)
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
 /***********************************************************************
  * Utility Functions
  **********************************************************************/
 
 static nsresult AssembleClientData(
--- a/dom/workers/test/mochitest.ini
+++ b/dom/workers/test/mochitest.ini
@@ -203,10 +203,12 @@ scheme=https
 [test_navigator_workers_hardwareConcurrency.html]
 [test_bug1278777.html]
 [test_setTimeoutWith0.html]
 [test_bug1301094.html]
 [test_subworkers_suspended.html]
 [test_bug1317725.html]
 support-files = test_bug1317725.js
 [test_sharedworker_event_listener_leaks.html]
-skip-if = (bits == 64 && os == 'linux' && asan && !debug) # Disabled on Linux64 opt asan, bug 1493563
+skip-if =
+  (bits == 64 && os == 'linux' && asan && !debug) # Disabled on Linux64 opt asan, bug 1493563
+  os == "win" && debug && xorigin # high frequency intermittent
 [test_fileReaderSync_when_closing.html]
--- a/editor/libeditor/tests/mochitest.ini
+++ b/editor/libeditor/tests/mochitest.ini
@@ -166,16 +166,18 @@ skip-if = os != "win"
 [test_bug1094000.html]
 [test_bug1102906.html]
 skip-if = os == 'android'
 [test_bug1109465.html]
 [test_bug1130651.html]
 [test_bug1140105.html]
 [test_bug1140617.html]
 [test_bug1151186.html]
+skip-if =
+  os == "win" && ccov && xorigin # high frequency intermittent
 [test_bug1153237.html]
 [test_bug1162952.html]
 [test_bug1181130-1.html]
 [test_bug1181130-2.html]
 [test_bug1186799.html]
 [test_bug1230473.html]
 [test_bug1247483.html]
 skip-if = headless
--- a/gfx/config/gfxConfigManager.cpp
+++ b/gfx/config/gfxConfigManager.cpp
@@ -30,16 +30,18 @@ void gfxConfigManager::Init() {
 
   EmplaceUserPref("gfx.webrender.compositor", mWrCompositorEnabled);
   mWrForceEnabled = gfxPlatform::WebRenderPrefEnabled();
   mWrSoftwareForceEnabled = StaticPrefs::gfx_webrender_software_AtStartup();
   mWrCompositorForceEnabled =
       StaticPrefs::gfx_webrender_compositor_force_enabled_AtStartup();
   mGPUProcessAllowSoftware =
       StaticPrefs::layers_gpu_process_allow_software_AtStartup();
+  mWrForcePartialPresent =
+      StaticPrefs::gfx_webrender_force_partial_present_AtStartup();
   mWrPartialPresent =
       StaticPrefs::gfx_webrender_max_partial_present_rects_AtStartup() > 0;
   EmplaceUserPref(StaticPrefs::GetPrefName_gfx_webrender_program_binary_disk(),
                   mWrShaderCache);
   mWrOptimizedShaders =
       StaticPrefs::gfx_webrender_use_optimized_shaders_AtStartup();
 #ifdef XP_WIN
   mWrForceAngle = StaticPrefs::gfx_webrender_force_angle_AtStartup();
@@ -325,33 +327,24 @@ void gfxConfigManager::ConfigureWebRende
   if (!mFeatureWrDComp->IsEnabled() && mWrCompositorDCompRequired) {
     mFeatureWrCompositor->ForceDisable(FeatureStatus::Unavailable,
                                        "No DirectComposition usage",
                                        mFeatureWrDComp->GetFailureId());
   }
 
   // Initialize WebRender partial present config.
   // Partial present is used only when WebRender compositor is not used.
-  if (mWrPartialPresent) {
-    if (mFeatureWr->IsEnabled() || mFeatureWrSoftware->IsEnabled()) {
-      mFeatureWrPartial->EnableByDefault();
+  mFeatureWrPartial->SetDefault(mWrPartialPresent, FeatureStatus::Disabled,
+                                "User disabled via pref");
+  if (mWrForcePartialPresent) {
+    mFeatureWrPartial->UserForceEnable("Force enabled by pref");
+  }
 
-      nsString adapter;
-      mGfxInfo->GetAdapterDeviceID(adapter);
-      // Block partial present on some devices due to rendering issues.
-      // On Mali-Txxx due to bug 1680087 and bug 1707815.
-      // On Adreno 3xx GPUs due to bug 1695771.
-      if (adapter.Find("Mali-T", /*ignoreCase*/ true) >= 0 ||
-          adapter.Find("Adreno (TM) 3", /*ignoreCase*/ true) >= 0) {
-        mFeatureWrPartial->Disable(
-            FeatureStatus::Blocked, "Partial present blocked",
-            "FEATURE_FAILURE_PARTIAL_PRESENT_BLOCKED"_ns);
-      }
-    }
-  }
+  ConfigureFromBlocklist(nsIGfxInfo::FEATURE_WEBRENDER_PARTIAL_PRESENT,
+                         mFeatureWrPartial);
 
   mFeatureWrShaderCache->SetDefaultFromPref(
       StaticPrefs::GetPrefName_gfx_webrender_program_binary_disk(), true,
       StaticPrefs::GetPrefDefault_gfx_webrender_program_binary_disk(),
       mWrShaderCache);
   ConfigureFromBlocklist(nsIGfxInfo::FEATURE_WEBRENDER_SHADER_CACHE,
                          mFeatureWrShaderCache);
   if (!mFeatureWr->IsEnabled()) {
--- a/gfx/config/gfxConfigManager.h
+++ b/gfx/config/gfxConfigManager.h
@@ -33,16 +33,17 @@ class gfxConfigManager {
         mFeatureGPUProcess(nullptr),
         mWrForceEnabled(false),
         mWrSoftwareForceEnabled(false),
         mWrCompositorForceEnabled(false),
         mWrForceAngle(false),
         mWrForceAngleNoGPUProcess(false),
         mWrDCompWinEnabled(false),
         mWrCompositorDCompRequired(false),
+        mWrForcePartialPresent(false),
         mWrPartialPresent(false),
         mWrOptimizedShaders(false),
         mGPUProcessAllowSoftware(false),
         mWrEnvForceEnabled(false),
         mScaledResolution(false),
         mDisableHwCompositingNoWr(false),
         mIsNightly(false),
         mIsEarlyBetaOrEarlier(false),
@@ -81,16 +82,17 @@ class gfxConfigManager {
   Maybe<bool> mWrCompositorEnabled;
   bool mWrForceEnabled;
   bool mWrSoftwareForceEnabled;
   bool mWrCompositorForceEnabled;
   bool mWrForceAngle;
   bool mWrForceAngleNoGPUProcess;
   bool mWrDCompWinEnabled;
   bool mWrCompositorDCompRequired;
+  bool mWrForcePartialPresent;
   bool mWrPartialPresent;
   Maybe<bool> mWrShaderCache;
   bool mWrOptimizedShaders;
   bool mGPUProcessAllowSoftware;
 
   /**
    * Environment variables
    */
--- a/gfx/layers/FrameMetrics.h
+++ b/gfx/layers/FrameMetrics.h
@@ -801,16 +801,18 @@ struct ScrollMetadata {
         mHasScrollgrab(false),
         mIsLayersIdRoot(false),
         mIsAutoDirRootContentRTL(false),
         mForceDisableApz(false),
         mResolutionUpdated(false),
         mIsRDMTouchSimulationActive(false),
         mDidContentGetPainted(true),
         mPrefersReducedMotion(false),
+        mForceMousewheelAutodir(false),
+        mForceMousewheelAutodirHonourRoot(false),
         mOverscrollBehavior() {}
 
   bool operator==(const ScrollMetadata& aOther) const {
     return mMetrics == aOther.mMetrics && mSnapInfo == aOther.mSnapInfo &&
            mScrollParentId == aOther.mScrollParentId &&
            mBackgroundColor == aOther.mBackgroundColor &&
            // don't compare mContentDescription
            mLineScrollAmount == aOther.mLineScrollAmount &&
@@ -818,16 +820,19 @@ struct ScrollMetadata {
            mHasScrollgrab == aOther.mHasScrollgrab &&
            mIsLayersIdRoot == aOther.mIsLayersIdRoot &&
            mIsAutoDirRootContentRTL == aOther.mIsAutoDirRootContentRTL &&
            mForceDisableApz == aOther.mForceDisableApz &&
            mResolutionUpdated == aOther.mResolutionUpdated &&
            mIsRDMTouchSimulationActive == aOther.mIsRDMTouchSimulationActive &&
            mDidContentGetPainted == aOther.mDidContentGetPainted &&
            mPrefersReducedMotion == aOther.mPrefersReducedMotion &&
+           mForceMousewheelAutodir == aOther.mForceMousewheelAutodir &&
+           mForceMousewheelAutodirHonourRoot ==
+               aOther.mForceMousewheelAutodirHonourRoot &&
            mDisregardedDirection == aOther.mDisregardedDirection &&
            mOverscrollBehavior == aOther.mOverscrollBehavior &&
            mScrollUpdates == aOther.mScrollUpdates;
   }
 
   bool operator!=(const ScrollMetadata& aOther) const {
     return !operator==(aOther);
   }
@@ -892,16 +897,28 @@ struct ScrollMetadata {
   }
   bool GetIsRDMTouchSimulationActive() const {
     return mIsRDMTouchSimulationActive;
   }
 
   void SetPrefersReducedMotion(bool aValue) { mPrefersReducedMotion = aValue; }
   bool PrefersReducedMotion() const { return mPrefersReducedMotion; }
 
+  void SetForceMousewheelAutodir(bool aValue) {
+    mForceMousewheelAutodir = aValue;
+  }
+  bool ForceMousewheelAutodir() const { return mForceMousewheelAutodir; }
+
+  void SetForceMousewheelAutodirHonourRoot(bool aValue) {
+    mForceMousewheelAutodirHonourRoot = aValue;
+  }
+  bool ForceMousewheelAutodirHonourRoot() const {
+    return mForceMousewheelAutodirHonourRoot;
+  }
+
   bool DidContentGetPainted() const { return mDidContentGetPainted; }
 
  private:
   // For use in IPC only
   void SetDidContentGetPainted(bool aValue) { mDidContentGetPainted = aValue; }
 
  public:
   // For more details about the concept of a disregarded direction, refer to the
@@ -1002,16 +1019,21 @@ struct ScrollMetadata {
   // can use the correct transforms.
   bool mDidContentGetPainted : 1;
 
   // Whether the user has requested the system minimze the amount of
   // non-essential motion it uses (see the prefers-reduced-motion
   // media query).
   bool mPrefersReducedMotion : 1;
 
+  // Whether privileged code has requested that autodir behaviour be
+  // enabled for the scroll frame.
+  bool mForceMousewheelAutodir : 1;
+  bool mForceMousewheelAutodirHonourRoot : 1;
+
   // The disregarded direction means the direction which is disregarded anyway,
   // even if the scroll frame overflows in that direction and the direction is
   // specified as scrollable. This could happen in some scenarios, for instance,
   // a single-line text control frame should disregard wheel scroll in
   // its block-flow direction even if it overflows in that direction.
   Maybe<ScrollDirection> mDisregardedDirection;
 
   // The overscroll behavior for this scroll frame.
--- a/gfx/layers/apz/src/AsyncPanZoomController.cpp
+++ b/gfx/layers/apz/src/AsyncPanZoomController.cpp
@@ -2123,22 +2123,23 @@ bool AsyncPanZoomController::CanScroll(c
   if (SCROLLWHEEL_INPUT == aEvent.mInputType) {
     const ScrollWheelInput& scrollWheelInput = aEvent.AsScrollWheelInput();
     // If it's a wheel scroll, we first check if it is an auto-dir scroll.
     // 1. For an auto-dir scroll, check if it's delta should be adjusted, if it
     //    is, then we can conclude it must be scrollable; otherwise, fall back
     //    to checking if it is scrollable without adjusting its delta.
     // 2. For a non-auto-dir scroll, simply check if it is scrollable without
     //    adjusting its delta.
-    if (scrollWheelInput.IsAutoDir()) {
+    if (scrollWheelInput.IsAutoDir(mScrollMetadata.ForceMousewheelAutodir())) {
       RecursiveMutexAutoLock lock(mRecursiveMutex);
       auto deltaX = scrollWheelInput.mDeltaX;
       auto deltaY = scrollWheelInput.mDeltaY;
       bool isRTL =
-          IsContentOfHonouredTargetRightToLeft(scrollWheelInput.HonoursRoot());
+          IsContentOfHonouredTargetRightToLeft(scrollWheelInput.HonoursRoot(
+              mScrollMetadata.ForceMousewheelAutodirHonourRoot()));
       APZAutoDirWheelDeltaAdjuster adjuster(deltaX, deltaY, mX, mY, isRTL);
       if (adjuster.ShouldBeAdjusted()) {
         // If we detect that the delta values should be adjusted for an auto-dir
         // wheel scroll, then it is impossible to be an unscrollable scroll.
         return true;
       }
     }
     return CanScrollWithWheel(delta);
@@ -2283,21 +2284,22 @@ nsEventStatus AsyncPanZoomController::On
   // getting the values, we need to check if it is an auto-dir scroll and if it
   // should be adjusted, if both answers are yes, let's adjust X and Y values
   // first, and then get the delta values in parent-layer pixels based on the
   // adjusted values.
   bool adjustedByAutoDir = false;
   auto deltaX = aEvent.mDeltaX;
   auto deltaY = aEvent.mDeltaY;
   ParentLayerPoint delta;
-  if (aEvent.IsAutoDir()) {
+  if (aEvent.IsAutoDir(mScrollMetadata.ForceMousewheelAutodir())) {
     // It's an auto-dir scroll, so check if its delta should be adjusted, if so,
     // adjust it.
     RecursiveMutexAutoLock lock(mRecursiveMutex);
-    bool isRTL = IsContentOfHonouredTargetRightToLeft(aEvent.HonoursRoot());
+    bool isRTL = IsContentOfHonouredTargetRightToLeft(
+        aEvent.HonoursRoot(mScrollMetadata.ForceMousewheelAutodirHonourRoot()));
     APZAutoDirWheelDeltaAdjuster adjuster(deltaX, deltaY, mX, mY, isRTL);
     if (adjuster.ShouldBeAdjusted()) {
       adjuster.Adjust();
       adjustedByAutoDir = true;
     }
   }
   // Ensure the calls to GetScrollWheelDelta are outside the mRecursiveMutex
   // lock since these calls may acquire the APZ tree lock. Holding
@@ -5043,16 +5045,20 @@ void AsyncPanZoomController::NotifyLayer
     Metrics().SetHasNonZeroDisplayPortMargins(
         aLayerMetrics.HasNonZeroDisplayPortMargins());
     Metrics().SetMinimalDisplayPort(aLayerMetrics.IsMinimalDisplayPort());
     mScrollMetadata.SetForceDisableApz(aScrollMetadata.IsApzForceDisabled());
     mScrollMetadata.SetIsRDMTouchSimulationActive(
         aScrollMetadata.GetIsRDMTouchSimulationActive());
     mScrollMetadata.SetPrefersReducedMotion(
         aScrollMetadata.PrefersReducedMotion());
+    mScrollMetadata.SetForceMousewheelAutodir(
+        aScrollMetadata.ForceMousewheelAutodir());
+    mScrollMetadata.SetForceMousewheelAutodirHonourRoot(
+        aScrollMetadata.ForceMousewheelAutodirHonourRoot());
     mScrollMetadata.SetDisregardedDirection(
         aScrollMetadata.GetDisregardedDirection());
     mScrollMetadata.SetOverscrollBehavior(
         aScrollMetadata.GetOverscrollBehavior());
   }
 
   bool scrollOffsetUpdated = false;
   bool smoothScrollRequested = false;
--- a/gfx/layers/ipc/LayersMessageUtils.h
+++ b/gfx/layers/ipc/LayersMessageUtils.h
@@ -429,16 +429,18 @@ struct ParamTraits<mozilla::layers::Scro
     WriteParam(aMsg, aParam.mHasScrollgrab);
     WriteParam(aMsg, aParam.mIsLayersIdRoot);
     WriteParam(aMsg, aParam.mIsAutoDirRootContentRTL);
     WriteParam(aMsg, aParam.mForceDisableApz);
     WriteParam(aMsg, aParam.mResolutionUpdated);
     WriteParam(aMsg, aParam.mIsRDMTouchSimulationActive);
     WriteParam(aMsg, aParam.mDidContentGetPainted);
     WriteParam(aMsg, aParam.mPrefersReducedMotion);
+    WriteParam(aMsg, aParam.mForceMousewheelAutodir);
+    WriteParam(aMsg, aParam.mForceMousewheelAutodirHonourRoot);
     WriteParam(aMsg, aParam.mDisregardedDirection);
     WriteParam(aMsg, aParam.mOverscrollBehavior);
     WriteParam(aMsg, aParam.mScrollUpdates);
   }
 
   static bool ReadContentDescription(const Message* aMsg, PickleIterator* aIter,
                                      paramType* aResult) {
     nsCString str;
@@ -469,16 +471,21 @@ struct ParamTraits<mozilla::layers::Scro
             ReadBoolForBitfield(aMsg, aIter, aResult,
                                 &paramType::SetResolutionUpdated) &&
             ReadBoolForBitfield(aMsg, aIter, aResult,
                                 &paramType::SetIsRDMTouchSimulationActive)) &&
            ReadBoolForBitfield(aMsg, aIter, aResult,
                                &paramType::SetDidContentGetPainted) &&
            ReadBoolForBitfield(aMsg, aIter, aResult,
                                &paramType::SetPrefersReducedMotion) &&
+           ReadBoolForBitfield(aMsg, aIter, aResult,
+                               &paramType::SetForceMousewheelAutodir) &&
+           ReadBoolForBitfield(
+               aMsg, aIter, aResult,
+               &paramType::SetForceMousewheelAutodirHonourRoot) &&
            ReadParam(aMsg, aIter, &aResult->mDisregardedDirection) &&
            ReadParam(aMsg, aIter, &aResult->mOverscrollBehavior) &&
            ReadParam(aMsg, aIter, &aResult->mScrollUpdates);
   }
 };
 
 template <>
 struct ParamTraits<mozilla::layers::TextureFactoryIdentifier> {
--- a/gfx/tests/gtest/TestConfigManager.cpp
+++ b/gfx/tests/gtest/TestConfigManager.cpp
@@ -18,28 +18,30 @@ namespace gfx {
 class MockGfxInfo final : public nsIGfxInfo {
  public:
   NS_DECL_THREADSAFE_ISUPPORTS
 
   int32_t mStatusWr;
   int32_t mStatusWrCompositor;
   int32_t mStatusWrShaderCache;
   int32_t mStatusWrOptimizedShaders;
+  int32_t mStatusWrPartialPresent;
   int32_t mMaxRefreshRate;
   bool mHasMixedRefreshRate;
   Maybe<bool> mHasBattery;
   const char* mVendorId;
   const char* mDeviceId;
 
   // Default allows WebRender + compositor, and is desktop NVIDIA.
   MockGfxInfo()
       : mStatusWr(nsIGfxInfo::FEATURE_ALLOW_ALWAYS),
         mStatusWrCompositor(nsIGfxInfo::FEATURE_STATUS_OK),
         mStatusWrShaderCache(nsIGfxInfo::FEATURE_STATUS_OK),
         mStatusWrOptimizedShaders(nsIGfxInfo::FEATURE_STATUS_OK),
+        mStatusWrPartialPresent(nsIGfxInfo::FEATURE_STATUS_OK),
         mMaxRefreshRate(-1),
         mHasMixedRefreshRate(false),
         mHasBattery(Some(false)),
         mVendorId("0x10de"),
         mDeviceId("") {}
 
   NS_IMETHOD GetFeatureStatus(int32_t aFeature, nsACString& aFailureId,
                               int32_t* _retval) override {
@@ -51,16 +53,19 @@ class MockGfxInfo final : public nsIGfxI
         *_retval = mStatusWrCompositor;
         break;
       case nsIGfxInfo::FEATURE_WEBRENDER_SHADER_CACHE:
         *_retval = mStatusWrShaderCache;
         break;
       case nsIGfxInfo::FEATURE_WEBRENDER_OPTIMIZED_SHADERS:
         *_retval = mStatusWrOptimizedShaders;
         break;
+      case nsIGfxInfo::FEATURE_WEBRENDER_PARTIAL_PRESENT:
+        *_retval = mStatusWrPartialPresent;
+        break;
       default:
         return NS_ERROR_NOT_IMPLEMENTED;
     }
     return NS_OK;
   }
 
   NS_IMETHOD GetHasBattery(bool* aHasBattery) override {
     if (mHasBattery.isNothing()) {
@@ -370,24 +375,32 @@ TEST_F(GfxConfigManager, WebRenderNoPart
   EXPECT_TRUE(mFeatures.mWrShaderCache.IsEnabled());
   EXPECT_TRUE(mFeatures.mWrOptimizedShaders.IsEnabled());
   EXPECT_TRUE(mFeatures.mHwCompositing.IsEnabled());
   EXPECT_TRUE(mFeatures.mGPUProcess.IsEnabled());
   EXPECT_TRUE(mFeatures.mD3D11HwAngle.IsEnabled());
   EXPECT_TRUE(mFeatures.mWrSoftware.IsEnabled());
 }
 
-TEST_F(GfxConfigManager, WebRenderPartialPresentMali) {
+TEST_F(GfxConfigManager, WebRenderPartialBlocked) {
   mWrPartialPresent = true;
-  mMockGfxInfo->mDeviceId = "Mali-T760";
+  mMockGfxInfo->mStatusWrPartialPresent = nsIGfxInfo::FEATURE_BLOCKED_DEVICE;
   ConfigureWebRender();
 
   EXPECT_FALSE(mFeatures.mWrPartial.IsEnabled());
 }
 
+TEST_F(GfxConfigManager, WebRenderForcePartialBlocked) {
+  mWrForcePartialPresent = true;
+  mMockGfxInfo->mStatusWrPartialPresent = nsIGfxInfo::FEATURE_BLOCKED_DEVICE;
+  ConfigureWebRender();
+
+  EXPECT_TRUE(mFeatures.mWrPartial.IsEnabled());
+}
+
 TEST_F(GfxConfigManager, WebRenderScaledResolutionWithHwStretching) {
   mScaledResolution = true;
   ConfigureWebRender();
 
   EXPECT_TRUE(mFeatures.mWrQualified.IsEnabled());
   EXPECT_TRUE(mFeatures.mWr.IsEnabled());
   EXPECT_TRUE(mFeatures.mWrCompositor.IsEnabled());
   EXPECT_TRUE(mFeatures.mWrAngle.IsEnabled());
--- a/gfx/wr/webrender_api/src/display_item.rs
+++ b/gfx/wr/webrender_api/src/display_item.rs
@@ -824,16 +824,21 @@ impl Rotation {
 pub enum ReferenceTransformBinding {
     /// Standard reference frame which contains a precomputed transform.
     Static {
         binding: PropertyBinding<LayoutTransform>,
     },
     /// Computed reference frame which dynamically calculates the transform
     /// based on the given parameters. The reference is the content size of
     /// the parent iframe, which is affected by snapping.
+    ///
+    /// This is used when a transform depends on the layout size of an
+    /// element, otherwise the difference between the unsnapped size
+    /// used in the transform, and the snapped size calculated during scene
+    /// building can cause seaming.
     Computed {
         scale_from: Option<LayoutSize>,
         vertical_flip: bool,
         rotation: Rotation,
     },
 }
 
 impl Default for ReferenceTransformBinding {
--- a/js/src/wasm/WasmCode.cpp
+++ b/js/src/wasm/WasmCode.cpp
@@ -879,32 +879,28 @@ bool LazyStubTier::createOneEntryStub(ui
   MOZ_ASSERT(cr.isJitEntry());
 
   codeTier.code().setJitEntry(cr.funcIndex(), segment->base() + cr.begin());
   return true;
 }
 
 // This uses the funcIndex as the major key and the tls pointer value as the
 // minor key, the same as the < and == predicates used in RemoveDuplicates.
+// However, since we only ever use this to search tables where every entry has
+// the same tls, there is no actual code for tls comparison here.
 
-auto IndirectStubComparator = [](uint32_t funcIndex, void* tlsData,
+auto IndirectStubComparator = [](uint32_t funcIndex,
                                  const IndirectStub& stub) -> int {
   if (funcIndex < stub.funcIndex) {
     return -1;
   }
   if (funcIndex > stub.funcIndex) {
     return 1;
   }
   // Function indices are equal.
-  if (uintptr_t(tlsData) < uintptr_t(stub.tls)) {
-    return -1;
-  }
-  if (uintptr_t(tlsData) > uintptr_t(stub.tls)) {
-    return 1;
-  }
   return 0;
 };
 
 bool LazyStubTier::createManyIndirectStubs(
     const VectorOfIndirectStubTarget& targets, const CodeTier& codeTier) {
   MOZ_ASSERT(targets.length());
 
   LifoAlloc lifo(LAZY_STUB_LIFO_DEFAULT_CHUNK_SIZE);
@@ -977,34 +973,73 @@ bool LazyStubTier::createManyIndirectStu
   }
 
   if (!ExecutableAllocator::makeExecutableAndFlushICache(
           FlushICacheSpec::LocalThreadOnly, codePtr, codeLength)) {
     return false;
   }
 
   // Record the runtime info about generated indirect stubs.
-  if (!indirectStubVector_.reserve(indirectStubVector_.length() +
-                                   targets.length())) {
-    return false;
+
+  // Count the number of new slots needed for the different tls values in the
+  // table.  While there may be multiple tls values in the target set, the
+  // typical number is one or two.
+  struct Counter {
+    explicit Counter(void* tls) : tls(tls), counter(0) {}
+    void* tls;
+    size_t counter;
+  };
+  Vector<Counter, 8, SystemAllocPolicy> counters{};
+  for (const auto& target : targets) {
+    size_t i = 0;
+    while (i < counters.length() && target.tls != counters[i].tls) {
+      i++;
+    }
+    if (i == counters.length() && !counters.emplaceBack(target.tls)) {
+      return false;
+    }
+    counters[i].counter++;
   }
 
+  // Reserve space in the tables, creating new tables as necessary.  Do this
+  // first to avoid OOM while we're midway through installing stubs in the
+  // tables.
+  for (const auto& counter : counters) {
+    auto probe = indirectStubTable_.lookupForAdd(counter.tls);
+    if (!probe) {
+      IndirectStubVector v{};
+      if (!indirectStubTable_.add(probe, counter.tls, std::move(v))) {
+        return false;
+      }
+    }
+    IndirectStubVector& indirectStubVector = probe->value();
+    if (!indirectStubVector.reserve(indirectStubVector.length() +
+                                    counter.counter)) {
+      return false;
+    }
+  }
+
+  // We have storage, so now we can commit.
   for (const auto& target : targets) {
     auto stub = IndirectStub{target.functionIdx, lastStubSegmentIndex_,
-                             indirectStubRangeIndex, target.tls};
+                             indirectStubRangeIndex};
+
+    auto probe = indirectStubTable_.lookup(target.tls);
+    MOZ_RELEASE_ASSERT(probe);
+    IndirectStubVector& indirectStubVector = probe->value();
 
     size_t indirectStubIndex;
     MOZ_ALWAYS_FALSE(BinarySearchIf(
-        indirectStubVector_, 0, indirectStubVector_.length(),
+        indirectStubVector, 0, indirectStubVector.length(),
         [&stub](const IndirectStub& otherStub) {
-          return IndirectStubComparator(stub.funcIndex, stub.tls, otherStub);
+          return IndirectStubComparator(stub.funcIndex, otherStub);
         },
         &indirectStubIndex));
-    MOZ_ALWAYS_TRUE(indirectStubVector_.insert(
-        indirectStubVector_.begin() + indirectStubIndex, std::move(stub)));
+    MOZ_ALWAYS_TRUE(indirectStubVector.insert(
+        indirectStubVector.begin() + indirectStubIndex, std::move(stub)));
 
     ++indirectStubRangeIndex;
   }
   return true;
 }
 
 const CodeRange* LazyStubTier::lookupRange(const void* pc) const {
   for (const UniqueLazyStubSegment& stubSegment : stubSegments_) {
@@ -1073,26 +1108,30 @@ void* LazyStubTier::lookupInterpEntry(ui
   }
   const LazyFuncExport& fe = exports_[match];
   const LazyStubSegment& stub = *stubSegments_[fe.lazyStubSegmentIndex];
   return stub.base() + stub.codeRanges()[fe.funcCodeRangeIndex].begin();
 }
 
 void* LazyStubTier::lookupIndirectStub(uint32_t funcIndex, void* tls) const {
   size_t match;
+  auto probe = indirectStubTable_.lookup(tls);
+  if (!probe) {
+    return nullptr;
+  }
+  const IndirectStubVector& indirectStubVector = probe->value();
   if (!BinarySearchIf(
-          indirectStubVector_, 0, indirectStubVector_.length(),
-          [funcIndex, tls](const IndirectStub& stub) {
-            return IndirectStubComparator(funcIndex, tls, stub);
+          indirectStubVector, 0, indirectStubVector.length(),
+          [funcIndex](const IndirectStub& stub) {
+            return IndirectStubComparator(funcIndex, stub);
           },
           &match)) {
     return nullptr;
   }
-
-  const IndirectStub& indirectStub = indirectStubVector_[match];
+  const IndirectStub& indirectStub = indirectStubVector[match];
 
   const LazyStubSegment& segment = *stubSegments_[indirectStub.segmentIndex];
   return segment.base() +
          segment.codeRanges()[indirectStub.codeRangeIndex].begin();
 }
 
 void LazyStubTier::addSizeOfMisc(MallocSizeOf mallocSizeOf, size_t* code,
                                  size_t* data) const {
--- a/js/src/wasm/WasmCode.h
+++ b/js/src/wasm/WasmCode.h
@@ -568,58 +568,70 @@ struct LazyFuncExport {
                  size_t funcCodeRangeIndex)
       : funcIndex(funcIndex),
         lazyStubSegmentIndex(lazyStubSegmentIndex),
         funcCodeRangeIndex(funcCodeRangeIndex) {}
 };
 
 using LazyFuncExportVector = Vector<LazyFuncExport, 0, SystemAllocPolicy>;
 
-// IndirectStub provides a mapping between function indices and
-// indirect stubs code ranges.
-
-// The function index is the index of the function *within a specific module*,
+// IndirectStub provides a mapping between a function index and an indirect stub
+// code range.
+//
+// The function index is the index of the function *within its defining module*,
 // not necessarily in the module that owns the stub.  That module's and
-// function's instance is provided by the tls field.
+// function's instance is provided by the tls field of the IndirectStubTable
+// entry within which this IndirectStub is found.
 
 struct IndirectStub {
   size_t funcIndex;
   size_t segmentIndex;
   size_t codeRangeIndex;
-  void* tls;
-  IndirectStub(size_t funcIndex, size_t segmentIndex, size_t codeRangeIndex,
-               TlsData* tls)
+  IndirectStub(size_t funcIndex, size_t segmentIndex, size_t codeRangeIndex)
       : funcIndex(funcIndex),
         segmentIndex(segmentIndex),
-        codeRangeIndex(codeRangeIndex),
-        tls(tls) {}
+        codeRangeIndex(codeRangeIndex) {}
 };
 
+// IndirectStubVector represents a set of IndirectStubs.  These stubs all belong
+// to the same IndirectStubTable entry, and so all have the same tls value.
+//
+// The IndirectStubVector is ordered by IndirectStubComparator (WasmCode.cpp):
+// the sort key is the funcIndex.  The vector is binary-searched by that
+// predicate when an entry is needed.
+//
+// Creating an indirect stub is not an idempotent operation!  There must be NO
+// duplicate entries in the table, or equivalently, an entry that is in the
+// table must always be found by a binary search.
+
 using IndirectStubVector = Vector<IndirectStub, 0, SystemAllocPolicy>;
 
+// An IndirectStubTable represents a set of indirect stubs belonging to a
+// module.  There table is keyed uniquely by tls and there is one
+// IndirectStubVector per tls value represented in the set.
+//
+// While the set is usually very small, its can grow with the product of the
+// number of instances and the number of threads in a system, and we therefore
+// use a hash table.
+
+using IndirectStubTable =
+    HashMap<void*, IndirectStubVector, DefaultHasher<void*>, SystemAllocPolicy>;
+
 // LazyStubTier contains all the necessary information for lazy function entry
 // stubs and indirect stubs that are generated at runtime.
 // None of its data are ever serialized.
 //
 // It must be protected by a lock, because the main thread can both read and
 // write lazy stubs at any time while a background thread can regenerate lazy
 // stubs for tier2 at any time.
 
 class LazyStubTier {
   LazyStubSegmentVector stubSegments_;
   LazyFuncExportVector exports_;
-  // The indirectStubVector_ is totally ordered by IndirectStubComparator (in
-  // WasmCode.cpp): the primary index is the funcIndex, the secondary index the
-  // pointer value of the tls.  The vector is binary-searched by that predicate
-  // when an entry is needed.
-  //
-  // Creating an indirect stub is not an idempotent operation!  There must be NO
-  // duplicate entries in the table, which is another way of saying that an
-  // entry that is in the table must always be found by a lookup.
-  IndirectStubVector indirectStubVector_;
+  IndirectStubTable indirectStubTable_;
   size_t lastStubSegmentIndex_;
 
   [[nodiscard]] bool createManyEntryStubs(const Uint32Vector& funcExportIndices,
                                           const CodeTier& codeTier,
                                           bool flushAllThreadsIcaches,
                                           size_t* stubSegmentIndex);
 
  public:
--- a/layout/base/nsLayoutUtils.cpp
+++ b/layout/base/nsLayoutUtils.cpp
@@ -8565,16 +8565,28 @@ ScrollMetadata nsLayoutUtils::ComputeScr
     if (DisplayPortMarginsPropertyData* currentData =
             static_cast<DisplayPortMarginsPropertyData*>(
                 aContent->GetProperty(nsGkAtoms::DisplayPortMargins))) {
       if (currentData->mMargins.mMargins != ScreenMargin()) {
         metrics.SetHasNonZeroDisplayPortMargins(true);
       }
     }
 
+    // Note: GetProperty() will return nullptr both in the case where
+    // the property hasn't been set, and in the case where the property
+    // has been set to false (in which case the property value is
+    // `reinterpret_cast<void*>(false)` which is nullptr.
+    if (aContent->GetProperty(nsGkAtoms::forceMousewheelAutodir)) {
+      metadata.SetForceMousewheelAutodir(true);
+    }
+
+    if (aContent->GetProperty(nsGkAtoms::forceMousewheelAutodirHonourRoot)) {
+      metadata.SetForceMousewheelAutodirHonourRoot(true);
+    }
+
     if (IsAPZTestLoggingEnabled()) {
       LogTestDataForPaint(aLayerManager, scrollId, "displayport",
                           metrics.GetDisplayPort());
     }
 
     metrics.SetMinimalDisplayPort(
         aContent->GetProperty(nsGkAtoms::MinimalDisplayPort));
   }
--- a/layout/tools/reftest/selftest/conftest.py
+++ b/layout/tools/reftest/selftest/conftest.py
@@ -13,17 +13,17 @@ try:
     from cStringIO import StringIO
 except ImportError:
     # Python3
     from io import StringIO
 
 import mozinfo
 import pytest
 from manifestparser import expression
-from moztest.selftest.fixtures import binary, setup_test_harness  # noqa
+from moztest.selftest.fixtures import binary_fixture, setup_test_harness  # noqa
 
 here = os.path.abspath(os.path.dirname(__file__))
 setup_args = [False, "reftest", "reftest"]
 
 
 @pytest.fixture(scope="module")
 def normalize():
     """A function that can take a relative path and append it to the 'files'
@@ -137,13 +137,13 @@ def skip_using_mozinfo(request, setup_te
         def test_foo():
             pass
     """
 
     setup_test_harness(*setup_args)
     runreftest = pytest.importorskip("runreftest")
     runreftest.update_mozinfo()
 
-    skip_mozinfo = request.node.get_marker("skip_mozinfo")
+    skip_mozinfo = request.node.get_closest_marker("skip_mozinfo")
     if skip_mozinfo:
         value = skip_mozinfo.args[0]
         if expression.parse(value, **mozinfo.info):
             pytest.skip("skipped due to mozinfo match: \n{}".format(value))
--- a/mach.cmd
+++ b/mach.cmd
@@ -1,3 +1,9 @@
 @ECHO OFF
 SET topsrcdir=%~dp0
-python %topsrcdir%mach %*
+
+WHERE /q py
+IF %ERRORLEVEL% EQU 0 (
+  py %topsrcdir%mach %*
+) ELSE (
+  python %topsrcdir%mach %*
+)
--- a/mach.ps1
+++ b/mach.ps1
@@ -1,21 +1,29 @@
 $mypath = $MyInvocation.MyCommand.Path
-$machpath = ($mypath -replace '\\', '/').substring(0, $mypath.length - 4)
+$machpath = $mypath.substring(0, $mypath.length - 4)
+
+if (Get-Command py) {
+  $python_executable = "py"
+} else {
+  $python_executable = "python"
+}
 
 if (-not (test-path env:MACH_PS1_USE_MOZILLABUILD)) {
-  python $machpath $args
+  &$python_executable $machpath $args
   exit $lastexitcode
 }
 
 if (-not (test-path env:MOZILLABUILD)) {
   echo "No MOZILLABUILD environment variable found, terminating."
   exit 1
 }
 
+$machpath = ($machpath -replace '\\', '/')
+
 if ($machpath.contains(' ')) {
   echo @'
 The repository path contains whitespace which currently isn't supported in mach.ps1.
 Please run MozillaBuild manually for now.
 '@
   exit 1
 }
 
@@ -25,10 +33,18 @@ for ($i = 0; $i -lt $args.length; $i++) 
     echo @'
 The command contains whitespace which currently isn't supported in mach.ps1.
 Please run MozillaBuild manually for now.
 '@
     exit 1
   }
 }
 
-& "$env:MOZILLABUILD/start-shell.bat" $machpath $args
+$mozillabuild_version = Get-Content "$env:MOZILLABUILD\VERSION"
+# Remove "preX" postfix if the current MozillaBuild is a prerelease.
+$mozillabuild_version = [decimal]($mozillabuild_version -replace "pre.*")
+
+if ($mozillabuild_version -ge 4.0) {
+  & "$env:MOZILLABUILD/start-shell.bat" -no-start -defterm -c "$machpath $args"
+} else {
+  & "$env:MOZILLABUILD/start-shell.bat" $machpath $args
+}
 exit $lastexitcode
--- a/modules/libpref/init/StaticPrefList.yaml
+++ b/modules/libpref/init/StaticPrefList.yaml
@@ -5671,16 +5671,22 @@
 # Whether or not we can reuse the buffer contents using the GL buffer age
 # extension, if supported by the platform. This requires partial present
 # to be used.
 - name: gfx.webrender.allow-partial-present-buffer-age
   type: bool
   value: true
   mirror: once
 
+# Whether or not we should force partial present on.
+- name: gfx.webrender.force-partial-present
+  type: bool
+  value: false
+  mirror: once
+
 - name: gfx.webrender.enable-gpu-markers
   type: bool
 #ifdef DEBUG
   value: true
 #else
   value: false
 #endif
   mirror: once
--- a/python/mach/docs/windows-usage-outside-mozillabuild.rst
+++ b/python/mach/docs/windows-usage-outside-mozillabuild.rst
@@ -50,17 +50,18 @@ Download Python from the `the official w
 
 .. note::
 
     To avoid Mach compatibility issues with recent Python releases, it's recommended to install
     the 2nd-most recent "major version". For example, at time of writing, the current modern Python
     version is 3.10.1, so a safe version to install would be the most recent 3.9 release.
 
 You'll want to download the "Windows installer (64-bit)" associated with the release you've chosen.
-During installation, ensure that you check the "Add Python 3.x to PATH" option.
+During installation, ensure that you check the "Add Python 3.x to PATH" option, otherwise you might
+`encounter issues running Mercurial <https://bz.mercurial-scm.org/show_bug.cgi?id=6635>`__.
 
 .. note::
 
     Due to issues with Python DLL import failures with pip-installed binaries, it's not
     recommended to use the Windows Store release of Python.
 
 2. Modify your PATH
 ~~~~~~~~~~~~~~~~~~~
--- a/python/mach/mach/util.py
+++ b/python/mach/mach/util.py
@@ -3,16 +3,19 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, unicode_literals
 
 import hashlib
 import os
 import sys
 
+from pathlib import Path, PurePosixPath
+from typing import Optional
+
 
 class UserError(Exception):
     """Represents an error caused by something the user did wrong rather than
     an internal `mach` failure. Exceptions that are subclasses of this class
     will not be reported as failures to Sentry.
     """
 
 
@@ -78,8 +81,33 @@ def get_state_dir(specific_to_topsrcdir=
         print("Creating local state directory: %s" % state_dir)
         os.makedirs(state_dir, mode=0o770)
         # Save the topsrcdir that this state dir corresponds to so we can clean
         # it up in the event its srcdir was deleted.
         with open(os.path.join(state_dir, "topsrcdir.txt"), "w") as fh:
             fh.write(topsrcdir)
 
     return state_dir
+
+
+def win_to_msys_path(path: Path):
+    """Convert a windows-style path to msys-style."""
+    drive, path = os.path.splitdrive(path)
+    path = "/".join(path.split("\\"))
+    if drive:
+        if path[0] == "/":
+            path = path[1:]
+        path = f"/{drive[:-1]}/{path}"
+    return PurePosixPath(path)
+
+
+def to_optional_path(path: Optional[Path]):
+    if path:
+        return Path(path)
+    else:
+        return None
+
+
+def to_optional_str(path: Optional[Path]):
+    if path:
+        return str(path)
+    else:
+        return None
--- a/python/mozboot/bin/bootstrap.py
+++ b/python/mozboot/bin/bootstrap.py
@@ -25,16 +25,17 @@ if (major < 3) or (major == 3 and minor 
 
 import os
 import shutil
 import stat
 import subprocess
 import tempfile
 import zipfile
 
+from pathlib import Path
 from optparse import OptionParser
 from urllib.request import urlopen
 
 CLONE_MERCURIAL_PULL_FAIL = """
 Failed to pull from hg.mozilla.org.
 
 This is most likely because of unstable network connection.
 Try running `cd %s && hg pull https://hg.mozilla.org/mozilla-unified` manually,
@@ -55,199 +56,205 @@ def which(name):
     """
     # git-cinnabar.exe doesn't exist, but .exe versions of the other executables
     # do.
     if WINDOWS and name != "git-cinnabar":
         name += ".exe"
     search_dirs = os.environ["PATH"].split(os.pathsep)
 
     for path in search_dirs:
-        test = os.path.join(path, name)
-        if os.path.isfile(test) and os.access(test, os.X_OK):
+        test = Path(path) / name
+        if test.is_file() and os.access(test, os.X_OK):
             return test
 
     return None
 
 
-def validate_clone_dest(dest):
-    dest = os.path.abspath(dest)
+def validate_clone_dest(dest: Path):
+    dest = dest.resolve()
 
-    if not os.path.exists(dest):
+    if not dest.exists():
         return dest
 
-    if not os.path.isdir(dest):
-        print("ERROR! Destination %s exists but is not a directory." % dest)
+    if not dest.is_dir():
+        print(f"ERROR! Destination {dest} exists but is not a directory.")
         return None
 
-    if not os.listdir(dest):
+    if not any(dest.iterdir()):
         return dest
     else:
-        print("ERROR! Destination directory %s exists but is nonempty." % dest)
+        print(f"ERROR! Destination directory {dest} exists but is nonempty.")
         print(
-            "To re-bootstrap the existing checkout, go into '%s' and run './mach bootstrap'."
-            % dest
+            f"To re-bootstrap the existing checkout, go into '{dest}' and run './mach bootstrap'."
         )
         return None
 
 
 def input_clone_dest(vcs, no_interactive):
     repo_name = "mozilla-unified"
-    print("Cloning into %s using %s..." % (repo_name, VCS_HUMAN_READABLE[vcs]))
+    print(f"Cloning into {repo_name} using {VCS_HUMAN_READABLE[vcs]}...")
     while True:
         dest = None
         if not no_interactive:
             dest = input(
-                "Destination directory for clone (leave empty to use "
-                "default destination of %s): " % repo_name
+                f"Destination directory for clone (leave empty to use "
+                f"default destination of {repo_name}): "
             ).strip()
         if not dest:
             dest = repo_name
-        dest = validate_clone_dest(os.path.expanduser(dest))
+        dest = validate_clone_dest(Path(dest).expanduser())
         if dest:
             return dest
         if no_interactive:
             return None
 
 
-def hg_clone_firefox(hg, dest):
+def hg_clone_firefox(hg: Path, dest: Path):
     # We create an empty repo then modify the config before adding data.
     # This is necessary to ensure storage settings are optimally
     # configured.
     args = [
-        hg,
+        str(hg),
         # The unified repo is generaldelta, so ensure the client is as
         # well.
         "--config",
         "format.generaldelta=true",
         "init",
-        dest,
+        str(dest),
     ]
     res = subprocess.call(args)
     if res:
         print("unable to create destination repo; please try cloning manually")
         return None
 
     # Strictly speaking, this could overwrite a config based on a template
     # the user has installed. Let's pretend this problem doesn't exist
     # unless someone complains about it.
-    with open(os.path.join(dest, ".hg", "hgrc"), "a") as fh:
+    with open(dest / ".hg" / "hgrc", "a") as fh:
         fh.write("[paths]\n")
         fh.write("default = https://hg.mozilla.org/mozilla-unified\n")
         fh.write("\n")
 
         # The server uses aggressivemergedeltas which can blow up delta chain
         # length. This can cause performance to tank due to delta chains being
         # too long. Limit the delta chain length to something reasonable
         # to bound revlog read time.
         fh.write("[format]\n")
         fh.write("# This is necessary to keep performance in check\n")
         fh.write("maxchainlen = 10000\n")
 
     res = subprocess.call(
-        [hg, "pull", "https://hg.mozilla.org/mozilla-unified"], cwd=dest
+        [str(hg), "pull", "https://hg.mozilla.org/mozilla-unified"], cwd=str(dest)
     )
     print("")
     if res:
         print(CLONE_MERCURIAL_PULL_FAIL % dest)
         return None
 
     print('updating to "central" - the development head of Gecko and Firefox')
-    res = subprocess.call([hg, "update", "-r", "central"], cwd=dest)
+    res = subprocess.call([str(hg), "update", "-r", "central"], cwd=str(dest))
     if res:
         print(
-            "error updating; you will need to `cd %s && hg update -r central` "
-            "manually" % dest
+            f"error updating; you will need to `cd {dest} && hg update -r central` "
+            "manually"
         )
     return dest
 
 
-def git_clone_firefox(git, dest, watchman):
+def git_clone_firefox(git: Path, dest: Path, watchman: Path):
     tempdir = None
     cinnabar = None
     env = dict(os.environ)
     try:
         cinnabar = which("git-cinnabar")
         if not cinnabar:
             cinnabar_url = (
                 "https://github.com/glandium/git-cinnabar/archive/" "master.zip"
             )
             # If git-cinnabar isn't installed already, that's fine; we can
             # download a temporary copy. `mach bootstrap` will clone a full copy
             # of the repo in the state dir; we don't want to copy all that logic
             # to this tiny bootstrapping script.
-            tempdir = tempfile.mkdtemp()
-            with open(os.path.join(tempdir, "git-cinnabar.zip"), mode="w+b") as archive:
+            tempdir = Path(tempfile.mkdtemp())
+            with open(tempdir / "git-cinnabar.zip", mode="w+b") as archive:
                 with urlopen(cinnabar_url) as repo:
                     shutil.copyfileobj(repo, archive)
                 archive.seek(0)
                 with zipfile.ZipFile(archive) as zipf:
                     zipf.extractall(path=tempdir)
-            cinnabar_dir = os.path.join(tempdir, "git-cinnabar-master")
-            cinnabar = os.path.join(cinnabar_dir, "git-cinnabar")
+            cinnabar_dir = tempdir / "git-cinnabar-master"
+            cinnabar = cinnabar_dir / "git-cinnabar"
             # Make git-cinnabar and git-remote-hg executable.
             st = os.stat(cinnabar)
-            os.chmod(cinnabar, st.st_mode | stat.S_IEXEC)
-            st = os.stat(os.path.join(cinnabar_dir, "git-remote-hg"))
-            os.chmod(
-                os.path.join(cinnabar_dir, "git-remote-hg"), st.st_mode | stat.S_IEXEC
-            )
-            env["PATH"] = cinnabar_dir + os.pathsep + env["PATH"]
+            cinnabar.chmod(st.st_mode | stat.S_IEXEC)
+            st = os.stat(cinnabar_dir / "git-remote-hg")
+            (cinnabar_dir / "git-remote-hg").chmod(st.st_mode | stat.S_IEXEC)
+            env["PATH"] = str(cinnabar_dir) + os.pathsep + env["PATH"]
             subprocess.check_call(
-                ["git", "cinnabar", "download"], cwd=cinnabar_dir, env=env
+                ["git", "cinnabar", "download"], cwd=str(cinnabar_dir), env=env
             )
             print(
                 "WARNING! git-cinnabar is required for Firefox development  "
                 "with git. After the clone is complete, the bootstrapper "
                 "will ask if you would like to configure git; answer yes, "
                 "and be sure to add git-cinnabar to your PATH according to "
                 "the bootstrapper output."
             )
 
         # We're guaranteed to have `git-cinnabar` installed now.
         # Configure git per the git-cinnabar requirements.
         subprocess.check_call(
             [
-                git,
+                str(git),
                 "clone",
                 "-b",
                 "bookmarks/central",
                 "hg::https://hg.mozilla.org/mozilla-unified",
-                dest,
+                str(dest),
             ],
             env=env,
         )
-        subprocess.check_call([git, "config", "fetch.prune", "true"], cwd=dest, env=env)
-        subprocess.check_call([git, "config", "pull.ff", "only"], cwd=dest, env=env)
+        subprocess.check_call(
+            [str(git), "config", "fetch.prune", "true"], cwd=str(dest), env=env
+        )
+        subprocess.check_call(
+            [str(git), "config", "pull.ff", "only"], cwd=str(dest), env=env
+        )
 
-        watchman_sample = os.path.join(dest, ".git/hooks/fsmonitor-watchman.sample")
+        watchman_sample = dest / ".git/hooks/fsmonitor-watchman.sample"
         # Older versions of git didn't include fsmonitor-watchman.sample.
-        if watchman and os.path.exists(watchman_sample):
+        if watchman and watchman_sample.exists():
             print("Configuring watchman")
-            watchman_config = os.path.join(dest, ".git/hooks/query-watchman")
-            if not os.path.exists(watchman_config):
-                print("Copying %s to %s" % (watchman_sample, watchman_config))
+            watchman_config = dest / ".git/hooks/query-watchman"
+            if not watchman_config.exists():
+                print(f"Copying {watchman_sample} to {watchman_config}")
                 copy_args = [
                     "cp",
                     ".git/hooks/fsmonitor-watchman.sample",
                     ".git/hooks/query-watchman",
                 ]
-                subprocess.check_call(copy_args, cwd=dest)
+                subprocess.check_call(copy_args, cwd=str(dest))
 
-            config_args = [git, "config", "core.fsmonitor", ".git/hooks/query-watchman"]
-            subprocess.check_call(config_args, cwd=dest, env=env)
+            config_args = [
+                str(git),
+                "config",
+                "core.fsmonitor",
+                ".git/hooks/query-watchman",
+            ]
+            subprocess.check_call(config_args, cwd=str(dest), env=env)
         return dest
     finally:
         if not cinnabar:
             print(
                 "Failed to install git-cinnabar. Try performing a manual "
                 "installation: https://github.com/glandium/git-cinnabar/wiki/"
                 "Mozilla:-A-git-workflow-for-Gecko-development"
             )
         if tempdir:
-            shutil.rmtree(tempdir)
+            shutil.rmtree(str(tempdir))
 
 
 def clone(vcs, no_interactive):
     hg = which("hg")
     if not hg:
         print(
             "Mercurial is not installed. Mercurial is required to clone "
             "Firefox%s." % (", even when cloning with Git" if vcs == "git" else "")
@@ -275,41 +282,41 @@ def clone(vcs, no_interactive):
             print("Git is not installed.")
             print("Try installing git using your system package manager.")
             return None
 
     dest = input_clone_dest(vcs, no_interactive)
     if not dest:
         return None
 
-    print("Cloning Firefox %s repository to %s" % (VCS_HUMAN_READABLE[vcs], dest))
+    print(f"Cloning Firefox {VCS_HUMAN_READABLE[vcs]} repository to {dest}")
     if vcs == "hg":
         return hg_clone_firefox(binary, dest)
     else:
         watchman = which("watchman")
         return git_clone_firefox(binary, dest, watchman)
 
 
-def bootstrap(srcdir, application_choice, no_interactive, no_system_changes):
-    args = [sys.executable, os.path.join(srcdir, "mach")]
+def bootstrap(srcdir: Path, application_choice, no_interactive, no_system_changes):
+    args = [sys.executable, str(srcdir / "mach")]
 
     if no_interactive:
         # --no-interactive is a global argument, not a command argument,
         # so it needs to be specified before "bootstrap" is appended.
         args += ["--no-interactive"]
 
     args += ["bootstrap"]
 
     if application_choice:
         args += ["--application-choice", application_choice]
     if no_system_changes:
         args += ["--no-system-changes"]
 
     print("Running `%s`" % " ".join(args))
-    return subprocess.call(args, cwd=srcdir)
+    return subprocess.call(args, cwd=str(srcdir))
 
 
 def main(args):
     parser = OptionParser()
     parser.add_option(
         "--application-choice",
         dest="application_choice",
         help='Pass in an application choice (see "APPLICATIONS" in '
@@ -352,17 +359,17 @@ def main(args):
                 "Unless you are going to have more local copies of Firefox source code, "
                 "this 'bootstrap.py' file is no longer needed and can be deleted. "
                 "Clean up the bootstrap.py file? (Y/n)"
             )
             if not remove_bootstrap_file:
                 remove_bootstrap_file = "y"
         if options.no_interactive or remove_bootstrap_file == "y":
             try:
-                os.remove(sys.argv[0])
+                Path(sys.argv[0]).unlink()
             except FileNotFoundError:
                 print("File could not be found !")
         return bootstrap(
             srcdir,
             options.application_choice,
             options.no_interactive,
             options.no_system_changes,
         )
--- a/python/mozboot/mozboot/android.py
+++ b/python/mozboot/mozboot/android.py
@@ -7,16 +7,17 @@ from __future__ import absolute_import, 
 import errno
 import json
 import os
 import stat
 import subprocess
 import sys
 import time
 import requests
+from typing import Optional, Union
 from pathlib import Path
 from tqdm import tqdm
 
 # We need the NDK version in multiple different places, and it's inconvenient
 # to pass down the NDK version to all relevant places, so we have this global
 # variable.
 from mozboot.bootstrap import MOZCONFIG_SUGGESTION_TEMPLATE
 
@@ -32,25 +33,19 @@ LINUX_ARM_ANDROID_AVD = "linux64-android
 
 MACOS_X86_64_ANDROID_AVD = "linux64-android-avd-x86_64-repack"
 MACOS_ARM_ANDROID_AVD = "linux64-android-avd-arm-repack"
 MACOS_ARM64_ANDROID_AVD = "linux64-android-avd-arm64-repack"
 
 WINDOWS_X86_64_ANDROID_AVD = "linux64-android-avd-x86_64-repack"
 WINDOWS_ARM_ANDROID_AVD = "linux64-android-avd-arm-repack"
 
-AVD_MANIFEST_X86_64 = os.path.abspath(
-    os.path.join(os.path.dirname(__file__), "android-avds/x86_64.json")
-)
-AVD_MANIFEST_ARM = os.path.abspath(
-    os.path.join(os.path.dirname(__file__), "android-avds/arm.json")
-)
-AVD_MANIFEST_ARM64 = os.path.abspath(
-    os.path.join(os.path.dirname(__file__), "android-avds/arm64.json")
-)
+AVD_MANIFEST_X86_64 = Path(__file__).resolve().parent / "android-avds/x86_64.json"
+AVD_MANIFEST_ARM = Path(__file__).resolve().parent / "android-avds/arm.json"
+AVD_MANIFEST_ARM64 = Path(__file__).resolve().parent / "android-avds/arm64.json"
 
 JAVA_VERSION_MAJOR = "17"
 JAVA_VERSION_MINOR = "0.1"
 JAVA_VERSION_PATCH = "12"
 
 ANDROID_NDK_EXISTS = """
 Looks like you have the correct version of the Android NDK installed at:
 %s
@@ -104,116 +99,117 @@ ac_add_options --enable-artifact-builds
 mk_add_options MOZ_OBJDIR=./objdir-frontend
 """
 
 
 class GetNdkVersionError(Exception):
     pass
 
 
-def install_bundletool(url, path):
+def install_bundletool(url, path: Path):
     """
     Fetch bundletool to the desired directory.
     """
-    old_path = os.getcwd()
     try:
-        os.chdir(path)
         subprocess.check_call(
-            ["wget", "--continue", url, "--output-document", "bundletool.jar"]
+            ["wget", "--continue", url, "--output-document", "bundletool.jar"],
+            cwd=str(path),
         )
     finally:
-        os.chdir(old_path)
+        pass
 
 
-def install_mobile_android_sdk_or_ndk(url, path):
+def install_mobile_android_sdk_or_ndk(url, path: Path):
     """
     Fetch an Android SDK or NDK from |url| and unpack it into the given |path|.
 
     We use, and 'requests' respects, https. We could also include SHAs for a
     small improvement in the integrity guarantee we give. But this script is
     bootstrapped over https anyway, so it's a really minor improvement.
 
     We keep a cache of the downloaded artifacts, writing into |path|/mozboot.
     We don't yet clean the cache; it's better to waste some disk space and
     not require a long re-download than to wipe the cache prematurely.
     """
 
-    old_path = os.getcwd()
+    download_path = path / "mozboot"
     try:
-        download_path = os.path.join(path, "mozboot")
-        try:
-            os.makedirs(download_path)
-        except OSError as e:
-            if e.errno == errno.EEXIST and os.path.isdir(download_path):
-                pass
-            else:
-                raise
-
-        os.chdir(download_path)
+        download_path.mkdir(parents=True)
+    except OSError as e:
+        if e.errno == errno.EEXIST and download_path.is_dir():
+            pass
+        else:
+            raise
 
-        file_name = url.split("/")[-1]
-        abspath = os.path.join(download_path, file_name)
-
-        file = Path(abspath)
-
-        with requests.Session() as session:
-            request = session.head(url)
-            remote_file_size = int(request.headers["content-length"])
+    file_name = url.split("/")[-1]
+    download_file_path = download_path / file_name
 
-            if file.is_file():
-                local_file_size = file.stat().st_size
+    with requests.Session() as session:
+        request = session.head(url)
+        remote_file_size = int(request.headers["content-length"])
 
-                if local_file_size == remote_file_size:
-                    print(f"{file} already downloaded. Skipping download...")
-                else:
-                    print(f"Partial download detected. Resuming download of {file}...")
-                    download(file_name, session, url, remote_file_size, local_file_size)
+        if download_file_path.is_file():
+            local_file_size = download_file_path.stat().st_size
+
+            if local_file_size == remote_file_size:
+                print(f"{download_file_path} already downloaded. Skipping download...")
             else:
-                print(f"Downloading {file}...")
-                download(file_name, session, url, remote_file_size)
-
-        os.chdir(path)
+                print(
+                    f"Partial download detected. Resuming download of {download_file_path}..."
+                )
+                download(
+                    download_file_path,
+                    session,
+                    url,
+                    remote_file_size,
+                    local_file_size,
+                )
+        else:
+            print(f"Downloading {download_file_path}...")
+            download(download_file_path, session, url, remote_file_size)
 
-        if file_name.endswith(".tar.gz") or file_name.endswith(".tgz"):
-            cmd = ["tar", "zxf", abspath]
-        elif file_name.endswith(".tar.bz2"):
-            cmd = ["tar", "jxf", abspath]
-        elif file_name.endswith(".zip"):
-            cmd = ["unzip", "-q", abspath]
-        elif file_name.endswith(".bin"):
-            # Execute the .bin file, which unpacks the content.
-            mode = os.stat(path).st_mode
-            os.chmod(abspath, mode | stat.S_IXUSR)
-            cmd = [abspath]
-        else:
-            raise NotImplementedError(f"Don't know how to unpack file: {file_name}")
+    if file_name.endswith(".tar.gz") or file_name.endswith(".tgz"):
+        cmd = ["tar", "zxf", str(download_file_path)]
+    elif file_name.endswith(".tar.bz2"):
+        cmd = ["tar", "jxf", str(download_file_path)]
+    elif file_name.endswith(".zip"):
+        cmd = ["unzip", "-q", str(download_file_path)]
+    elif file_name.endswith(".bin"):
+        # Execute the .bin file, which unpacks the content.
+        mode = os.stat(path).st_mode
+        download_file_path.chmod(mode | stat.S_IXUSR)
+        cmd = [str(download_file_path)]
+    else:
+        raise NotImplementedError(f"Don't know how to unpack file: {file_name}")
 
-        print(f"Unpacking {file}...")
+    print(f"Unpacking {download_file_path}...")
 
-        with open(os.devnull, "w") as stdout:
-            # These unpack commands produce a ton of output; ignore it.  The
-            # .bin files are 7z archives; there's no command line flag to quiet
-            # output, so we use this hammer.
-            subprocess.check_call(cmd, stdout=stdout)
+    with open(os.devnull, "w") as stdout:
+        # These unpack commands produce a ton of output; ignore it.  The
+        # .bin files are 7z archives; there's no command line flag to quiet
+        # output, so we use this hammer.
+        subprocess.check_call(cmd, stdout=stdout, cwd=str(path))
 
-        print(f"Unpacking {file}... DONE")
-        # Now delete the archive
-        os.unlink(abspath)
-    finally:
-        os.chdir(old_path)
+    print(f"Unpacking {download_file_path}... DONE")
+    # Now delete the archive
+    download_file_path.unlink()
 
 
 def download(
-    file_name, session, url, remote_file_size, resume_from_byte_pos: int = None
+    download_file_path: Path,
+    session,
+    url,
+    remote_file_size,
+    resume_from_byte_pos: int = None,
 ):
     """
     Handles both a fresh SDK/NDK download, as well as resuming a partial one
     """
     # "ab" will behave same as "wb" if file does not exist
-    with open(file_name, "ab") as file:
+    with open(download_file_path, "ab") as file:
         # 64 KB/s should be fine on even the slowest internet connections
         chunk_size = 1024 * 64
         # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Range#directives
         resume_header = (
             {"Range": f"bytes={resume_from_byte_pos}-"}
             if resume_from_byte_pos
             else None
         )
@@ -222,29 +218,30 @@ def download(
             url, stream=True, allow_redirects=True, headers=resume_header
         )
 
         with tqdm(
             total=int(remote_file_size),
             unit="B",
             unit_scale=True,
             unit_divisor=1024,
-            desc=file_name,
+            desc=download_file_path.name,
             initial=resume_from_byte_pos if resume_from_byte_pos else 0,
         ) as progress_bar:
             for chunk in request.iter_content(chunk_size):
                 file.write(chunk)
                 progress_bar.update(len(chunk))
 
 
-def get_ndk_version(ndk_path):
+def get_ndk_version(ndk_path: Union[str, Path]):
     """Given the path to the NDK, return the version as a 3-tuple of (major,
     minor, human).
     """
-    with open(os.path.join(ndk_path, "source.properties"), "r") as f:
+    ndk_path = Path(ndk_path)
+    with open(ndk_path / "source.properties", "r") as f:
         revision = [line for line in f if line.startswith("Pkg.Revision")]
         if not revision:
             raise GetNdkVersionError(
                 "Cannot determine NDK version from source.properties"
             )
         if len(revision) != 1:
             raise GetNdkVersionError("Too many Pkg.Revision lines in source.properties")
 
@@ -265,97 +262,84 @@ def get_ndk_version(ndk_path):
         int_minor = int(minor)
         alphas = "abcdefghijklmnop"
         ascii_minor = alphas[int_minor] if int_minor > 0 else ""
         human = "r%s%s" % (major, ascii_minor)
         return (major, minor, human)
 
 
 def get_paths(os_name):
-    mozbuild_path = os.environ.get(
-        "MOZBUILD_STATE_PATH", os.path.expanduser(os.path.join("~", ".mozbuild"))
+    mozbuild_path = Path(
+        os.environ.get("MOZBUILD_STATE_PATH", Path("~/.mozbuild").expanduser())
     )
-    sdk_path = os.environ.get(
-        "ANDROID_SDK_HOME",
-        os.path.join(mozbuild_path, "android-sdk-{0}".format(os_name)),
+    sdk_path = Path(
+        os.environ.get("ANDROID_SDK_HOME", mozbuild_path / f"android-sdk-{os_name}"),
     )
-    ndk_path = os.environ.get(
-        "ANDROID_NDK_HOME",
-        os.path.join(mozbuild_path, "android-ndk-{0}".format(NDK_VERSION)),
+    ndk_path = Path(
+        os.environ.get(
+            "ANDROID_NDK_HOME", mozbuild_path / f"android-ndk-{NDK_VERSION}"
+        ),
     )
-    avd_home_path = os.environ.get(
-        "ANDROID_AVD_HOME", os.path.join(mozbuild_path, "android-device", "avd")
+    avd_home_path = Path(
+        os.environ.get("ANDROID_AVD_HOME", mozbuild_path / "android-device" / "avd")
     )
-    emulator_path = os.environ.get(
-        "ANDROID_EMULATOR_HOME", os.path.join(mozbuild_path, "android-device")
-    )
-    return (mozbuild_path, sdk_path, ndk_path, avd_home_path, emulator_path)
+    return mozbuild_path, sdk_path, ndk_path, avd_home_path
 
 
-def sdkmanager_tool(sdk_path):
+def sdkmanager_tool(sdk_path: Path):
     # sys.platform is win32 even if Python/Win64.
     sdkmanager = "sdkmanager.bat" if sys.platform.startswith("win") else "sdkmanager"
-    return os.path.join(
-        sdk_path, "cmdline-tools", CMDLINE_TOOLS_VERSION_STRING, "bin", sdkmanager
+    return (
+        sdk_path / "cmdline-tools" / CMDLINE_TOOLS_VERSION_STRING / "bin" / sdkmanager
     )
 
 
-def avdmanager_tool(sdk_path):
+def avdmanager_tool(sdk_path: Path):
     # sys.platform is win32 even if Python/Win64.
     sdkmanager = "avdmanager.bat" if sys.platform.startswith("win") else "avdmanager"
-    return os.path.join(
-        sdk_path, "cmdline-tools", CMDLINE_TOOLS_VERSION_STRING, "bin", sdkmanager
+    return (
+        sdk_path / "cmdline-tools" / CMDLINE_TOOLS_VERSION_STRING / "bin" / sdkmanager
     )
 
 
-def adb_tool(sdk_path):
+def adb_tool(sdk_path: Path):
     adb = "adb.bat" if sys.platform.startswith("win") else "adb"
-    return os.path.join(sdk_path, "platform-tools", adb)
+    return sdk_path / "platform-tools" / adb
 
 
-def emulator_tool(sdk_path):
+def emulator_tool(sdk_path: Path):
     emulator = "emulator.bat" if sys.platform.startswith("win") else "emulator"
-    return os.path.join(sdk_path, "emulator", emulator)
-
-
-def ensure_dir(dir):
-    """Ensures the given directory exists"""
-    if dir and not os.path.exists(dir):
-        try:
-            os.makedirs(dir)
-        except OSError as error:
-            if error.errno != errno.EEXIST:
-                raise
+    return sdk_path / "emulator" / emulator
 
 
 def ensure_android(
     os_name,
     os_arch,
     artifact_mode=False,
     ndk_only=False,
     system_images_only=False,
     emulator_only=False,
-    avd_manifest_path=None,
+    avd_manifest_path: Optional[Path] = None,
     prewarm_avd=False,
     no_interactive=False,
     list_packages=False,
 ):
     """
     Ensure the Android SDK (and NDK, if `artifact_mode` is falsy) are
     installed.  If not, fetch and unpack the SDK and/or NDK from the
     given URLs.  Ensure the required Android SDK packages are
     installed.
 
     `os_name` can be 'linux', 'macosx' or 'windows'.
     """
     # The user may have an external Android SDK (in which case we
     # save them a lengthy download), or they may have already
     # completed the download. We unpack to
     # ~/.mozbuild/{android-sdk-$OS_NAME, android-ndk-$VER}.
-    mozbuild_path, sdk_path, ndk_path, avd_home_path, emulator_path = get_paths(os_name)
+    mozbuild_path, sdk_path, ndk_path, avd_home_path = get_paths(os_name)
 
     if os_name == "macosx":
         os_tag = "mac"
     elif os_name == "windows":
         os_tag = "win"
     else:
         os_tag = os_name
 
@@ -365,17 +349,16 @@ def ensure_android(
     ndk_url = android_ndk_url(os_name)
     bundletool_url = "https://github.com/google/bundletool/releases/download/{v}/bundletool-all-{v}.jar".format(  # NOQA: E501
         v=BUNDLETOOL_VERSION
     )
 
     ensure_android_sdk_and_ndk(
         mozbuild_path,
         os_name,
-        os_arch,
         sdk_path=sdk_path,
         sdk_url=sdk_url,
         ndk_path=ndk_path,
         ndk_url=ndk_url,
         bundletool_url=bundletool_url,
         artifact_mode=artifact_mode,
         ndk_only=ndk_only,
         emulator_only=emulator_only,
@@ -410,30 +393,28 @@ def ensure_android(
         return
 
     ensure_android_avd(
         avdmanager_tool=avdmanager_tool(sdk_path),
         adb_tool=adb_tool(sdk_path),
         emulator_tool=emulator_tool(sdk_path),
         avd_home_path=avd_home_path,
         sdk_path=sdk_path,
-        emulator_path=emulator_path,
         no_interactive=no_interactive,
         avd_manifest=avd_manifest,
         prewarm_avd=prewarm_avd,
     )
 
 
 def ensure_android_sdk_and_ndk(
-    mozbuild_path,
+    mozbuild_path: Path,
     os_name,
-    os_arch,
-    sdk_path,
+    sdk_path: Path,
     sdk_url,
-    ndk_path,
+    ndk_path: Path,
     ndk_url,
     bundletool_url,
     artifact_mode,
     ndk_only,
     emulator_only,
 ):
     """
     Ensure the Android SDK and NDK are found at the given paths.  If not, fetch
@@ -442,17 +423,17 @@ def ensure_android_sdk_and_ndk(
     """
 
     # It's not particularly bad to overwrite the NDK toolchain, but it does take
     # a while to unpack, so let's avoid the disk activity if possible.  The SDK
     # may prompt about licensing, so we do this first.
     # Check for Android NDK only if we are not in artifact mode.
     if not artifact_mode and not emulator_only:
         install_ndk = True
-        if os.path.isdir(ndk_path):
+        if ndk_path.is_dir():
             try:
                 _, _, human = get_ndk_version(ndk_path)
                 if human == NDK_VERSION:
                     print(ANDROID_NDK_EXISTS % ndk_path)
                     install_ndk = False
             except GetNdkVersionError:
                 pass  # Just do the install.
         if install_ndk:
@@ -461,71 +442,67 @@ def ensure_android_sdk_and_ndk(
 
     if ndk_only:
         return
 
     # We don't want to blindly overwrite, since we use the
     # |sdkmanager| tool to install additional parts of the Android
     # toolchain.  If we overwrite, we lose whatever Android packages
     # the user may have already installed.
-    if os.path.isfile(sdkmanager_tool(sdk_path)):
+    if sdkmanager_tool(sdk_path).is_file():
         print(ANDROID_SDK_EXISTS % sdk_path)
-    elif os.path.isdir(sdk_path):
+    elif sdk_path.is_dir():
         raise NotImplementedError(ANDROID_SDK_TOO_OLD % sdk_path)
     else:
         # The SDK archive used to include a top-level
         # android-sdk-$OS_NAME directory; it no longer does so.  We
         # preserve the old convention to smooth detecting existing SDK
         # installations.
-        cmdline_tools_path = os.path.join(
-            mozbuild_path, "android-sdk-{0}".format(os_name), "cmdline-tools"
-        )
+        cmdline_tools_path = mozbuild_path / f"android-sdk-{os_name}" / "cmdline-tools"
         install_mobile_android_sdk_or_ndk(sdk_url, cmdline_tools_path)
         # The tools package *really* wants to be in
         # <sdk>/cmdline-tools/$CMDLINE_TOOLS_VERSION_STRING
-        os.rename(
-            os.path.join(cmdline_tools_path, "cmdline-tools"),
-            os.path.join(cmdline_tools_path, CMDLINE_TOOLS_VERSION_STRING),
+        (cmdline_tools_path / "cmdline-tools").rename(
+            cmdline_tools_path / CMDLINE_TOOLS_VERSION_STRING
         )
         install_bundletool(bundletool_url, mozbuild_path)
 
 
 def get_packages_to_install(packages_file_content, avd_manifest):
     packages = []
     packages += map(lambda package: package.strip(), packages_file_content)
     if avd_manifest is not None:
         packages += [avd_manifest["emulator_package"]]
     return packages
 
 
 def ensure_android_avd(
-    avdmanager_tool,
-    adb_tool,
-    emulator_tool,
-    avd_home_path,
-    sdk_path,
-    emulator_path,
+    avdmanager_tool: Path,
+    adb_tool: Path,
+    emulator_tool: Path,
+    avd_home_path: Path,
+    sdk_path: Path,
     no_interactive=False,
     avd_manifest=None,
     prewarm_avd=False,
 ):
     """
     Use the given sdkmanager tool (like 'sdkmanager') to install required
     Android packages.
     """
     if avd_manifest is None:
         return
 
-    ensure_dir(avd_home_path)
+    avd_home_path.mkdir(parents=True, exist_ok=True)
     # The AVD needs this folder to boot, so make sure it exists here.
-    ensure_dir(os.path.join(sdk_path, "platforms"))
+    (sdk_path / "platforms").mkdir(parents=True, exist_ok=True)
 
     avd_name = avd_manifest["emulator_avd_name"]
     args = [
-        avdmanager_tool,
+        str(avdmanager_tool),
         "--verbose",
         "create",
         "avd",
         "--force",
         "--name",
         avd_name,
         "--package",
         avd_manifest["emulator_package"],
@@ -533,71 +510,73 @@ def ensure_android_avd(
 
     if not no_interactive:
         subprocess.check_call(args)
         return
 
     # Flush outputs before running sdkmanager.
     sys.stdout.flush()
     env = os.environ.copy()
-    env["ANDROID_AVD_HOME"] = avd_home_path
+    env["ANDROID_AVD_HOME"] = str(avd_home_path)
     proc = subprocess.Popen(args, stdin=subprocess.PIPE, env=env)
     proc.communicate("no\n".encode("UTF-8"))
 
     retcode = proc.poll()
     if retcode:
         cmd = args[0]
         e = subprocess.CalledProcessError(retcode, cmd)
         raise e
 
-    avd_path = os.path.join(avd_home_path, avd_name + ".avd")
-    config_file_name = os.path.join(avd_path, "config.ini")
+    avd_path = avd_home_path / (str(avd_name) + ".avd")
+    config_file_name = avd_path / "config.ini"
 
-    print("Writing config at %s" % config_file_name)
+    print(f"Writing config at {config_file_name}")
 
-    if os.path.isfile(config_file_name):
+    if config_file_name.is_file():
         with open(config_file_name, "a") as config:
             for key, value in avd_manifest["emulator_extra_config"].items():
                 config.write("%s=%s\n" % (key, value))
     else:
         raise NotImplementedError(
-            "Could not find config file at %s, something went wrong" % config_file_name
+            f"Could not find config file at {config_file_name}, something went wrong"
         )
     if prewarm_avd:
-        run_prewarm_avd(
-            adb_tool, emulator_tool, env, avd_name, avd_manifest, no_interactive
-        )
+        run_prewarm_avd(adb_tool, emulator_tool, env, avd_name, avd_manifest)
     # When running in headless mode, the emulator does not run the cleanup
     # step, and thus doesn't delete lock files. On some platforms, left-over
     # lock files can cause the emulator to not start, so we remove them here.
     for lock_file in ["hardware-qemu.ini.lock", "multiinstance.lock"]:
-        lock_file_path = os.path.join(avd_path, lock_file)
+        lock_file_path = avd_path / lock_file
         try:
-            os.remove(lock_file_path)
-            print("Removed lock file %s" % lock_file_path)
+            lock_file_path.unlink()
+            print(f"Removed lock file {lock_file_path}")
         except OSError:
             # The lock file is not there, nothing to do.
             pass
 
 
 def run_prewarm_avd(
-    adb_tool, emulator_tool, env, avd_name, avd_manifest, no_interactive=False
+    adb_tool: Path,
+    emulator_tool: Path,
+    env,
+    avd_name,
+    avd_manifest,
 ):
     """
     Ensures the emulator is fully booted to save time on future iterations.
     """
-    args = [emulator_tool, "-avd", avd_name] + avd_manifest["emulator_extra_args"]
+    args = [str(emulator_tool), "-avd", avd_name] + avd_manifest["emulator_extra_args"]
 
     # Flush outputs before running emulator.
     sys.stdout.flush()
     proc = subprocess.Popen(args, env=env)
 
     booted = False
     for i in range(100):
-        boot_completed_cmd = [adb_tool, "shell", "getprop", "sys.boot_completed"]
+        boot_completed_cmd = [str(adb_tool), "shell", "getprop", "sys.boot_completed"]
         completed_proc = subprocess.Popen(
             boot_completed_cmd, env=env, stdout=subprocess.PIPE
         )
         try:
             out, err = completed_proc.communicate(timeout=30)
             boot_completed = out.decode("UTF-8").strip()
             print("sys.boot_completed = %s" % boot_completed)
             time.sleep(30)
@@ -607,24 +586,24 @@ def run_prewarm_avd(
         except subprocess.TimeoutExpired:
             # Sometimes the adb command hangs, that's ok
             print("sys.boot_completed = Timeout")
 
     if not booted:
         raise NotImplementedError("Could not prewarm emulator")
 
     # Wait until the emulator completely shuts down
-    subprocess.Popen([adb_tool, "emu", "kill"], env=env).wait()
+    subprocess.Popen([str(adb_tool), "emu", "kill"], env=env).wait()
     proc.wait()
 
 
 def ensure_android_packages(
     os_name,
     os_arch,
-    sdkmanager_tool,
+    sdkmanager_tool: Path,
     emulator_only=False,
     system_images_only=False,
     avd_manifest=None,
     no_interactive=False,
     list_packages=False,
 ):
     """
     Use the given sdkmanager tool (like 'sdkmanager') to install required
@@ -635,35 +614,34 @@ def ensure_android_packages(
     # may be prompted to agree to the Android license.
     if system_images_only:
         packages_file_name = "android-system-images-packages.txt"
     elif emulator_only:
         packages_file_name = "android-emulator-packages.txt"
     else:
         packages_file_name = "android-packages.txt"
 
-    packages_file_path = os.path.abspath(
-        os.path.join(os.path.dirname(__file__), packages_file_name)
-    )
+    packages_file_path = (Path(__file__).parent / packages_file_name).resolve()
+
     with open(packages_file_path) as packages_file:
         packages_file_content = packages_file.readlines()
 
     packages = get_packages_to_install(packages_file_content, avd_manifest)
     print(INSTALLING_ANDROID_PACKAGES % "\n".join(packages))
 
-    args = [sdkmanager_tool]
+    args = [str(sdkmanager_tool)]
     if os_name == "macosx" and os_arch == "arm64":
         # Support for Apple Silicon is still in nightly
         args.append("--channel=3")
     args.extend(packages)
 
     # sdkmanager needs JAVA_HOME
     java_bin_path = ensure_java(os_name, os_arch)
     env = os.environ.copy()
-    env["JAVA_HOME"] = os.path.dirname(java_bin_path)
+    env["JAVA_HOME"] = str(java_bin_path.parent)
 
     if not no_interactive:
         subprocess.check_call(args, env=env)
         return
 
     # Flush outputs before running sdkmanager.
     sys.stdout.flush()
     sys.stderr.flush()
@@ -674,21 +652,21 @@ def ensure_android_packages(
     proc.communicate(yes)
 
     retcode = proc.poll()
     if retcode:
         cmd = args[0]
         e = subprocess.CalledProcessError(retcode, cmd)
         raise e
     if list_packages:
-        subprocess.check_call([sdkmanager_tool, "--list"])
+        subprocess.check_call([str(sdkmanager_tool), "--list"])
 
 
 def generate_mozconfig(os_name, artifact_mode=False):
-    moz_state_dir, sdk_path, ndk_path, avd_home_path, emulator_path = get_paths(os_name)
+    moz_state_dir, sdk_path, ndk_path, avd_home_path = get_paths(os_name)
 
     extra_lines = []
     if extra_lines:
         extra_lines.append("")
 
     if artifact_mode:
         template = MOBILE_ANDROID_ARTIFACT_MODE_MOZCONFIG_TEMPLATE
     else:
@@ -803,24 +781,28 @@ def main(argv):
         )
 
     os_arch = platform.machine()
 
     if options.jdk_only:
         ensure_java(os_name, os_arch)
         return 0
 
+    avd_manifest_path = (
+        Path(options.avd_manifest_path) if options.avd_manifest_path else None
+    )
+
     ensure_android(
         os_name,
         os_arch,
         artifact_mode=options.artifact_mode,
         ndk_only=options.ndk_only,
         system_images_only=options.system_images_only,
         emulator_only=options.emulator_only,
-        avd_manifest_path=options.avd_manifest_path,
+        avd_manifest_path=avd_manifest_path,
         prewarm_avd=options.prewarm_avd,
         no_interactive=options.no_interactive,
         list_packages=options.list_packages,
     )
     mozconfig = generate_mozconfig(os_name, options.artifact_mode)
 
     # |./mach bootstrap| automatically creates a mozconfig file for you if it doesn't
     # exist. However, here, we don't know where the "topsrcdir" is, and it's not worth
@@ -828,17 +810,17 @@ def main(argv):
     # So, instead, we'll politely ask users to create (or update) the file themselves.
     suggestion = MOZCONFIG_SUGGESTION_TEMPLATE % ("$topsrcdir/mozconfig", mozconfig)
     print("\n" + suggestion)
 
     return 0
 
 
 def ensure_java(os_name, os_arch):
-    mozbuild_path, _, _, _, _ = get_paths(os_name)
+    mozbuild_path, _, _, _ = get_paths(os_name)
 
     if os_name == "macosx":
         os_tag = "mac"
     else:
         os_tag = os_name
 
     if os_arch == "x86_64":
         arch = "x64"
@@ -848,70 +830,70 @@ def ensure_java(os_name, os_arch):
         arch = os_arch
 
     ext = "zip" if os_name == "windows" else "tar.gz"
 
     java_path = java_bin_path(os_name, mozbuild_path)
     if not java_path:
         raise NotImplementedError(f"Could not bootstrap java for {os_name}.")
 
-    if not os.path.exists(java_path):
+    if not java_path.exists():
         # e.g. https://github.com/adoptium/temurin17-binaries/releases/
         #      download/jdk-17.0.1%2B12/OpenJDK17U-jdk_x64_linux_hotspot_17.0.1_12.tar.gz
         java_url = (
             "https://github.com/adoptium/temurin{major}-binaries/releases/"
             "download/jdk-{major}.{minor}%2B{patch}/"
             "OpenJDK{major}U-jdk_{arch}_{os}_hotspot_{major}.{minor}_{patch}.{ext}"
         ).format(
             major=JAVA_VERSION_MAJOR,
             minor=JAVA_VERSION_MINOR,
             patch=JAVA_VERSION_PATCH,
             os=os_tag,
             arch=arch,
             ext=ext,
         )
-        install_mobile_android_sdk_or_ndk(java_url, os.path.join(mozbuild_path, "jdk"))
+        install_mobile_android_sdk_or_ndk(java_url, mozbuild_path / "jdk")
     return java_path
 
 
-def java_bin_path(os_name, toolchain_path):
+def java_bin_path(os_name, toolchain_path: Path):
     # Like jdk-17.0.1+12
     jdk_folder = "jdk-{major}.{minor}+{patch}".format(
         major=JAVA_VERSION_MAJOR, minor=JAVA_VERSION_MINOR, patch=JAVA_VERSION_PATCH
     )
 
-    java_path = os.path.join(toolchain_path, "jdk", jdk_folder)
+    java_path = toolchain_path / "jdk" / jdk_folder
 
     if os_name == "macosx":
-        return os.path.join(java_path, "Contents", "Home", "bin")
+        return java_path / "Contents" / "Home" / "bin"
     elif os_name == "linux":
-        return os.path.join(java_path, "bin")
+        return java_path / "bin"
     elif os_name == "windows":
-        return os.path.join(java_path, "bin")
+        return java_path / "bin"
     else:
         return None
 
 
-def locate_java_bin_path(host_kernel, toolchain_path):
+def locate_java_bin_path(host_kernel, toolchain_path: Union[str, Path]):
     if host_kernel == "WINNT":
         os_name = "windows"
     elif host_kernel == "Darwin":
         os_name = "macosx"
     elif host_kernel == "Linux":
         os_name = "linux"
     else:
         # Default to Linux
         os_name = "linux"
-    path = java_bin_path(os_name, toolchain_path)
-    if not os.path.isdir(path):
+    path = java_bin_path(os_name, Path(toolchain_path))
+    if not path.is_dir():
         raise JavaLocationFailedException(
-            "Could not locate Java at {}, please run "
-            "./mach bootstrap --no-system-changes".format(path)
+            f"Could not locate Java at {path}, please run "
+            "./mach bootstrap --no-system-changes"
         )
-    return path
+    return str(path)
 
 
 class JavaLocationFailedException(Exception):
     pass
 
 
 if __name__ == "__main__":
     sys.exit(main(sys.argv))
--- a/python/mozboot/mozboot/archlinux.py
+++ b/python/mozboot/mozboot/archlinux.py
@@ -4,16 +4,18 @@
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
 import sys
 import tempfile
 import subprocess
 
+from pathlib import Path
+
 from mozboot.base import BaseBootstrapper
 from mozboot.linux_common import LinuxBootstrapper
 
 # NOTE: This script is intended to be run with a vanilla Python install.  We
 # have to rely on the standard library instead of Python 2+3 helpers like
 # the six module.
 if sys.version_info < (3,):
     input = raw_input  # noqa
@@ -64,16 +66,20 @@ class ArchlinuxBootstrapper(LinuxBootstr
     def install_browser_packages(self, mozconfig_builder, artifact_mode=False):
         # TODO: Figure out what not to install for artifact mode
         self.aur_install(*self.BROWSER_AUR_PACKAGES)
         self.pacman_install(*self.BROWSER_PACKAGES)
 
     def install_browser_artifact_mode_packages(self, mozconfig_builder):
         self.install_browser_packages(mozconfig_builder, artifact_mode=True)
 
+    def ensure_nasm_packages(self, state_dir: Path, checkout_root: Path):
+        # installed via install_browser_packages
+        pass
+
     def install_mobile_android_packages(self, mozconfig_builder, artifact_mode=False):
         # Multi-part process:
         # 1. System packages.
         # 2. Android SDK. Android NDK only if we are not in artifact mode. Android packages.
 
         # 1. This is hard to believe, but the Android SDK binaries are 32-bit
         # and that conflicts with 64-bit Arch installations out of the box.  The
         # solution is to add the multilibs repository; unfortunately, this
@@ -121,25 +127,25 @@ class ArchlinuxBootstrapper(LinuxBootstr
 
     def run(self, command, env=None):
         subprocess.check_call(command, stdin=sys.stdin, env=env)
 
     def download(self, uri):
         command = ["curl", "-L", "-O", uri]
         self.run(command)
 
-    def unpack(self, path, name, ext):
+    def unpack(self, path: Path, name, ext):
         if ext == ".gz":
             compression = "-z"
         else:
             print(f"unsupported compression extension: {ext}", file=sys.stderr)
             sys.exit(1)
 
-        name = os.path.join(path, name) + ".tar" + ext
-        command = ["tar", "-x", compression, "-f", name, "-C", path]
+        name = path / (name + ".tar" + ext)
+        command = ["tar", "-x", compression, "-f", str(name), "-C", str(path)]
         self.run(command)
 
     def makepkg(self, name):
         command = ["makepkg", "-sri"]
         if self.no_interactive:
             command.append("--noconfirm")
         makepkg_env = os.environ.copy()
         makepkg_env["PKGDEST"] = "."
@@ -156,33 +162,33 @@ class ArchlinuxBootstrapper(LinuxBootstr
                 )
             else:
                 needed.append(package)
 
         # all required AUR packages are already installed!
         if not needed:
             return
 
-        path = tempfile.mkdtemp(prefix="mozboot-")
+        path = Path(tempfile.mkdtemp(prefix="mozboot-"))
         if not self.no_interactive:
             print(
                 "WARNING! This script requires to install packages from the AUR "
                 "This is potentially insecure so I recommend that you carefully "
                 "read each package description and check the sources."
                 f"These packages will be built in {path}: " + ", ".join(needed),
                 file=sys.stderr,
             )
             choice = input("Do you want to continue? (yes/no) [no]")
             if choice != "yes":
                 sys.exit(1)
 
-        base_dir = os.getcwd()
+        base_dir = Path.cwd()
         os.chdir(path)
         for name in needed:
             url = AUR_URL_TEMPLATE.format(package)
-            ext = os.path.splitext(url)[-1]
-            directory = os.path.join(path, name)
+            ext = Path(url).suffix
+            directory = path / name
             self.download(url)
             self.unpack(path, name, ext)
             os.chdir(directory)
             self.makepkg(name)
 
         os.chdir(base_dir)
--- a/python/mozboot/mozboot/base.py
+++ b/python/mozboot/mozboot/base.py
@@ -5,23 +5,26 @@
 from __future__ import absolute_import, print_function, unicode_literals
 
 import hashlib
 import os
 import re
 import subprocess
 import sys
 
+from pathlib import Path
+
 from distutils.version import LooseVersion
 from mozboot import rust
 from mozboot.util import (
     get_mach_virtualenv_binary,
     MINIMUM_RUST_VERSION,
 )
 from mozfile import which
+from mach.util import to_optional_path, win_to_msys_path
 
 # NOTE: This script is intended to be run with a vanilla Python install.  We
 # have to rely on the standard library instead of Python 2+3 helpers like
 # the six module.
 if sys.version_info < (3,):
     from urllib2 import urlopen
 
     input = raw_input  # noqa
@@ -373,19 +376,19 @@ class BaseBootstrapper(object):
 
     def ensure_minidump_stackwalk_packages(self):
         """
         Install minidump_stackwalk.
         """
         pass
 
     def install_toolchain_static_analysis(self, toolchain_job):
-        clang_tools_path = os.path.join(self.state_dir, "clang-tools")
-        if not os.path.exists(clang_tools_path):
-            os.mkdir(clang_tools_path)
+        clang_tools_path = self.state_dir / "clang-tools"
+        if not clang_tools_path.exists():
+            clang_tools_path.mkdir()
         self.install_toolchain_artifact_impl(clang_tools_path, toolchain_job)
 
     def install_toolchain_artifact(self, toolchain_job, no_unpack=False):
         if no_unpack:
             return self.install_toolchain_artifact_impl(
                 self.state_dir, toolchain_job, no_unpack
             )
 
@@ -393,59 +396,58 @@ class BaseBootstrapper(object):
             from mozbuild.configure import ConfigureSandbox
 
             # Here, we don't want an existing mozconfig to interfere with what we
             # do, neither do we want the default for --enable-bootstrap (which is not
             # always on) to prevent this from doing something.
             self.configure_sandbox = sandbox = ConfigureSandbox(
                 {}, argv=["configure", "--enable-bootstrap", f"MOZCONFIG={os.devnull}"]
             )
-            moz_configure = os.path.join(self.srcdir, "build", "moz.configure")
-            sandbox.include_file(os.path.join(moz_configure, "init.configure"))
+            moz_configure = self.srcdir / "build" / "moz.configure"
+            sandbox.include_file(str(moz_configure / "init.configure"))
             # bootstrap_search_path_order has a dependency on developer_options, which
             # is not defined in init.configure. Its value doesn't matter for us, though.
             sandbox["developer_options"] = sandbox["always"]
-            sandbox.include_file(os.path.join(moz_configure, "bootstrap.configure"))
+            sandbox.include_file(str(moz_configure / "bootstrap.configure"))
 
         # Expand the `bootstrap_path` template for the given toolchain_job, and execute the
         # expanded function via `_value_for`, which will trigger autobootstrap.
         self.configure_sandbox._value_for(
             self.configure_sandbox["bootstrap_path"](toolchain_job)
         )
 
     def install_toolchain_artifact_impl(
-        self, install_dir, toolchain_job, no_unpack=False
+        self, install_dir: Path, toolchain_job, no_unpack=False
     ):
-        mach_binary = os.path.join(self.srcdir, "mach")
-        mach_binary = os.path.abspath(mach_binary)
-        if not os.path.exists(mach_binary):
-            raise ValueError("mach not found at %s" % mach_binary)
+        mach_binary = (self.srcdir / "mach").resolve()
+        if not mach_binary.exists():
+            raise ValueError(f"mach not found at {mach_binary}")
 
         if not self.state_dir:
             raise ValueError(
                 "Need a state directory (e.g. ~/.mozbuild) to download " "artifacts"
             )
         python_location = get_mach_virtualenv_binary()
-        if not os.path.exists(python_location):
-            raise ValueError("python not found at %s" % python_location)
+        if not python_location.exists():
+            raise ValueError(f"python not found at {python_location}")
 
         cmd = [
-            python_location,
-            mach_binary,
+            str(python_location),
+            str(mach_binary),
             "artifact",
             "toolchain",
             "--bootstrap",
             "--from-build",
             toolchain_job,
         ]
 
         if no_unpack:
             cmd += ["--no-unpack"]
 
-        subprocess.check_call(cmd, cwd=install_dir)
+        subprocess.check_call(cmd, cwd=str(install_dir))
 
     def run_as_root(self, command):
         if os.geteuid() != 0:
             if which("sudo"):
                 command.insert(0, "sudo")
             else:
                 command = ["su", "root", "-c", " ".join(command)]
 
@@ -608,38 +610,38 @@ class BaseBootstrapper(object):
         self.package_manager_updated = True
 
     def _update_package_manager(self):
         """Updates the package manager's manifests/package list.
 
         This should be defined in child classes.
         """
 
-    def _parse_version_impl(self, path, name, env, version_param):
+    def _parse_version_impl(self, path: Path, name, env, version_param):
         """Execute the given path, returning the version.
 
         Invokes the path argument with the --version switch
         and returns a LooseVersion representing the output
         if successful. If not, returns None.
 
         An optional name argument gives the expected program
         name returned as part of the version string, if it's
         different from the basename of the executable.
 
         An optional env argument allows modifying environment
         variable during the invocation to set options, PATH,
         etc.
         """
         if not name:
-            name = os.path.basename(path)
+            name = path.name
         if name.lower().endswith(".exe"):
             name = name[:-4]
 
         process = subprocess.run(
-            [path, version_param],
+            [str(path), version_param],
             env=env,
             universal_newlines=True,
             stdout=subprocess.PIPE,
             stderr=subprocess.STDOUT,
         )
         if process.returncode != 0:
             # This can happen e.g. if the user has an inactive pyenv shim in
             # their path. Just silently treat this as a failure to parse the
@@ -648,17 +650,17 @@ class BaseBootstrapper(object):
 
         match = re.search(name + " ([a-z0-9\.]+)", process.stdout)
         if not match:
             print("ERROR! Unable to identify %s version." % name)
             return None
 
         return LooseVersion(match.group(1))
 
-    def _parse_version(self, path, name=None, env=None):
+    def _parse_version(self, path: Path, name=None, env=None):
         return self._parse_version_impl(path, name, env, "--version")
 
     def _hg_cleanenv(self, load_hgrc=False):
         """Returns a copy of the current environment updated with the HGPLAIN
         and HGRCPATH environment variables.
 
         HGPLAIN prevents Mercurial from applying locale variations to the output
         making it suitable for use in scripts.
@@ -669,17 +671,17 @@ class BaseBootstrapper(object):
         env = os.environ.copy()
         env["HGPLAIN"] = "1"
         if not load_hgrc:
             env["HGRCPATH"] = ""
 
         return env
 
     def is_mercurial_modern(self):
-        hg = which("hg")
+        hg = to_optional_path(which("hg"))
         if not hg:
             print(NO_MERCURIAL)
             return False, False, None
 
         our = self._parse_version(hg, "version", self._hg_cleanenv())
         if not our:
             return True, False, None
 
@@ -727,73 +729,62 @@ class BaseBootstrapper(object):
     def warn_if_pythonpath_is_set(self):
         if "PYTHONPATH" in os.environ:
             print(
                 "WARNING: Your PYTHONPATH environment variable is set. This can "
                 "cause flaky installations of the requirements, and other unexpected "
                 "issues with mach. It is recommended to unset this variable."
             )
 
-    def is_rust_modern(self, cargo_bin):
-        rustc = which("rustc", extra_search_dirs=[cargo_bin])
+    def is_rust_modern(self, cargo_bin: Path):
+        rustc = to_optional_path(which("rustc", extra_search_dirs=[str(cargo_bin)]))
         if not rustc:
             print("Could not find a Rust compiler.")
             return False, None
 
         our = self._parse_version(rustc)
         if not our:
             return False, None
 
         return our >= MODERN_RUST_VERSION, our
 
     def cargo_home(self):
-        cargo_home = os.environ.get(
-            "CARGO_HOME", os.path.expanduser(os.path.join("~", ".cargo"))
-        )
-        cargo_bin = os.path.join(cargo_home, "bin")
+        cargo_home = Path(os.environ.get("CARGO_HOME", Path("~/.cargo").expanduser()))
+        cargo_bin = cargo_home / "bin"
         return cargo_home, cargo_bin
 
-    def win_to_msys_path(self, path):
-        """Convert a windows-style path to msys style."""
-        drive, path = os.path.splitdrive(path)
-        path = "/".join(path.split("\\"))
-        if drive:
-            if path[0] == "/":
-                path = path[1:]
-            path = "/%s/%s" % (drive[:-1], path)
-        return path
-
-    def print_rust_path_advice(self, template, cargo_home, cargo_bin):
+    def print_rust_path_advice(self, template, cargo_home: Path, cargo_bin: Path):
         # Suggest ~/.cargo/env if it exists.
-        if os.path.exists(os.path.join(cargo_home, "env")):
-            cmd = "source %s/env" % cargo_home
+        if (cargo_home / "env").exists():
+            cmd = f"source {cargo_home}/env"
         else:
             # On Windows rustup doesn't write out ~/.cargo/env
             # so fall back to a manual PATH update. Bootstrap
             # only runs under msys, so a unix-style shell command
             # is appropriate there.
-            cargo_bin = self.win_to_msys_path(cargo_bin)
-            cmd = "export PATH=%s:$PATH" % cargo_bin
+            cargo_bin = win_to_msys_path(cargo_bin)
+            cmd = f"export PATH={cargo_bin}:$PATH"
         print(template % {"cargo_bin": cargo_bin, "cmd": cmd})
 
     def ensure_rust_modern(self):
         cargo_home, cargo_bin = self.cargo_home()
         modern, version = self.is_rust_modern(cargo_bin)
 
+        rustup = to_optional_path(which("rustup", extra_search_dirs=[str(cargo_bin)]))
+
         if modern:
             print("Your version of Rust (%s) is new enough." % version)
-            rustup = which("rustup", extra_search_dirs=[cargo_bin])
+
             if rustup:
                 self.ensure_rust_targets(rustup, version)
             return
 
         if version:
             print("Your version of Rust (%s) is too old." % version)
 
-        rustup = which("rustup", extra_search_dirs=[cargo_bin])
         if rustup:
             rustup_version = self._parse_version(rustup)
             if not rustup_version:
                 print(RUSTUP_OLD)
                 sys.exit(1)
             print("Found rustup. Will try to upgrade.")
             self.upgrade_rust(rustup)
 
@@ -801,112 +792,113 @@ class BaseBootstrapper(object):
             if not modern:
                 print(RUST_UPGRADE_FAILED % (MODERN_RUST_VERSION, after))
                 sys.exit(1)
         else:
             # No rustup. Download and run the installer.
             print("Will try to install Rust.")
             self.install_rust()
 
-    def ensure_rust_targets(self, rustup, rust_version):
+    def ensure_rust_targets(self, rustup: Path, rust_version):
         """Make sure appropriate cross target libraries are installed."""
         target_list = subprocess.check_output(
-            [rustup, "target", "list"], universal_newlines=True
+            [str(rustup), "target", "list"], universal_newlines=True
         )
         targets = [
             line.split()[0]
             for line in target_list.splitlines()
             if "installed" in line or "default" in line
         ]
         print("Rust supports %s targets." % ", ".join(targets))
 
         # Support 32-bit Windows on 64-bit Windows.
         win32 = "i686-pc-windows-msvc"
         win64 = "x86_64-pc-windows-msvc"
         if rust.platform() == win64 and win32 not in targets:
-            subprocess.check_call([rustup, "target", "add", win32])
+            subprocess.check_call([str(rustup), "target", "add", win32])
 
         if "mobile_android" in self.application:
             # Let's add the most common targets.
             if rust_version < LooseVersion("1.33"):
                 arm_target = "armv7-linux-androideabi"
             else:
                 arm_target = "thumbv7neon-linux-androideabi"
             android_targets = (
                 arm_target,
                 "aarch64-linux-android",
                 "i686-linux-android",
                 "x86_64-linux-android",
             )
             for target in android_targets:
                 if target not in targets:
-                    subprocess.check_call([rustup, "target", "add", target])
+                    subprocess.check_call([str(rustup), "target", "add", target])
 
-    def upgrade_rust(self, rustup):
+    def upgrade_rust(self, rustup: Path):
         """Upgrade Rust.
 
         Invoke rustup from the given path to update the rust install."""
-        subprocess.check_call([rustup, "update"])
+        subprocess.check_call([str(rustup), "update"])
         # This installs rustfmt when not already installed, or nothing
         # otherwise, while the update above would have taken care of upgrading
         # it.
-        subprocess.check_call([rustup, "component", "add", "rustfmt"])
+        subprocess.check_call([str(rustup), "component", "add", "rustfmt"])
 
     def install_rust(self):
         """Download and run the rustup installer."""
         import errno
         import stat
         import tempfile
 
         platform = rust.platform()
         url = rust.rustup_url(platform)
         checksum = rust.rustup_hash(platform)
         if not url or not checksum:
             print("ERROR: Could not download installer.")
             sys.exit(1)
         print("Downloading rustup-init... ", end="")
-        fd, rustup_init = tempfile.mkstemp(prefix=os.path.basename(url))
+        fd, rustup_init = tempfile.mkstemp(prefix=Path(url).name)
+        rustup_init = Path(rustup_init)
         os.close(fd)
         try:
             self.http_download_and_save(url, rustup_init, checksum)
-            mode = os.stat(rustup_init).st_mode
-            os.chmod(rustup_init, mode | stat.S_IRWXU)
+            mode = rustup_init.stat().st_mode
+            rustup_init.chmod(mode | stat.S_IRWXU)
             print("Ok")
             print("Running rustup-init...")
             subprocess.check_call(
                 [
-                    rustup_init,
+                    str(rustup_init),
                     "-y",
                     "--default-toolchain",
                     "stable",
                     "--default-host",
                     platform,
                     "--component",
                     "rustfmt",
                 ]
             )
             cargo_home, cargo_bin = self.cargo_home()
             self.print_rust_path_advice(RUST_INSTALL_COMPLETE, cargo_home, cargo_bin)
         finally:
             try:
-                os.remove(rustup_init)
+                rustup_init.unlink()
             except OSError as e:
                 if e.errno != errno.ENOENT:
                     raise
 
-    def http_download_and_save(self, url, dest, hexhash, digest="sha256"):
+    def http_download_and_save(self, url, dest: Path, hexhash, digest="sha256"):
         """Download the given url and save it to dest.  hexhash is a checksum
         that will be used to validate the downloaded file using the given
         digest algorithm.  The value of digest can be any value accepted by
         hashlib.new.  The default digest used is 'sha256'."""
         f = urlopen(url)
         h = hashlib.new(digest)
         with open(dest, "wb") as out:
             while True:
                 data = f.read(4096)
                 if data:
                     out.write(data)
                     h.update(data)
                 else:
                     break
         if h.hexdigest() != hexhash:
-            os.remove(dest)
+            dest.unlink()
             raise ValueError("Hash of downloaded file does not match expected hash")
--- a/python/mozboot/mozboot/bootstrap.py
+++ b/python/mozboot/mozboot/bootstrap.py
@@ -7,35 +7,42 @@ from __future__ import absolute_import, 
 from collections import OrderedDict
 
 import os
 import platform
 import re
 import sys
 import subprocess
 import time
+from typing import Optional
+from pathlib import Path
 from distutils.version import LooseVersion
-from mozfile import which
-from mach.util import get_state_dir, UserError
+from mach.util import (
+    get_state_dir,
+    UserError,
+    to_optional_path,
+    to_optional_str,
+    win_to_msys_path,
+)
 from mach.telemetry import initialize_telemetry_setting
-
 from mozboot.base import MODERN_RUST_VERSION
 from mozboot.centosfedora import CentOSFedoraBootstrapper
 from mozboot.opensuse import OpenSUSEBootstrapper
 from mozboot.debian import DebianBootstrapper
 from mozboot.freebsd import FreeBSDBootstrapper
 from mozboot.gentoo import GentooBootstrapper
 from mozboot.osx import OSXBootstrapper, OSXBootstrapperLight
 from mozboot.openbsd import OpenBSDBootstrapper
 from mozboot.archlinux import ArchlinuxBootstrapper
 from mozboot.solus import SolusBootstrapper
 from mozboot.void import VoidBootstrapper
 from mozboot.windows import WindowsBootstrapper
 from mozboot.mozillabuild import MozillaBuildBootstrapper
 from mozboot.mozconfig import find_mozconfig, MozconfigBuilder
+from mozfile import which
 
 # Use distro package to retrieve linux platform information
 import distro
 
 APPLICATION_CHOICE = """
 Note on Artifact Mode:
 
 Artifact builds download prebuilt C++ components rather than building
@@ -163,17 +170,17 @@ class Bootstrapper(object):
             elif dist_id in DEBIAN_DISTROS:
                 cls = DebianBootstrapper
                 args["distro"] = dist_id
                 args["codename"] = codename
             elif dist_id in ("gentoo", "funtoo"):
                 cls = GentooBootstrapper
             elif dist_id in ("solus"):
                 cls = SolusBootstrapper
-            elif dist_id in ("arch") or os.path.exists("/etc/arch-release"):
+            elif dist_id in ("arch") or Path("/etc/arch-release").exists():
                 cls = ArchlinuxBootstrapper
             elif dist_id in ("void"):
                 cls = VoidBootstrapper
             elif dist_id in (
                 "opensuse",
                 "opensuse-leap",
                 "opensuse-tumbleweed",
                 "suse",
@@ -231,30 +238,30 @@ class Bootstrapper(object):
         if not self.instance.artifact_mode:
             self.instance.ensure_stylo_packages()
             self.instance.ensure_clang_static_analysis_package()
             self.instance.ensure_nasm_packages()
             self.instance.ensure_sccache_packages()
         # Like 'ensure_browser_packages' or 'ensure_mobile_android_packages'
         getattr(self.instance, "ensure_%s_packages" % application)()
 
-    def check_code_submission(self, checkout_root):
+    def check_code_submission(self, checkout_root: Path):
         if self.instance.no_interactive or which("moz-phab"):
             return
 
         # Skip moz-phab install until bug 1696357 is fixed and makes it to a moz-phab
         # release.
         if sys.platform.startswith("darwin") and platform.machine() == "arm64":
             return
 
         if not self.instance.prompt_yesno("Will you be submitting commits to Mozilla?"):
             return
 
-        mach_binary = os.path.join(checkout_root, "mach")
-        subprocess.check_call((sys.executable, mach_binary, "install-moz-phab"))
+        mach_binary = checkout_root / "mach"
+        subprocess.check_call((sys.executable, str(mach_binary), "install-moz-phab"))
 
     def bootstrap(self, settings):
         if self.choice is None:
             applications = APPLICATIONS
             # Like ['1. Firefox for Desktop', '2. Firefox for Android Artifact Mode', ...].
             labels = [
                 "%s. %s" % (i, name) for i, name in enumerate(applications.keys(), 1)
             ]
@@ -300,23 +307,26 @@ class Bootstrapper(object):
                 print(
                     "Python is being emulated under Rosetta. Please use a native "
                     "Python instead. If you still really want to go ahead, set "
                     "the MACH_I_DO_WANT_TO_USE_ROSETTA environment variable.",
                     file=sys.stderr,
                 )
                 return 1
 
-        state_dir = get_state_dir()
+        state_dir = Path(get_state_dir())
         self.instance.state_dir = state_dir
 
+        hg = to_optional_path(which("hg"))
+
         # We need to enable the loading of hgrc in case extensions are
         # required to open the repo.
         (checkout_type, checkout_root) = current_firefox_checkout(
-            env=self.instance._hg_cleanenv(load_hgrc=True), hg=which("hg")
+            env=self.instance._hg_cleanenv(load_hgrc=True),
+            hg=hg,
         )
         self.instance.srcdir = checkout_root
         self.instance.validate_environment()
         self._validate_python_environment()
 
         if self.instance.no_system_changes:
             self.maybe_install_private_packages_or_exit(application)
             self._output_mozconfig(application, mozconfig_builder)
@@ -326,52 +336,57 @@ class Bootstrapper(object):
 
         # Like 'install_browser_packages' or 'install_mobile_android_packages'.
         getattr(self.instance, "install_%s_packages" % application)(mozconfig_builder)
 
         hg_installed, hg_modern = self.instance.ensure_mercurial_modern()
         if not self.instance.artifact_mode:
             self.instance.ensure_rust_modern()
 
+        git = to_optional_path(which("git"))
+
         # Possibly configure Mercurial, but not if the current checkout or repo
         # type is Git.
         if hg_installed and checkout_type == "hg":
             if not self.instance.no_interactive:
                 configure_hg = self.instance.prompt_yesno(prompt=CONFIGURE_MERCURIAL)
             else:
                 configure_hg = self.hg_configure
 
             if configure_hg:
-                configure_mercurial(which("hg"), state_dir)
+                configure_mercurial(hg, state_dir)
 
         # Offer to configure Git, if the current checkout or repo type is Git.
-        elif which("git") and checkout_type == "git":
+        elif git and checkout_type == "git":
             should_configure_git = False
             if not self.instance.no_interactive:
                 should_configure_git = self.instance.prompt_yesno(prompt=CONFIGURE_GIT)
             else:
                 # Assuming default configuration setting applies to all VCS.
                 should_configure_git = self.hg_configure
 
             if should_configure_git:
                 configure_git(
-                    which("git"), which("git-cinnabar"), state_dir, checkout_root
+                    git,
+                    to_optional_path(which("git-cinnabar")),
+                    state_dir,
+                    checkout_root,
                 )
 
         self.maybe_install_private_packages_or_exit(application)
         self.check_code_submission(checkout_root)
         # Wait until after moz-phab setup to check telemetry so that employees
         # will be automatically opted-in.
         if not self.instance.no_interactive and not settings.mach_telemetry.is_set_up:
-            initialize_telemetry_setting(settings, checkout_root, state_dir)
+            initialize_telemetry_setting(settings, str(checkout_root), str(state_dir))
 
         print(FINISHED % name)
         if not (
             which("rustc")
-            and self.instance._parse_version("rustc") >= MODERN_RUST_VERSION
+            and self.instance._parse_version(Path("rustc")) >= MODERN_RUST_VERSION
         ):
             print(
                 "To build %s, please restart the shell (Start a new terminal window)"
                 % name
             )
 
         self._output_mozconfig(application, mozconfig_builder)
 
@@ -380,25 +395,24 @@ class Bootstrapper(object):
         additional_mozconfig = getattr(
             self.instance, "generate_%s_mozconfig" % application
         )()
         if additional_mozconfig:
             mozconfig_builder.append(additional_mozconfig)
         raw_mozconfig = mozconfig_builder.generate()
 
         if raw_mozconfig:
-            mozconfig_path = find_mozconfig(self.mach_context.topdir)
+            mozconfig_path = Path(find_mozconfig(Path(self.mach_context.topdir)))
             if not mozconfig_path:
                 # No mozconfig file exists yet
-                mozconfig_path = os.path.join(self.mach_context.topdir, "mozconfig")
+                mozconfig_path = Path(self.mach_context.topdir) / "mozconfig"
                 with open(mozconfig_path, "w") as mozconfig_file:
                     mozconfig_file.write(raw_mozconfig)
                 print(
-                    'Your requested configuration has been written to "%s".'
-                    % mozconfig_path
+                    f'Your requested configuration has been written to "{mozconfig_path}".'
                 )
             else:
                 suggestion = MOZCONFIG_SUGGESTION_TEMPLATE % (
                     mozconfig_path,
                     raw_mozconfig,
                 )
                 print(suggestion, end="")
 
@@ -415,222 +429,233 @@ class Bootstrapper(object):
         except ImportError as e:
             print("ERROR: Could not import package %s" % e.name, file=sys.stderr)
             self.instance.suggest_install_distutils()
             valid = False
         except AssertionError:
             print("ERROR: distutils is not behaving as expected.", file=sys.stderr)
             self.instance.suggest_install_distutils()
             valid = False
-        pip3 = which("pip3")
+        pip3 = to_optional_path(which("pip3"))
         if not pip3:
             print("ERROR: Could not find pip3.", file=sys.stderr)
             self.instance.suggest_install_pip3()
             valid = False
         if not valid:
             print(
                 "ERROR: Your Python installation will not be able to run "
                 "`mach bootstrap`. `mach bootstrap` cannot maintain your "
                 "Python environment for you; fix the errors shown here, and "
                 "then re-run `mach bootstrap`.",
                 file=sys.stderr,
             )
             sys.exit(1)
 
 
-def update_vct(hg, root_state_dir):
+def update_vct(hg: Path, root_state_dir: Path):
     """Ensure version-control-tools in the state directory is up to date."""
-    vct_dir = os.path.join(root_state_dir, "version-control-tools")
+    vct_dir = root_state_dir / "version-control-tools"
 
     # Ensure the latest revision of version-control-tools is present.
     update_mercurial_repo(
         hg, "https://hg.mozilla.org/hgcustom/version-control-tools", vct_dir, "@"
     )
 
     return vct_dir
 
 
-def configure_mercurial(hg, root_state_dir):
+def configure_mercurial(hg: Optional[Path], root_state_dir: Path):
     """Run the Mercurial configuration wizard."""
     vct_dir = update_vct(hg, root_state_dir)
 
+    hg = to_optional_str(hg)
+
     # Run the config wizard from v-c-t.
     args = [
         hg,
         "--config",
-        "extensions.configwizard=%s/hgext/configwizard" % vct_dir,
+        f"extensions.configwizard={vct_dir}/hgext/configwizard",
         "configwizard",
     ]
     subprocess.call(args)
 
 
-def update_mercurial_repo(hg, url, dest, revision):
+def update_mercurial_repo(hg: Path, url, dest: Path, revision):
     """Perform a clone/pull + update of a Mercurial repository."""
     # Disable common extensions whose older versions may cause `hg`
     # invocations to abort.
-    pull_args = [hg]
-    if os.path.exists(dest):
+    pull_args = [str(hg)]
+    if dest.exists():
         pull_args.extend(["pull", url])
         cwd = dest
     else:
-        pull_args.extend(["clone", "--noupdate", url, dest])
+        pull_args.extend(["clone", "--noupdate", url, str(dest)])
         cwd = "/"
 
-    update_args = [hg, "update", "-r", revision]
+    update_args = [str(hg), "update", "-r", revision]
 
     print("=" * 80)
-    print("Ensuring %s is up to date at %s" % (url, dest))
+    print(f"Ensuring {url} is up to date at {dest}")
 
     env = os.environ.copy()
     env.update({"HGPLAIN": "1"})
 
     try:
-        subprocess.check_call(pull_args, cwd=cwd, env=env)
-        subprocess.check_call(update_args, cwd=dest, env=env)
+        subprocess.check_call(pull_args, cwd=str(cwd), env=env)
+        subprocess.check_call(update_args, cwd=str(dest), env=env)
     finally:
         print("=" * 80)
 
 
-def current_firefox_checkout(env, hg=None):
+def current_firefox_checkout(env, hg: Optional[Path] = None):
     """Determine whether we're in a Firefox checkout.
 
     Returns one of None, ``git``, or ``hg``.
     """
     HG_ROOT_REVISIONS = set(
         [
             # From mozilla-unified.
             "8ba995b74e18334ab3707f27e9eb8f4e37ba3d29"
         ]
     )
 
-    path = os.getcwd()
+    path = Path.cwd()
     while path:
-        hg_dir = os.path.join(path, ".hg")
-        git_dir = os.path.join(path, ".git")
-        if hg and os.path.exists(hg_dir):
+        hg_dir = path / ".hg"
+        git_dir = path / ".git"
+        if hg and hg_dir.exists():
             # Verify the hg repo is a Firefox repo by looking at rev 0.
             try:
                 node = subprocess.check_output(
-                    [hg, "log", "-r", "0", "--template", "{node}"],
-                    cwd=path,
+                    [str(hg), "log", "-r", "0", "--template", "{node}"],
+                    cwd=str(path),
                     env=env,
                     universal_newlines=True,
                 )
                 if node in HG_ROOT_REVISIONS:
                     _warn_if_risky_revision(path)
-                    return ("hg", path)
+                    return "hg", path
                 # Else the root revision is different. There could be nested
                 # repos. So keep traversing the parents.
             except subprocess.CalledProcessError:
                 pass
 
         # Just check for known-good files in the checkout, to prevent attempted
         # foot-shootings.  Determining a canonical git checkout of mozilla-unified
         # is...complicated
-        elif os.path.exists(git_dir):
-            moz_configure = os.path.join(path, "moz.configure")
-            if os.path.exists(moz_configure):
+        elif git_dir.exists():
+            moz_configure = path / "moz.configure"
+            if moz_configure.exists():
                 _warn_if_risky_revision(path)
-                return ("git", path)
+                return "git", path
 
-        path, child = os.path.split(path)
-        if child == "":
+        if not len(path.parents):
             break
 
     raise UserError(
         "Could not identify the root directory of your checkout! "
         "Are you running `mach bootstrap` in an hg or git clone?"
     )
 
 
-def update_git_tools(git, root_state_dir):
+def update_git_tools(git: Optional[Path], root_state_dir: Path):
     """Update git tools, hooks and extensions"""
     # Ensure git-cinnabar is up to date.
-    cinnabar_dir = os.path.join(root_state_dir, "git-cinnabar")
+    cinnabar_dir = root_state_dir / "git-cinnabar"
 
     # Ensure the latest revision of git-cinnabar is present.
     update_git_repo(git, "https://github.com/glandium/git-cinnabar.git", cinnabar_dir)
 
+    git = to_optional_str(git)
+
     # Perform a download of cinnabar.
     download_args = [git, "cinnabar", "download"]
 
     try:
-        subprocess.check_call(download_args, cwd=cinnabar_dir)
+        subprocess.check_call(download_args, cwd=str(cinnabar_dir))
     except subprocess.CalledProcessError as e:
         print(e)
     return cinnabar_dir
 
 
-def update_git_repo(git, url, dest):
+def update_git_repo(git: Optional[Path], url, dest: Path):
     """Perform a clone/pull + update of a Git repository."""
-    pull_args = [git]
+    git_str = to_optional_str(git)
 
-    if os.path.exists(dest):
+    pull_args = [git_str]
+
+    if dest.exists():
         pull_args.extend(["pull"])
         cwd = dest
     else:
-        pull_args.extend(["clone", "--no-checkout", url, dest])
-        cwd = "/"
+        pull_args.extend(["clone", "--no-checkout", url, str(dest)])
+        cwd = Path("/")
 
-    update_args = [git, "checkout"]
+    update_args = [git_str, "checkout"]
 
     print("=" * 80)
-    print("Ensuring %s is up to date at %s" % (url, dest))
+    print(f"Ensuring {url} is up to date at {dest}")
 
     try:
-        subprocess.check_call(pull_args, cwd=cwd)
-        subprocess.check_call(update_args, cwd=dest)
+        subprocess.check_call(pull_args, cwd=str(cwd))
+        subprocess.check_call(update_args, cwd=str(dest))
     finally:
         print("=" * 80)
 
 
-def configure_git(git, cinnabar, root_state_dir, top_src_dir):
+def configure_git(
+    git: Optional[Path],
+    cinnabar: Optional[Path],
+    root_state_dir: Path,
+    top_src_dir: Path,
+):
     """Run the Git configuration steps."""
 
+    git_str = to_optional_str(git)
+
     match = re.search(
         r"(\d+\.\d+\.\d+)",
-        subprocess.check_output([git, "--version"], universal_newlines=True),
+        subprocess.check_output([git_str, "--version"], universal_newlines=True),
     )
     if not match:
         raise Exception("Could not find git version")
     git_version = LooseVersion(match.group(1))
 
     if git_version < MINIMUM_RECOMMENDED_GIT_VERSION:
         print(
             OLD_GIT_WARNING.format(
                 old_version=git_version,
                 minimum_recommended_version=MINIMUM_RECOMMENDED_GIT_VERSION,
             )
         )
 
     if git_version >= LooseVersion("2.17"):
         # "core.untrackedCache" has a bug before 2.17
         subprocess.check_call(
-            [git, "config", "core.untrackedCache", "true"], cwd=top_src_dir
+            [git_str, "config", "core.untrackedCache", "true"], cwd=str(top_src_dir)
         )
 
-    cinnabar_dir = update_git_tools(git, root_state_dir)
+    cinnabar_dir = str(update_git_tools(git, root_state_dir))
 
     if not cinnabar:
         if "MOZILLABUILD" in os.environ:
             # Slightly modify the path on Windows to be correct
             # for the copy/paste into the .bash_profile
-            cinnabar_dir = "/" + cinnabar_dir
-            cinnabar_dir = cinnabar_dir.replace(":", "")
+            cinnabar_dir = win_to_msys_path(cinnabar_dir)
 
             print(
                 ADD_GIT_CINNABAR_PATH.format(
                     prefix="%USERPROFILE%", cinnabar_dir=cinnabar_dir
                 )
             )
         else:
             print(ADD_GIT_CINNABAR_PATH.format(prefix="~", cinnabar_dir=cinnabar_dir))
 
 
-def _warn_if_risky_revision(path):
+def _warn_if_risky_revision(path: Path):
     # Warn the user if they're trying to bootstrap from an obviously old
     # version of tree as reported by the version control system (a month in
     # this case). This is an approximate calculation but is probably good
     # enough for our purposes.
     NUM_SECONDS_IN_MONTH = 60 * 60 * 24 * 30
     from mozversioncontrol import get_repository_object
 
     repo = get_repository_object(path)
--- a/python/mozboot/mozboot/mach_commands.py
+++ b/python/mozboot/mozboot/mach_commands.py
@@ -2,16 +2,17 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this,
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import errno
 import sys
 
+from pathlib import Path
 from mach.decorators import CommandArgument, Command
 from mozboot.bootstrap import APPLICATIONS
 
 
 @Command(
     "bootstrap",
     category="devenv",
     description="Install required system packages for building.",
@@ -66,43 +67,45 @@ def vcs_setup(command_context, update_on
 
     If "--update-only" is used, the interactive wizard is disabled
     and this command only ensures that remote repositories providing
     VCS extensions are up to date.
     """
     import mozboot.bootstrap as bootstrap
     import mozversioncontrol
     from mozfile import which
+    from mach.util import to_optional_path
 
     repo = mozversioncontrol.get_repository_object(command_context._mach_context.topdir)
     tool = "hg"
     if repo.name == "git":
         tool = "git"
 
     # "hg" is an executable script with a shebang, which will be found by
     # which. We need to pass a win32 executable to the function because we
     # spawn a process from it.
     if sys.platform in ("win32", "msys"):
         tool += ".exe"
 
-    vcs = which(tool)
+    vcs = to_optional_path(which(tool))
     if not vcs:
         raise OSError(errno.ENOENT, "Could not find {} on $PATH".format(tool))
 
     if update_only:
         if repo.name == "git":
             bootstrap.update_git_tools(
                 vcs,
-                command_context._mach_context.state_dir,
-                command_context._mach_context.topdir,
+                Path(command_context._mach_context.state_dir),
             )
         else:
-            bootstrap.update_vct(vcs, command_context._mach_context.state_dir)
+            bootstrap.update_vct(vcs, Path(command_context._mach_context.state_dir))
     else:
         if repo.name == "git":
             bootstrap.configure_git(
                 vcs,
-                which("git-cinnabar"),
-                command_context._mach_context.state_dir,
-                command_context._mach_context.topdir,
+                to_optional_path(which("git-cinnabar")),
+                Path(command_context._mach_context.state_dir),
+                Path(command_context._mach_context.topdir),
             )
         else:
-            bootstrap.configure_mercurial(vcs, command_context._mach_context.state_dir)
+            bootstrap.configure_mercurial(
+                vcs, Path(command_context._mach_context.state_dir)
+            )
--- a/python/mozboot/mozboot/mozconfig.py
+++ b/python/mozboot/mozboot/mozconfig.py
@@ -2,16 +2,19 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import
 
 import filecmp
 import os
 
+from pathlib import Path
+from typing import Union
+
 
 MOZ_MYCONFIG_ERROR = """
 The MOZ_MYCONFIG environment variable to define the location of mozconfigs
 is deprecated. If you wish to define the mozconfig path via an environment
 variable, use MOZCONFIG instead.
 """.strip()
 
 MOZCONFIG_LEGACY_PATH_ERROR = """
@@ -35,116 +38,123 @@ class MozconfigBuilder(object):
 
     def append(self, block):
         self._lines.extend([line.strip() for line in block.split("\n") if line.strip()])
 
     def generate(self):
         return "".join(line + "\n" for line in self._lines)
 
 
-def find_mozconfig(topsrcdir, env=os.environ):
+def find_mozconfig(topsrcdir: Union[str, Path], env=os.environ):
     """Find the active mozconfig file for the current environment.
 
     This emulates the logic in mozconfig-find.
 
     1) If ENV[MOZCONFIG] is set, use that
     2) If $TOPSRCDIR/mozconfig or $TOPSRCDIR/.mozconfig exists, use it.
     3) If both exist or if there are legacy locations detected, error out.
 
     The absolute path to the found mozconfig will be returned on success.
     None will be returned if no mozconfig could be found. A
     MozconfigFindException will be raised if there is a bad state,
     including conditions from #3 above.
     """
+    topsrcdir = Path(topsrcdir)
+
     # Check for legacy methods first.
     if "MOZ_MYCONFIG" in env:
         raise MozconfigFindException(MOZ_MYCONFIG_ERROR)
 
     env_path = env.get("MOZCONFIG", None) or None
+
     if env_path is not None:
-        if not os.path.isabs(env_path):
-            potential_roots = [topsrcdir, os.getcwd()]
+        env_path = Path(env_path)
+
+    if env_path is not None:
+        if not env_path.is_absolute():
+            potential_roots = [topsrcdir, Path.cwd()]
             # Attempt to eliminate duplicates for e.g.
-            # self.topsrcdir == os.curdir.
-            potential_roots = set(os.path.abspath(p) for p in potential_roots)
+            # self.topsrcdir == Path.cwd().
+            potential_roots_strings = set(str(p.resolve()) for p in potential_roots)
             existing = [
                 root
-                for root in potential_roots
-                if os.path.exists(os.path.join(root, env_path))
+                for root in potential_roots_strings
+                if (Path(root) / env_path).exists()
             ]
             if len(existing) > 1:
                 # There are multiple files, but we might have a setup like:
                 #
                 # somedirectory/
                 #   srcdir/
                 #   objdir/
                 #
                 # MOZCONFIG=../srcdir/some/path/to/mozconfig
                 #
                 # and be configuring from the objdir.  So even though we
                 # have multiple existing files, they are actually the same
                 # file.
-                mozconfigs = [os.path.join(root, env_path) for root in existing]
+                mozconfigs = [root / env_path for root in existing]
                 if not all(
                     map(
                         lambda p1, p2: filecmp.cmp(p1, p2, shallow=False),
                         mozconfigs[:-1],
                         mozconfigs[1:],
                     )
                 ):
                     raise MozconfigFindException(
                         "MOZCONFIG environment variable refers to a path that "
                         + "exists in more than one of "
-                        + ", ".join(potential_roots)
+                        + ", ".join(potential_roots_strings)
                         + ". Remove all but one."
                     )
             elif not existing:
                 raise MozconfigFindException(
                     "MOZCONFIG environment variable refers to a path that "
                     + "does not exist in any of "
-                    + ", ".join(potential_roots)
+                    + ", ".join(potential_roots_strings)
                 )
 
-            env_path = os.path.join(existing[0], env_path)
-        elif not os.path.exists(env_path):  # non-relative path
+            env_path = existing[0] / env_path
+        elif not env_path.exists():  # non-relative path
             raise MozconfigFindException(
                 "MOZCONFIG environment variable refers to a path that "
-                "does not exist: " + env_path
+                f"does not exist: {env_path}"
             )
 
-        if not os.path.isfile(env_path):
+        if not env_path.is_file():
             raise MozconfigFindException(
-                "MOZCONFIG environment variable refers to a " "non-file: " + env_path
+                "MOZCONFIG environment variable refers to a " f"non-file: {env_path}"
             )
 
-    srcdir_paths = [os.path.join(topsrcdir, p) for p in DEFAULT_TOPSRCDIR_PATHS]
-    existing = [p for p in srcdir_paths if os.path.isfile(p)]
+    srcdir_paths = [topsrcdir / p for p in DEFAULT_TOPSRCDIR_PATHS]
+    existing = [p for p in srcdir_paths if p.is_file()]
 
     if env_path is None and len(existing) > 1:
         raise MozconfigFindException(
             "Multiple default mozconfig files "
-            "present. Remove all but one. " + ", ".join(existing)
+            "present. Remove all but one. " + ", ".join(str(p) for p in existing)
         )
 
     path = None
 
     if env_path is not None:
         path = env_path
     elif len(existing):
         assert len(existing) == 1
         path = existing[0]
 
     if path is not None:
-        return os.path.abspath(path)
+        return str(Path.cwd() / path)
 
-    deprecated_paths = [os.path.join(topsrcdir, s) for s in DEPRECATED_TOPSRCDIR_PATHS]
+    deprecated_paths = [topsrcdir / s for s in DEPRECATED_TOPSRCDIR_PATHS]
 
     home = env.get("HOME", None)
     if home is not None:
-        deprecated_paths.extend([os.path.join(home, s) for s in DEPRECATED_HOME_PATHS])
+        home = Path(home)
+        deprecated_paths.extend([home / s for s in DEPRECATED_HOME_PATHS])
 
     for path in deprecated_paths:
-        if os.path.exists(path):
+        if path.exists():
             raise MozconfigFindException(
                 MOZCONFIG_LEGACY_PATH_ERROR % (path, topsrcdir)
             )
 
     return None
--- a/python/mozboot/mozboot/mozillabuild.py
+++ b/python/mozboot/mozboot/mozillabuild.py
@@ -5,16 +5,17 @@
 from __future__ import absolute_import, print_function, unicode_literals
 
 import ctypes
 import os
 import platform
 import sys
 import subprocess
 
+from pathlib import Path
 from mozboot.base import BaseBootstrapper
 
 
 def is_aarch64_host():
     from ctypes import wintypes
 
     kernel32 = ctypes.windll.kernel32
     IMAGE_FILE_MACHINE_UNKNOWN = 0
@@ -63,47 +64,47 @@ def get_windefender_exclusion_paths():
     try:
         with winreg.OpenKeyEx(
             winreg.HKEY_LOCAL_MACHINE,
             r"SOFTWARE\Microsoft\Windows Defender\Exclusions\Paths",
         ) as exclusions_key:
             _, values_count, __ = winreg.QueryInfoKey(exclusions_key)
             for i in range(0, values_count):
                 path, _, __ = winreg.EnumValue(exclusions_key, i)
-                paths.append(path)
+                paths.append(Path(path))
     except FileNotFoundError:
         pass
 
     return paths
 
 
-def is_windefender_affecting_srcdir(srcdir):
+def is_windefender_affecting_srcdir(src_dir: Path):
     if get_is_windefender_disabled():
         return False
 
     # When there's a match, but path cases aren't the same between srcdir and exclusion_path,
     # commonpath will use the casing of the first path provided.
     # To avoid surprises here, we normcase(...) so we don't get unexpected breakage if we change
     # the path order.
-    srcdir = os.path.normcase(os.path.abspath(srcdir))
+    src_dir = src_dir.resolve()
 
     try:
         exclusion_paths = get_windefender_exclusion_paths()
     except OSError as e:
         if e.winerror == 5:
             # A version of Windows 10 released in 2021 raises an "Access is denied"
             # error (ERROR_ACCESS_DENIED == 5) to un-elevated processes when they
             # query Windows Defender's exclusions. Skip the exclusion path checking.
             return
         raise
 
     for exclusion_path in exclusion_paths:
-        exclusion_path = os.path.normcase(os.path.abspath(exclusion_path))
+        exclusion_path = exclusion_path.resolve()
         try:
-            if os.path.commonpath([exclusion_path, srcdir]) == exclusion_path:
+            if Path(os.path.commonpath((exclusion_path, src_dir))) == exclusion_path:
                 # exclusion_path is an ancestor of srcdir
                 return False
         except ValueError:
             # ValueError: Paths don't have the same drive - can't be ours
             pass
     return True
 
 
@@ -242,14 +243,13 @@ class MozillaBuildBootstrapper(BaseBoots
 
     def _update_package_manager(self):
         pass
 
     def run(self, command):
         subprocess.check_call(command, stdin=sys.stdin)
 
     def pip_install(self, *packages):
-        pip_dir = os.path.join(
-            os.environ["MOZILLABUILD"], "python", "Scripts", "pip.exe"
-        )
-        command = [pip_dir, "install", "--upgrade"]
+        pip_dir = Path(os.environ["MOZILLABUILD"]) / "python" / "Scripts" / "pip.exe"
+
+        command = [str(pip_dir), "install", "--upgrade"]
         command.extend(packages)
         self.run(command)
--- a/python/mozboot/mozboot/osx.py
+++ b/python/mozboot/mozboot/osx.py
@@ -10,20 +10,22 @@ import subprocess
 import sys
 import tempfile
 
 try:
     from urllib2 import urlopen
 except ImportError:
     from urllib.request import urlopen
 
+from pathlib import Path
 from distutils.version import StrictVersion
 
 from mozboot.base import BaseBootstrapper
 from mozfile import which
+from mach.util import to_optional_path, to_optional_str
 
 HOMEBREW_BOOTSTRAP = (
     "https://raw.githubusercontent.com/Homebrew/install/master/install.sh"
 )
 
 BREW_INSTALL = """
 We will install the Homebrew package manager to install required packages.
 
@@ -223,26 +225,26 @@ class OSXBootstrapper(OSXAndroidBootstra
 
     def install_browser_packages(self, mozconfig_builder):
         pass
 
     def install_browser_artifact_mode_packages(self, mozconfig_builder):
         pass
 
     def _ensure_homebrew_found(self):
-        self.brew = which("brew")
+        self.brew = to_optional_path(which("brew"))
 
         return self.brew is not None
 
     def _ensure_homebrew_packages(self, packages, is_for_cask=False):
         package_type_flag = "--cask" if is_for_cask else "--formula"
         self.ensure_homebrew_installed()
 
         def create_homebrew_cmd(*parameters):
-            base_cmd = [self.brew]
+            base_cmd = [to_optional_str(self.brew)]
             base_cmd.extend(parameters)
             return base_cmd + [package_type_flag]
 
         installed = set(
             subprocess.check_output(
                 create_homebrew_cmd("list"), universal_newlines=True
             ).split()
         )
@@ -260,28 +262,32 @@ class OSXBootstrapper(OSXAndroidBootstra
         if to_install:
             subprocess.check_call(create_homebrew_cmd("install") + list(to_install))
         if to_upgrade:
             subprocess.check_call(create_homebrew_cmd("upgrade") + list(to_upgrade))
 
     def _ensure_homebrew_casks(self, casks):
         self._ensure_homebrew_found()
 
-        known_taps = subprocess.check_output([self.brew, "tap"])
+        known_taps = subprocess.check_output([to_optional_str(self.brew), "tap"])
 
         # Ensure that we can access old versions of packages.
         if b"homebrew/cask-versions" not in known_taps:
-            subprocess.check_output([self.brew, "tap", "homebrew/cask-versions"])
+            subprocess.check_output(
+                [to_optional_str(self.brew), "tap", "homebrew/cask-versions"]
+            )
 
         # "caskroom/versions" has been renamed to "homebrew/cask-versions", so
         # it is safe to remove the old tap. Removing the old tap is necessary
         # to avoid the error "Cask [name of cask] exists in multiple taps".
         # See https://bugzilla.mozilla.org/show_bug.cgi?id=1544981
         if b"caskroom/versions" in known_taps:
-            subprocess.check_output([self.brew, "untap", "caskroom/versions"])
+            subprocess.check_output(
+                [to_optional_str(self.brew), "untap", "caskroom/versions"]
+            )
 
         self._ensure_homebrew_packages(casks, is_for_cask=True)
 
     def ensure_homebrew_browser_packages(self):
         # TODO: Figure out what not to install for artifact mode
         packages = ["yasm"]
         self._ensure_homebrew_packages(packages)
 
@@ -290,19 +296,19 @@ class OSXBootstrapper(OSXAndroidBootstra
         Search for Homebrew in sys.path, if not found, prompt the user to install it.
         Then assert our PATH ordering is correct.
         """
         homebrew_found = self._ensure_homebrew_found()
         if not homebrew_found:
             self.install_homebrew()
 
         # Check for correct $PATH ordering.
-        brew_dir = os.path.dirname(self.brew)
+        brew_dir = self.brew.resolve().parent
         for path in os.environ["PATH"].split(os.pathsep):
-            if path == brew_dir:
+            if Path(path) == brew_dir:
                 break
 
             for check in ("/bin", "/usr/bin"):
                 if path == check:
                     print(BAD_PATH_ORDER % (check, brew_dir, brew_dir, check, brew_dir))
                     sys.exit(1)
 
     def ensure_clang_static_analysis_package(self):
@@ -346,23 +352,24 @@ class OSXBootstrapper(OSXAndroidBootstra
         if not homebrew_found:
             print(
                 "Homebrew was just installed but can't be found on PATH. "
                 "Please file a bug."
             )
             sys.exit(1)
 
     def _update_package_manager(self):
-        subprocess.check_call([self.brew, "-v", "update"])
+        subprocess.check_call([to_optional_str(self.brew), "-v", "update"])
 
     def _upgrade_package(self, package):
         self._ensure_homebrew_installed()
 
         try:
             subprocess.check_output(
-                [self.brew, "-v", "upgrade", package], stderr=subprocess.STDOUT
+                [to_optional_str(self.brew), "-v", "upgrade", package],
+                stderr=subprocess.STDOUT,
             )
         except subprocess.CalledProcessError as e:
             if b"already installed" not in e.output:
                 raise
 
     def upgrade_mercurial(self, current):
         self._upgrade_package("mercurial")
--- a/python/mozboot/mozboot/rust.py
+++ b/python/mozboot/mozboot/rust.py
@@ -1,18 +1,18 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this,
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-import os
 import platform as platform_mod
 import sys
 
+
 # Base url for pulling the rustup installer.
 # Use the no-CNAME host for compatibilty with Python 2.7
 # which doesn't support SNI.
 RUSTUP_URL_BASE = "https://static-rust-lang-org.s3.amazonaws.com/rustup"
 
 # Pull this to get the lastest stable version number.
 RUSTUP_MANIFEST = RUSTUP_URL_BASE + "/release-stable.toml"
 
@@ -146,24 +146,16 @@ def make_checksums(version, validate=Fal
             print("OK")
         hashes.append((platform, checksum))
     return hashes
 
 
 if __name__ == "__main__":
     """Allow invoking the module as a utility to update checksums."""
 
-    # Hook the requests module from the greater source tree. We can't import
-    # this at the module level since we might be imported into the bootstrap
-    # script in standalone mode.
-    #
-    # This module is necessary for correct https certificate verification.
-    mod_path = os.path.dirname(__file__)
-    sys.path.insert(0, os.path.join(mod_path, "..", "..", "requests"))
-
     update = False
     if len(sys.argv) > 1:
         if sys.argv[1] == "--update":
             update = True
         else:
             print(USAGE)
             sys.exit(1)
 
--- a/python/mozboot/mozboot/test/test_mozconfig.py
+++ b/python/mozboot/mozboot/test/test_mozconfig.py
@@ -1,17 +1,20 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
 import unittest
+import sys
+import pytest
 
+from pathlib import Path
 from shutil import rmtree
 
 from tempfile import (
     gettempdir,
     mkdtemp,
 )
 
 from mozboot.mozconfig import (
@@ -32,197 +35,201 @@ class TestFindMozconfig(unittest.TestCas
         os.environ.pop("CC", None)
         os.environ.pop("CXX", None)
         self._temp_dirs = set()
 
     def tearDown(self):
         os.environ.clear()
         os.environ.update(self._old_env)
 
-        for d in self._temp_dirs:
-            rmtree(d)
+        for temp_dir in self._temp_dirs:
+            rmtree(str(temp_dir))
 
     def get_temp_dir(self):
-        d = mkdtemp()
-        self._temp_dirs.add(d)
+        new_temp_dir = Path(mkdtemp())
+        self._temp_dirs.add(new_temp_dir)
 
-        return d
+        return new_temp_dir
 
     def test_find_legacy_env(self):
         """Ensure legacy mozconfig path definitions result in error."""
 
         os.environ["MOZ_MYCONFIG"] = "/foo"
 
         with self.assertRaises(MozconfigFindException) as e:
             find_mozconfig(self.get_temp_dir())
 
         self.assertTrue(str(e.exception).startswith("The MOZ_MYCONFIG"))
 
     def test_find_multiple_configs(self):
         """Ensure multiple relative-path MOZCONFIGs result in error."""
         relative_mozconfig = ".mconfig"
         os.environ["MOZCONFIG"] = relative_mozconfig
 
-        srcdir = self.get_temp_dir()
-        curdir = self.get_temp_dir()
-        dirs = [srcdir, curdir]
-        for d in dirs:
-            path = os.path.join(d, relative_mozconfig)
-            with open(path, "w") as f:
-                f.write(path)
+        src_dir = self.get_temp_dir()
+        cur_dir = self.get_temp_dir()
+        dirs = [src_dir, cur_dir]
+        for iter_dir in dirs:
+            path = iter_dir / relative_mozconfig
+            with open(path, "w") as file:
+                file.write(str(path))
 
-        orig_dir = os.getcwd()
+        orig_dir = Path.cwd()
         try:
-            os.chdir(curdir)
+            os.chdir(cur_dir)
             with self.assertRaises(MozconfigFindException) as e:
-                find_mozconfig(srcdir)
+                find_mozconfig(src_dir)
         finally:
             os.chdir(orig_dir)
 
         self.assertIn("exists in more than one of", str(e.exception))
-        for d in dirs:
-            self.assertIn(d, str(e.exception))
+        for iter_dir in dirs:
+            self.assertIn(str(iter_dir.resolve()), str(e.exception))
 
     def test_find_multiple_but_identical_configs(self):
         """Ensure multiple relative-path MOZCONFIGs pointing at the same file are OK."""
         relative_mozconfig = "../src/.mconfig"
         os.environ["MOZCONFIG"] = relative_mozconfig
 
-        topdir = self.get_temp_dir()
-        srcdir = os.path.join(topdir, "src")
-        os.mkdir(srcdir)
-        curdir = os.path.join(topdir, "obj")
-        os.mkdir(curdir)
+        top_dir = self.get_temp_dir()
+        src_dir = top_dir / "src"
+        src_dir.mkdir()
+        cur_dir = top_dir / "obj"
+        cur_dir.mkdir()
 
-        path = os.path.join(srcdir, relative_mozconfig)
+        path = src_dir / relative_mozconfig
         with open(path, "w"):
             pass
 
-        orig_dir = os.getcwd()
+        orig_dir = Path.cwd()
         try:
-            os.chdir(curdir)
-            self.assertEqual(
-                os.path.realpath(find_mozconfig(srcdir)), os.path.realpath(path)
-            )
+            os.chdir(cur_dir)
+            self.assertEqual(Path(find_mozconfig(src_dir)).resolve(), path.resolve())
         finally:
             os.chdir(orig_dir)
 
     def test_find_no_relative_configs(self):
         """Ensure a missing relative-path MOZCONFIG is detected."""
         relative_mozconfig = ".mconfig"
         os.environ["MOZCONFIG"] = relative_mozconfig
 
-        srcdir = self.get_temp_dir()
-        curdir = self.get_temp_dir()
-        dirs = [srcdir, curdir]
+        src_dir = self.get_temp_dir()
+        cur_dir = self.get_temp_dir()
+        dirs = [src_dir, cur_dir]
 
-        orig_dir = os.getcwd()
+        orig_dir = Path.cwd()
         try:
-            os.chdir(curdir)
+            os.chdir(cur_dir)
             with self.assertRaises(MozconfigFindException) as e:
-                find_mozconfig(srcdir)
+                find_mozconfig(src_dir)
         finally:
             os.chdir(orig_dir)
 
         self.assertIn("does not exist in any of", str(e.exception))
-        for d in dirs:
-            self.assertIn(d, str(e.exception))
+        for iter_dir in dirs:
+            self.assertIn(str(iter_dir.resolve()), str(e.exception))
 
     def test_find_relative_mozconfig(self):
         """Ensure a relative MOZCONFIG can be found in the srcdir."""
         relative_mozconfig = ".mconfig"
         os.environ["MOZCONFIG"] = relative_mozconfig
 
-        srcdir = self.get_temp_dir()
-        curdir = self.get_temp_dir()
+        src_dir = Path(self.get_temp_dir())
+        cur_dir = Path(self.get_temp_dir())
 
-        path = os.path.join(srcdir, relative_mozconfig)
+        path = src_dir / relative_mozconfig
         with open(path, "w"):
             pass
 
-        orig_dir = os.getcwd()
+        orig_dir = Path.cwd()
         try:
-            os.chdir(curdir)
+            os.chdir(cur_dir)
             self.assertEqual(
-                os.path.normpath(find_mozconfig(srcdir)), os.path.normpath(path)
+                str(Path(find_mozconfig(src_dir)).resolve()), str(path.resolve())
             )
         finally:
             os.chdir(orig_dir)
 
+    @pytest.mark.skipif(
+        sys.platform.startswith("win"),
+        reason="This test uses unix-style absolute paths, since we now use Pathlib, and "
+        "`is_absolute()` always returns `False` on Windows if there isn't a drive"
+        " letter, this test is invalid for Windows.",
+    )
     def test_find_abs_path_not_exist(self):
         """Ensure a missing absolute path is detected."""
         os.environ["MOZCONFIG"] = "/foo/bar/does/not/exist"
 
         with self.assertRaises(MozconfigFindException) as e:
             find_mozconfig(self.get_temp_dir())
 
         self.assertIn("path that does not exist", str(e.exception))
-        self.assertTrue(str(e.exception).endswith("/foo/bar/does/not/exist"))
+        self.assertIn("/foo/bar/does/not/exist", str(e.exception))
 
     def test_find_path_not_file(self):
         """Ensure non-file paths are detected."""
 
         os.environ["MOZCONFIG"] = gettempdir()
 
         with self.assertRaises(MozconfigFindException) as e:
             find_mozconfig(self.get_temp_dir())
 
         self.assertIn("refers to a non-file", str(e.exception))
         self.assertTrue(str(e.exception).endswith(gettempdir()))
 
     def test_find_default_files(self):
         """Ensure default paths are used when present."""
-        for p in DEFAULT_TOPSRCDIR_PATHS:
-            d = self.get_temp_dir()
-            path = os.path.join(d, p)
+        for default_dir in DEFAULT_TOPSRCDIR_PATHS:
+            temp_dir = self.get_temp_dir()
+            path = temp_dir / default_dir
 
             with open(path, "w"):
                 pass
 
-            self.assertEqual(find_mozconfig(d), path)
+            self.assertEqual(Path(find_mozconfig(temp_dir)), path)
 
     def test_find_multiple_defaults(self):
         """Ensure we error when multiple default files are present."""
         self.assertGreater(len(DEFAULT_TOPSRCDIR_PATHS), 1)
 
-        d = self.get_temp_dir()
-        for p in DEFAULT_TOPSRCDIR_PATHS:
-            with open(os.path.join(d, p), "w"):
+        temp_dir = self.get_temp_dir()
+        for default_dir in DEFAULT_TOPSRCDIR_PATHS:
+            with open(temp_dir / default_dir, "w"):
                 pass
 
         with self.assertRaises(MozconfigFindException) as e:
-            find_mozconfig(d)
+            find_mozconfig(temp_dir)
 
         self.assertIn("Multiple default mozconfig files present", str(e.exception))
 
     def test_find_deprecated_path_srcdir(self):
         """Ensure we error when deprecated path locations are present."""
-        for p in DEPRECATED_TOPSRCDIR_PATHS:
-            d = self.get_temp_dir()
-            with open(os.path.join(d, p), "w"):
+        for deprecated_dir in DEPRECATED_TOPSRCDIR_PATHS:
+            temp_dir = self.get_temp_dir()
+            with open(temp_dir / deprecated_dir, "w"):
                 pass
 
             with self.assertRaises(MozconfigFindException) as e:
-                find_mozconfig(d)
+                find_mozconfig(temp_dir)
 
             self.assertIn("This implicit location is no longer", str(e.exception))
-            self.assertIn(d, str(e.exception))
+            self.assertIn(str(temp_dir), str(e.exception))
 
     def test_find_deprecated_home_paths(self):
         """Ensure we error when deprecated home directory paths are present."""
 
-        for p in DEPRECATED_HOME_PATHS:
+        for deprecated_path in DEPRECATED_HOME_PATHS:
             home = self.get_temp_dir()
-            os.environ["HOME"] = home
-            path = os.path.join(home, p)
+            os.environ["HOME"] = str(home)
+            path = home / deprecated_path
 
             with open(path, "w"):
                 pass
 
             with self.assertRaises(MozconfigFindException) as e:
                 find_mozconfig(self.get_temp_dir())
 
             self.assertIn("This implicit location is no longer", str(e.exception))
-            self.assertIn(path, str(e.exception))
+            self.assertIn(str(path), str(e.exception))
 
 
 if __name__ == "__main__":
     main()
--- a/python/mozboot/mozboot/util.py
+++ b/python/mozboot/mozboot/util.py
@@ -1,30 +1,30 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
 
+from pathlib import Path
+
 from mach.site import PythonVirtualenv
 from mach.util import get_state_dir
 
 
 MINIMUM_RUST_VERSION = "1.57.0"
 
 
 def get_tools_dir(srcdir=False):
     if os.environ.get("MOZ_AUTOMATION") and "MOZ_FETCHES_DIR" in os.environ:
         return os.environ["MOZ_FETCHES_DIR"]
     return get_state_dir(srcdir)
 
 
 def get_mach_virtualenv_root():
-    return os.path.join(
-        get_state_dir(specific_to_topsrcdir=True), "_virtualenvs", "mach"
-    )
+    return Path(get_state_dir(specific_to_topsrcdir=True)) / "_virtualenvs" / "mach"
 
 
 def get_mach_virtualenv_binary():
     root = get_mach_virtualenv_root()
-    return PythonVirtualenv(root).python_path
+    return Path(PythonVirtualenv(str(root)).python_path)
--- a/remote/marionette/driver.js
+++ b/remote/marionette/driver.js
@@ -1058,17 +1058,17 @@ GeckoDriver.prototype.getWindowRect = as
  * @throws {NoSuchWindowError}
  *     Top-level browsing context has been discarded.
  * @throws {UnexpectedAlertOpenError}
  *     A modal dialog is open, blocking this operation.
  * @throws {UnsupportedOperationError}
  *     Not applicable to application.
  */
 GeckoDriver.prototype.setWindowRect = async function(cmd) {
-  assert.firefox();
+  assert.desktop();
   assert.open(this.getBrowsingContext({ top: true }));
   await this._handleUserPrompts();
 
   let { x, y, width, height } = cmd.parameters;
 
   const win = this.getCurrentWindow();
   switch (WindowState.from(win.windowState)) {
     case WindowState.Fullscreen:
@@ -2073,17 +2073,17 @@ GeckoDriver.prototype.close = async func
  *
  * @return {Array.<string>}
  *     Unique chrome window handles of remaining chrome windows.
  *
  * @throws {NoSuchWindowError}
  *     Top-level browsing context has been discarded.
  */
 GeckoDriver.prototype.closeChromeWindow = async function() {
-  assert.firefox();
+  assert.desktop();
   assert.open(this.getBrowsingContext({ context: Context.Chrome, top: true }));
 
   let nwins = 0;
 
   // eslint-disable-next-line
   for (let _ of windowManager.windows) {
     nwins++;
   }
@@ -2265,17 +2265,17 @@ GeckoDriver.prototype.setScreenOrientati
  * @throws {NoSuchWindowError}
  *     Top-level browsing context has been discarded.
  * @throws {UnexpectedAlertOpenError}
  *     A modal dialog is open, blocking this operation.
  * @throws {UnsupportedOperationError}
  *     Not available for current application.
  */
 GeckoDriver.prototype.minimizeWindow = async function() {
-  assert.firefox();
+  assert.desktop();
   assert.open(this.getBrowsingContext({ top: true }));
   await this._handleUserPrompts();
 
   const win = this.getCurrentWindow();
   switch (WindowState.from(win.windowState)) {
     case WindowState.Fullscreen:
       await exitFullscreen(win);
       break;
@@ -2318,17 +2318,17 @@ GeckoDriver.prototype.minimizeWindow = a
  * @throws {NoSuchWindowError}
  *     Top-level browsing context has been discarded.
  * @throws {UnexpectedAlertOpenError}
  *     A modal dialog is open, blocking this operation.
  * @throws {UnsupportedOperationError}
  *     Not available for current application.
  */
 GeckoDriver.prototype.maximizeWindow = async function() {
-  assert.firefox();
+  assert.desktop();
   assert.open(this.getBrowsingContext({ top: true }));
   await this._handleUserPrompts();
 
   const win = this.getCurrentWindow();
   switch (WindowState.from(win.windowState)) {
     case WindowState.Fullscreen:
       await exitFullscreen(win);
       break;
@@ -2370,17 +2370,17 @@ GeckoDriver.prototype.maximizeWindow = a
  * @throws {NoSuchWindowError}
  *     Top-level browsing context has been discarded.
  * @throws {UnexpectedAlertOpenError}
  *     A modal dialog is open, blocking this operation.
  * @throws {UnsupportedOperationError}
  *     Not available for current application.
  */
 GeckoDriver.prototype.fullscreenWindow = async function() {
-  assert.firefox();
+  assert.desktop();
   assert.open(this.getBrowsingContext({ top: true }));
   await this._handleUserPrompts();
 
   const win = this.getCurrentWindow();
   switch (WindowState.from(win.windowState)) {
     case WindowState.Maximized:
     case WindowState.Minimized:
       await restoreWindow(win);
@@ -2687,17 +2687,17 @@ GeckoDriver.prototype.installAddon = fun
   ) {
     throw new error.InvalidArgumentError();
   }
 
   return Addon.install(path, temp);
 };
 
 GeckoDriver.prototype.uninstallAddon = function(cmd) {
-  assert.firefox();
+  assert.desktop();
 
   let id = cmd.parameters.id;
   if (typeof id == "undefined" || typeof id != "string") {
     throw new error.InvalidArgumentError();
   }
 
   return Addon.uninstall(id);
 };
--- a/taskcluster/ci/fetch/visual-metrics.yml
+++ b/taskcluster/ci/fetch/visual-metrics.yml
@@ -1,11 +1,11 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 ---
 visual-metrics:
     description: "Browsertime visual metrics analsyis script"
     fetch:
         type: static-url
-        url: https://raw.githubusercontent.com/sitespeedio/browsertime/f27fdac113108f0da72c347b5585f0e49da0f54a/browsertime/visualmetrics.py
-        sha256: bf179468dff2572e9ad8ec6c32c793521536c4e9bebeeadd8aab0954b1a4f28c
-        size: 91634
+        url: https://raw.githubusercontent.com/sitespeedio/browsertime/c547875d4b5e2982c2b3a7fc823bde2d6a59aed6/browsertime/visualmetrics.py
+        sha256: 8063f3428e2824a2dbad5b474baf364e4816827a93337bf0c63d1ed606f1022b
+        size: 93169
--- a/taskcluster/ci/source-test/python.yml
+++ b/taskcluster/ci/source-test/python.yml
@@ -31,16 +31,17 @@ job-defaults:
         using: mach
     when:
         files-changed:
             - 'config/mozunit/**'
             - 'python/mach_commands.py'
 
 firefox-ci:
     description: taskcluster/gecko_taskgraph unit tests
+    always-target: false
     python-version: [3]
     treeherder:
         symbol: ci
     run:
         using: python-test
         subsuite: ci
     when:
         files-changed:
--- a/taskcluster/ci/test/mochitest.yml
+++ b/taskcluster/ci/test/mochitest.yml
@@ -77,17 +77,18 @@ mochitest-plain:
                     (linux.*64|macosx.*64|windows10-64-2004)-qr/debug: ['autoland', 'mozilla-central']
                     (linux.*64|macosx.*64|windows10-64-2004)-qr/opt: ['autoland']
                     (linux.*64|macosx.*64|windows10-64-2004)-shippable-qr/opt: ['autoland', 'mozilla-central']
                     linux.*64(-asan|-tsan)-qr/opt: ['autoland', 'mozilla-central']
                     linux.*64-ccov-qr/opt: ['mozilla-central']
                     default: []
             fission-xorigin:
                 by-test-platform:
-                    (linux.*64|macosx.*64|windows10-64-2004)(-shippable)?-qr/(opt|debug): ['trunk']
+                    (linux.*64|macosx.*64|windows10.*2004)(-shippable)?-qr/(opt|debug|asan|tsan): ['trunk']
+                    windows.*64-ccov-qr/opt: ['mozilla-central']
                     linux.*64-ccov-qr/opt: ['mozilla-central']
                     default: []
             socketprocess_networking:
                 by-test-platform:
                     (linux.*64|macosx.*64|windows10-64-2004)(-shippable)?-qr/(opt|debug): ['autoland', 'mozilla-central']
                     default: []
             socketprocess_networking+fission:
                 by-test-platform:
--- a/taskcluster/gecko_taskgraph/config.py
+++ b/taskcluster/gecko_taskgraph/config.py
@@ -1,23 +1,20 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 
 import os
 import logging
-import sys
 
-import attr
-from mozpack import path
+from taskgraph.config import GraphConfig
 from taskgraph.util.yaml import load_yaml
 from voluptuous import Required, Optional, Any
 
-from .util.python_path import find_object
 from .util.schema import validate_schema, Schema, optionally_keyed_by
 
 logger = logging.getLogger(__name__)
 
 graph_config_schema = Schema(
     {
         # The trust-domain for this graph.
         # (See https://firefox-source-docs.mozilla.org/taskcluster/taskcluster/taskgraph.html#taskgraph-trust-domain)  # noqa
@@ -133,58 +130,16 @@ graph_config_schema = Schema(
                 description="Python function to call to register extensions.",
             ): str,
             Optional("decision-parameters"): str,
         },
     }
 )
 
 
-@attr.s(frozen=True, cmp=False)
-class GraphConfig:
-    _config = attr.ib()
-    root_dir = attr.ib()
-
-    _PATH_MODIFIED = False
-
-    def __getitem__(self, name):
-        return self._config[name]
-
-    def register(self):
-        """
-        Add the project's taskgraph directory to the python path, and register
-        any extensions present.
-        """
-        modify_path = os.path.dirname(self.root_dir)
-        if GraphConfig._PATH_MODIFIED:
-            if GraphConfig._PATH_MODIFIED == modify_path:
-                # Already modified path with the same root_dir.
-                # We currently need to do this to enable actions to call
-                # taskgraph_decision, e.g. relpro.
-                return
-            raise Exception("Can't register multiple directories on python path.")
-        GraphConfig._PATH_MODIFIED = modify_path
-        sys.path.insert(0, modify_path)
-        register_path = self["taskgraph"].get("register")
-        if register_path:
-            find_object(register_path)(self)
-
-    @property
-    def taskcluster_yml(self):
-        if path.split(self.root_dir)[-2:] != ["taskcluster", "ci"]:
-            raise Exception(
-                "Not guessing path to `.taskcluster.yml`. "
-                "Graph config in non-standard location."
-            )
-        return os.path.join(
-            os.path.dirname(os.path.dirname(self.root_dir)),
-            ".taskcluster.yml",
-        )
-
-
 def validate_graph_config(config):
     validate_schema(graph_config_schema, config, "Invalid graph configuration:")
 
 
 def load_graph_config(root_dir):
     config_yml = os.path.join(root_dir, "config.yml")
     if not os.path.exists(config_yml):
         raise Exception(f"Couldn't find taskgraph configuration: {config_yml}")
--- a/taskcluster/gecko_taskgraph/generator.py
+++ b/taskcluster/gecko_taskgraph/generator.py
@@ -2,32 +2,33 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import logging
 import os
 import copy
 
 import attr
+from taskgraph.config import GraphConfig
 from taskgraph.util.yaml import load_yaml
 
 from . import filter_tasks
 from .graph import Graph
 from .taskgraph import TaskGraph
 from .task import Task
 from .optimize import optimize_task_graph
 from .morph import morph
 from .parameters import Parameters
 from .util.python_path import find_object
 from .transforms.base import TransformSequence, TransformConfig
 from .util.verify import (
     verify_docs,
     verifications,
 )
-from .config import load_graph_config, GraphConfig
+from .config import load_graph_config
 
 logger = logging.getLogger(__name__)
 
 
 class KindNotFound(Exception):
     """
     Raised when trying to load kind from a directory without a kind.yml.
     """
--- a/taskcluster/gecko_taskgraph/main.py
+++ b/taskcluster/gecko_taskgraph/main.py
@@ -424,27 +424,30 @@ def show_taskgraph(options):
             cur_path = os.path.join(diffdir, f"{options['graph_attr']}_{cur_ref}")
 
             params_name = None
             if len(parameters) > 1:
                 params_name = Parameters.format_spec(spec)
                 base_path += f"_{params_name}"
                 cur_path += f"_{params_name}"
 
-            # We only capture errors when the 'base' generation fails. This is
-            # because if the 'current' generation passed, the failure is likely
-            # due to a difference in the set of revisions being tested and
-            # harmless. We'll still log a warning to notify that the diff is
-            # not available. But if the current generation failed, the error
-            # needs to be addressed.
-            if not os.path.isfile(base_path):
+            # If the base or cur files are missing it means that generation
+            # failed. If one of them failed but not the other, the failure is
+            # likely due to the patch making changes to taskgraph in modules
+            # that don't get reloaded (safe to ignore). If both generations
+            # failed, there's likely a real issue.
+            base_missing = not os.path.isfile(base_path)
+            cur_missing = not os.path.isfile(cur_path)
+            if base_missing != cur_missing:  # != is equivalent to XOR for booleans
                 non_fatal_failures.append(os.path.basename(base_path))
                 continue
 
             try:
+                # If the output file(s) are missing, this command will raise
+                # CalledProcessError with a returncode > 1.
                 proc = subprocess.run(
                     diffcmd + [base_path, cur_path],
                     stdout=subprocess.PIPE,
                     stderr=subprocess.PIPE,
                     universal_newlines=True,
                     check=True,
                 )
                 diff_output = proc.stdout
--- a/taskcluster/gecko_taskgraph/test/conftest.py
+++ b/taskcluster/gecko_taskgraph/test/conftest.py
@@ -1,25 +1,26 @@
 # Any copyright is dedicated to the public domain.
 # http://creativecommons.org/publicdomain/zero/1.0/
 
 import os
 
 import pytest
 from mach.logging import LoggingManager
 from responses import RequestsMock
+from taskgraph.config import GraphConfig
 
 from gecko_taskgraph import (
     GECKO,
     generator,
     optimize as optimize_mod,
     target_tasks as target_tasks_mod,
 )
 from gecko_taskgraph.actions import render_actions_json
-from gecko_taskgraph.config import load_graph_config, GraphConfig
+from gecko_taskgraph.config import load_graph_config
 from gecko_taskgraph.generator import TaskGraphGenerator, Kind
 from gecko_taskgraph.optimize import OptimizationStrategy
 from gecko_taskgraph.parameters import Parameters
 from gecko_taskgraph.util.templates import merge
 
 
 @pytest.fixture
 def responses():
--- a/taskcluster/gecko_taskgraph/test/test_create.py
+++ b/taskcluster/gecko_taskgraph/test/test_create.py
@@ -1,19 +1,19 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 
 import unittest
 
+from taskgraph.config import GraphConfig
 from unittest import mock
 
 from gecko_taskgraph import create
-from gecko_taskgraph.config import GraphConfig
 from gecko_taskgraph.graph import Graph
 from gecko_taskgraph.taskgraph import TaskGraph
 from gecko_taskgraph.task import Task
 
 from mozunit import main
 
 GRAPH_CONFIG = GraphConfig({"trust-domain": "domain"}, "/var/empty")
 
--- a/taskcluster/gecko_taskgraph/test/test_optimize_strategies.py
+++ b/taskcluster/gecko_taskgraph/test/test_optimize_strategies.py
@@ -112,17 +112,20 @@ disperse_tasks = list(
             }
         },
     )
 )
 
 
 def idfn(param):
     if isinstance(param, tuple):
-        return param[0].__name__
+        try:
+            return param[0].__name__
+        except AttributeError:
+            return None
     return None
 
 
 @pytest.mark.parametrize(
     "opt,tasks,arg,expected",
     [
         # debug
         pytest.param(
--- a/taskcluster/gecko_taskgraph/transforms/base.py
+++ b/taskcluster/gecko_taskgraph/transforms/base.py
@@ -1,16 +1,16 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 
 import attr
+from taskgraph.config import GraphConfig
 
-from ..config import GraphConfig
 from ..parameters import Parameters
 from ..util.schema import Schema, validate_schema
 
 
 @attr.s(frozen=True)
 class TransformConfig:
     """
     A container for configuration affecting transforms.  The `config` argument
--- a/taskcluster/test/python.ini
+++ b/taskcluster/test/python.ini
@@ -1,8 +1,9 @@
 [DEFAULT]
 subsuite = ci
 skip-if = python == 2
 
 [test_autoland.py]
 [test_autoland_backstop.py]
+[test_generate_params.py]
 [test_mach_try_auto.py]
 [test_mozilla_central.py]
new file mode 100644
--- /dev/null
+++ b/taskcluster/test/test_generate_params.py
@@ -0,0 +1,58 @@
+import json
+import os
+import subprocess
+
+import pytest
+from mozunit import main
+
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.taskgraph import TaskGraph
+
+pytestmark = pytest.mark.slow
+PARAMS_DIR = os.path.join(GECKO, "taskcluster", "test", "params")
+
+
+@pytest.fixture(scope="module")
+def get_graph_from_spec(tmpdir_factory):
+    outdir = tmpdir_factory.mktemp("graphs")
+
+    # Use a mach subprocess to leverage the auto parallelization of
+    # parameters when specifying a directory.
+    cmd = [
+        "./mach",
+        "taskgraph",
+        "morphed",
+        "--json",
+        f"--parameters={PARAMS_DIR}",
+        f"--output-file={outdir}/graph.json",
+    ]
+    subprocess.run(cmd, cwd=GECKO)
+    assert len(outdir.listdir()) > 0
+
+    def inner(param_spec):
+        outfile = f"{outdir}/graph_{param_spec}.json"
+        with open(outfile) as fh:
+            output = fh.read()
+            try:
+                return TaskGraph.from_json(json.loads(output))[1]
+            except ValueError:
+                return output
+
+    return inner
+
+
+@pytest.mark.parametrize(
+    "param_spec", [os.path.splitext(p)[0] for p in os.listdir(PARAMS_DIR)]
+)
+def test_generate_graphs(get_graph_from_spec, param_spec):
+    ret = get_graph_from_spec(param_spec)
+    if isinstance(ret, str):
+        print(ret)
+        pytest.fail("An exception was raised during graph generation!")
+
+    assert isinstance(ret, TaskGraph)
+    assert len(ret.tasks) > 0
+
+
+if __name__ == "__main__":
+    main()
--- a/testing/marionette/harness/marionette_harness/tests/harness_unit/conftest.py
+++ b/testing/marionette/harness/marionette_harness/tests/harness_unit/conftest.py
@@ -92,10 +92,10 @@ def mock_httpd(request):
     return httpd
 
 
 @pytest.fixture
 def mock_marionette(request):
     """Mock marionette instance"""
     marionette = MagicMock(spec=dir(Marionette()))
     if "has_crashed" in request.funcargnames:
-        marionette.check_for_crash.return_value = request.getfuncargvalue("has_crashed")
+        marionette.check_for_crash.return_value = request.getfixturevalue("has_crashed")
     return marionette
--- a/testing/marionette/harness/marionette_harness/tests/harness_unit/test_marionette_harness.py
+++ b/testing/marionette/harness/marionette_harness/tests/harness_unit/test_marionette_harness.py
@@ -17,17 +17,17 @@ from marionette_harness.runtests import 
 
 @pytest.fixture
 def harness_class(request):
     """
     Mock based on MarionetteHarness whose run method just returns a number of
     failures according to the supplied test parameter
     """
     if "num_fails_crashed" in request.funcargnames:
-        num_fails_crashed = request.getfuncargvalue("num_fails_crashed")
+        num_fails_crashed = request.getfixturevalue("num_fails_crashed")
     else:
         num_fails_crashed = (0, 0)
     harness_cls = Mock(spec=MarionetteHarness)
     harness = harness_cls.return_value
     if num_fails_crashed is None:
         harness.run.side_effect = Exception
     else:
         harness.run.return_value = sum(num_fails_crashed)
@@ -36,17 +36,17 @@ def harness_class(request):
 
 @pytest.fixture
 def runner_class(request):
     """
     Mock based on MarionetteTestRunner, wherein the runner.failed,
     runner.crashed attributes are provided by a test parameter
     """
     if "num_fails_crashed" in request.funcargnames:
-        failures, crashed = request.getfuncargvalue("num_fails_crashed")
+        failures, crashed = request.getfixturevalue("num_fails_crashed")
     else:
         failures = 0
         crashed = 0
     mock_runner_class = Mock(spec=MarionetteTestRunner)
     runner = mock_runner_class.return_value
     runner.failed = failures
     runner.crashed = crashed
     return mock_runner_class
--- a/testing/mochitest/tests/python/conftest.py
+++ b/testing/mochitest/tests/python/conftest.py
@@ -7,17 +7,17 @@ import six
 import json
 import os
 from argparse import Namespace
 
 import pytest
 
 import mozinfo
 from manifestparser import TestManifest, expression
-from moztest.selftest.fixtures import binary, setup_test_harness  # noqa
+from moztest.selftest.fixtures import binary_fixture, setup_test_harness  # noqa
 
 here = os.path.abspath(os.path.dirname(__file__))
 setup_args = [os.path.join(here, "files"), "mochitest", "testing/mochitest"]
 
 
 @pytest.fixture(scope="function")
 def parser(request):
     parser = pytest.importorskip("mochitest_options")
@@ -135,13 +135,13 @@ def skip_using_mozinfo(request, setup_te
         def test_foo():
             pass
     """
 
     setup_test_harness(*setup_args)
     runtests = pytest.importorskip("runtests")
     runtests.update_mozinfo()
 
-    skip_mozinfo = request.node.get_marker("skip_mozinfo")
+    skip_mozinfo = request.node.get_closest_marker("skip_mozinfo")
     if skip_mozinfo:
         value = skip_mozinfo.args[0]
         if expression.parse(value, **mozinfo.info):
             pytest.skip("skipped due to mozinfo match: \n{}".format(value))
--- a/testing/mozbase/mozcrash/tests/conftest.py
+++ b/testing/mozbase/mozcrash/tests/conftest.py
@@ -45,17 +45,16 @@ def check_for_crashes(tmpdir, stackwalk,
 @pytest.fixture
 def check_for_java_exception():
     def wrapper(logcat=None, test_name=None, quiet=True):
         return mozcrash.check_for_java_exception(logcat, test_name, quiet)
 
     return wrapper
 
 
-@pytest.fixture
 def minidump_files(request, tmpdir):
     files = []
 
     for i in range(getattr(request, "param", 1)):
         name = uuid.uuid4()
 
         dmp = tmpdir.join("{}.dmp".format(name))
         dmp.write("foo")
@@ -99,16 +98,21 @@ def minidump_files(request, tmpdir):
             encoding="utf-8",
         )
 
         files.append({"dmp": dmp, "extra": extra})
 
     return files
 
 
+@pytest.fixture(name="minidump_files")
+def minidump_files_fixture(request, tmpdir):
+    return minidump_files(request, tmpdir)
+
+
 @pytest.fixture(autouse=True)
 def mock_popen(monkeypatch):
     """Generate a class that can mock subprocess.Popen.
 
     :param stdouts: Iterable that should return an iterable for the
                     stdout of each process in turn.
     """
 
--- a/testing/mozbase/moztest/moztest/selftest/fixtures.py
+++ b/testing/mozbase/moztest/moztest/selftest/fixtures.py
@@ -85,17 +85,16 @@ def setup_test_harness(request, flavor="
             # We are purposefully not failing here because running |mach python-test|
             # without a build is a perfectly valid use case.
             pass
         return test_root
 
     return inner
 
 
-@pytest.fixture(scope="session")
 def binary():
     """Return a Firefox binary"""
     try:
         return build.get_binary_path()
     except Exception:
         pass
 
     app = "firefox"
@@ -103,8 +102,13 @@ def binary():
     if os.path.isdir(bindir):
         try:
             return mozinstall.get_binary(bindir, app_name=app)
         except Exception:
             pass
 
     if "GECKO_BINARY_PATH" in os.environ:
         return os.environ["GECKO_BINARY_PATH"]
+
+
+@pytest.fixture(name="binary", scope="session")
+def binary_fixture():
+    return binary()
--- a/testing/mozbase/mozversion/tests/test_binary.py
+++ b/testing/mozbase/mozversion/tests/test_binary.py
@@ -7,26 +7,26 @@
 from __future__ import absolute_import
 
 import os
 import sys
 import shutil
 
 import mozunit
 import pytest
-from moztest.selftest.fixtures import binary as real_binary  # noqa: F401
+from moztest.selftest.fixtures import binary_fixture  # noqa: F401
 
 from mozversion import errors, get_version
 
 
 """test getting application version information from a binary path"""
 
 
-@pytest.fixture(name="binary")
-def fixure_binary(tmpdir):
+@pytest.fixture()
+def fake_binary(tmpdir):
     binary = tmpdir.join("binary")
     binary.write("foobar")
     return str(binary)
 
 
 @pytest.fixture(name="application_ini")
 def fixture_application_ini(tmpdir):
     ini = tmpdir.join("application.ini")
@@ -52,97 +52,97 @@ def fixture_platform_ini(tmpdir):
 BuildID = PlatformBuildID
 Milestone = PlatformMilestone
 SourceStamp = PlatformSourceStamp
 SourceRepository = PlatformSourceRepo"""
     )
     return str(ini)
 
 
-def test_real_binary(real_binary):  # noqa: F811
-    if not real_binary:
+def test_real_binary(binary):  # noqa: F811
+    if not binary:
         pytest.skip("No binary found")
-    v = get_version(real_binary)
+    v = get_version(binary)
     assert isinstance(v, dict)
 
 
-def test_binary(binary, application_ini, platform_ini):
-    _check_version(get_version(binary))
+def test_binary(fake_binary, application_ini, platform_ini):
+    _check_version(get_version(fake_binary))
 
 
 @pytest.mark.skipif(
     not hasattr(os, "symlink"), reason="os.symlink not supported on this platform"
 )
-def test_symlinked_binary(binary, application_ini, platform_ini, tmpdir):
+def test_symlinked_binary(fake_binary, application_ini, platform_ini, tmpdir):
     # create a symlink of the binary in another directory and check
     # version against this symlink
     symlink = str(tmpdir.join("symlink"))
-    os.symlink(binary, symlink)
+    os.symlink(fake_binary, symlink)
     _check_version(get_version(symlink))
 
 
-def test_binary_in_current_path(binary, application_ini, platform_ini, tmpdir):
+def test_binary_in_current_path(fake_binary, application_ini, platform_ini, tmpdir):
     os.chdir(str(tmpdir))
     _check_version(get_version())
 
 
 def test_with_ini_files_on_osx(
-    binary, application_ini, platform_ini, monkeypatch, tmpdir
+    fake_binary, application_ini, platform_ini, monkeypatch, tmpdir
 ):
     monkeypatch.setattr(sys, "platform", "darwin")
     # get_version is working with ini files next to the binary
-    _check_version(get_version(binary=binary))
+    _check_version(get_version(binary=fake_binary))
 
     # or if they are in the Resources dir
     # in this case the binary must be in a Contents dir, next
     # to the Resources dir
     contents_dir = tmpdir.mkdir("Contents")
-    moved_binary = str(contents_dir.join(os.path.basename(binary)))
-    shutil.move(binary, moved_binary)
+    moved_binary = str(contents_dir.join(os.path.basename(fake_binary)))
+    shutil.move(fake_binary, moved_binary)
 
     resources_dir = str(tmpdir.mkdir("Resources"))
     shutil.move(application_ini, resources_dir)
     shutil.move(platform_ini, resources_dir)
 
     _check_version(get_version(binary=moved_binary))
 
 
 def test_invalid_binary_path(tmpdir):
     with pytest.raises(IOError):
         get_version(str(tmpdir.join("invalid")))
 
 
-def test_without_ini_files(binary):
+def test_without_ini_files(fake_binary):
     """With missing ini files an exception should be thrown"""
     with pytest.raises(errors.AppNotFoundError):
-        get_version(binary)
+        get_version(fake_binary)
 
 
-def test_without_platform_ini_file(binary, application_ini):
+def test_without_platform_ini_file(fake_binary, application_ini):
     """With a missing platform.ini file an exception should be thrown"""
     with pytest.raises(errors.AppNotFoundError):
-        get_version(binary)
+        get_version(fake_binary)
 
 
-def test_without_application_ini_file(binary, platform_ini):
+def test_without_application_ini_file(fake_binary, platform_ini):
     """With a missing application.ini file an exception should be thrown"""
     with pytest.raises(errors.AppNotFoundError):
-        get_version(binary)
+        get_version(fake_binary)
 
 
 def test_with_exe(application_ini, platform_ini, tmpdir):
     """Test that we can resolve .exe files"""
     binary = tmpdir.join("binary.exe")
     binary.write("foobar")
     _check_version(get_version(os.path.splitext(str(binary))[0]))
 
 
-def test_not_found_with_binary_specified(binary):
+def test_not_found_with_binary_specified(fake_binary):
     with pytest.raises(errors.LocalAppNotFoundError):
-        get_version(binary)
+        get_version(fake_binary)
 
 
 def _check_version(version):
     assert version.get("application_id") == "AppID"
     assert version.get("application_name") == "AppName"
     assert version.get("application_display_name") == "AppCodeName"
     assert version.get("application_version") == "AppVersion"
     assert version.get("application_buildid") == "AppBuildID"
--- a/testing/mozharness/scripts/web_platform_tests.py
+++ b/testing/mozharness/scripts/web_platform_tests.py
@@ -289,17 +289,16 @@ class WebPlatformTest(TestingMixin, Merc
         )
 
         cmd += [
             "--log-raw=-",
             "--log-raw=%s" % raw_log_file,
             "--log-wptreport=%s"
             % os.path.join(dirs["abs_blob_upload_dir"], "wptreport.json"),
             "--log-errorsummary=%s" % error_summary_file,
-            "--binary=%s" % self.binary_path,
             "--symbols-path=%s" % self.symbols_path,
             "--stackwalk-binary=%s" % self.query_minidump_stackwalk(),
             "--stackfix-dir=%s" % os.path.join(dirs["abs_test_install_dir"], "bin"),
             "--no-pause-after-test",
             "--instrument-to-file=%s"
             % os.path.join(dirs["abs_blob_upload_dir"], "wpt_instruments.txt"),
             "--specialpowers-path=%s"
             % os.path.join(
@@ -325,16 +324,18 @@ class WebPlatformTest(TestingMixin, Merc
             processes = 2
         cmd.append("--processes=%s" % processes)
 
         if self.is_android:
             cmd += [
                 "--device-serial=%s" % self.device_serial,
                 "--package-name=%s" % self.query_package_name(),
             ]
+        else:
+            cmd.append("--binary=%s" % self.binary_path)
 
         if is_windows_7:
             # On Windows 7 --install-fonts fails, so fall back to a Firefox-specific codepath
             self._install_fonts()
         else:
             cmd += ["--install-fonts"]
 
         for test_type in test_types:
--- a/testing/web-platform/meta/webaudio/the-audio-api/the-oscillatornode-interface/osc-basic-waveform.html.ini
+++ b/testing/web-platform/meta/webaudio/the-audio-api/the-oscillatornode-interface/osc-basic-waveform.html.ini
@@ -1,9 +1,12 @@
 [osc-basic-waveform.html]
+  expected:
+    if os == "linux" and fission: ["OK", "FAIL"]
+
   [< [Test 0\] 2 out of 2 assertions were failed.]
     expected: FAIL
 
   [X Custom: 100 Hz does not equal [1,1.0141456127166748,1.0280853509902954,1.0418163537979126,1.0553359985351562,1.0686413049697876,1.081729769706726,1.094598650932312,1.1072453260421753,1.1196671724319458,1.1318618059158325,1.1438266038894653,1.1555593013763428,1.1670573949813843,1.1783186197280884,1.189340591430664...\] with an element-wise tolerance of {"absoluteThreshold":0.0000018478,"relativeThreshold":0}.\n\tIndex\tActual\t\t\tExpected\t\tAbsError\t\tRelError\t\tTest threshold\n\t[132\]\t6.4742153882980347e-1\t6.4741957187652588e-1\t1.9669532775878906e-6\t3.0381430575024735e-6\t1.8477999999999999e-6\n\t[134\]\t6.1133599281311035e-1\t6.1133408546447754e-1\t1.9073486328125000e-6\t3.1199775673612907e-6\t1.8477999999999999e-6\n\t[135\]\t5.9310543537139893e-1\t5.9310334920883179e-1\t2.0861625671386719e-6\t3.5173677065245060e-6\t1.8477999999999999e-6\n\t[136\]\t5.7475429773330688e-1\t5.7475227117538452e-1\t2.0265579223632813e-6\t3.5259676629357436e-6\t1.8477999999999999e-6\n\t[137\]\t5.5628657341003418e-1\t5.5628448724746704e-1\t2.0861625671386719e-6\t3.7501721061127269e-6\t1.8477999999999999e-6\n\t...and 97 more errors.\n\tMax AbsError of 4.5299530029296875e-6 at index of 197.\n\t[197\]\t-6.1586797237396240e-1\t-6.1587250232696533e-1\t4.5299530029296875e-6\t7.3553421947141031e-6\t1.8477999999999999e-6\n\tMax RelError of 4.6893454976520917e-4 at index of 165.\n\t[165\]\t7.5594307854771614e-3\t7.5558875687420368e-3\t3.5432167351245880e-6\t4.6893454976520917e-4\t1.8477999999999999e-6\n]
     expected: FAIL
 
   [< [Test 5\] 1 out of 2 assertions were failed.]
     expected: FAIL
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/fetch/api/crashtests/request.html
@@ -0,0 +1,8 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<script src="/common/utils.js"></script>
+<script>
+  // Cycle collection test for a case where the Request object is alive and accessible globally.
+  var req = new Request(`/`);
+  fetch(req)
+</script>
--- a/testing/web-platform/tests/webdriver/tests/support/helpers.py
+++ b/testing/web-platform/tests/webdriver/tests/support/helpers.py
@@ -53,17 +53,18 @@ def cleanup_session(session):
 
     @ignore_exceptions
     def _restore_window_state(session):
         """Reset window to an acceptable size.
 
         This also includes bringing it out of maximized, minimized,
         or fullscreened state.
         """
-        session.window.size = defaults.WINDOW_SIZE
+        if session.capabilities.get("setWindowRect"):
+            session.window.size = defaults.WINDOW_SIZE
 
     @ignore_exceptions
     def _restore_windows(session):
         """Close superfluous windows opened by the test.
 
         It will not end the session implicitly by closing the last window.
         """
         current_window = session.window_handle
--- a/third_party/python/attrs/attr/__init__.py
+++ b/third_party/python/attrs/attr/__init__.py
@@ -11,19 +11,21 @@ from ._make import (
     Factory,
     attrib,
     attrs,
     fields,
     fields_dict,
     make_class,
     validate,
 )
+from ._version import VersionInfo
 
 
-__version__ = "19.1.0"
+__version__ = "19.2.0"
+__version_info__ = VersionInfo._from_version_string(__version__)
 
 __title__ = "attrs"
 __description__ = "Classes Without Boilerplate"
 __url__ = "https://www.attrs.org/"
 __uri__ = __url__
 __doc__ = __description__ + " <" + __uri__ + ">"
 
 __author__ = "Hynek Schlawack"
@@ -32,16 +34,17 @@ from ._make import (
 __license__ = "MIT"
 __copyright__ = "Copyright (c) 2015 Hynek Schlawack"
 
 
 s = attributes = attrs
 ib = attr = attrib
 dataclass = partial(attrs, auto_attribs=True)  # happy Easter ;)
 
+
 __all__ = [
     "Attribute",
     "Factory",
     "NOTHING",
     "asdict",
     "assoc",
     "astuple",
     "attr",
--- a/third_party/python/attrs/attr/__init__.pyi
+++ b/third_party/python/attrs/attr/__init__.pyi
@@ -15,22 +15,37 @@ from typing import (
 )
 
 # `import X as X` is required to make these public
 from . import exceptions as exceptions
 from . import filters as filters
 from . import converters as converters
 from . import validators as validators
 
+from ._version import VersionInfo
+
+__version__: str
+__version_info__: VersionInfo
+__title__: str
+__description__: str
+__url__: str
+__uri__: str
+__author__: str
+__email__: str
+__license__: str
+__copyright__: str
+
 _T = TypeVar("_T")
 _C = TypeVar("_C", bound=type)
 
 _ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
 _ConverterType = Callable[[Any], _T]
 _FilterType = Callable[[Attribute[_T], _T], bool]
+_ReprType = Callable[[Any], str]
+_ReprArgType = Union[bool, _ReprType]
 # FIXME: in reality, if multiple validators are passed they must be in a list or tuple,
 # but those are invariant and so would prevent subtypes of _ValidatorType from working
 # when passed in a list or tuple.
 _ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
 
 # _make --
 
 NOTHING: object
@@ -44,28 +59,26 @@ def Factory(
     factory: Union[Callable[[Any], _T], Callable[[], _T]],
     takes_self: bool = ...,
 ) -> _T: ...
 
 class Attribute(Generic[_T]):
     name: str
     default: Optional[_T]
     validator: Optional[_ValidatorType[_T]]
-    repr: bool
+    repr: _ReprArgType
     cmp: bool
+    eq: bool
+    order: bool
     hash: Optional[bool]
     init: bool
     converter: Optional[_ConverterType[_T]]
     metadata: Dict[Any, Any]
     type: Optional[Type[_T]]
     kw_only: bool
-    def __lt__(self, x: Attribute[_T]) -> bool: ...
-    def __le__(self, x: Attribute[_T]) -> bool: ...
-    def __gt__(self, x: Attribute[_T]) -> bool: ...
-    def __ge__(self, x: Attribute[_T]) -> bool: ...
 
 # NOTE: We had several choices for the annotation to use for type arg:
 # 1) Type[_T]
 #   - Pros: Handles simple cases correctly
 #   - Cons: Might produce less informative errors in the case of conflicting TypeVars
 #   e.g. `attr.ib(default='bad', type=int)`
 # 2) Callable[..., _T]
 #   - Pros: Better error messages than #1 for conflicting TypeVars
@@ -84,113 +97,121 @@ class Attribute(Generic[_T]):
 # This makes this type of assignments possible:
 #     x: int = attr(8)
 #
 # This form catches explicit None or no default but with no other arguments returns Any.
 @overload
 def attrib(
     default: None = ...,
     validator: None = ...,
-    repr: bool = ...,
-    cmp: bool = ...,
+    repr: _ReprArgType = ...,
+    cmp: Optional[bool] = ...,
     hash: Optional[bool] = ...,
     init: bool = ...,
-    convert: None = ...,
     metadata: Optional[Mapping[Any, Any]] = ...,
     type: None = ...,
     converter: None = ...,
     factory: None = ...,
     kw_only: bool = ...,
+    eq: Optional[bool] = ...,
+    order: Optional[bool] = ...,
 ) -> Any: ...
 
 # This form catches an explicit None or no default and infers the type from the other arguments.
 @overload
 def attrib(
     default: None = ...,
     validator: Optional[_ValidatorArgType[_T]] = ...,
-    repr: bool = ...,
-    cmp: bool = ...,
+    repr: _ReprArgType = ...,
+    cmp: Optional[bool] = ...,
     hash: Optional[bool] = ...,
     init: bool = ...,
-    convert: Optional[_ConverterType[_T]] = ...,
     metadata: Optional[Mapping[Any, Any]] = ...,
     type: Optional[Type[_T]] = ...,
     converter: Optional[_ConverterType[_T]] = ...,
     factory: Optional[Callable[[], _T]] = ...,
     kw_only: bool = ...,
+    eq: Optional[bool] = ...,
+    order: Optional[bool] = ...,
 ) -> _T: ...
 
 # This form catches an explicit default argument.
 @overload
 def attrib(
     default: _T,
     validator: Optional[_ValidatorArgType[_T]] = ...,
-    repr: bool = ...,
-    cmp: bool = ...,
+    repr: _ReprArgType = ...,
+    cmp: Optional[bool] = ...,
     hash: Optional[bool] = ...,
     init: bool = ...,
-    convert: Optional[_ConverterType[_T]] = ...,
     metadata: Optional[Mapping[Any, Any]] = ...,
     type: Optional[Type[_T]] = ...,
     converter: Optional[_ConverterType[_T]] = ...,
     factory: Optional[Callable[[], _T]] = ...,
     kw_only: bool = ...,
+    eq: Optional[bool] = ...,
+    order: Optional[bool] = ...,
 ) -> _T: ...
 
 # This form covers type=non-Type: e.g. forward references (str), Any
 @overload
 def attrib(
     default: Optional[_T] = ...,
     validator: Optional[_ValidatorArgType[_T]] = ...,
-    repr: bool = ...,
-    cmp: bool = ...,
+    repr: _ReprArgType = ...,
+    cmp: Optional[bool] = ...,
     hash: Optional[bool] = ...,
     init: bool = ...,
-    convert: Optional[_ConverterType[_T]] = ...,
     metadata: Optional[Mapping[Any, Any]] = ...,
     type: object = ...,
     converter: Optional[_ConverterType[_T]] = ...,
     factory: Optional[Callable[[], _T]] = ...,
     kw_only: bool = ...,
+    eq: Optional[bool] = ...,
+    order: Optional[bool] = ...,
 ) -> Any: ...
 @overload
 def attrs(
     maybe_cls: _C,
     these: Optional[Dict[str, Any]] = ...,
     repr_ns: Optional[str] = ...,
     repr: bool = ...,
-    cmp: bool = ...,
+    cmp: Optional[bool] = ...,
     hash: Optional[bool] = ...,
     init: bool = ...,
     slots: bool = ...,
     frozen: bool = ...,
     weakref_slot: bool = ...,
     str: bool = ...,
     auto_attribs: bool = ...,
     kw_only: bool = ...,
     cache_hash: bool = ...,
     auto_exc: bool = ...,
+    eq: Optional[bool] = ...,
+    order: Optional[bool] = ...,
 ) -> _C: ...
 @overload
 def attrs(
     maybe_cls: None = ...,
     these: Optional[Dict[str, Any]] = ...,
     repr_ns: Optional[str] = ...,
     repr: bool = ...,
-    cmp: bool = ...,
+    cmp: Optional[bool] = ...,
     hash: Optional[bool] = ...,
     init: bool = ...,
     slots: bool = ...,
     frozen: bool = ...,
     weakref_slot: bool = ...,
     str: bool = ...,
     auto_attribs: bool = ...,
     kw_only: bool = ...,
     cache_hash: bool = ...,
     auto_exc: bool = ...,
+    eq: Optional[bool] = ...,
+    order: Optional[bool] = ...,
 ) -> Callable[[_C], _C]: ...
 
 # TODO: add support for returning NamedTuple from the mypy plugin
 class _Fields(Tuple[Attribute[Any], ...]):
     def __getattr__(self, name: str) -> Attribute[Any]: ...
 
 def fields(cls: type) -> _Fields: ...
 def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ...
@@ -199,27 +220,29 @@ def validate(inst: Any) -> None: ...
 # TODO: add support for returning a proper attrs class from the mypy plugin
 # we use Any instead of _CountingAttr so that e.g. `make_class('Foo', [attr.ib()])` is valid
 def make_class(
     name: str,
     attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
     bases: Tuple[type, ...] = ...,
     repr_ns: Optional[str] = ...,
     repr: bool = ...,
-    cmp: bool = ...,
+    cmp: Optional[bool] = ...,
     hash: Optional[bool] = ...,
     init: bool = ...,
     slots: bool = ...,
     frozen: bool = ...,
     weakref_slot: bool = ...,
     str: bool = ...,
     auto_attribs: bool = ...,
     kw_only: bool = ...,
     cache_hash: bool = ...,
     auto_exc: bool = ...,
+    eq: Optional[bool] = ...,
+    order: Optional[bool] = ...,
 ) -> type: ...
 
 # _funcs --
 
 # TODO: add support for returning TypedDict from the mypy plugin
 # FIXME: asdict/astuple do not honor their factory args.  waiting on one of these:
 # https://github.com/python/mypy/issues/4236
 # https://github.com/python/typing/issues/253
--- a/third_party/python/attrs/attr/_compat.py
+++ b/third_party/python/attrs/attr/_compat.py
@@ -15,17 +15,17 @@ if PYPY or sys.version_info[:2] >= (3, 6
 else:
     from collections import OrderedDict
 
     ordered_dict = OrderedDict
 
 
 if PY2:
     from UserDict import IterableUserDict
-    from collections import Mapping, Sequence  # noqa
+    from collections import Mapping, Sequence
 
     # We 'bundle' isclass instead of using inspect as importing inspect is
     # fairly expensive (order of 10-15 ms for a modern machine in 2016)
     def isclass(klass):
         return isinstance(klass, (type, types.ClassType))
 
     # TYPE is used in exceptions, repr(int) is different on Python 2 and 3.
     TYPE = "type"
@@ -101,17 +101,18 @@ else:  # Python 3 and later.
     from collections.abc import Mapping, Sequence  # noqa
 
     def just_warn(*args, **kw):
         """
         We only warn on Python 3 because we are not aware of any concrete
         consequences of not setting the cell on Python 2.
         """
         warnings.warn(
-            "Missing ctypes.  Some features like bare super() or accessing "
+            "Running interpreter doesn't sufficiently support code object "
+            "introspection.  Some features like bare super() or accessing "
             "__class__ will not work with slotted classes.",
             RuntimeWarning,
             stacklevel=2,
         )
 
     def isclass(klass):
         return isinstance(klass, type)
 
@@ -119,41 +120,111 @@ else:  # Python 3 and later.
 
     def iteritems(d):
         return d.items()
 
     def metadata_proxy(d):
         return types.MappingProxyType(dict(d))
 
 
-def import_ctypes():
-    """
-    Moved into a function for testability.
+def make_set_closure_cell():
+    """Return a function of two arguments (cell, value) which sets
+    the value stored in the closure cell `cell` to `value`.
     """
-    import ctypes
-
-    return ctypes
-
-
-def make_set_closure_cell():
-    """
-    Moved into a function for testability.
-    """
+    # pypy makes this easy. (It also supports the logic below, but
+    # why not do the easy/fast thing?)
     if PYPY:  # pragma: no cover
 
         def set_closure_cell(cell, value):
             cell.__setstate__((value,))
 
-    else:
-        try:
-            ctypes = import_ctypes()
+        return set_closure_cell
+
+    # Otherwise gotta do it the hard way.
+
+    # Create a function that will set its first cellvar to `value`.
+    def set_first_cellvar_to(value):
+        x = value
+        return
+
+        # This function will be eliminated as dead code, but
+        # not before its reference to `x` forces `x` to be
+        # represented as a closure cell rather than a local.
+        def force_x_to_be_a_cell():  # pragma: no cover
+            return x
+
+    try:
+        # Extract the code object and make sure our assumptions about
+        # the closure behavior are correct.
+        if PY2:
+            co = set_first_cellvar_to.func_code
+        else:
+            co = set_first_cellvar_to.__code__
+        if co.co_cellvars != ("x",) or co.co_freevars != ():
+            raise AssertionError  # pragma: no cover
 
-            set_closure_cell = ctypes.pythonapi.PyCell_Set
-            set_closure_cell.argtypes = (ctypes.py_object, ctypes.py_object)
-            set_closure_cell.restype = ctypes.c_int
-        except Exception:
-            # We try best effort to set the cell, but sometimes it's not
-            # possible.  For example on Jython or on GAE.
-            set_closure_cell = just_warn
-    return set_closure_cell
+        # Convert this code object to a code object that sets the
+        # function's first _freevar_ (not cellvar) to the argument.
+        if sys.version_info >= (3, 8):
+            # CPython 3.8+ has an incompatible CodeType signature
+            # (added a posonlyargcount argument) but also added
+            # CodeType.replace() to do this without counting parameters.
+            set_first_freevar_code = co.replace(
+                co_cellvars=co.co_freevars, co_freevars=co.co_cellvars
+            )
+        else:
+            args = [co.co_argcount]
+            if not PY2:
+                args.append(co.co_kwonlyargcount)
+            args.extend(
+                [
+                    co.co_nlocals,
+                    co.co_stacksize,
+                    co.co_flags,
+                    co.co_code,
+                    co.co_consts,
+                    co.co_names,
+                    co.co_varnames,
+                    co.co_filename,
+                    co.co_name,
+                    co.co_firstlineno,
+                    co.co_lnotab,
+                    # These two arguments are reversed:
+                    co.co_cellvars,
+                    co.co_freevars,
+                ]
+            )
+            set_first_freevar_code = types.CodeType(*args)
+
+        def set_closure_cell(cell, value):
+            # Create a function using the set_first_freevar_code,
+            # whose first closure cell is `cell`. Calling it will
+            # change the value of that cell.
+            setter = types.FunctionType(
+                set_first_freevar_code, {}, "setter", (), (cell,)
+            )
+            # And call it to set the cell.
+            setter(value)
+
+        # Make sure it works on this interpreter:
+        def make_func_with_cell():
+            x = None
+
+            def func():
+                return x  # pragma: no cover
+
+            return func
+
+        if PY2:
+            cell = make_func_with_cell().func_closure[0]
+        else:
+            cell = make_func_with_cell().__closure__[0]
+        set_closure_cell(cell, 100)
+        if cell.cell_contents != 100:
+            raise AssertionError  # pragma: no cover
+
+    except Exception:
+        return just_warn
+    else:
+        return set_closure_cell
 
 
 set_closure_cell = make_set_closure_cell()
--- a/third_party/python/attrs/attr/_funcs.py
+++ b/third_party/python/attrs/attr/_funcs.py
@@ -19,17 +19,17 @@ def asdict(
 
     Optionally recurse into other ``attrs``-decorated classes.
 
     :param inst: Instance of an ``attrs``-decorated class.
     :param bool recurse: Recurse into classes that are also
         ``attrs``-decorated.
     :param callable filter: A callable whose return code determines whether an
         attribute or element is included (``True``) or dropped (``False``).  Is
-        called with the :class:`attr.Attribute` as the first argument and the
+        called with the `attr.Attribute` as the first argument and the
         value as the second argument.
     :param callable dict_factory: A callable to produce dictionaries from.  For
         example, to produce ordered dictionaries instead of normal Python
         dictionaries, pass in ``collections.OrderedDict``.
     :param bool retain_collection_types: Do not convert to ``list`` when
         encountering an attribute whose type is ``tuple`` or ``set``.  Only
         meaningful if ``recurse`` is ``True``.
 
@@ -125,17 +125,17 @@ def astuple(
 
     Optionally recurse into other ``attrs``-decorated classes.
 
     :param inst: Instance of an ``attrs``-decorated class.
     :param bool recurse: Recurse into classes that are also
         ``attrs``-decorated.
     :param callable filter: A callable whose return code determines whether an
         attribute or element is included (``True``) or dropped (``False``).  Is
-        called with the :class:`attr.Attribute` as the first argument and the
+        called with the `attr.Attribute` as the first argument and the
         value as the second argument.
     :param callable tuple_factory: A callable to produce tuples from.  For
         example, to produce lists instead of tuples.
     :param bool retain_collection_types: Do not convert to ``list``
         or ``dict`` when encountering an attribute which type is
         ``tuple``, ``dict`` or ``set``.  Only meaningful if ``recurse`` is
         ``True``.
 
@@ -214,17 +214,17 @@ def astuple(
 
 def has(cls):
     """
     Check whether *cls* is a class with ``attrs`` attributes.
 
     :param type cls: Class to introspect.
     :raise TypeError: If *cls* is not a class.
 
-    :rtype: :class:`bool`
+    :rtype: bool
     """
     return getattr(cls, "__attrs_attrs__", None) is not None
 
 
 def assoc(inst, **changes):
     """
     Copy *inst* and apply *changes*.
 
@@ -234,17 +234,17 @@ def assoc(inst, **changes):
     :return: A copy of inst with *changes* incorporated.
 
     :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
         be found on *cls*.
     :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
         class.
 
     ..  deprecated:: 17.1.0
-        Use :func:`evolve` instead.
+        Use `evolve` instead.
     """
     import warnings
 
     warnings.warn(
         "assoc is deprecated and will be removed after 2018/01.",
         DeprecationWarning,
         stacklevel=2,
     )
--- a/third_party/python/attrs/attr/_make.py
+++ b/third_party/python/attrs/attr/_make.py
@@ -1,15 +1,15 @@
 from __future__ import absolute_import, division, print_function
 
 import copy
-import hashlib
 import linecache
 import sys
 import threading
+import uuid
 import warnings
 
 from operator import itemgetter
 
 from . import _config
 from ._compat import (
     PY2,
     isclass,
@@ -37,16 +37,19 @@ from .exceptions import (
 _classvar_prefixes = ("typing.ClassVar", "t.ClassVar", "ClassVar")
 # we don't use a double-underscore prefix because that triggers
 # name mangling when trying to create a slot for the field
 # (when slots=True)
 _hash_cache_field = "_attrs_cached_hash"
 
 _empty_metadata_singleton = metadata_proxy({})
 
+# Unique object for unequivocal getattr() defaults.
+_sentinel = object()
+
 
 class _Nothing(object):
     """
     Sentinel class to indicate the lack of a value when ``None`` is ambiguous.
 
     ``_Nothing`` is a singleton. There is only ever one of it.
     """
 
@@ -66,137 +69,141 @@ NOTHING = _Nothing()
 Sentinel to indicate the lack of a value when ``None`` is ambiguous.
 """
 
 
 def attrib(
     default=NOTHING,
     validator=None,
     repr=True,
-    cmp=True,
+    cmp=None,
     hash=None,
     init=True,
-    convert=None,
     metadata=None,
     type=None,
     converter=None,
     factory=None,
     kw_only=False,
+    eq=None,
+    order=None,
 ):
     """
     Create a new attribute on a class.
 
     ..  warning::
 
         Does *not* do anything unless the class is also decorated with
-        :func:`attr.s`!
+        `attr.s`!
 
     :param default: A value that is used if an ``attrs``-generated ``__init__``
         is used and no value is passed while instantiating or the attribute is
         excluded using ``init=False``.
 
-        If the value is an instance of :class:`Factory`, its callable will be
+        If the value is an instance of `Factory`, its callable will be
         used to construct a new value (useful for mutable data types like lists
         or dicts).
 
         If a default is not set (or set manually to ``attr.NOTHING``), a value
-        *must* be supplied when instantiating; otherwise a :exc:`TypeError`
+        *must* be supplied when instantiating; otherwise a `TypeError`
         will be raised.
 
         The default can also be set using decorator notation as shown below.
 
-    :type default: Any value.
+    :type default: Any value
 
     :param callable factory: Syntactic sugar for
         ``default=attr.Factory(callable)``.
 
-    :param validator: :func:`callable` that is called by ``attrs``-generated
+    :param validator: `callable` that is called by ``attrs``-generated
         ``__init__`` methods after the instance has been initialized.  They
-        receive the initialized instance, the :class:`Attribute`, and the
+        receive the initialized instance, the `Attribute`, and the
         passed value.
 
         The return value is *not* inspected so the validator has to throw an
         exception itself.
 
         If a ``list`` is passed, its items are treated as validators and must
         all pass.
 
         Validators can be globally disabled and re-enabled using
-        :func:`get_run_validators`.
+        `get_run_validators`.
 
         The validator can also be set using decorator notation as shown below.
 
     :type validator: ``callable`` or a ``list`` of ``callable``\\ s.
 
-    :param bool repr: Include this attribute in the generated ``__repr__``
-        method.
-    :param bool cmp: Include this attribute in the generated comparison methods
-        (``__eq__`` et al).
+    :param repr: Include this attribute in the generated ``__repr__``
+        method. If ``True``, include the attribute; if ``False``, omit it. By
+        default, the built-in ``repr()`` function is used. To override how the
+        attribute value is formatted, pass a ``callable`` that takes a single
+        value and returns a string. Note that the resulting string is used
+        as-is, i.e. it will be used directly *instead* of calling ``repr()``
+        (the default).
+    :type repr: a ``bool`` or a ``callable`` to use a custom function.
+    :param bool eq: If ``True`` (default), include this attribute in the
+        generated ``__eq__`` and ``__ne__`` methods that check two instances
+        for equality.
+    :param bool order: If ``True`` (default), include this attributes in the
+        generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods.
+    :param bool cmp: Setting to ``True`` is equivalent to setting ``eq=True,
+        order=True``. Deprecated in favor of *eq* and *order*.
     :param hash: Include this attribute in the generated ``__hash__``
-        method.  If ``None`` (default), mirror *cmp*'s value.  This is the
+        method.  If ``None`` (default), mirror *eq*'s value.  This is the
         correct behavior according the Python spec.  Setting this value to
         anything else than ``None`` is *discouraged*.
     :type hash: ``bool`` or ``None``
     :param bool init: Include this attribute in the generated ``__init__``
         method.  It is possible to set this to ``False`` and set a default
         value.  In that case this attributed is unconditionally initialized
         with the specified default value or factory.
-    :param callable converter: :func:`callable` that is called by
+    :param callable converter: `callable` that is called by
         ``attrs``-generated ``__init__`` methods to converter attribute's value
         to the desired format.  It is given the passed-in value, and the
         returned value will be used as the new value of the attribute.  The
         value is converted before being passed to the validator, if any.
     :param metadata: An arbitrary mapping, to be used by third-party
-        components.  See :ref:`extending_metadata`.
+        components.  See `extending_metadata`.
     :param type: The type of the attribute.  In Python 3.6 or greater, the
         preferred method to specify the type is using a variable annotation
         (see `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_).
         This argument is provided for backward compatibility.
         Regardless of the approach used, the type will be stored on
         ``Attribute.type``.
 
         Please note that ``attrs`` doesn't do anything with this metadata by
         itself. You can use it as part of your own code or for
-        :doc:`static type checking <types>`.
+        `static type checking <types>`.
     :param kw_only: Make this attribute keyword-only (Python 3+)
         in the generated ``__init__`` (if ``init`` is ``False``, this
         parameter is ignored).
 
     .. versionadded:: 15.2.0 *convert*
     .. versionadded:: 16.3.0 *metadata*
     .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
     .. versionchanged:: 17.1.0
-       *hash* is ``None`` and therefore mirrors *cmp* by default.
+       *hash* is ``None`` and therefore mirrors *eq* by default.
     .. versionadded:: 17.3.0 *type*
     .. deprecated:: 17.4.0 *convert*
     .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated
        *convert* to achieve consistency with other noun-based arguments.
     .. versionadded:: 18.1.0
        ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
     .. versionadded:: 18.2.0 *kw_only*
+    .. versionchanged:: 19.2.0 *convert* keyword argument removed
+    .. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
+    .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+    .. versionadded:: 19.2.0 *eq* and *order*
     """
+    eq, order = _determine_eq_order(cmp, eq, order)
+
     if hash is not None and hash is not True and hash is not False:
         raise TypeError(
             "Invalid value for hash.  Must be True, False, or None."
         )
 
-    if convert is not None:
-        if converter is not None:
-            raise RuntimeError(
-                "Can't pass both `convert` and `converter`.  "
-                "Please use `converter` only."
-            )
-        warnings.warn(
-            "The `convert` argument is deprecated in favor of `converter`.  "
-            "It will be removed after 2019/01.",
-            DeprecationWarning,
-            stacklevel=2,
-        )
-        converter = convert
-
     if factory is not None:
         if default is not NOTHING:
             raise ValueError(
                 "The `default` and `factory` arguments are mutually "
                 "exclusive."
             )
         if not callable(factory):
             raise ValueError("The `factory` argument must be a callable.")
@@ -204,23 +211,25 @@ def attrib(
 
     if metadata is None:
         metadata = {}
 
     return _CountingAttr(
         default=default,
         validator=validator,
         repr=repr,
-        cmp=cmp,
+        cmp=None,
         hash=hash,
         init=init,
         converter=converter,
         metadata=metadata,
         type=type,
         kw_only=kw_only,
+        eq=eq,
+        order=order,
     )
 
 
 def _make_attr_tuple_class(cls_name, attr_names):
     """
     Create a tuple subclass to hold `Attribute`s for an `attrs` class.
 
     The subclass is a bare tuple with properties for names.
@@ -380,48 +389,30 @@ def _transform_attrs(cls, these, auto_at
     AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
 
     if kw_only:
         own_attrs = [a._assoc(kw_only=True) for a in own_attrs]
         base_attrs = [a._assoc(kw_only=True) for a in base_attrs]
 
     attrs = AttrsClass(base_attrs + own_attrs)
 
+    # Mandatory vs non-mandatory attr order only matters when they are part of
+    # the __init__ signature and when they aren't kw_only (which are moved to
+    # the end and can be mandatory or non-mandatory in any order, as they will
+    # be specified as keyword args anyway). Check the order of those attrs:
     had_default = False
-    was_kw_only = False
-    for a in attrs:
-        if (
-            was_kw_only is False
-            and had_default is True
-            and a.default is NOTHING
-            and a.init is True
-            and a.kw_only is False
-        ):
+    for a in (a for a in attrs if a.init is not False and a.kw_only is False):
+        if had_default is True and a.default is NOTHING:
             raise ValueError(
                 "No mandatory attributes allowed after an attribute with a "
                 "default value or factory.  Attribute in question: %r" % (a,)
             )
-        elif (
-            had_default is False
-            and a.default is not NOTHING
-            and a.init is not False
-            and
-            # Keyword-only attributes without defaults can be specified
-            # after keyword-only attributes with defaults.
-            a.kw_only is False
-        ):
+
+        if had_default is False and a.default is not NOTHING:
             had_default = True
-        if was_kw_only is True and a.kw_only is False and a.init is True:
-            raise ValueError(
-                "Non keyword-only attributes are not allowed after a "
-                "keyword-only attribute (unless they are init=False).  "
-                "Attribute in question: {a!r}".format(a=a)
-            )
-        if was_kw_only is False and a.init is True and a.kw_only is True:
-            was_kw_only = True
 
     return _Attributes((attrs, base_attrs, base_attr_map))
 
 
 def _frozen_setattrs(self, name, value):
     """
     Attached to frozen classes as __setattr__.
     """
@@ -513,17 +504,17 @@ class _ClassBuilder(object):
         cls = self._cls
         base_names = self._base_names
 
         # Clean class of attribute definitions (`attr.ib()`s).
         if self._delete_attribs:
             for name in self._attr_names:
                 if (
                     name not in base_names
-                    and getattr(cls, name, None) is not None
+                    and getattr(cls, name, _sentinel) != _sentinel
                 ):
                     try:
                         delattr(cls, name)
                     except AttributeError:
                         # This can happen if a base class defines a class
                         # variable and we want to set an attribute with the
                         # same name by using only a type annotation.
                         pass
@@ -671,44 +662,57 @@ class _ClassBuilder(object):
 
     def make_unhashable(self):
         self._cls_dict["__hash__"] = None
         return self
 
     def add_hash(self):
         self._cls_dict["__hash__"] = self._add_method_dunders(
             _make_hash(
-                self._attrs, frozen=self._frozen, cache_hash=self._cache_hash
+                self._cls,
+                self._attrs,
+                frozen=self._frozen,
+                cache_hash=self._cache_hash,
             )
         )
 
         return self
 
     def add_init(self):
         self._cls_dict["__init__"] = self._add_method_dunders(
             _make_init(
+                self._cls,
                 self._attrs,
                 self._has_post_init,
                 self._frozen,
                 self._slots,
                 self._cache_hash,
                 self._base_attr_map,
                 self._is_exc,
             )
         )
 
         return self
 
-    def add_cmp(self):
+    def add_eq(self):
         cd = self._cls_dict
 
-        cd["__eq__"], cd["__ne__"], cd["__lt__"], cd["__le__"], cd[
-            "__gt__"
-        ], cd["__ge__"] = (
-            self._add_method_dunders(meth) for meth in _make_cmp(self._attrs)
+        cd["__eq__"], cd["__ne__"] = (
+            self._add_method_dunders(meth)
+            for meth in _make_eq(self._cls, self._attrs)
+        )
+
+        return self
+
+    def add_order(self):
+        cd = self._cls_dict
+
+        cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
+            self._add_method_dunders(meth)
+            for meth in _make_order(self._cls, self._attrs)
         )
 
         return self
 
     def _add_method_dunders(self, method):
         """
         Add __module__ and __qualname__ to a *method* if possible.
         """
@@ -722,150 +726,196 @@ class _ClassBuilder(object):
                 (self._cls.__qualname__, method.__name__)
             )
         except AttributeError:
             pass
 
         return method
 
 
+_CMP_DEPRECATION = (
+    "The usage of `cmp` is deprecated and will be removed on or after "
+    "2021-06-01.  Please use `eq` and `order` instead."
+)
+
+
+def _determine_eq_order(cmp, eq, order):
+    """
+    Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+    values of eq and order.
+    """
+    if cmp is not None and any((eq is not None, order is not None)):
+        raise ValueError("Don't mix `cmp` with `eq' and `order`.")
+
+    # cmp takes precedence due to bw-compatibility.
+    if cmp is not None:
+        warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=3)
+
+        return cmp, cmp
+
+    # If left None, equality is on and ordering mirrors equality.
+    if eq is None:
+        eq = True
+
+    if order is None:
+        order = eq
+
+    if eq is False and order is True:
+        raise ValueError("`order` can only be True if `eq` is True too.")
+
+    return eq, order
+
+
 def attrs(
     maybe_cls=None,
     these=None,
     repr_ns=None,
     repr=True,
-    cmp=True,
+    cmp=None,
     hash=None,
     init=True,
     slots=False,
     frozen=False,
     weakref_slot=True,
     str=False,
     auto_attribs=False,
     kw_only=False,
     cache_hash=False,
     auto_exc=False,
+    eq=None,
+    order=None,
 ):
     r"""
     A class decorator that adds `dunder
     <https://wiki.python.org/moin/DunderAlias>`_\ -methods according to the
-    specified attributes using :func:`attr.ib` or the *these* argument.
+    specified attributes using `attr.ib` or the *these* argument.
 
-    :param these: A dictionary of name to :func:`attr.ib` mappings.  This is
+    :param these: A dictionary of name to `attr.ib` mappings.  This is
         useful to avoid the definition of your attributes within the class body
         because you can't (e.g. if you want to add ``__repr__`` methods to
         Django models) or don't want to.
 
         If *these* is not ``None``, ``attrs`` will *not* search the class body
         for attributes and will *not* remove any attributes from it.
 
-        If *these* is an ordered dict (:class:`dict` on Python 3.6+,
-        :class:`collections.OrderedDict` otherwise), the order is deduced from
+        If *these* is an ordered dict (`dict` on Python 3.6+,
+        `collections.OrderedDict` otherwise), the order is deduced from
         the order of the attributes inside *these*.  Otherwise the order
         of the definition of the attributes is used.
 
-    :type these: :class:`dict` of :class:`str` to :func:`attr.ib`
+    :type these: `dict` of `str` to `attr.ib`
 
     :param str repr_ns: When using nested classes, there's no way in Python 2
         to automatically detect that.  Therefore it's possible to set the
         namespace explicitly for a more meaningful ``repr`` output.
     :param bool repr: Create a ``__repr__`` method with a human readable
         representation of ``attrs`` attributes..
     :param bool str: Create a ``__str__`` method that is identical to
         ``__repr__``.  This is usually not necessary except for
-        :class:`Exception`\ s.
-    :param bool cmp: Create ``__eq__``, ``__ne__``, ``__lt__``, ``__le__``,
-        ``__gt__``, and ``__ge__`` methods that compare the class as if it were
-        a tuple of its ``attrs`` attributes.  But the attributes are *only*
-        compared, if the types of both classes are *identical*!
+        `Exception`\ s.
+    :param bool eq: If ``True`` or ``None`` (default), add ``__eq__`` and
+        ``__ne__`` methods that check two instances for equality.
+
+        They compare the instances as if they were tuples of their ``attrs``
+        attributes, but only iff the types of both classes are *identical*!
+    :type eq: `bool` or `None`
+    :param bool order: If ``True``, add ``__lt__``, ``__le__``, ``__gt__``,
+        and ``__ge__`` methods that behave like *eq* above and allow instances
+        to be ordered. If ``None`` (default) mirror value of *eq*.
+    :type order: `bool` or `None`
+    :param cmp: Setting to ``True`` is equivalent to setting ``eq=True,
+        order=True``. Deprecated in favor of *eq* and *order*, has precedence
+        over them for backward-compatibility though. Must not be mixed with
+        *eq* or *order*.
+    :type cmp: `bool` or `None`
     :param hash: If ``None`` (default), the ``__hash__`` method is generated
-        according how *cmp* and *frozen* are set.
+        according how *eq* and *frozen* are set.
 
         1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you.
-        2. If *cmp* is True and *frozen* is False, ``__hash__`` will be set to
+        2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to
            None, marking it unhashable (which it is).
-        3. If *cmp* is False, ``__hash__`` will be left untouched meaning the
+        3. If *eq* is False, ``__hash__`` will be left untouched meaning the
            ``__hash__`` method of the base class will be used (if base class is
            ``object``, this means it will fall back to id-based hashing.).
 
         Although not recommended, you can decide for yourself and force
         ``attrs`` to create one (e.g. if the class is immutable even though you
         didn't freeze it programmatically) by passing ``True`` or not.  Both of
         these cases are rather special and should be used carefully.
 
-        See the `Python documentation \
-        <https://docs.python.org/3/reference/datamodel.html#object.__hash__>`_
-        and the `GitHub issue that led to the default behavior \
-        <https://github.com/python-attrs/attrs/issues/136>`_ for more details.
+        See our documentation on `hashing`, Python's documentation on
+        `object.__hash__`, and the `GitHub issue that led to the default \
+        behavior <https://github.com/python-attrs/attrs/issues/136>`_ for more
+        details.
     :type hash: ``bool`` or ``None``
     :param bool init: Create a ``__init__`` method that initializes the
         ``attrs`` attributes.  Leading underscores are stripped for the
         argument name.  If a ``__attrs_post_init__`` method exists on the
         class, it will be called after the class is fully initialized.
-    :param bool slots: Create a slots_-style class that's more
-        memory-efficient.  See :ref:`slots` for further ramifications.
+    :param bool slots: Create a `slotted class <slotted classes>` that's more
+        memory-efficient.
     :param bool frozen: Make instances immutable after initialization.  If
         someone attempts to modify a frozen instance,
-        :exc:`attr.exceptions.FrozenInstanceError` is raised.
+        `attr.exceptions.FrozenInstanceError` is raised.
 
         Please note:
 
             1. This is achieved by installing a custom ``__setattr__`` method
                on your class so you can't implement an own one.
 
             2. True immutability is impossible in Python.
 
-            3. This *does* have a minor a runtime performance :ref:`impact
+            3. This *does* have a minor a runtime performance `impact
                <how-frozen>` when initializing new instances.  In other words:
                ``__init__`` is slightly slower with ``frozen=True``.
 
             4. If a class is frozen, you cannot modify ``self`` in
                ``__attrs_post_init__`` or a self-written ``__init__``. You can
                circumvent that limitation by using
                ``object.__setattr__(self, "attribute_name", value)``.
 
-        ..  _slots: https://docs.python.org/3/reference/datamodel.html#slots
     :param bool weakref_slot: Make instances weak-referenceable.  This has no
         effect unless ``slots`` is also enabled.
     :param bool auto_attribs: If True, collect `PEP 526`_-annotated attributes
         (Python 3.6 and later only) from the class body.
 
         In this case, you **must** annotate every field.  If ``attrs``
-        encounters a field that is set to an :func:`attr.ib` but lacks a type
-        annotation, an :exc:`attr.exceptions.UnannotatedAttributeError` is
+        encounters a field that is set to an `attr.ib` but lacks a type
+        annotation, an `attr.exceptions.UnannotatedAttributeError` is
         raised.  Use ``field_name: typing.Any = attr.ib(...)`` if you don't
         want to set a type.
 
         If you assign a value to those attributes (e.g. ``x: int = 42``), that
         value becomes the default value like if it were passed using
-        ``attr.ib(default=42)``.  Passing an instance of :class:`Factory` also
+        ``attr.ib(default=42)``.  Passing an instance of `Factory` also
         works as expected.
 
-        Attributes annotated as :data:`typing.ClassVar` are **ignored**.
+        Attributes annotated as `typing.ClassVar`, and attributes that are
+        neither annotated nor set to an `attr.ib` are **ignored**.
 
         .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/
     :param bool kw_only: Make all attributes keyword-only (Python 3+)
         in the generated ``__init__`` (if ``init`` is ``False``, this
         parameter is ignored).
     :param bool cache_hash: Ensure that the object's hash code is computed
         only once and stored on the object.  If this is set to ``True``,
         hashing must be either explicitly or implicitly enabled for this
         class.  If the hash code is cached, avoid any reassignments of
         fields involved in hash code computation or mutations of the objects
         those fields point to after object creation.  If such changes occur,
         the behavior of the object's hash code is undefined.
-    :param bool auto_exc: If the class subclasses :class:`BaseException`
+    :param bool auto_exc: If the class subclasses `BaseException`
         (which implicitly includes any subclass of any exception), the
         following happens to behave like a well-behaved Python exceptions
         class:
 
-        - the values for *cmp* and *hash* are ignored and the instances compare
-          and hash by the instance's ids (N.B. ``attrs`` will *not* remove
-          existing implementations of ``__hash__`` or the equality methods. It
-          just won't add own ones.),
+        - the values for *eq*, *order*, and *hash* are ignored and the
+          instances compare and hash by the instance's ids (N.B. ``attrs`` will
+          *not* remove existing implementations of ``__hash__`` or the equality
+          methods. It just won't add own ones.),
         - all attributes that are either passed into ``__init__`` or have a
           default value are additionally available as a tuple in the ``args``
           attribute,
         - the value of *str* is ignored leaving ``__str__`` to base classes.
 
     .. versionadded:: 16.0.0 *slots*
     .. versionadded:: 16.1.0 *frozen*
     .. versionadded:: 16.3.0 *str*
@@ -874,23 +924,29 @@ def attrs(
        *hash* supports ``None`` as value which is also the default now.
     .. versionadded:: 17.3.0 *auto_attribs*
     .. versionchanged:: 18.1.0
        If *these* is passed, no attributes are deleted from the class body.
     .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
     .. versionadded:: 18.2.0 *weakref_slot*
     .. deprecated:: 18.2.0
        ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
-       :class:`DeprecationWarning` if the classes compared are subclasses of
+       `DeprecationWarning` if the classes compared are subclasses of
        each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
        to each other.
+    .. versionchanged:: 19.2.0
+       ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
+       subclasses comparable anymore.
     .. versionadded:: 18.2.0 *kw_only*
     .. versionadded:: 18.2.0 *cache_hash*
     .. versionadded:: 19.1.0 *auto_exc*
+    .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+    .. versionadded:: 19.2.0 *eq* and *order*
     """
+    eq, order = _determine_eq_order(cmp, eq, order)
 
     def wrap(cls):
 
         if getattr(cls, "__class__", None) is None:
             raise TypeError("attrs only works with new-style classes.")
 
         is_exc = auto_exc is True and issubclass(cls, BaseException)
 
@@ -905,38 +961,40 @@ def attrs(
             cache_hash,
             is_exc,
         )
 
         if repr is True:
             builder.add_repr(repr_ns)
         if str is True:
             builder.add_str()
-        if cmp is True and not is_exc:
-            builder.add_cmp()
+        if eq is True and not is_exc:
+            builder.add_eq()
+        if order is True and not is_exc:
+            builder.add_order()
 
         if hash is not True and hash is not False and hash is not None:
             # Can't use `hash in` because 1 == True for example.
             raise TypeError(
                 "Invalid value for hash.  Must be True, False, or None."
             )
-        elif hash is False or (hash is None and cmp is False):
+        elif hash is False or (hash is None and eq is False) or is_exc:
+            # Don't do anything. Should fall back to __object__'s __hash__
+            # which is by id.
             if cache_hash:
                 raise TypeError(
                     "Invalid value for cache_hash.  To use hash caching,"
                     " hashing must be either explicitly or implicitly "
                     "enabled."
                 )
-        elif (
-            hash is True
-            or (hash is None and cmp is True and frozen is True)
-            and is_exc is False
-        ):
+        elif hash is True or (hash is None and eq is True and frozen is True):
+            # Build a __hash__ if told so, or if it's safe.
             builder.add_hash()
         else:
+            # Raise TypeError on attempts to hash.
             if cache_hash:
                 raise TypeError(
                     "Invalid value for cache_hash.  To use hash caching,"
                     " hashing must be either explicitly or implicitly "
                     "enabled."
                 )
             builder.make_unhashable()
 
@@ -992,29 +1050,54 @@ else:
 
 def _attrs_to_tuple(obj, attrs):
     """
     Create a tuple of all values of *obj*'s *attrs*.
     """
     return tuple(getattr(obj, a.name) for a in attrs)
 
 
-def _make_hash(attrs, frozen, cache_hash):
+def _generate_unique_filename(cls, func_name):
+    """
+    Create a "filename" suitable for a function being generated.
+    """
+    unique_id = uuid.uuid4()
+    extra = ""
+    count = 1
+
+    while True:
+        unique_filename = "<attrs generated {0} {1}.{2}{3}>".format(
+            func_name,
+            cls.__module__,
+            getattr(cls, "__qualname__", cls.__name__),
+            extra,
+        )
+        # To handle concurrency we essentially "reserve" our spot in
+        # the linecache with a dummy line.  The caller can then
+        # set this value correctly.
+        cache_line = (1, None, (str(unique_id),), unique_filename)
+        if (
+            linecache.cache.setdefault(unique_filename, cache_line)
+            == cache_line
+        ):
+            return unique_filename
+
+        # Looks like this spot is taken. Try again.
+        count += 1
+        extra = "-{0}".format(count)
+
+
+def _make_hash(cls, attrs, frozen, cache_hash):
     attrs = tuple(
-        a
-        for a in attrs
-        if a.hash is True or (a.hash is None and a.cmp is True)
+        a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
     )
 
     tab = "        "
 
-    # We cache the generated hash methods for the same kinds of attributes.
-    sha1 = hashlib.sha1()
-    sha1.update(repr(attrs).encode("utf-8"))
-    unique_filename = "<attrs generated hash %s>" % (sha1.hexdigest(),)
+    unique_filename = _generate_unique_filename(cls, "hash")
     type_hash = hash(unique_filename)
 
     method_lines = ["def __hash__(self):"]
 
     def append_hash_computation_lines(prefix, indent):
         """
         Generate the code for actually computing the hash code.
         Below this will either be returned directly or used to compute
@@ -1061,45 +1144,36 @@ def _make_hash(attrs, frozen, cache_hash
 
     return locs["__hash__"]
 
 
 def _add_hash(cls, attrs):
     """
     Add a hash method to *cls*.
     """
-    cls.__hash__ = _make_hash(attrs, frozen=False, cache_hash=False)
+    cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False)
     return cls
 
 
 def __ne__(self, other):
     """
     Check equality and either forward a NotImplemented or return the result
     negated.
     """
     result = self.__eq__(other)
     if result is NotImplemented:
         return NotImplemented
 
     return not result
 
 
-WARNING_CMP_ISINSTANCE = (
-    "Comparision of subclasses using __%s__ is deprecated and will be removed "
-    "in 2019."
-)
-
+def _make_eq(cls, attrs):
+    attrs = [a for a in attrs if a.eq]
 
-def _make_cmp(attrs):
-    attrs = [a for a in attrs if a.cmp]
-
-    # We cache the generated eq methods for the same kinds of attributes.
-    sha1 = hashlib.sha1()
-    sha1.update(repr(attrs).encode("utf-8"))
-    unique_filename = "<attrs generated eq %s>" % (sha1.hexdigest(),)
+    unique_filename = _generate_unique_filename(cls, "eq")
     lines = [
         "def __eq__(self, other):",
         "    if other.__class__ is not self.__class__:",
         "        return NotImplemented",
     ]
     # We can't just do a big self.x = other.x and... clause due to
     # irregularities like nan == nan is false but (nan,) == (nan,) is true.
     if attrs:
@@ -1122,102 +1196,95 @@ def _make_cmp(attrs):
     # In order of debuggers like PDB being able to step through the code,
     # we add a fake linecache entry.
     linecache.cache[unique_filename] = (
         len(script),
         None,
         script.splitlines(True),
         unique_filename,
     )
-    eq = locs["__eq__"]
-    ne = __ne__
+    return locs["__eq__"], __ne__
+
+
+def _make_order(cls, attrs):
+    attrs = [a for a in attrs if a.order]
 
     def attrs_to_tuple(obj):
         """
         Save us some typing.
         """
         return _attrs_to_tuple(obj, attrs)
 
     def __lt__(self, other):
         """
         Automatically created by attrs.
         """
-        if isinstance(other, self.__class__):
-            if other.__class__ is not self.__class__:
-                warnings.warn(
-                    WARNING_CMP_ISINSTANCE % ("lt",), DeprecationWarning
-                )
+        if other.__class__ is self.__class__:
             return attrs_to_tuple(self) < attrs_to_tuple(other)
-        else:
-            return NotImplemented
+
+        return NotImplemented
 
     def __le__(self, other):
         """
         Automatically created by attrs.
         """
-        if isinstance(other, self.__class__):
-            if other.__class__ is not self.__class__:
-                warnings.warn(
-                    WARNING_CMP_ISINSTANCE % ("le",), DeprecationWarning
-                )
+        if other.__class__ is self.__class__:
             return attrs_to_tuple(self) <= attrs_to_tuple(other)
-        else:
-            return NotImplemented
+
+        return NotImplemented
 
     def __gt__(self, other):
         """
         Automatically created by attrs.
         """
-        if isinstance(other, self.__class__):
-            if other.__class__ is not self.__class__:
-                warnings.warn(
-                    WARNING_CMP_ISINSTANCE % ("gt",), DeprecationWarning
-                )
+        if other.__class__ is self.__class__:
             return attrs_to_tuple(self) > attrs_to_tuple(other)
-        else:
-            return NotImplemented
+
+        return NotImplemented
 
     def __ge__(self, other):
         """
         Automatically created by attrs.
         """
-        if isinstance(other, self.__class__):
-            if other.__class__ is not self.__class__:
-                warnings.warn(
-                    WARNING_CMP_ISINSTANCE % ("ge",), DeprecationWarning
-                )
+        if other.__class__ is self.__class__:
             return attrs_to_tuple(self) >= attrs_to_tuple(other)
-        else:
-            return NotImplemented
 
-    return eq, ne, __lt__, __le__, __gt__, __ge__
+        return NotImplemented
+
+    return __lt__, __le__, __gt__, __ge__
 
 
-def _add_cmp(cls, attrs=None):
+def _add_eq(cls, attrs=None):
     """
-    Add comparison methods to *cls*.
+    Add equality methods to *cls* with *attrs*.
     """
     if attrs is None:
         attrs = cls.__attrs_attrs__
 
-    cls.__eq__, cls.__ne__, cls.__lt__, cls.__le__, cls.__gt__, cls.__ge__ = _make_cmp(  # noqa
-        attrs
-    )
+    cls.__eq__, cls.__ne__ = _make_eq(cls, attrs)
 
     return cls
 
 
 _already_repring = threading.local()
 
 
 def _make_repr(attrs, ns):
     """
-    Make a repr method for *attr_names* adding *ns* to the full name.
+    Make a repr method that includes relevant *attrs*, adding *ns* to the full
+    name.
     """
-    attr_names = tuple(a.name for a in attrs if a.repr)
+
+    # Figure out which attributes to include, and which function to use to
+    # format them. The a.repr value can be either bool or a custom callable.
+    attr_names_with_reprs = tuple(
+        (a.name, repr if a.repr is True else a.repr)
+        for a in attrs
+        if a.repr is not False
+    )
 
     def __repr__(self):
         """
         Automatically created by attrs.
         """
         try:
             working_set = _already_repring.working_set
         except AttributeError:
@@ -1239,22 +1306,24 @@ def _make_repr(attrs, ns):
         # Since 'self' remains on the stack (i.e.: strongly referenced) for the
         # duration of this call, it's safe to depend on id(...) stability, and
         # not need to track the instance and therefore worry about properties
         # like weakref- or hash-ability.
         working_set.add(id(self))
         try:
             result = [class_name, "("]
             first = True
-            for name in attr_names:
+            for name, attr_repr in attr_names_with_reprs:
                 if first:
                     first = False
                 else:
                     result.append(", ")
-                result.extend((name, "=", repr(getattr(self, name, NOTHING))))
+                result.extend(
+                    (name, "=", attr_repr(getattr(self, name, NOTHING)))
+                )
             return "".join(result) + ")"
         finally:
             working_set.remove(id(self))
 
     return __repr__
 
 
 def _add_repr(cls, ns=None, attrs=None):
@@ -1264,24 +1333,21 @@ def _add_repr(cls, ns=None, attrs=None):
     if attrs is None:
         attrs = cls.__attrs_attrs__
 
     cls.__repr__ = _make_repr(attrs, ns)
     return cls
 
 
 def _make_init(
-    attrs, post_init, frozen, slots, cache_hash, base_attr_map, is_exc
+    cls, attrs, post_init, frozen, slots, cache_hash, base_attr_map, is_exc
 ):
     attrs = [a for a in attrs if a.init or a.default is not NOTHING]
 
-    # We cache the generated init methods for the same kinds of attributes.
-    sha1 = hashlib.sha1()
-    sha1.update(repr(attrs).encode("utf-8"))
-    unique_filename = "<attrs generated init {0}>".format(sha1.hexdigest())
+    unique_filename = _generate_unique_filename(cls, "init")
 
     script, globs, annotations = _attrs_to_init_script(
         attrs, frozen, slots, post_init, cache_hash, base_attr_map, is_exc
     )
     locs = {}
     bytecode = compile(script, unique_filename, "exec")
     attr_dict = dict((a.name, a) for a in attrs)
     globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
@@ -1316,17 +1382,17 @@ def fields(cls):
     examples).
 
     :param type cls: Class to introspect.
 
     :raise TypeError: If *cls* is not a class.
     :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
         class.
 
-    :rtype: tuple (with name accessors) of :class:`attr.Attribute`
+    :rtype: tuple (with name accessors) of `attr.Attribute`
 
     ..  versionchanged:: 16.2.0 Returned tuple allows accessing the fields
         by name.
     """
     if not isclass(cls):
         raise TypeError("Passed object must be a class.")
     attrs = getattr(cls, "__attrs_attrs__", None)
     if attrs is None:
@@ -1343,17 +1409,17 @@ def fields_dict(cls):
 
     :param type cls: Class to introspect.
 
     :raise TypeError: If *cls* is not a class.
     :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
         class.
 
     :rtype: an ordered dict where keys are attribute names and values are
-        :class:`attr.Attribute`\\ s. This will be a :class:`dict` if it's
+        `attr.Attribute`\\ s. This will be a `dict` if it's
         naturally ordered like on Python 3.6+ or an
         :class:`~collections.OrderedDict` otherwise.
 
     .. versionadded:: 18.1.0
     """
     if not isclass(cls):
         raise TypeError("Passed object must be a class.")
     attrs = getattr(cls, "__attrs_attrs__", None)
@@ -1673,74 +1739,66 @@ def __init__(self, {args}):
 
 
 class Attribute(object):
     """
     *Read-only* representation of an attribute.
 
     :attribute name: The name of the attribute.
 
-    Plus *all* arguments of :func:`attr.ib`.
+    Plus *all* arguments of `attr.ib` (except for ``factory``
+    which is only syntactic sugar for ``default=Factory(...)``.
 
-    For the version history of the fields, see :func:`attr.ib`.
+    For the version history of the fields, see `attr.ib`.
     """
 
     __slots__ = (
         "name",
         "default",
         "validator",
         "repr",
-        "cmp",
+        "eq",
+        "order",
         "hash",
         "init",
         "metadata",
         "type",
         "converter",
         "kw_only",
     )
 
     def __init__(
         self,
         name,
         default,
         validator,
         repr,
-        cmp,
+        cmp,  # XXX: unused, remove along with other cmp code.
         hash,
         init,
-        convert=None,
         metadata=None,
         type=None,
         converter=None,
         kw_only=False,
+        eq=None,
+        order=None,
     ):
+        eq, order = _determine_eq_order(cmp, eq, order)
+
         # Cache this descriptor here to speed things up later.
         bound_setattr = _obj_setattr.__get__(self, Attribute)
 
         # Despite the big red warning, people *do* instantiate `Attribute`
         # themselves.
-        if convert is not None:
-            if converter is not None:
-                raise RuntimeError(
-                    "Can't pass both `convert` and `converter`.  "
-                    "Please use `converter` only."
-                )
-            warnings.warn(
-                "The `convert` argument is deprecated in favor of `converter`."
-                "  It will be removed after 2019/01.",
-                DeprecationWarning,
-                stacklevel=2,
-            )
-            converter = convert
-
         bound_setattr("name", name)
         bound_setattr("default", default)
         bound_setattr("validator", validator)
         bound_setattr("repr", repr)
-        bound_setattr("cmp", cmp)
+        bound_setattr("eq", eq)
+        bound_setattr("order", order)
         bound_setattr("hash", hash)
         bound_setattr("init", init)
         bound_setattr("converter", converter)
         bound_setattr(
             "metadata",
             (
                 metadata_proxy(metadata)
                 if metadata
@@ -1748,26 +1806,16 @@ class Attribute(object):
             ),
         )
         bound_setattr("type", type)
         bound_setattr("kw_only", kw_only)
 
     def __setattr__(self, name, value):
         raise FrozenInstanceError()
 
-    @property
-    def convert(self):
-        warnings.warn(
-            "The `convert` attribute is deprecated in favor of `converter`.  "
-            "It will be removed after 2019/01.",
-            DeprecationWarning,
-            stacklevel=2,
-        )
-        return self.converter
-
     @classmethod
     def from_counting_attr(cls, name, ca, type=None):
         # type holds the annotated value. deal with conflicts:
         if type is None:
             type = ca.type
         elif ca.type is not None:
             raise ValueError(
                 "Type annotation and type argument cannot both be present"
@@ -1776,27 +1824,36 @@ class Attribute(object):
             k: getattr(ca, k)
             for k in Attribute.__slots__
             if k
             not in (
                 "name",
                 "validator",
                 "default",
                 "type",
-                "convert",
             )  # exclude methods and deprecated alias
         }
         return cls(
             name=name,
             validator=ca._validator,
             default=ca._default,
             type=type,
+            cmp=None,
             **inst_dict
         )
 
+    @property
+    def cmp(self):
+        """
+        Simulate the presence of a cmp attribute and warn.
+        """
+        warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2)
+
+        return self.eq and self.order
+
     # Don't use attr.assoc since fields(Attribute) doesn't work
     def _assoc(self, **changes):
         """
         Copy *self* and apply *changes*.
         """
         new = copy.copy(self)
 
         new._setattrs(changes.items())
@@ -1834,26 +1891,27 @@ class Attribute(object):
 
 
 _a = [
     Attribute(
         name=name,
         default=NOTHING,
         validator=None,
         repr=True,
-        cmp=True,
+        cmp=None,
+        eq=True,
+        order=False,
         hash=(name != "metadata"),
         init=True,
     )
     for name in Attribute.__slots__
-    if name != "convert"  # XXX: remove once `convert` is gone
 ]
 
 Attribute = _add_hash(
-    _add_cmp(_add_repr(Attribute, attrs=_a), attrs=_a),
+    _add_eq(_add_repr(Attribute, attrs=_a), attrs=_a),
     attrs=[a for a in _a if a.hash],
 )
 
 
 class _CountingAttr(object):
     """
     Intermediate representation of attributes that uses a counter to preserve
     the order in which the attributes have been defined.
@@ -1861,74 +1919,90 @@ class _CountingAttr(object):
     *Internal* data structure of the attrs library.  Running into is most
     likely the result of a bug like a forgotten `@attr.s` decorator.
     """
 
     __slots__ = (
         "counter",
         "_default",
         "repr",
-        "cmp",
+        "eq",
+        "order",
         "hash",
         "init",
         "metadata",
         "_validator",
         "converter",
         "type",
         "kw_only",
     )
     __attrs_attrs__ = tuple(
         Attribute(
             name=name,
             default=NOTHING,
             validator=None,
             repr=True,
-            cmp=True,
+            cmp=None,
             hash=True,
             init=True,
             kw_only=False,
+            eq=True,
+            order=False,
         )
-        for name in ("counter", "_default", "repr", "cmp", "hash", "init")
+        for name in (
+            "counter",
+            "_default",
+            "repr",
+            "eq",
+            "order",
+            "hash",
+            "init",
+        )
     ) + (
         Attribute(
             name="metadata",
             default=None,
             validator=None,
             repr=True,
-            cmp=True,
+            cmp=None,
             hash=False,
             init=True,
             kw_only=False,
+            eq=True,
+            order=False,
         ),
     )
     cls_counter = 0
 
     def __init__(
         self,
         default,
         validator,
         repr,
-        cmp,
+        cmp,  # XXX: unused, remove along with cmp
         hash,
         init,
         converter,
         metadata,
         type,
         kw_only,
+        eq,
+        order,
     ):
         _CountingAttr.cls_counter += 1
         self.counter = _CountingAttr.cls_counter
         self._default = default
         # If validator is a list/tuple, wrap it using helper validator.
         if validator and isinstance(validator, (list, tuple)):
             self._validator = and_(*validator)
         else:
             self._validator = validator
         self.repr = repr
-        self.cmp = cmp
+        self.eq = eq
+        self.order = order
         self.hash = hash
         self.init = init
         self.converter = converter
         self.metadata = metadata
         self.type = type
         self.kw_only = kw_only
 
     def validator(self, meth):
@@ -1958,25 +2032,25 @@ class _CountingAttr(object):
         if self._default is not NOTHING:
             raise DefaultAlreadySetError()
 
         self._default = Factory(meth, takes_self=True)
 
         return meth
 
 
-_CountingAttr = _add_cmp(_add_repr(_CountingAttr))
+_CountingAttr = _add_eq(_add_repr(_CountingAttr))
 
 
 @attrs(slots=True, init=False, hash=True)
 class Factory(object):
     """
     Stores a factory callable.
 
-    If passed as the default value to :func:`attr.ib`, the factory is used to
+    If passed as the default value to `attr.ib`, the factory is used to
     generate a new value.
 
     :param callable factory: A callable that takes either none or exactly one
         mandatory positional argument depending on *takes_self*.
     :param bool takes_self: Pass the partially initialized instance that is
         being initialized as a positional argument.
 
     .. versionadded:: 17.1.0  *takes_self*
@@ -1999,25 +2073,25 @@ def make_class(name, attrs, bases=(objec
     A quick way to create a new class called *name* with *attrs*.
 
     :param name: The name for the new class.
     :type name: str
 
     :param attrs: A list of names or a dictionary of mappings of names to
         attributes.
 
-        If *attrs* is a list or an ordered dict (:class:`dict` on Python 3.6+,
-        :class:`collections.OrderedDict` otherwise), the order is deduced from
+        If *attrs* is a list or an ordered dict (`dict` on Python 3.6+,
+        `collections.OrderedDict` otherwise), the order is deduced from
         the order of the names or attributes inside *attrs*.  Otherwise the
         order of the definition of the attributes is used.
-    :type attrs: :class:`list` or :class:`dict`
+    :type attrs: `list` or `dict`
 
     :param tuple bases: Classes that the new class will subclass.
 
-    :param attributes_arguments: Passed unmodified to :func:`attr.s`.
+    :param attributes_arguments: Passed unmodified to `attr.s`.
 
     :return: A new class with *attrs*.
     :rtype: type
 
     .. versionadded:: 17.1.0 *bases*
     .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
     """
     if isinstance(attrs, dict):
@@ -2039,16 +2113,24 @@ def make_class(name, attrs, bases=(objec
     # defined for arguments greater than 0 (IronPython).
     try:
         type_.__module__ = sys._getframe(1).f_globals.get(
             "__name__", "__main__"
         )
     except (AttributeError, ValueError):
         pass
 
+    # We do it here for proper warnings with meaningful stacklevel.
+    cmp = attributes_arguments.pop("cmp", None)
+    attributes_arguments["eq"], attributes_arguments[
+        "order"
+    ] = _determine_eq_order(
+        cmp, attributes_arguments.get("eq"), attributes_arguments.get("order")
+    )
+
     return _attrs(these=cls_dict, **attributes_arguments)(type_)
 
 
 # These are required by within this module so we define them here and merely
 # import into .validators.
 
 
 @attrs(slots=True, hash=True)
new file mode 100644
--- /dev/null
+++ b/third_party/python/attrs/attr/_version.py
@@ -0,0 +1,85 @@
+from __future__ import absolute_import, division, print_function
+
+from functools import total_ordering
+
+from ._funcs import astuple
+from ._make import attrib, attrs
+
+
+@total_ordering
+@attrs(eq=False, order=False, slots=True, frozen=True)
+class VersionInfo(object):
+    """
+    A version object that can be compared to tuple of length 1--4:
+
+    >>> attr.VersionInfo(19, 1, 0, "final")  <= (19, 2)
+    True
+    >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
+    True
+    >>> vi = attr.VersionInfo(19, 2, 0, "final")
+    >>> vi < (19, 1, 1)
+    False
+    >>> vi < (19,)
+    False
+    >>> vi == (19, 2,)
+    True
+    >>> vi == (19, 2, 1)
+    False
+
+    .. versionadded:: 19.2
+    """
+
+    year = attrib(type=int)
+    minor = attrib(type=int)
+    micro = attrib(type=int)
+    releaselevel = attrib(type=str)
+
+    @classmethod
+    def _from_version_string(cls, s):
+        """
+        Parse *s* and return a _VersionInfo.
+        """
+        v = s.split(".")
+        if len(v) == 3:
+            v.append("final")
+
+        return cls(
+            year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
+        )
+
+    def _ensure_tuple(self, other):
+        """
+        Ensure *other* is a tuple of a valid length.
+
+        Returns a possibly transformed *other* and ourselves as a tuple of
+        the same length as *other*.
+        """
+
+        if self.__class__ is other.__class__:
+            other = astuple(other)
+
+        if not isinstance(other, tuple):
+            raise NotImplementedError
+
+        if not (1 <= len(other) <= 4):
+            raise NotImplementedError
+
+        return astuple(self)[: len(other)], other
+
+    def __eq__(self, other):
+        try:
+            us, them = self._ensure_tuple(other)
+        except NotImplementedError:
+            return NotImplemented
+
+        return us == them
+
+    def __lt__(self, other):
+        try:
+            us, them = self._ensure_tuple(other)
+        except NotImplementedError:
+            return NotImplemented
+
+        # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
+        # have to do anything special with releaselevel for now.
+        return us < them
new file mode 100644
--- /dev/null
+++ b/third_party/python/attrs/attr/_version.pyi
@@ -0,0 +1,9 @@
+class VersionInfo:
+    @property
+    def year(self) -> int: ...
+    @property
+    def minor(self) -> int: ...
+    @property
+    def micro(self) -> int: ...
+    @property
+    def releaselevel(self) -> str: ...
--- a/third_party/python/attrs/attr/converters.py
+++ b/third_party/python/attrs/attr/converters.py
@@ -27,24 +27,24 @@ def optional(converter):
 
 
 def default_if_none(default=NOTHING, factory=None):
     """
     A converter that allows to replace ``None`` values by *default* or the
     result of *factory*.
 
     :param default: Value to be used if ``None`` is passed. Passing an instance
-       of :class:`attr.Factory` is supported, however the ``takes_self`` option
+       of `attr.Factory` is supported, however the ``takes_self`` option
        is *not*.
     :param callable factory: A callable that takes not parameters whose result
        is used if ``None`` is passed.
 
     :raises TypeError: If **neither** *default* or *factory* is passed.
     :raises TypeError: If **both** *default* and *factory* are passed.
-    :raises ValueError: If an instance of :class:`attr.Factory` is passed with
+    :raises ValueError: If an instance of `attr.Factory` is passed with
        ``takes_self=True``.
 
     .. versionadded:: 18.2.0
     """
     if default is NOTHING and factory is None:
         raise TypeError("Must pass either `default` or `factory`.")
 
     if default is not NOTHING and factory is not None:
--- a/third_party/python/attrs/attr/exceptions.py
+++ b/third_party/python/attrs/attr/exceptions.py
@@ -1,17 +1,17 @@
 from __future__ import absolute_import, division, print_function
 
 
 class FrozenInstanceError(AttributeError):
     """
     A frozen/immutable instance has been attempted to be modified.
 
     It mirrors the behavior of ``namedtuples`` by using the same error message
-    and subclassing :exc:`AttributeError`.
+    and subclassing `AttributeError`.
 
     .. versionadded:: 16.1.0
     """
 
     msg = "can't set attribute"
     args = [msg]
 
 
@@ -50,8 +50,25 @@ class UnannotatedAttributeError(RuntimeE
 
 
 class PythonTooOldError(RuntimeError):
     """
     An ``attrs`` feature requiring a more recent python version has been used.
 
     .. versionadded:: 18.2.0
     """
+
+
+class NotCallableError(TypeError):
+    """
+    A ``attr.ib()`` requiring a callable has been set with a value
+    that is not callable.
+
+    .. versionadded:: 19.2.0
+    """
+
+    def __init__(self, msg, value):
+        super(TypeError, self).__init__(msg, value)
+        self.msg = msg
+        self.value = value
+
+    def __str__(self):
+        return str(self.msg)
--- a/third_party/python/attrs/attr/exceptions.pyi
+++ b/third_party/python/attrs/attr/exceptions.pyi
@@ -1,7 +1,15 @@
+from typing import Any
+
 class FrozenInstanceError(AttributeError):
     msg: str = ...
 
 class AttrsAttributeNotFoundError(ValueError): ...
 class NotAnAttrsClassError(ValueError): ...
 class DefaultAlreadySetError(RuntimeError): ...
 class UnannotatedAttributeError(RuntimeError): ...
+class PythonTooOldError(RuntimeError): ...
+
+class NotCallableError(TypeError):
+    msg: str = ...
+    value: Any = ...
+    def __init__(self, msg: str, value: Any) -> None: ...
--- a/third_party/python/attrs/attr/filters.py
+++ b/third_party/python/attrs/attr/filters.py
@@ -1,10 +1,10 @@
 """
-Commonly useful filters for :func:`attr.asdict`.
+Commonly useful filters for `attr.asdict`.
 """
 
 from __future__ import absolute_import, division, print_function
 
 from ._compat import isclass
 from ._make import Attribute
 
 
@@ -18,35 +18,35 @@ def _split_what(what):
     )
 
 
 def include(*what):
     """
     Whitelist *what*.
 
     :param what: What to whitelist.
-    :type what: :class:`list` of :class:`type` or :class:`attr.Attribute`\\ s
+    :type what: `list` of `type` or `attr.Attribute`\\ s
 
-    :rtype: :class:`callable`
+    :rtype: `callable`
     """
     cls, attrs = _split_what(what)
 
     def include_(attribute, value):
         return value.__class__ in cls or attribute in attrs
 
     return include_
 
 
 def exclude(*what):
     """
     Blacklist *what*.
 
     :param what: What to blacklist.
-    :type what: :class:`list` of classes or :class:`attr.Attribute`\\ s.
+    :type what: `list` of classes or `attr.Attribute`\\ s.
 
-    :rtype: :class:`callable`
+    :rtype: `callable`
     """
     cls, attrs = _split_what(what)
 
     def exclude_(attribute, value):
         return value.__class__ not in cls and attribute not in attrs
 
     return exclude_
--- a/third_party/python/attrs/attr/validators.py
+++ b/third_party/python/attrs/attr/validators.py
@@ -1,18 +1,31 @@
 """
 Commonly useful validators.
 """
 
 from __future__ import absolute_import, division, print_function
 
+import re
+
 from ._make import _AndValidator, and_, attrib, attrs
+from .exceptions import NotCallableError
 
 
-__all__ = ["and_", "in_", "instance_of", "optional", "provides"]
+__all__ = [
+    "and_",
+    "deep_iterable",
+    "deep_mapping",
+    "in_",
+    "instance_of",
+    "is_callable",
+    "matches_re",
+    "optional",
+    "provides",
+]
 
 
 @attrs(repr=False, slots=True, hash=True)
 class _InstanceOfValidator(object):
     type = attrib()
 
     def __call__(self, inst, attr, value):
         """
@@ -35,30 +48,102 @@ class _InstanceOfValidator(object):
     def __repr__(self):
         return "<instance_of validator for type {type!r}>".format(
             type=self.type
         )
 
 
 def instance_of(type):
     """
-    A validator that raises a :exc:`TypeError` if the initializer is called
+    A validator that raises a `TypeError` if the initializer is called
     with a wrong type for this particular attribute (checks are performed using
-    :func:`isinstance` therefore it's also valid to pass a tuple of types).
+    `isinstance` therefore it's also valid to pass a tuple of types).
 
     :param type: The type to check for.
     :type type: type or tuple of types
 
     :raises TypeError: With a human readable error message, the attribute
-        (of type :class:`attr.Attribute`), the expected type, and the value it
+        (of type `attr.Attribute`), the expected type, and the value it
         got.
     """
     return _InstanceOfValidator(type)
 
 
+@attrs(repr=False, frozen=True)
+class _MatchesReValidator(object):
+    regex = attrib()
+    flags = attrib()
+    match_func = attrib()
+
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if not self.match_func(value):
+            raise ValueError(
+                "'{name}' must match regex {regex!r}"
+                " ({value!r} doesn't)".format(
+                    name=attr.name, regex=self.regex.pattern, value=value
+                ),
+                attr,
+                self.regex,
+                value,
+            )
+
+    def __repr__(self):
+        return "<matches_re validator for pattern {regex!r}>".format(
+            regex=self.regex
+        )
+
+
+def matches_re(regex, flags=0, func=None):
+    r"""
+    A validator that raises `ValueError` if the initializer is called
+    with a string that doesn't match *regex*.
+
+    :param str regex: a regex string to match against
+    :param int flags: flags that will be passed to the underlying re function
+        (default 0)
+    :param callable func: which underlying `re` function to call (options
+        are `re.fullmatch`, `re.search`, `re.match`, default
+        is ``None`` which means either `re.fullmatch` or an emulation of
+        it on Python 2). For performance reasons, they won't be used directly
+        but on a pre-`re.compile`\ ed pattern.
+
+    .. versionadded:: 19.2.0
+    """
+    fullmatch = getattr(re, "fullmatch", None)
+    valid_funcs = (fullmatch, None, re.search, re.match)
+    if func not in valid_funcs:
+        raise ValueError(
+            "'func' must be one of %s."
+            % (
+                ", ".join(
+                    sorted(
+                        e and e.__name__ or "None" for e in set(valid_funcs)
+                    )
+                ),
+            )
+        )
+
+    pattern = re.compile(regex, flags)
+    if func is re.match:
+        match_func = pattern.match
+    elif func is re.search:
+        match_func = pattern.search
+    else:
+        if fullmatch:
+            match_func = pattern.fullmatch
+        else:
+            pattern = re.compile(r"(?:{})\Z".format(regex), flags)
+            match_func = pattern.match
+
+    return _MatchesReValidator(pattern, flags, match_func)
+
+
 @attrs(repr=False, slots=True, hash=True)
 class _ProvidesValidator(object):
     interface = attrib()
 
     def __call__(self, inst, attr, value):
         """
         We use a callable class to be able to change the ``__repr__``.
         """
@@ -76,25 +161,25 @@ class _ProvidesValidator(object):
     def __repr__(self):
         return "<provides validator for interface {interface!r}>".format(
             interface=self.interface
         )
 
 
 def provides(interface):
     """
-    A validator that raises a :exc:`TypeError` if the initializer is called
+    A validator that raises a `TypeError` if the initializer is called
     with an object that does not provide the requested *interface* (checks are
     performed using ``interface.providedBy(value)`` (see `zope.interface
     <https://zopeinterface.readthedocs.io/en/latest/>`_).
 
     :param zope.interface.Interface interface: The interface to check for.
 
     :raises TypeError: With a human readable error message, the attribute
-        (of type :class:`attr.Attribute`), the expected interface, and the
+        (of type `attr.Attribute`), the expected interface, and the
         value it got.
     """
     return _ProvidesValidator(interface)
 
 
 @attrs(repr=False, slots=True, hash=True)
 class _OptionalValidator(object):
     validator = attrib()
@@ -114,17 +199,17 @@ class _OptionalValidator(object):
 def optional(validator):
     """
     A validator that makes an attribute optional.  An optional attribute is one
     which can be set to ``None`` in addition to satisfying the requirements of
     the sub-validator.
 
     :param validator: A validator (or a list of validators) that is used for
         non-``None`` values.
-    :type validator: callable or :class:`list` of callables.
+    :type validator: callable or `list` of callables.
 
     .. versionadded:: 15.1.0
     .. versionchanged:: 17.1.0 *validator* can be a list of validators.
     """
     if isinstance(validator, list):
         return _OptionalValidator(_AndValidator(validator))
     return _OptionalValidator(validator)
 
@@ -149,54 +234,65 @@ class _InValidator(object):
     def __repr__(self):
         return "<in_ validator with options {options!r}>".format(
             options=self.options
         )
 
 
 def in_(options):
     """
-    A validator that raises a :exc:`ValueError` if the initializer is called
+    A validator that raises a `ValueError` if the initializer is called
     with a value that does not belong in the options provided.  The check is
     performed using ``value in options``.
 
     :param options: Allowed options.
-    :type options: list, tuple, :class:`enum.Enum`, ...
+    :type options: list, tuple, `enum.Enum`, ...
 
     :raises ValueError: With a human readable error message, the attribute (of
-       type :class:`attr.Attribute`), the expected options, and the value it
+       type `attr.Attribute`), the expected options, and the value it
        got.
 
     .. versionadded:: 17.1.0
     """
     return _InValidator(options)
 
 
 @attrs(repr=False, slots=False, hash=True)
 class _IsCallableValidator(object):
     def __call__(self, inst, attr, value):
         """
         We use a callable class to be able to change the ``__repr__``.
         """
         if not callable(value):
-            raise TypeError("'{name}' must be callable".format(name=attr.name))
+            message = (
+                "'{name}' must be callable "
+                "(got {value!r} that is a {actual!r})."
+            )
+            raise NotCallableError(
+                msg=message.format(
+                    name=attr.name, value=value, actual=value.__class__
+                ),
+                value=value,
+            )
 
     def __repr__(self):
         return "<is_callable validator>"
 
 
 def is_callable():
     """
-    A validator that raises a :class:`TypeError` if the initializer is called
-    with a value for this particular attribute that is not callable.
+    A validator that raises a `attr.exceptions.NotCallableError` if the
+    initializer is called with a value for this particular attribute
+    that is not callable.
 
     .. versionadded:: 19.1.0
 
-    :raises TypeError: With a human readable error message containing the
-        attribute (of type :class:`attr.Attribute`) name.
+    :raises `attr.exceptions.NotCallableError`: With a human readable error
+        message containing the attribute (`attr.Attribute`) name,
+        and the value it got.
     """
     return _IsCallableValidator()
 
 
 @attrs(repr=False, slots=True, hash=True)
 class _DeepIterable(object):
     member_validator = attrib(validator=is_callable())
     iterable_validator = attrib(
--- a/third_party/python/attrs/attr/validators.pyi
+++ b/third_party/python/attrs/attr/validators.pyi
@@ -1,24 +1,66 @@
-from typing import Container, List, Union, TypeVar, Type, Any, Optional, Tuple
+from typing import (
+    Container,
+    List,
+    Union,
+    TypeVar,
+    Type,
+    Any,
+    Optional,
+    Tuple,
+    Iterable,
+    Mapping,
+    Callable,
+    Match,
+    AnyStr,
+    overload,
+)
 from . import _ValidatorType
 
 _T = TypeVar("_T")
+_T1 = TypeVar("_T1")
+_T2 = TypeVar("_T2")
+_T3 = TypeVar("_T3")
+_I = TypeVar("_I", bound=Iterable)
+_K = TypeVar("_K")
+_V = TypeVar("_V")
+_M = TypeVar("_M", bound=Mapping)
 
+# To be more precise on instance_of use some overloads.
+# If there are more than 3 items in the tuple then we fall back to Any
+@overload
+def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ...
+@overload
 def instance_of(
-    type: Union[Tuple[Type[_T], ...], Type[_T]]
-) -> _ValidatorType[_T]: ...
+    type: Tuple[Type[_T1], Type[_T2]]
+) -> _ValidatorType[Union[_T1, _T2]]: ...
+@overload
+def instance_of(
+    type: Tuple[Type[_T1], Type[_T2], Type[_T3]]
+) -> _ValidatorType[Union[_T1, _T2, _T3]]: ...
+@overload
+def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ...
 def provides(interface: Any) -> _ValidatorType[Any]: ...
 def optional(
     validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]]
 ) -> _ValidatorType[Optional[_T]]: ...
 def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
 def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
+def matches_re(
+    regex: AnyStr,
+    flags: int = ...,
+    func: Optional[
+        Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]]
+    ] = ...,
+) -> _ValidatorType[AnyStr]: ...
 def deep_iterable(
     member_validator: _ValidatorType[_T],
-    iterable_validator: Optional[_ValidatorType[_T]],
-) -> _ValidatorType[_T]: ...
+    iterable_validator: Optional[_ValidatorType[_I]] = ...,
+) -> _ValidatorType[_I]: ...
 def deep_mapping(
-    key_validator: _ValidatorType[_T],
-    value_validator: _ValidatorType[_T],
-    mapping_validator: Optional[_ValidatorType[_T]],
-) -> _ValidatorType[_T]: ...
+    key_validator: _ValidatorType[_K],
+    value_validator: _ValidatorType[_V],
+    mapping_validator: Optional[_ValidatorType[_M]] = ...,
+) -> _ValidatorType[_M]: ...
 def is_callable() -> _ValidatorType[_T]: ...
deleted file mode 100644
--- a/third_party/python/attrs/attrs-19.1.0.dist-info/METADATA
+++ /dev/null
@@ -1,249 +0,0 @@
-Metadata-Version: 2.1
-Name: attrs
-Version: 19.1.0
-Summary: Classes Without Boilerplate
-Home-page: https://www.attrs.org/
-Author: Hynek Schlawack
-Author-email: hs@ox.cx
-Maintainer: Hynek Schlawack
-Maintainer-email: hs@ox.cx
-License: MIT
-Project-URL: Documentation, https://www.attrs.org/
-Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues
-Project-URL: Source Code, https://github.com/python-attrs/attrs
-Keywords: class,attribute,boilerplate
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: Natural Language :: English
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 2
-Classifier: Programming Language :: Python :: 2.7
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.4
-Classifier: Programming Language :: Python :: 3.5
-Classifier: Programming Language :: Python :: 3.6
-Classifier: Programming Language :: Python :: 3.7
-Classifier: Programming Language :: Python :: Implementation :: CPython
-Classifier: Programming Language :: Python :: Implementation :: PyPy
-Classifier: Topic :: Software Development :: Libraries :: Python Modules
-Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
-Provides-Extra: dev
-Requires-Dist: coverage ; extra == 'dev'
-Requires-Dist: hypothesis ; extra == 'dev'
-Requires-Dist: pympler ; extra == 'dev'
-Requires-Dist: pytest ; extra == 'dev'
-Requires-Dist: six ; extra == 'dev'
-Requires-Dist: zope.interface ; extra == 'dev'
-Requires-Dist: sphinx ; extra == 'dev'
-Requires-Dist: pre-commit ; extra == 'dev'
-Provides-Extra: docs
-Requires-Dist: sphinx ; extra == 'docs'
-Requires-Dist: zope.interface ; extra == 'docs'
-Provides-Extra: tests
-Requires-Dist: coverage ; extra == 'tests'
-Requires-Dist: hypothesis ; extra == 'tests'
-Requires-Dist: pympler ; extra == 'tests'
-Requires-Dist: pytest ; extra == 'tests'
-Requires-Dist: six ; extra == 'tests'
-Requires-Dist: zope.interface ; extra == 'tests'
-
-.. image:: https://www.attrs.org/en/latest/_static/attrs_logo.png
-   :alt: attrs Logo
-
-======================================
-``attrs``: Classes Without Boilerplate
-======================================
-
-.. image:: https://readthedocs.org/projects/attrs/badge/?version=stable
-   :target: https://www.attrs.org/en/stable/?badge=stable
-   :alt: Documentation Status
-
-.. image:: https://travis-ci.org/python-attrs/attrs.svg?branch=master
-   :target: https://travis-ci.org/python-attrs/attrs
-   :alt: CI Status
-
-.. image:: https://codecov.io/github/python-attrs/attrs/branch/master/graph/badge.svg
-   :target: https://codecov.io/github/python-attrs/attrs
-   :alt: Test Coverage
-
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
-   :target: https://github.com/ambv/black
-   :alt: Code style: black
-
-.. teaser-begin
-
-``attrs`` is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka `dunder <https://nedbatchelder.com/blog/200605/dunder.html>`_ methods).
-
-Its main goal is to help you to write **concise** and **correct** software without slowing down your code.
-
-.. -spiel-end-
-
-For that, it gives you a class decorator and a way to declaratively define the attributes on that class:
-
-.. -code-begin-
-
-.. code-block:: pycon
-
-   >>> import attr
-
-   >>> @attr.s
-   ... class SomeClass(object):
-   ...     a_number = attr.ib(default=42)
-   ...     list_of_numbers = attr.ib(factory=list)
-   ...
-   ...     def hard_math(self, another_number):
-   ...         return self.a_number + sum(self.list_of_numbers) * another_number
-
-
-   >>> sc = SomeClass(1, [1, 2, 3])
-   >>> sc
-   SomeClass(a_number=1, list_of_numbers=[1, 2, 3])
-
-   >>> sc.hard_math(3)
-   19
-   >>> sc == SomeClass(1, [1, 2, 3])
-   True
-   >>> sc != SomeClass(2, [3, 2, 1])
-   True
-
-   >>> attr.asdict(sc)
-   {'a_number': 1, 'list_of_numbers': [1, 2, 3]}
-
-   >>> SomeClass()
-   SomeClass(a_number=42, list_of_numbers=[])
-
-   >>> C = attr.make_class("C", ["a", "b"])
-   >>> C("foo", "bar")
-   C(a='foo', b='bar')
-
-
-After *declaring* your attributes ``attrs`` gives you:
-
-- a concise and explicit overview of the class's attributes,
-- a nice human-readable ``__repr__``,
-- a complete set of comparison methods,
-- an initializer,
-- and much more,
-
-*without* writing dull boilerplate code again and again and *without* runtime performance penalties.
-
-On Python 3.6 and later, you can often even drop the calls to ``attr.ib()`` by using `type annotations <https://www.attrs.org/en/latest/types.html>`_.
-
-This gives you the power to use actual classes with actual types in your code instead of confusing ``tuple``\ s or `confusingly behaving <https://www.attrs.org/en/stable/why.html#namedtuples>`_ ``namedtuple``\ s.
-Which in turn encourages you to write *small classes* that do `one thing well <https://www.destroyallsoftware.com/talks/boundaries>`_.
-Never again violate the `single responsibility principle <https://en.wikipedia.org/wiki/Single_responsibility_principle>`_ just because implementing ``__init__`` et al is a painful drag.
-
-
-.. -testimonials-
-
-Testimonials
-============
-
-**Amber Hawkie Brown**, Twisted Release Manager and Computer Owl:
-
-  Writing a fully-functional class using attrs takes me less time than writing this testimonial.
-
-
-**Glyph Lefkowitz**, creator of `Twisted <https://twistedmatrix.com/>`_, `Automat <https://pypi.org/project/Automat/>`_, and other open source software, in `The One Python Library Everyone Needs <https://glyph.twistedmatrix.com/2016/08/attrs.html>`_:
-
-  I’m looking forward to is being able to program in Python-with-attrs everywhere.
-  It exerts a subtle, but positive, design influence in all the codebases I’ve see it used in.
-
-
-**Kenneth Reitz**, author of `Requests <http://www.python-requests.org/>`_ and Developer Advocate at DigitalOcean, (`on paper no less <https://twitter.com/hynek/status/866817877650751488>`_!):
-
-  attrs—classes for humans.  I like it.
-
-
-**Łukasz Langa**, prolific CPython core developer and Production Engineer at Facebook:
-
-  I'm increasingly digging your attr.ocity. Good job!
-
-
-.. -end-
-
-.. -project-information-
-
-Getting Help
-============
-
-Please use the ``python-attrs`` tag on `StackOverflow <https://stackoverflow.com/questions/tagged/python-attrs>`_ to get help.
-
-Answering questions of your fellow developers is also great way to help the project!
-
-
-Project Information
-===================
-
-``attrs`` is released under the `MIT <https://choosealicense.com/licenses/mit/>`_ license,
-its documentation lives at `Read the Docs <https://www.attrs.org/>`_,
-the code on `GitHub <https://github.com/python-attrs/attrs>`_,
-and the latest release on `PyPI <https://pypi.org/project/attrs/>`_.
-It’s rigorously tested on Python 2.7, 3.4+, and PyPy.
-
-We collect information on **third-party extensions** in our `wiki <https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs>`_.
-Feel free to browse and add your own!
-
-If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide <https://www.attrs.org/en/latest/contributing.html>`_ to get you started!
-
-
-Release Information
-===================
-
-19.1.0 (2019-03-03)
--------------------
-
-Backward-incompatible Changes
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-- Fixed a bug where deserialized objects with ``cache_hash=True`` could have incorrect hash code values.
-  This change breaks classes with ``cache_hash=True`` when a custom ``__setstate__`` is present.
-  An exception will be thrown when applying the ``attrs`` annotation to such a class.
-  This limitation is tracked in issue `#494 <https://github.com/python-attrs/attrs/issues/494>`_.
-  `#482 <https://github.com/python-attrs/attrs/issues/482>`_
-
-
-Changes
-^^^^^^^
-
-- Add ``is_callable``, ``deep_iterable``, and ``deep_mapping`` validators.
-
-  * ``is_callable``: validates that a value is callable
-  * ``deep_iterable``: Allows recursion down into an iterable,
-    applying another validator to every member in the iterable
-    as well as applying an optional validator to the iterable itself.
-  * ``deep_mapping``: Allows recursion down into the items in a mapping object,
-    applying a key validator and a value validator to the key and value in every item.
-    Also applies an optional validator to the mapping object itself.
-
-  You can find them in the ``attr.validators`` package.
-  `#425 <https://github.com/python-attrs/attrs/issues/425>`_
-- Fixed stub files to prevent errors raised by mypy's ``disallow_any_generics = True`` option.
-  `#443 <https://github.com/python-attrs/attrs/issues/443>`_
-- Attributes with ``init=False`` now can follow after ``kw_only=True`` attributes.
-  `#450 <https://github.com/python-attrs/attrs/issues/450>`_
-- ``attrs`` now has first class support for defining exception classes.
-
-  If you define a class using ``@attr.s(auto_exc=True)`` and subclass an exception, the class will behave like a well-behaved exception class including an appropriate ``__str__`` method, and all attributes additionally available in an ``args`` attribute.
-  `#500 <https://github.com/python-attrs/attrs/issues/500>`_
-- Clarified documentation for hashing to warn that hashable objects should be deeply immutable (in their usage, even if this is not enforced).
-  `#503 <https://github.com/python-attrs/attrs/issues/503>`_
-
-`Full changelog <https://www.attrs.org/en/stable/changelog.html>`_.
-
-Credits
-=======
-
-``attrs`` is written and maintained by `Hynek Schlawack <https://hynek.me/>`_.
-
-The development is kindly supported by `Variomedia AG <https://www.variomedia.de/>`_.
-
-A full list of contributors can be found in `GitHub's overview <https://github.com/python-attrs/attrs/graphs/contributors>`_.
-
-It’s the spiritual successor of `characteristic <https://characteristic.readthedocs.io/>`_ and aspires to fix some of it clunkiness and unfortunate decisions.
-Both were inspired by Twisted’s `FancyEqMixin <https://twistedmatrix.com/documents/current/api/twisted.python.util.FancyEqMixin.html>`_ but both are implemented using class decorators because `subclassing is bad for you <https://www.youtube.com/watch?v=3MNVP9-hglc>`_, m’kay?
-
-
deleted file mode 100644
--- a/third_party/python/attrs/attrs-19.1.0.dist-info/RECORD
+++ /dev/null
@@ -1,20 +0,0 @@
-attr/__init__.py,sha256=3XomfUfit8bVVEmSf1bRhLnRMPKauPbzFqPUnVRPgXw,1244
-attr/__init__.pyi,sha256=OON4rNWdgL69frd_WdrxtuQe8CEczl3aFpgifFeESN8,7769
-attr/_compat.py,sha256=GcjqWHrwUWGVCbDKY7twYt-Rr_4nPJqBnfrf5SeHsIY,4583
-attr/_config.py,sha256=_KvW0mQdH2PYjHc0YfIUaV_o2pVfM7ziMEYTxwmEhOA,514
-attr/_funcs.py,sha256=7v3MNMHdOUP2NkiLPwEiWAorBs3uNQq5Rn70Odr5uqo,9725
-attr/_make.py,sha256=be1PmzR8EDGfVA2Cx6ljsTIuXRxW2tEWPpTqtQXde0Y,68317
-attr/converters.py,sha256=SFPiz6-hAs2pw3kn7SzkBcdpE9AjW8iT9wjpe2eLDrQ,2155
-attr/converters.pyi,sha256=wAhCoOT1MFV8t323rpD87O7bxQ8CYLTPiBQd-29BieI,351
-attr/exceptions.py,sha256=N0WQfKvBVd4GWgDxTbFScg4ajy7-HlyvXiwlSQBA0jA,1272
-attr/exceptions.pyi,sha256=sq7TbBEGGSf81uFXScW9_aO62vd0v6LAvqz0a8Hrsxw,257
-attr/filters.py,sha256=s6NrcRWJKlCQauPEH0S4lmgFwlCdUQcHKcNkDHpptN4,1153
-attr/filters.pyi,sha256=xDpmKQlFdssgxGa5tsl1ADh_3zwAwAT4vUhd8h-8-Tk,214
-attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-attr/validators.py,sha256=ZAf_y5wNHyq2Rdlin_fwplQnU2u5wZnvmYJq1JddPtM,8750
-attr/validators.pyi,sha256=p2xr2ob8RaKW3PqlKDrQQVAyl8ZH4pNdlZzWXapGPjk,897
-attrs-19.1.0.dist-info/LICENSE,sha256=v2WaKLSSQGAvVrvfSQy-LsUJsVuY-Z17GaUsdA4yeGM,1082
-attrs-19.1.0.dist-info/METADATA,sha256=5yXp3BTFGRkY2hQDs18h-2dT7xnSlExRUfxvujCtHTE,10275
-attrs-19.1.0.dist-info/WHEEL,sha256=_wJFdOYk7i3xxT8ElOkUJvOdOvfNGbR9g-bf6UQT6sU,110
-attrs-19.1.0.dist-info/top_level.txt,sha256=tlRYMddkRlKPqJ96wP2_j9uEsmcNHgD2SbuWd4CzGVU,5
-attrs-19.1.0.dist-info/RECORD,,
deleted file mode 100644
--- a/third_party/python/attrs/attrs-19.1.0.dist-info/WHEEL
+++ /dev/null
@@ -1,6 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.32.3)
-Root-Is-Purelib: true
-Tag: py2-none-any
-Tag: py3-none-any
-
rename from third_party/python/attrs/attrs-19.1.0.dist-info/LICENSE
rename to third_party/python/attrs/attrs-19.2.0.dist-info/LICENSE
new file mode 100644
--- /dev/null
+++ b/third_party/python/attrs/attrs-19.2.0.dist-info/METADATA
@@ -0,0 +1,284 @@
+Metadata-Version: 2.1
+Name: attrs
+Version: 19.2.0
+Summary: Classes Without Boilerplate
+Home-page: https://www.attrs.org/
+Author: Hynek Schlawack
+Author-email: hs@ox.cx
+Maintainer: Hynek Schlawack
+Maintainer-email: hs@ox.cx
+License: MIT
+Project-URL: Documentation, https://www.attrs.org/
+Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues
+Project-URL: Source Code, https://github.com/python-attrs/attrs
+Keywords: class,attribute,boilerplate
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
+Description-Content-Type: text/x-rst
+Provides-Extra: azure-pipelines
+Requires-Dist: coverage ; extra == 'azure-pipelines'
+Requires-Dist: hypothesis ; extra == 'azure-pipelines'
+Requires-Dist: pympler ; extra == 'azure-pipelines'
+Requires-Dist: pytest (>=4.3.0) ; extra == 'azure-pipelines'
+Requires-Dist: six ; extra == 'azure-pipelines'
+Requires-Dist: zope.interface ; extra == 'azure-pipelines'
+Requires-Dist: pytest-azurepipelines ; extra == 'azure-pipelines'
+Provides-Extra: dev
+Requires-Dist: coverage ; extra == 'dev'
+Requires-Dist: hypothesis ; extra == 'dev'
+Requires-Dist: pympler ; extra == 'dev'
+Requires-Dist: pytest (>=4.3.0) ; extra == 'dev'
+Requires-Dist: six ; extra == 'dev'
+Requires-Dist: zope.interface ; extra == 'dev'
+Requires-Dist: sphinx ; extra == 'dev'
+Requires-Dist: pre-commit ; extra == 'dev'
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: zope.interface ; extra == 'docs'
+Provides-Extra: tests
+Requires-Dist: coverage ; extra == 'tests'
+Requires-Dist: hypothesis ; extra == 'tests'
+Requires-Dist: pympler ; extra == 'tests'
+Requires-Dist: pytest (>=4.3.0) ; extra == 'tests'
+Requires-Dist: six ; extra == 'tests'
+Requires-Dist: zope.interface ; extra == 'tests'
+
+.. image:: https://www.attrs.org/en/latest/_static/attrs_logo.png
+   :alt: attrs Logo
+
+======================================
+``attrs``: Classes Without Boilerplate
+======================================
+
+.. image:: https://readthedocs.org/projects/attrs/badge/?version=stable
+   :target: https://www.attrs.org/en/stable/?badge=stable
+   :alt: Documentation Status
+
+.. image:: https://attrs.visualstudio.com/attrs/_apis/build/status/python-attrs.attrs?branchName=master
+   :target: https://attrs.visualstudio.com/attrs/_build/latest?definitionId=1&branchName=master
+   :alt: CI Status
+
+.. image:: https://codecov.io/github/python-attrs/attrs/branch/master/graph/badge.svg
+   :target: https://codecov.io/github/python-attrs/attrs
+   :alt: Test Coverage
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+   :target: https://github.com/psf/black
+   :alt: Code style: black
+
+.. teaser-begin
+
+``attrs`` is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka `dunder <https://nedbatchelder.com/blog/200605/dunder.html>`_ methods).
+
+Its main goal is to help you to write **concise** and **correct** software without slowing down your code.
+
+.. -spiel-end-
+
+For that, it gives you a class decorator and a way to declaratively define the attributes on that class:
+
+.. -code-begin-
+
+.. code-block:: pycon
+
+   >>> import attr
+
+   >>> @attr.s
+   ... class SomeClass(object):
+   ...     a_number = attr.ib(default=42)
+   ...     list_of_numbers = attr.ib(factory=list)
+   ...
+   ...     def hard_math(self, another_number):
+   ...         return self.a_number + sum(self.list_of_numbers) * another_number
+
+
+   >>> sc = SomeClass(1, [1, 2, 3])
+   >>> sc
+   SomeClass(a_number=1, list_of_numbers=[1, 2, 3])
+
+   >>> sc.hard_math(3)
+   19
+   >>> sc == SomeClass(1, [1, 2, 3])
+   True
+   >>> sc != SomeClass(2, [3, 2, 1])
+   True
+
+   >>> attr.asdict(sc)
+   {'a_number': 1, 'list_of_numbers': [1, 2, 3]}
+
+   >>> SomeClass()
+   SomeClass(a_number=42, list_of_numbers=[])
+
+   >>> C = attr.make_class("C", ["a", "b"])
+   >>> C("foo", "bar")
+   C(a='foo', b='bar')
+
+
+After *declaring* your attributes ``attrs`` gives you:
+
+- a concise and explicit overview of the class's attributes,
+- a nice human-readable ``__repr__``,
+- a complete set of comparison methods (equality and ordering),
+- an initializer,
+- and much more,
+
+*without* writing dull boilerplate code again and again and *without* runtime performance penalties.
+
+On Python 3.6 and later, you can often even drop the calls to ``attr.ib()`` by using `type annotations <https://www.attrs.org/en/latest/types.html>`_.
+
+This gives you the power to use actual classes with actual types in your code instead of confusing ``tuple``\ s or `confusingly behaving <https://www.attrs.org/en/stable/why.html#namedtuples>`_ ``namedtuple``\ s.
+Which in turn encourages you to write *small classes* that do `one thing well <https://www.destroyallsoftware.com/talks/boundaries>`_.
+Never again violate the `single responsibility principle <https://en.wikipedia.org/wiki/Single_responsibility_principle>`_ just because implementing ``__init__`` et al is a painful drag.
+
+
+.. -testimonials-
+
+Testimonials
+============
+
+**Amber Hawkie Brown**, Twisted Release Manager and Computer Owl:
+
+  Writing a fully-functional class using attrs takes me less time than writing this testimonial.
+
+
+**Glyph Lefkowitz**, creator of `Twisted <https://twistedmatrix.com/>`_, `Automat <https://pypi.org/project/Automat/>`_, and other open source software, in `The One Python Library Everyone Needs <https://glyph.twistedmatrix.com/2016/08/attrs.html>`_:
+
+  I’m looking forward to is being able to program in Python-with-attrs everywhere.
+  It exerts a subtle, but positive, design influence in all the codebases I’ve see it used in.
+
+
+**Kenneth Reitz**, creator of `Requests <https://github.com/psf/requests>`_ (`on paper no less <https://twitter.com/hynek/status/866817877650751488>`_!):
+
+  attrs—classes for humans.  I like it.
+
+
+**Łukasz Langa**, creator of `Black <https://github.com/psf/black>`_, prolific Python core developer, and release manager for Python 3.8 and 3.9:
+
+  I'm increasingly digging your attr.ocity. Good job!
+
+
+.. -end-
+
+.. -project-information-
+
+Getting Help
+============
+
+Please use the ``python-attrs`` tag on `StackOverflow <https://stackoverflow.com/questions/tagged/python-attrs>`_ to get help.
+
+Answering questions of your fellow developers is also great way to help the project!
+
+
+Project Information
+===================
+
+``attrs`` is released under the `MIT <https://choosealicense.com/licenses/mit/>`_ license,
+its documentation lives at `Read the Docs <https://www.attrs.org/>`_,
+the code on `GitHub <https://github.com/python-attrs/attrs>`_,
+and the latest release on `PyPI <https://pypi.org/project/attrs/>`_.
+It’s rigorously tested on Python 2.7, 3.4+, and PyPy.
+
+We collect information on **third-party extensions** in our `wiki <https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs>`_.
+Feel free to browse and add your own!
+
+If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide <https://www.attrs.org/en/latest/contributing.html>`_ to get you started!
+
+
+Release Information
+===================
+
+19.2.0 (2019-10-01)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Removed deprecated ``Attribute`` attribute ``convert`` per scheduled removal on 2019/1.
+  This planned deprecation is tracked in issue `#307 <https://github.com/python-attrs/attrs/issues/307>`_.
+  `#504 <https://github.com/python-attrs/attrs/issues/504>`_
+- ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` do not consider subclasses comparable anymore.
+
+  This has been deprecated since 18.2.0 and was raising a ``DeprecationWarning`` for over a year.
+  `#570 <https://github.com/python-attrs/attrs/issues/570>`_
+
+
+Deprecations
+^^^^^^^^^^^^
+
+- The ``cmp`` argument to ``attr.s()`` and ``attr.ib()`` is now deprecated.
+
+  Please use ``eq`` to add equality methods (``__eq__`` and ``__ne__``) and ``order`` to add ordering methods (``__lt__``, ``__le__``, ``__gt__``, and ``__ge__``) instead – just like with `dataclasses <https://docs.python.org/3/library/dataclasses.html>`_.
+
+  Both are effectively ``True`` by default but it's enough to set ``eq=False`` to disable both at once.
+  Passing ``eq=False, order=True`` explicitly will raise a ``ValueError`` though.
+
+  Since this is arguably a deeper backward-compatibility break, it will have an extended deprecation period until 2021-06-01.
+  After that day, the ``cmp`` argument will be removed.
+
+  ``attr.Attribute`` also isn't orderable anymore.
+  `#574 <https://github.com/python-attrs/attrs/issues/574>`_
+
+
+Changes
+^^^^^^^
+
+- Updated ``attr.validators.__all__`` to include new validators added in `#425`_.
+  `#517 <https://github.com/python-attrs/attrs/issues/517>`_
+- Slotted classes now use a pure Python mechanism to rewrite the ``__class__`` cell when rebuilding the class, so ``super()`` works even on environments where ``ctypes`` is not installed.
+  `#522 <https://github.com/python-attrs/attrs/issues/522>`_
+- When collecting attributes using ``@attr.s(auto_attribs=True)``, attributes with a default of ``None`` are now deleted too.
+  `#523 <https://github.com/python-attrs/attrs/issues/523>`_,
+  `#556 <https://github.com/python-attrs/attrs/issues/556>`_
+- Fixed ``attr.validators.deep_iterable()`` and ``attr.validators.deep_mapping()`` type stubs.
+  `#533 <https://github.com/python-attrs/attrs/issues/533>`_
+- ``attr.validators.is_callable()`` validator now raises an exception ``attr.exceptions.NotCallableError``, a subclass of ``TypeError``, informing the received value.
+  `#536 <https://github.com/python-attrs/attrs/issues/536>`_
+- ``@attr.s(auto_exc=True)`` now generates classes that are hashable by ID, as the documentation always claimed it would.
+  `#543 <https://github.com/python-attrs/attrs/issues/543>`_,
+  `#563 <https://github.com/python-attrs/attrs/issues/563>`_
+- Added ``attr.validators.matches_re()`` that checks string attributes whether they match a regular expression.
+  `#552 <https://github.com/python-attrs/attrs/issues/552>`_
+- Keyword-only attributes (``kw_only=True``) and attributes that are excluded from the ``attrs``'s ``__init__`` (``init=False``) now can appear before mandatory attributes.
+  `#559 <https://github.com/python-attrs/attrs/issues/559>`_
+- The fake filename for generated methods is now more stable.
+  It won't change when you restart the process.
+  `#560 <https://github.com/python-attrs/attrs/issues/560>`_
+- The value passed to ``@attr.ib(repr=…)`` can now be either a boolean (as before) or a callable.
+  That callable must return a string and is then used for formatting the attribute by the generated ``__repr__()`` method.
+  `#568 <https://github.com/python-attrs/attrs/issues/568>`_
+- Added ``attr.__version_info__`` that can be used to reliably check the version of ``attrs`` and write forward- and backward-compatible code.
+  Please check out the `section on deprecated APIs <http://www.attrs.org/en/stable/api.html#deprecated-apis>`_ on how to use it.
+  `#580 <https://github.com/python-attrs/attrs/issues/580>`_
+
+ .. _`#425`: https://github.com/python-attrs/attrs/issues/425
+
+`Full changelog <https://www.attrs.org/en/stable/changelog.html>`_.
+
+Credits
+=======
+
+``attrs`` is written and maintained by `Hynek Schlawack <https://hynek.me/>`_.
+
+The development is kindly supported by `Variomedia AG <https://www.variomedia.de/>`_.
+
+A full list of contributors can be found in `GitHub's overview <https://github.com/python-attrs/attrs/graphs/contributors>`_.
+
+It’s the spiritual successor of `characteristic <https://characteristic.readthedocs.io/>`_ and aspires to fix some of it clunkiness and unfortunate decisions.
+Both were inspired by Twisted’s `FancyEqMixin <https://twistedmatrix.com/documents/current/api/twisted.python.util.FancyEqMixin.html>`_ but both are implemented using class decorators because `subclassing is bad for you <https://www.youtube.com/watch?v=3MNVP9-hglc>`_, m’kay?
+
+
new file mode 100644
--- /dev/null
+++ b/third_party/python/attrs/attrs-19.2.0.dist-info/RECORD
@@ -0,0 +1,22 @@
+attr/__init__.py,sha256=nRvEecOWLaJsMraOK89f4hMYThTUZHWj1B0jl249M-0,1344
+attr/__init__.pyi,sha256=5AVtEEzK-g3HO1SUll44hTL8LFoM8TYD7Gn9vEMFGzk,8252
+attr/_compat.py,sha256=-pJtdtqgCg0K6rH_BWf3wKuTum58GD-WWPclQQ2SUaU,7326
+attr/_config.py,sha256=_KvW0mQdH2PYjHc0YfIUaV_o2pVfM7ziMEYTxwmEhOA,514
+attr/_funcs.py,sha256=unAJfNGSTOzxyFzkj7Rs3O1bfsQodmXyir9uZKen-vY,9696
+attr/_make.py,sha256=4pdTus8d4OkitzlwytTPP7TNLZK6pVIoKg6KdAZMwYQ,70804
+attr/_version.py,sha256=azMi1lNelb3cJvvYUMXsXVbUANkRzbD5IEiaXVpeVr4,2162
+attr/_version.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209
+attr/converters.py,sha256=5QJRYSXE8G7PW0289y_SPwvvZIcw-nJIuBlfYVdB4BQ,2141
+attr/converters.pyi,sha256=wAhCoOT1MFV8t323rpD87O7bxQ8CYLTPiBQd-29BieI,351
+attr/exceptions.py,sha256=hbhOa3b4W8_mRrbj3FsMTR4Bt5xzbJs5xaFTWn8s6h4,1635
+attr/exceptions.pyi,sha256=4zuaJyl2axxWbqnZgxo_2oTpPNbyowEw3A4hqV5PmAc,458
+attr/filters.py,sha256=weDxwATsa69T_0bPVjiM1fGsciAMQmwhY5G8Jm5BxuI,1098
+attr/filters.pyi,sha256=xDpmKQlFdssgxGa5tsl1ADh_3zwAwAT4vUhd8h-8-Tk,214
+attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+attr/validators.py,sha256=8AsxgdDgh3sGPseiUIMPGcTr6PvaDYfH3AK46tsvs8U,11460
+attr/validators.pyi,sha256=vZgsJqUwrJevh4v_Hd7_RSXqDrBctE6-3AEZ7uYKodo,1868
+attrs-19.2.0.dist-info/LICENSE,sha256=v2WaKLSSQGAvVrvfSQy-LsUJsVuY-Z17GaUsdA4yeGM,1082
+attrs-19.2.0.dist-info/METADATA,sha256=qPqvhqvovqyvpsQebMPTXsOi8pv2xuzUDkUzAxz-wvM,12750
+attrs-19.2.0.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
+attrs-19.2.0.dist-info/top_level.txt,sha256=tlRYMddkRlKPqJ96wP2_j9uEsmcNHgD2SbuWd4CzGVU,5
+attrs-19.2.0.dist-info/RECORD,,
new file mode 100644
--- /dev/null
+++ b/third_party/python/attrs/attrs-19.2.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.33.6)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
rename from third_party/python/attrs/attrs-19.1.0.dist-info/top_level.txt
rename to third_party/python/attrs/attrs-19.2.0.dist-info/top_level.txt
--- a/third_party/python/requirements.in
+++ b/third_party/python/requirements.in
@@ -1,10 +1,10 @@
 appdirs==1.4.4
-attrs==19.1.0
+attrs==19.2.0
 blessings==1.7
 cbor2==4.0.1
 # Though we don't depend on colorama directly, we need to explicitly
 # define it here because it's needed by other dependencies on
 # Windows systems.
 colorama==0.4.4
 compare-locales==8.2.1
 cookies==2.2.1
--- a/third_party/python/requirements.txt
+++ b/third_party/python/requirements.txt
@@ -45,19 +45,19 @@ appdirs==1.4.4 \
     #   glean-parser
     #   taskcluster-taskgraph
 async-timeout==3.0.1 \
     --hash=sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f \
     --hash=sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3
     # via
     #   aiohttp
     #   taskcluster
-attrs==19.1.0 \
-    --hash=sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79 \
-    --hash=sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399
+attrs==19.2.0 \
+    --hash=sha256:ec20e7a4825331c1b5ebf261d111e16fa9612c1f7a5e1f884f12bd53a664dfd2 \
+    --hash=sha256:f913492e1663d3c36f502e5e9ba6cd13cf19d7fab50aa13239e420fef95e1396
     # via
     #   -r requirements-mach-vendor-python.in
     #   aiohttp
     #   jsonschema
     #   mozilla-version
     #   taskcluster-taskgraph
 blessings==1.7 \
     --hash=sha256:98e5854d805f50a5b58ac2333411b0482516a8210f23f43308baeb58d77c157d \
--- a/toolkit/components/enterprisepolicies/WindowsGPOParser.jsm
+++ b/toolkit/components/enterprisepolicies/WindowsGPOParser.jsm
@@ -92,18 +92,19 @@ function registryToObject(wrk, policies)
     }
   }
   return policies;
 }
 
 function readRegistryValue(wrk, value) {
   switch (wrk.getValueType(value)) {
     case 7: // REG_MULTI_SZ
-      // We only use REG_MULTI_SZ for JSON in the registry. By parsing it here,
-      // we get the benefit of having JSONSchemaValidator properly validate.
+      // While we support JSON in REG_SZ and REG_MULTI_SZ, if it's REG_MULTI_SZ,
+      // we know it must be JSON. So we go ahead and JSON.parse it here so it goes
+      // through the schema validator.
       try {
         return JSON.parse(wrk.readStringValue(value).replace(/\0/g, "\n"));
       } catch (e) {
         log.error(`Unable to parse JSON for ${value}`);
         return undefined;
       }
     case 2: // REG_EXPAND_SZ
     case wrk.TYPE_STRING:
--- a/toolkit/components/extensions/Extension.jsm
+++ b/toolkit/components/extensions/Extension.jsm
@@ -3138,31 +3138,27 @@ class Langpack extends ExtensionData {
 
     resourceProtocol.setSubstitution(this.startupData.langpackId, null);
   }
 }
 
 class SitePermission extends ExtensionData {
   constructor(addonData, startupReason) {
     super(addonData.resourceURI);
-
+    this.id = addonData.id;
     this.hasShutdown = false;
   }
 
   async loadManifest() {
     let [manifestData] = await Promise.all([this.parseManifest()]);
 
     if (!manifestData) {
       return;
     }
 
-    if (!this.id) {
-      this.id = manifestData.id;
-    }
-
     this.manifest = manifestData.manifest;
     this.type = manifestData.type;
     this.sitePermissions = this.manifest.site_permissions;
     // 1 install_origins is mandatory for this addon type
     this.siteOrigin = this.manifest.install_origins[0];
 
     return this.manifest;
   }
--- a/toolkit/components/extensions/ExtensionCommon.jsm
+++ b/toolkit/components/extensions/ExtensionCommon.jsm
@@ -2279,17 +2279,16 @@ class EventManager {
         // The runtime module no longer implements primed listeners, drop them.
         extension.persistentListeners.delete(module);
         EventManager._writePersistentListeners(extension);
         continue;
       }
       for (let [event, eventEntry] of moduleEntry) {
         for (let listener of eventEntry.values()) {
           let primed = { pendingEvents: [] };
-          listener.primed = primed;
 
           let fireEvent = (...args) =>
             new Promise((resolve, reject) => {
               if (!listener.primed) {
                 reject(new Error("primed listener not re-registered"));
                 return;
               }
               primed.pendingEvents.push({ args, resolve, reject });
@@ -2297,23 +2296,26 @@ class EventManager {
             });
 
           let fire = {
             wakeup: () => extension.wakeupBackground(),
             sync: fireEvent,
             async: fireEvent,
           };
 
-          let { unregister, convert } = api.primeListener(
+          let handler = api.primeListener(
             extension,
             event,
             fire,
             listener.params
           );
-          Object.assign(primed, { unregister, convert });
+          if (handler) {
+            listener.primed = primed;
+            Object.assign(primed, handler);
+          }
         }
       }
     }
   }
 
   // Remove any primed listeners that were not re-registered.
   // This function is called after the background page has started.
   // The removed listeners are removed from the set of saved listeners, unless
@@ -2441,20 +2443,20 @@ class EventManager {
       let key = uneval(args);
       EventManager._initPersistentListeners(extension);
       let listener = extension.persistentListeners
         .get(module)
         .get(event)
         .get(key);
 
       if (listener) {
-        // If extensions.webextensions.background-delayed-startup is disabled,
-        // we can have stored info here but no primed listener.  This check
-        // can be removed if/when we make delayed background startup the only
-        // supported setting.
+        // During startup only a subset of persisted listeners are primed.  As
+        // well, each API determines whether to prime a specific listener.
+        // Additionally, if extensions.webextensions.background-delayed-startup
+        // is disabled we may not have primed listeners.
         let { primed } = listener;
         if (primed) {
           listener.primed = null;
 
           primed.convert(fire, this.context);
           unregister = primed.unregister;
 
           for (let evt of primed.pendingEvents) {
--- a/toolkit/components/extensions/ExtensionContent.jsm
+++ b/toolkit/components/extensions/ExtensionContent.jsm
@@ -1133,16 +1133,27 @@ var ExtensionContent = {
       encoding: doc.characterSet,
     });
     return result.language === "un" ? "und" : result.language;
   },
 
   // Used to executeScript, insertCSS and removeCSS.
   async handleActorExecute({ options, windows }) {
     let policy = WebExtensionPolicy.getByID(options.extensionId);
+    // `WebExtensionContentScript` uses `MozDocumentMatcher::Matches` to ensure
+    // that a script can be run in a document. That requires either `frameId`
+    // or `allFrames` to be set. When `frameIds` (plural) is used, we force
+    // `allFrames` to be `true` in order to match any frame. This is OK because
+    // `executeInWin()` below looks up the window for the given `frameIds`
+    // immediately before `script.injectInto()`. Due to this, we won't run
+    // scripts in windows with non-matching `frameId`, despite `allFrames`
+    // being set to `true`.
+    if (options.frameIds) {
+      options.allFrames = true;
+    }
     let matcher = new WebExtensionContentScript(policy, options);
 
     Object.assign(matcher, {
       wantReturnValue: options.wantReturnValue,
       removeCSS: options.removeCSS,
       cssOrigin: options.cssOrigin,
       jsCode: options.jsCode,
     });
@@ -1150,22 +1161,45 @@ var ExtensionContent = {
 
     // Add the cssCode to the script, so that it can be converted into a cached URL.
     await script.addCSSCode(options.cssCode);
     delete options.cssCode;
 
     const executeInWin = innerId => {
       let wg = WindowGlobalChild.getByInnerWindowId(innerId);
       if (wg?.isCurrentGlobal && script.matchesWindowGlobal(wg)) {
-        return script.injectInto(wg.browsingContext.window);
+        let bc = wg.browsingContext;
+
+        return {
+          frameId: bc.parent ? bc.id : 0,
+          promise: script.injectInto(bc.window),
+        };
       }
     };
 
-    let all = Promise.all(windows.map(executeInWin).filter(p => p));
-    let result = await all.catch(e => Promise.reject({ message: e.message }));
+    let promisesWithFrameIds = windows.map(executeInWin).filter(obj => obj);
+
+    let result = await Promise.all(
+      promisesWithFrameIds.map(async ({ frameId, promise }) => {
+        if (!options.returnResultsWithFrameIds) {
+          return promise;
+        }
+
+        try {
+          const result = await promise;
+
+          return { frameId, result };
+        } catch ({ message }) {
+          // Errors cannot be cloned, so return an object with a message
+          // property.
+          // TODO bug 1740608: also support non-Error rejections.
+          return { frameId, error: { message } };
+        }
+      })
+    ).catch(e => Promise.reject({ message: e.message }));
 
     try {
       // Check if the result can be structured-cloned before sending back.
       return Cu.cloneInto(result, this);
     } catch (e) {
       let path = options.jsPaths.slice(-1)[0] ?? "<anonymous code>";
       let message = `Script '${path}' result is non-structured-clonable data`;
       return Promise.reject({ message, fileName: path });
--- a/toolkit/components/extensions/child/ext-scripting.js
+++ b/toolkit/components/extensions/child/ext-scripting.js
@@ -8,43 +8,39 @@
 
 var { ExtensionError } = ExtensionUtils;
 
 this.scripting = class extends ExtensionAPI {
   getAPI(context) {
     return {
       scripting: {
         executeScript: async details => {
-          if (
-            (details.files !== null && details.func !== null) ||
-            (!details.files && !details.func)
-          ) {
-            throw new ExtensionError(
-              "Exactly one of files and func must be specified."
-            );
-          }
-
           let { func, args, ...parentDetails } = details;
 
           if (details.files) {
             if (details.args) {
               throw new ExtensionError(
                 "'args' may not be used with file injections."
               );
             }
-          } else {
+          }
+          // `files` and `func` are mutually exclusive but that is checked in
+          // the parent (in `execute()`).
+          if (func) {
             try {
               const serializedArgs = args
                 ? JSON.stringify(args).slice(1, -1)
                 : "";
               // This is a prop that we compute here and pass to the parent.
-              parentDetails.codeToExecute = `(${func.toString()})(${serializedArgs});`;
+              parentDetails.func = `(${func.toString()})(${serializedArgs});`;
             } catch (e) {
               throw new ExtensionError("Unserializable arguments.");
             }
+          } else {
+            parentDetails.func = null;
           }
 
           return context.childManager.callParentAsyncFunction(
             "scripting.executeScriptInternal",
             [parentDetails]
           );
         },
       },
--- a/toolkit/components/extensions/parent/ext-scripting.js
+++ b/toolkit/components/extensions/parent/ext-scripting.js
@@ -4,128 +4,114 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 "use strict";
 
 var { ExtensionError } = ExtensionUtils;
 
 /**
- * Inserts a script in the given tab, and returns a promise which resolves when
- * the operation has completed.
+ * Inserts a script or style in the given tab, and returns a promise which
+ * resolves when the operation has completed.
  *
- * @param {TabBase} tab
- *        The tab in which to perform the injection.
  * @param {BaseContext} context
  *        The extension context for which to perform the injection.
  * @param {Object} details
  *        The details object, specifying what to inject, where, and when.
- *        Derived from the ScriptInjection type.
+ *        Derived from the ScriptInjection or CSSInjection types.
  * @param {string} kind
- *        The kind of data being injected. Possible choices: "js".
+ *        The kind of data being injected. Possible choices: "js" or "css".
  * @param {string} method
  *        The name of the method which was called to trigger the injection.
  *        Used to generate appropriate error messages on failure.
  *
  * @returns {Promise}
  *        Resolves to the result of the execution, once it has completed.
  */
-const execute = (tab, context, details, kind, method) => {
+const execute = (context, details, kind, method) => {
+  const { tabManager } = context.extension;
+
   let options = {
     jsPaths: [],
+    cssPaths: [],
+    removeCSS: method == "removeCSS",
     extensionId: context.extension.id,
   };
 
+  const { tabId, frameIds, allFrames } = details.target;
+  const tab = tabManager.get(tabId);
+
   // TODO: Bug 1750765 - Add test coverage for this option.
   options.hasActiveTabPermission = tab.hasActiveTabPermission;
   options.matches = tab.extension.allowedOrigins.patterns.map(
     host => host.pattern
   );
 
-  if (details.code) {
-    options[`${kind}Code`] = details.code;
+  const codeKey = kind === "js" ? "func" : "css";
+  if ((details.files === null) == (details[codeKey] === null)) {
+    throw new ExtensionError(
+      `Exactly one of files and ${codeKey} must be specified.`
+    );
+  }
+
+  if (details[codeKey]) {
+    options[`${kind}Code`] = details[codeKey];
   }
 
   if (details.files) {
     for (const file of details.files) {
       let url = context.uri.resolve(file);
       if (!tab.extension.isExtensionURL(url)) {
-        return Promise.reject({
-          message: "Files to be injected must be within the extension",
-        });
+        throw new ExtensionError(
+          "Files to be injected must be within the extension"
+        );
       }
       options[`${kind}Paths`].push(url);
     }
   }
 
-  // TODO: Bug 1736574 - Add support for multiple frame IDs and `allFrames`.
-  if (details.frameId) {
-    options.frameID = details.frameId;
+  if (allFrames && frameIds) {
+    throw new ExtensionError("Cannot specify both 'allFrames' and 'frameIds'.");
+  }
+
+  if (allFrames) {
+    options.allFrames = allFrames;
+  } else if (frameIds) {
+    options.frameIds = frameIds;
   }
 
   options.runAt = "document_idle";
   options.wantReturnValue = true;
+  // With this option set to `true`, we'll receive executeScript() results with
+  // `frameId/result` properties and an `error` property will also be returned
+  // in case of an error.
+  options.returnResultsWithFrameIds = kind === "js";
 
-  // TODO: Bug 1736579 - Configure options for CSS injection, e.g., `cssPaths`
-  // and `cssOrigin`.
+  if (details.origin) {
+    options.cssOrigin = details.origin.toLowerCase();
+  } else {
+    options.cssOrigin = "author";
+  }
 
   // This function is derived from `_execute()` in `parent/ext-tabs-base.js`,
   // make sure to keep both in sync when relevant.
   return tab.queryContent("Execute", options);
 };
 
 this.scripting = class extends ExtensionAPI {
   getAPI(context) {
-    const { extension } = context;
-    const { tabManager } = extension;
-
     return {
       scripting: {
         executeScriptInternal: async details => {
-          let { tabId, frameIds } = details.target;
-
-          let tab = tabManager.get(tabId);
-
-          let executeDetails = {
-            // Defined in `child/ext-scripting.js`.
-            code: details.codeToExecute,
-            files: details.files,
-          };
-
-          const promises = [];
-
-          if (!frameIds) {
-            // We use the top-level frame by default.
-            frameIds = [0];
-          }
+          return execute(context, details, "js", "executeScript");
+        },
 
-          for (const frameId of frameIds) {
-            const details = { ...executeDetails, frameId };
-            promises.push(
-              execute(tab, context, details, "js", "executeScript")
-                // We return `null` when the result value is falsey.
-                .then(results => ({ frameId, result: results[0] || null }))
-                .catch(error => ({ frameId, result: null, error }))
-            );
-          }
-
-          const results = await Promise.all(promises);
+        insertCSS: async details => {
+          return execute(context, details, "css", "insertCSS").then(() => {});
+        },
 
-          return results.map(({ frameId, result, error }) => {
-            if (error) {
-              // TODO Bug 1740608: we re-throw extension errors coming from
-              // `tab.executeScript()` and only log runtime errors, but this
-              // might change because error handling needs to be more
-              // well-defined.
-              if (error instanceof ExtensionError) {
-                throw error;
-              }
-
-              Cu.reportError(error.message || error);
-            }
-
-            return { frameId, result };
-          });
+        removeCSS: async details => {
+          return execute(context, details, "css", "removeCSS").then(() => {});
         },
       },
     };
   }
 };
--- a/toolkit/components/extensions/parent/ext-tabs-base.js
+++ b/toolkit/components/extensions/parent/ext-tabs-base.js
@@ -671,35 +671,43 @@ class TabBase {
       }
     }
 
     return result;
   }
 
   /**
    * Query each content process hosting subframes of the tab, return results.
+   *
    * @param {string} message
    * @param {object} options
-   * @param {number} options.frameID
-   * @param {boolean} options.allFrames
+   *        These options are also sent to the message handler in the
+   *        `ExtensionContentChild`.
+   * @param {number[]} options.frameIds
+   * @param {boolean} options.returnResultsWithFrameIds
    * @returns {Promise[]}
    */
   async queryContent(message, options) {
-    let { allFrames, frameID } = options;
+    let { frameIds } = options;
 
     /** @type {Map<nsIDOMProcessParent, innerWindowId[]>} */
     let byProcess = new DefaultMap(() => []);
+    let framesFound = 0;
 
     // Recursively walk the tab's BC tree, find all frames, group by process.
     function visit(bc) {
       let win = bc.currentWindowGlobal;
-      if (win?.domProcess && (!frameID || frameID === bc.id)) {
+      let frameId = bc.parent ? bc.id : 0;
+
+      if (win?.domProcess && (!frameIds || frameIds.includes(frameId))) {
         byProcess.get(win.domProcess).push(win.innerWindowId);
+        framesFound++;
       }
-      if (allFrames || (frameID && !byProcess.size)) {
+
+      if (!frameIds || framesFound < frameIds.length) {
         bc.children.forEach(visit);
       }
     }
     visit(this.browsingContext);
 
     let promises = Array.from(byProcess.entries(), ([proc, windows]) =>
       proc.getActor("ExtensionContent").sendQuery(message, { windows, options })
     );
@@ -710,25 +718,25 @@ class TabBase {
         let message = `Script '${fileName}' result is non-structured-clonable data`;
         return Promise.reject({ message, fileName });
       }
       throw err;
     });
     results = results.flat();
 
     if (!results.length) {
-      if (frameID) {
+      if (frameIds && frameIds.length === 1 && frameIds[0] !== 0) {
         throw new ExtensionError("Frame not found, or missing host permission");
       }
 
-      let frames = allFrames ? ", and any iframes" : "";
+      let frames = framesFound > 1 ? ", and any iframes" : "";
       throw new ExtensionError(`Missing host permission for the tab${frames}`);
     }
 
-    if (!allFrames && results.length > 1) {
+    if (frameIds && frameIds.length === 1 && results.length > 1) {
       throw new ExtensionError("Internal error: multiple windows matched");
     }
 
     return results;
   }
 
   /**
    * Inserts a script or stylesheet in the given tab, and returns a promise
@@ -782,22 +790,25 @@ class TabBase {
       let url = context.uri.resolve(details.file);
       if (!this.extension.isExtensionURL(url)) {
         return Promise.reject({
           message: "Files to be injected must be within the extension",
         });
       }
       options[`${kind}Paths`].push(url);
     }
+
     if (details.allFrames) {
-      options.allFrames = details.allFrames;
+      options.allFrames = true;
+    } else if (details.frameId !== null) {
+      options.frameIds = [details.frameId];
+    } else if (!details.allFrames) {
+      options.frameIds = [0];
     }
-    if (details.frameId !== null) {
-      options.frameID = details.frameId;
-    }
+
     if (details.matchAboutBlank) {
       options.matchAboutBlank = details.matchAboutBlank;
     }
     if (details.runAt !== null) {
       options.runAt = details.runAt;
     } else {
       options.runAt = "document_idle";
     }
--- a/toolkit/components/extensions/parent/ext-webRequest.js
+++ b/toolkit/components/extensions/parent/ext-webRequest.js
@@ -122,17 +122,20 @@ function makeWebRequestEvent(context, na
         context.xulBrowser.frameLoader.remoteTab
       ).unregister;
     },
   }).api();
 }
 
 this.webRequest = class extends ExtensionAPI {
   primeListener(extension, event, fire, params) {
-    return registerEvent(extension, event, fire, ...params);
+    // During early startup if the listener does not use blocking we do not prime it.
+    if (params[1]?.includes("blocking")) {
+      return registerEvent(extension, event, fire, ...params);
+    }
   }
 
   getAPI(context) {
     return {
       webRequest: {
         onBeforeRequest: makeWebRequestEvent(context, "onBeforeRequest"),
         onBeforeSendHeaders: makeWebRequestEvent(
           context,
--- a/toolkit/components/extensions/schemas/scripting.json
+++ b/toolkit/components/extensions/schemas/scripting.json
@@ -31,17 +31,17 @@
             "type": "array",
             "optional": true,
             "description": "The arguments to curry into a provided function. This is only valid if the <code>func</code> parameter is specified. These arguments must be JSON-serializable.",
             "items": { "type": "any" }
           },
           "files": {
             "type": "array",
             "optional": true,
-            "description": "The path of the JS or CSS files to inject, relative to the extension's root directory. Exactly one of <code>files</code> and <code>func</code> must be specified.",
+            "description": "The path of the JS files to inject, relative to the extension's root directory. Exactly one of <code>files</code> and <code>func</code> must be specified.",
             "minItems": 1,
             "items": { "type": "string" }
           },
           "func": {
             "type": "function",
             "optional": true,
             "description": "A JavaScript function to inject. This function will be serialized, and then deserialized for injection. This means that any bound parameters and execution context will be lost. Exactly one of <code>files</code> and <code>func</code> must be specified."
           },
@@ -52,41 +52,86 @@
         }
       },
       {
         "id": "InjectionResult",
         "type": "object",
         "description": "Result of a script injection.",
         "properties": {
           "frameId": {
-            "type": "number",
+            "type": "integer",
             "description": "The frame ID associated with the injection."
           },
           "result": {
             "type": "any",
             "optional": true,
             "description": "The result of the script execution."
+          },
+          "error": {
+            "type": "object",
+            "optional": true,
+            "description": "When the injection has failed, the error is exposed to the caller with this property.",
+            "properties": {
+              "message": {
+                "type": "string",
+                "description": "A message explaining why the injection has failed."
+              }
+            }
           }
         }
       },
       {
         "id": "InjectionTarget",
         "type": "object",
         "properties": {
           "frameIds": {
             "type": "array",
             "optional": true,
             "description": "The IDs of specific frames to inject into.",
             "items": { "type": "number" }
           },
+          "allFrames": {
+            "type": "boolean",
+            "optional": true,
+            "description": "Whether the script should inject into all frames within the tab. Defaults to false. This must not be true if <code>frameIds</code> is specified."
+          },
           "tabId": {
             "type": "number",
             "description": "The ID of the tab into which to inject."
           }
         }
+      },
+      {
+        "id": "CSSInjection",
+        "type": "object",
+        "properties": {
+          "css": {
+            "type": "string",
+            "optional": true,
+            "description": "A string containing the CSS to inject. Exactly one of <code>files</code> and <code>css</code> must be specified."
+          },
+          "files": {
+            "type": "array",
+            "optional": true,
+            "description": "The path of the CSS files to inject, relative to the extension's root directory. Exactly one of <code>files</code> and <code>css</code> must be specified.",
+            "minItems": 1,
+            "items": { "type": "string" }
+          },
+          "origin": {
+            "type": "string",
+            "optional": true,
+            "enum": ["USER", "AUTHOR"],
+            "default": "AUTHOR",
+            "description": "The style origin for the injection. Defaults to <code>'AUTHOR'</code>."
+          },
+          "target": {
+            "$ref": "InjectionTarget",
+            "description": "Details specifying the target into which to inject the CSS."
+          }
+        }
       }
     ],
     "functions": [
       {
         "name": "executeScript",
         "type": "function",
         "description": "Injects a script into a target context. The script will be run at <code>document_idle</code>.",
         "async": "callback",
@@ -104,12 +149,50 @@
               {
                 "name": "results",
                 "type": "array",
                 "items": { "$ref": "InjectionResult" }
               }
             ]
           }
         ]
+      },
+      {
+        "name": "insertCSS",
+        "type": "function",
+        "description": "Inserts a CSS stylesheet into a target context. If multiple frames are specified, unsuccessful injections are ignored.",
+        "async": "callback",
+        "parameters": [
+          {
+            "name": "injection",
+            "$ref": "CSSInjection",
+            "description": "The details of the styles to insert."
+          },
+          {
+            "name": "callback",
+            "type": "function",
+            "description": "Invoked upon completion of the injection.",
+            "parameters": []
+          }
+        ]
+      },
+      {
+        "name": "removeCSS",
+        "type": "function",
+        "description": "Removes a CSS stylesheet that was previously inserted by this extension from a target context.",
+        "async": "callback",
+        "parameters": [
+          {
+            "name": "injection",
+            "$ref": "CSSInjection",
+            "description": "The details of the styles to remove. Note that the <code>css</code>, <code>files</code>, and <code>origin</code> properties must exactly match the stylesheet inserted through <code>insertCSS</code>. Attempting to remove a non-existent stylesheet is a no-op."
+          },
+          {
+            "name": "callback",
+            "type": "function",
+            "description": "Invoked upon completion of the injection.",
+            "parameters": []
+          }
+        ]
       }
     ]
   }
 ]
--- a/toolkit/components/extensions/test/browser/browser.ini
+++ b/toolkit/components/extensions/test/browser/browser.ini
@@ -22,16 +22,17 @@ skip-if = verify
 [browser_ext_themes_findbar.js]
 [browser_ext_themes_getCurrent_differentExt.js]
 [browser_ext_themes_highlight.js]
 [browser_ext_themes_incognito.js]
 [browser_ext_themes_lwtsupport.js]
 [browser_ext_themes_multiple_backgrounds.js]
 [browser_ext_themes_ntp_colors.js]
 [browser_ext_themes_ntp_colors_perwindow.js]
+[browser_ext_themes_pbm.js]
 [browser_ext_themes_persistence.js]
 [browser_ext_themes_reset.js]
 [browser_ext_themes_sanitization.js]
 [browser_ext_themes_separators.js]
 [browser_ext_themes_sidebars.js]
 [browser_ext_themes_static_onUpdated.js]
 [browser_ext_themes_tab_line.js]
 [browser_ext_themes_tab_loading.js]
--- a/toolkit/components/extensions/test/browser/browser_ext_themes_ntp_colors.js
+++ b/toolkit/components/extensions/test/browser/browser_ext_themes_ntp_colors.js
@@ -158,16 +158,19 @@ add_task(async function test_support_ntp
     set: [
       // BrowserTestUtils.withNewTab waits for about:newtab to load
       // so we disable preloading before running the test.
       ["browser.newtab.preload", false],
       // Force prefers-color-scheme to "system", as otherwise it is derived
       // from the newtab background colors, but we hard-code the light styles
       // on this test.
       ["layout.css.prefers-color-scheme.content-override", 2],
+      // Override the system color scheme to light so this test passes on
+      // machines with dark system color scheme.
+      ["ui.systemUsesDarkTheme", 0],
     ],
   });
   NewTabPagePreloading.removePreloadedBrowser(window);
   for (let url of ["about:newtab", "about:home"]) {
     info("Opening url: " + url);
     await BrowserTestUtils.withNewTab({ gBrowser, url }, async browser => {
       await waitForAboutNewTabReady(browser, url);
       await test_ntp_theme(
new file mode 100644
--- /dev/null
+++ b/toolkit/components/extensions/test/browser/browser_ext_themes_pbm.js
@@ -0,0 +1,314 @@
+/* -*- Mode: indent-tabs-mode: nil; js-indent-level: 2 -*- */
+/* vim: set sts=2 sw=2 et tw=80: */
+"use strict";
+
+/**
+ * Tests that we apply dark theme variants to PBM windows where applicable.
+ */
+
+const { BuiltInThemes } = ChromeUtils.import(
+  "resource:///modules/BuiltInThemes.jsm"
+);
+
+const IS_LINUX = AppConstants.platform == "linux";
+
+const LIGHT_THEME_ID = "firefox-compact-light@mozilla.org";
+const DARK_THEME_ID = "firefox-compact-dark@mozilla.org";
+
+/**
+ * Test a window's theme color scheme.
+ * @param {*} options - Test options.
+ * @param {Window} options.win - Window object to test.
+ * @param {boolean} options.colorScheme - Whether expected chrome color scheme
+ * is dark (true) or light (false).
+ * @param {boolean} options.expectLWTAttributes - Whether the window  should
+ * have the LWT attributes set matching the color scheme.
+ * @param {boolean} options.expectDefaultDarkAttribute - Whether the window
+ * should have the "lwt-default-theme-in-dark-mode" attribute.
+ */
+async function testWindowColorScheme({
+  win,
+  expectDark,
+  expectLWTAttributes,
+  expectDefaultDarkAttribute,
+}) {
+  let docEl = win.document.documentElement;
+
+  is(
+    docEl.hasAttribute("lwt-default-theme-in-dark-mode"),
+    expectDefaultDarkAttribute,
+    `Window should${
+      expectDefaultDarkAttribute ? "" : " not"
+    } have lwt-default-theme-in-dark-mode attribute.`
+  );
+
+  if (expectLWTAttributes) {
+    ok(docEl.hasAttribute("lwtheme"), "Window should have LWT attribute.");
+    is(
+      docEl.getAttribute("lwthemetextcolor"),
+      expectDark ? "bright" : "dark",
+      "LWT text color attribute should be set."
+    );
+  } else {
+    ok(!docEl.hasAttribute("lwtheme"), "Window should not have LWT attribute.");
+    ok(
+      !docEl.hasAttribute("lwthemetextcolor"),
+      "LWT text color attribute should not be set."
+    );
+  }
+}
+
+add_task(async function setup() {
+  // Set system theme to light to ensure consistency across test machines.
+  await SpecialPowers.pushPrefEnv({
+    set: [
+      ["browser.theme.dark-private-windows", true],
+      ["ui.systemUsesDarkTheme", 0],
+    ],
+  });
+  // Ensure the built-in themes are initialized.
+  await BuiltInThemes.ensureBuiltInThemes();
+
+  // The previous test, browser_ext_themes_ntp_colors.js has side effects.
+  // Switch to a theme, then switch back to the default theme to reach a
+  // consistent themeData state. Without this, themeData in
+  // LightWeightConsumer#_update does not contain darkTheme data and PBM windows
+  // don't get themed correctly.
+  let lightTheme = await AddonManager.getAddonByID(LIGHT_THEME_ID);
+  await lightTheme.enable();
+  await lightTheme.disable();
+});
+
+// For the default theme with light color scheme, private browsing windows
+// should be themed dark.
+// The PBM window's content should not be themed dark.
+add_task(async function test_default_theme_light() {
+  info("Normal browsing window should not be in dark mode.");
+  await testWindowColorScheme({
+    win: window,
+    expectDark: false,
+    expectLWTAttributes: false,
+    expectDefaultDarkAttribute: false,
+  });
+
+  let windowB = await BrowserTestUtils.openNewBrowserWindow();
+
+  info("Additional normal browsing window should not be in dark mode.");
+  await testWindowColorScheme({
+    win: windowB,
+    expectDark: false,
+    expectLWTAttributes: false,
+    expectDefaultDarkAttribute: false,
+  });
+
+  let pbmWindowA = await BrowserTestUtils.openNewBrowserWindow({
+    private: true,
+  });
+
+  info("Private browsing window should be in dark mode.");
+  await testWindowColorScheme({
+    win: pbmWindowA,
+    expectDark: true,
+    expectLWTAttributes: true,
+    expectDefaultDarkAttribute: true,
+  });
+
+  let prefersColorScheme = await SpecialPowers.spawn(
+    pbmWindowA.gBrowser.selectedBrowser,
+    [],
+    async () => {
+      // LookAndFeel updates are async.
+      await new Promise(resolve => {
+        content.requestAnimationFrame(() =>
+          content.requestAnimationFrame(resolve)
+        );
+      });
+      return {
+        light: content.matchMedia("(prefers-color-scheme: light)").matches,
+        dark: content.matchMedia("(prefers-color-scheme: dark)").matches,
+      };
+    }
+  );
+  ok(
+    prefersColorScheme.light && !prefersColorScheme.dark,
+    "Content of dark themed PBM window should still be themed light"
+  );
+
+  let pbmWindowB = await BrowserTestUtils.openNewBrowserWindow({
+    private: true,
+  });
+  info("Additional private browsing window should be in dark mode.");
+  await testWindowColorScheme({
+    win: pbmWindowB,
+    expectDark: true,
+    expectLWTAttributes: true,
+    expectDefaultDarkAttribute: true,
+  });
+
+  await BrowserTestUtils.closeWindow(windowB);
+  await BrowserTestUtils.closeWindow(pbmWindowA);
+  await BrowserTestUtils.closeWindow(pbmWindowB);
+});
+
+// For the default theme with dark color scheme, normal and private browsing
+// windows should be themed dark.
+add_task(async function test_default_theme_dark() {
+  // Set the system theme to dark. The default theme will follow this color
+  // scheme.
+  await SpecialPowers.pushPrefEnv({ set: [["ui.systemUsesDarkTheme", 1]] });
+
+  info("Normal browsing window should be in dark mode.");
+  await testWindowColorScheme({
+    win: window,
+    expectDark: true,
+    expectLWTAttributes: !IS_LINUX,
+    expectDefaultDarkAttribute: !IS_LINUX,
+  });
+
+  let pbmWindow = await BrowserTestUtils.openNewBrowserWindow({
+    private: true,
+  });
+
+  info("Private browsing window should be in dark mode.");
+  await testWindowColorScheme({
+    win: pbmWindow,
+    expectDark: true,
+    expectLWTAttributes: !IS_LINUX,
+    expectDefaultDarkAttribute: !IS_LINUX,
+  });
+
+  await BrowserTestUtils.closeWindow(pbmWindow);
+
+  await SpecialPowers.popPrefEnv();
+});
+
+// For the light theme both normal and private browsing windows should have a
+// bright color scheme applied.
+add_task(async function test_light_theme_builtin() {
+  let lightTheme = await AddonManager.getAddonByID(LIGHT_THEME_ID);
+  await lightTheme.enable();
+
+  info("Normal browsing window should not be in dark mode.");
+  await testWindowColorScheme({
+    win: window,
+    expectDark: false,
+    expectLWTAttributes: true,
+    expectDefaultDarkAttribute: false,
+  });
+
+  let pbmWindow = await BrowserTestUt