Merge autoland to mozilla-central. a=merge
authorBrindusan Cristian <cbrindusan@mozilla.com>
Sat, 14 Aug 2021 12:40:42 +0300
changeset 588875 e67bca14d669f6994356264cd15ed8a803a97e8b
parent 588856 d6e2bf1d80b0de62e93d3206fbfe048e1ed4caab (current diff)
parent 588874 c0b40ec24ca1ecf4ab1db3e7cad9d51914c55353 (diff)
child 588973 7545f9c667372d159f91ddcd0ec1a7fdf42d9500
push id38706
push usercbrindusan@mozilla.com
push dateSat, 14 Aug 2021 09:42:00 +0000
treeherdermozilla-central@e67bca14d669 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge
milestone93.0a1
first release with
nightly linux32
e67bca14d669 / 93.0a1 / 20210814094200 / files
nightly linux64
e67bca14d669 / 93.0a1 / 20210814094200 / files
nightly mac
e67bca14d669 / 93.0a1 / 20210814094200 / files
nightly win32
e67bca14d669 / 93.0a1 / 20210814094200 / files
nightly win64
e67bca14d669 / 93.0a1 / 20210814094200 / files
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
releases
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge autoland to mozilla-central. a=merge
gfx/2d/CGTextDrawing.h
--- a/browser/components/newtab/lib/ActivityStream.jsm
+++ b/browser/components/newtab/lib/ActivityStream.jsm
@@ -198,23 +198,16 @@ const PREFS_CONFIG = new Map([
         cta_button: "",
         cta_text: "",
         cta_url: "",
         use_cta: false,
       }),
     },
   ],
   [
-    "filterAdult",
-    {
-      title: "Remove adult pages from sites, highlights, etc.",
-      value: true,
-    },
-  ],
-  [
     "showSearch",
     {
       title: "Show the Search bar",
       value: true,
     },
   ],
   [
     "feeds.snippets",
--- a/browser/components/newtab/lib/FilterAdult.jsm
+++ b/browser/components/newtab/lib/FilterAdult.jsm
@@ -1,19 +1,30 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 "use strict";
 
+const { XPCOMUtils } = ChromeUtils.import(
+  "resource://gre/modules/XPCOMUtils.jsm"
+);
+
 ChromeUtils.defineModuleGetter(
   this,
   "Services",
   "resource://gre/modules/Services.jsm"
 );
 
+XPCOMUtils.defineLazyPreferenceGetter(
+  this,
+  "gFilterAdultEnabled",
+  "browser.newtabpage.activity-stream.filterAdult",
+  true
+);
+
 // Keep a Set of adult base domains for lookup (initialized at end of file)
 let gAdultSet;
 
 // Keep a hasher for repeated hashings
 let gCryptoHash = null;
 
 /**
  * Run some text through md5 and return the base64 result.
@@ -33,31 +44,80 @@ function md5Hash(text) {
     text.split("").map(c => c.charCodeAt(0)),
     text.length
   );
 
   // Request the has result as ASCII base64
   return gCryptoHash.finish(true);
 }
 
-/**
- * Filter out any link objects that have a url with an adult base domain.
- */
-function filterAdult(links) {
-  return links.filter(({ url }) => {
+const FilterAdult = {
+  /**
+   * Filter out any link objects that have a url with an adult base domain.
+   *
+   * @param {string[]} links
+   *   An array of links to test.
+   * @returns {string[]}
+   *   A filtered array without adult links.
+   */
+  filter(links) {
+    if (!gFilterAdultEnabled) {
+      return links;
+    }
+
+    return links.filter(({ url }) => {
+      try {
+        const uri = Services.io.newURI(url);
+        return !gAdultSet.has(md5Hash(Services.eTLD.getBaseDomain(uri)));
+      } catch (ex) {
+        return true;
+      }
+    });
+  },
+
+  /**
+   * Determine if the supplied url is an adult url or not.
+   *
+   * @param {string} url
+   *   The url to test.
+   * @returns {boolean}
+   *   True if it is an adult url.
+   */
+  isAdultUrl(url) {
+    if (!gFilterAdultEnabled) {
+      return false;
+    }
     try {
       const uri = Services.io.newURI(url);
-      return !gAdultSet.has(md5Hash(Services.eTLD.getBaseDomain(uri)));
+      return gAdultSet.has(md5Hash(Services.eTLD.getBaseDomain(uri)));
     } catch (ex) {
-      return true;
+      return false;
     }
-  });
-}
+  },
+
+  /**
+   * For tests, adds a domain to the adult list.
+   */
+  addDomainToList(url) {
+    gAdultSet.add(
+      md5Hash(Services.eTLD.getBaseDomain(Services.io.newURI(url)))
+    );
+  },
 
-const EXPORTED_SYMBOLS = ["filterAdult"];
+  /**
+   * For tests, removes a domain to the adult list.
+   */
+  removeDomainFromList(url) {
+    gAdultSet.delete(
+      md5Hash(Services.eTLD.getBaseDomain(Services.io.newURI(url)))
+    );
+  },
+};
+
+const EXPORTED_SYMBOLS = ["FilterAdult"];
 
 // These are md5 hashes of base domains to be filtered out. Originally from:
 // https://hg.mozilla.org/mozilla-central/log/default/browser/base/content/newtab/newTab.inadjacent.json
 gAdultSet = new Set([
   "+/UCpAhZhz368iGioEO8aQ==",
   "+1e7jvUo8f2/2l0TFrQqfA==",
   "+1gcqAqaRZwCj5BGiZp3CA==",
   "+25t/2lo0FUEtWYK8LdQZQ==",
--- a/browser/components/newtab/lib/HighlightsFeed.jsm
+++ b/browser/components/newtab/lib/HighlightsFeed.jsm
@@ -20,17 +20,17 @@ const {
   TOP_SITES_MAX_SITES_PER_ROW,
 } = ChromeUtils.import("resource://activity-stream/common/Reducers.jsm");
 const { Dedupe } = ChromeUtils.import(
   "resource://activity-stream/common/Dedupe.jsm"
 );
 
 ChromeUtils.defineModuleGetter(
   this,
-  "filterAdult",
+  "FilterAdult",
   "resource://activity-stream/lib/FilterAdult.jsm"
 );
 ChromeUtils.defineModuleGetter(
   this,
   "LinksCache",
   "resource://activity-stream/lib/LinksCache.jsm"
 );
 ChromeUtils.defineModuleGetter(
@@ -212,19 +212,17 @@ this.HighlightsFeed = class HighlightsFe
           type: "download",
         });
       }
     }
 
     const orderedPages = this._orderHighlights(manyPages);
 
     // Remove adult highlights if we need to
-    const checkedAdult = this.store.getState().Prefs.values.filterAdult
-      ? filterAdult(orderedPages)
-      : orderedPages;
+    const checkedAdult = FilterAdult.filter(orderedPages);
 
     // Remove any Highlights that are in Top Sites already
     const [, deduped] = this.dedupe.group(
       this.store.getState().TopSites.rows,
       checkedAdult
     );
 
     // Keep all "bookmark"s and at most one (most recent) "history" per host
--- a/browser/components/newtab/lib/TopSitesFeed.jsm
+++ b/browser/components/newtab/lib/TopSitesFeed.jsm
@@ -33,17 +33,17 @@ const {
   SEARCH_SHORTCUTS_HAVE_PINNED_PREF,
   checkHasSearchEngine,
   getSearchProvider,
   getSearchFormURL,
 } = ChromeUtils.import("resource://activity-stream/lib/SearchShortcuts.jsm");
 
 ChromeUtils.defineModuleGetter(
   this,
-  "filterAdult",
+  "FilterAdult",
   "resource://activity-stream/lib/FilterAdult.jsm"
 );
 ChromeUtils.defineModuleGetter(
   this,
   "LinksCache",
   "resource://activity-stream/lib/LinksCache.jsm"
 );
 ChromeUtils.defineModuleGetter(
@@ -805,19 +805,17 @@ this.TopSitesFeed = class TopSitesFeed {
       ,
       dedupedSponsored,
       dedupedFrecent,
       dedupedDefaults,
     ] = this.dedupe.group(pinned, sponsored, frecent, notBlockedDefaultSites);
     const dedupedUnpinned = [...dedupedFrecent, ...dedupedDefaults];
 
     // Remove adult sites if we need to
-    const checkedAdult = prefValues.filterAdult
-      ? filterAdult(dedupedUnpinned)
-      : dedupedUnpinned;
+    const checkedAdult = FilterAdult.filter(dedupedUnpinned);
 
     // Insert the original pinned sites into the deduped frecent and defaults.
     let withPinned = insertPinned(checkedAdult, pinned);
     // Insert sponsored sites at their desired position.
     dedupedSponsored.forEach(link => {
       if (!link) {
         return;
       }
--- a/browser/components/newtab/test/unit/lib/FilterAdult.test.js
+++ b/browser/components/newtab/test/unit/lib/FilterAdult.test.js
@@ -1,12 +1,12 @@
-import { filterAdult } from "lib/FilterAdult.jsm";
+import { FilterAdult } from "lib/FilterAdult.jsm";
 import { GlobalOverrider } from "test/unit/utils";
 
-describe("filterAdult", () => {
+describe("FilterAdult", () => {
   let hashStub;
   let hashValue;
   let globals;
 
   beforeEach(() => {
     globals = new GlobalOverrider();
     hashStub = {
       finish: sinon.stub().callsFake(() => hashValue),
@@ -15,40 +15,98 @@ describe("filterAdult", () => {
     };
     globals.set("Cc", {
       "@mozilla.org/security/hash;1": {
         createInstance() {
           return hashStub;
         },
       },
     });
+    globals.set("gFilterAdultEnabled", true);
   });
 
   afterEach(() => {
+    hashValue = "";
     globals.restore();
   });
 
-  it("should default to include on unexpected urls", () => {
-    const empty = {};
+  describe("filter", () => {
+    it("should default to include on unexpected urls", () => {
+      const empty = {};
+
+      const result = FilterAdult.filter([empty]);
 
-    const result = filterAdult([empty]);
+      assert.equal(result.length, 1);
+      assert.equal(result[0], empty);
+    });
+    it("should not filter out non-adult urls", () => {
+      const link = { url: "https://mozilla.org/" };
+
+      const result = FilterAdult.filter([link]);
 
-    assert.equal(result.length, 1);
-    assert.equal(result[0], empty);
+      assert.equal(result.length, 1);
+      assert.equal(result[0], link);
+    });
+    it("should filter out adult urls", () => {
+      // Use a hash value that is in the adult set
+      hashValue = "+/UCpAhZhz368iGioEO8aQ==";
+      const link = { url: "https://some-adult-site/" };
+
+      const result = FilterAdult.filter([link]);
+
+      assert.equal(result.length, 0);
+    });
+    it("should not filter out adult urls if the preference is turned off", () => {
+      // Use a hash value that is in the adult set
+      hashValue = "+/UCpAhZhz368iGioEO8aQ==";
+      globals.set("gFilterAdultEnabled", false);
+      const link = { url: "https://some-adult-site/" };
+
+      const result = FilterAdult.filter([link]);
+
+      assert.equal(result.length, 1);
+      assert.equal(result[0], link);
+    });
   });
-  it("should not filter out non-adult urls", () => {
-    const link = { url: "https://mozilla.org/" };
 
-    const result = filterAdult([link]);
+  describe("isAdultUrl", () => {
+    it("should default to false on unexpected urls", () => {
+      const result = FilterAdult.isAdultUrl("");
+
+      assert.equal(result, false);
+    });
+    it("should return false for non-adult urls", () => {
+      const result = FilterAdult.isAdultUrl("https://mozilla.org/");
+
+      assert.equal(result, false);
+    });
+    it("should return true for adult urls", () => {
+      // Use a hash value that is in the adult set
+      hashValue = "+/UCpAhZhz368iGioEO8aQ==";
+      const result = FilterAdult.isAdultUrl("https://some-adult-site/");
 
-    assert.equal(result.length, 1);
-    assert.equal(result[0], link);
-  });
-  it("should filter out adult urls", () => {
-    // Use a hash value that is in the adult set
-    hashValue = "+/UCpAhZhz368iGioEO8aQ==";
-    const link = { url: "https://some-adult-site/" };
+      assert.equal(result, true);
+    });
+    it("should return false for adult urls when the preference is turned off", () => {
+      // Use a hash value that is in the adult set
+      hashValue = "+/UCpAhZhz368iGioEO8aQ==";
+      globals.set("gFilterAdultEnabled", false);
+      const result = FilterAdult.isAdultUrl("https://some-adult-site/");
+
+      assert.equal(result, false);
+    });
 
-    const result = filterAdult([link]);
+    describe("test functions", () => {
+      it("should add and remove a filter in the adult list", () => {
+        // Use a hash value that is in the adult set
+        FilterAdult.addDomainToList("https://some-adult-site/");
+        let result = FilterAdult.isAdultUrl("https://some-adult-site/");
 
-    assert.equal(result.length, 0);
+        assert.equal(result, true);
+
+        FilterAdult.removeDomainFromList("https://some-adult-site/");
+        result = FilterAdult.isAdultUrl("https://some-adult-site/");
+
+        assert.equal(result, false);
+      });
+    });
   });
 });
--- a/browser/components/newtab/test/unit/lib/HighlightsFeed.test.js
+++ b/browser/components/newtab/test/unit/lib/HighlightsFeed.test.js
@@ -55,35 +55,38 @@ describe("Highlights Feed", () => {
       onAction: sinon.spy(),
       init: sinon.spy(),
     });
     fakeScreenshot = {
       getScreenshotForURL: sandbox.spy(() => Promise.resolve(FAKE_IMAGE)),
       maybeCacheScreenshot: Screenshots.maybeCacheScreenshot,
       _shouldGetScreenshots: sinon.stub().returns(true),
     };
-    filterAdultStub = sinon.stub().returns([]);
+    filterAdultStub = {
+      filter: sinon.stub().returnsArg(0),
+    };
     shortURLStub = sinon
       .stub()
       .callsFake(site => site.url.match(/\/([^/]+)/)[1]);
     fakePageThumbs = {
       addExpirationFilter: sinon.stub(),
       removeExpirationFilter: sinon.stub(),
     };
 
     globals.set("NewTabUtils", fakeNewTabUtils);
     globals.set("PageThumbs", fakePageThumbs);
+    globals.set("gFilterAdultEnabled", false);
     ({
       HighlightsFeed,
       SECTION_ID,
       SYNC_BOOKMARKS_FINISHED_EVENT,
       BOOKMARKS_RESTORE_SUCCESS_EVENT,
       BOOKMARKS_RESTORE_FAILED_EVENT,
     } = injector({
-      "lib/FilterAdult.jsm": { filterAdult: filterAdultStub },
+      "lib/FilterAdult.jsm": { FilterAdult: filterAdultStub },
       "lib/ShortURL.jsm": { shortURL: shortURLStub },
       "lib/SectionsManager.jsm": { SectionsManager: sectionsManagerStub },
       "lib/Screenshots.jsm": { Screenshots: fakeScreenshot },
       "common/Dedupe.jsm": { Dedupe },
       "lib/DownloadsManager.jsm": { DownloadsManager: downloadsManagerStub },
     }));
     sandbox.spy(global.Services.obs, "addObserver");
     sandbox.spy(global.Services.obs, "removeObserver");
@@ -91,17 +94,16 @@ describe("Highlights Feed", () => {
     feed.store = {
       dispatch: sinon.spy(),
       getState() {
         return this.state;
       },
       state: {
         Prefs: {
           values: {
-            filterAdult: false,
             "section.highlights.includePocket": false,
             "section.highlights.includeDownloads": false,
           },
         },
         TopSites: {
           initialized: true,
           rows: Array(12)
             .fill(null)
@@ -565,28 +567,22 @@ describe("Highlights Feed", () => {
           bookmarkGuid: "1234567890",
         },
       ];
 
       const highlights = await fetchHighlights();
 
       assert.propertyVal(highlights[0], "type", "history");
     });
-    it("should not filter out adult pages when pref is false", async () => {
-      await feed.fetchHighlights();
-
-      assert.notCalled(filterAdultStub);
-    });
-    it("should filter out adult pages when pref is true", async () => {
-      feed.store.state.Prefs.values.filterAdult = true;
-
+    it("should filter out adult pages", async () => {
+      filterAdultStub.filter = sinon.stub().returns([]);
       const highlights = await fetchHighlights();
 
       // The stub filters out everything
-      assert.calledOnce(filterAdultStub);
+      assert.calledOnce(filterAdultStub.filter);
       assert.equal(highlights.length, 0);
     });
     it("should not expose internal link properties", async () => {
       const highlights = await fetchHighlights();
 
       const internal = Object.keys(highlights[0]).filter(key =>
         key.startsWith("__")
       );
--- a/browser/components/newtab/test/unit/lib/TopSitesFeed.test.js
+++ b/browser/components/newtab/test/unit/lib/TopSitesFeed.test.js
@@ -87,40 +87,43 @@ describe("Top Sites Feed", () => {
         unpin: sandbox.spy(),
       },
     };
     fakeScreenshot = {
       getScreenshotForURL: sandbox.spy(() => Promise.resolve(FAKE_SCREENSHOT)),
       maybeCacheScreenshot: sandbox.spy(Screenshots.maybeCacheScreenshot),
       _shouldGetScreenshots: sinon.stub().returns(true),
     };
-    filterAdultStub = sinon.stub().returns([]);
+    filterAdultStub = {
+      filter: sinon.stub().returnsArg(0),
+    };
     shortURLStub = sinon
       .stub()
       .callsFake(site =>
         site.url.replace(/(.com|.ca)/, "").replace("https://", "")
       );
     const fakeDedupe = function() {};
     fakePageThumbs = {
       addExpirationFilter: sinon.stub(),
       removeExpirationFilter: sinon.stub(),
     };
     globals.set("PageThumbs", fakePageThumbs);
     globals.set("NewTabUtils", fakeNewTabUtils);
+    globals.set("gFilterAdultEnabled", false);
     sandbox.spy(global.XPCOMUtils, "defineLazyGetter");
     FakePrefs.prototype.prefs["default.sites"] = "https://foo.com/";
     ({ TopSitesFeed, DEFAULT_TOP_SITES } = injector({
       "lib/ActivityStreamPrefs.jsm": { Prefs: FakePrefs },
       "common/Dedupe.jsm": { Dedupe: fakeDedupe },
       "common/Reducers.jsm": {
         insertPinned,
         TOP_SITES_DEFAULT_ROWS,
         TOP_SITES_MAX_SITES_PER_ROW,
       },
-      "lib/FilterAdult.jsm": { filterAdult: filterAdultStub },
+      "lib/FilterAdult.jsm": { FilterAdult: filterAdultStub },
       "lib/Screenshots.jsm": { Screenshots: fakeScreenshot },
       "lib/TippyTopProvider.jsm": { TippyTopProvider: FakeTippyTopProvider },
       "lib/ShortURL.jsm": { shortURL: shortURLStub },
       "lib/ActivityStreamStorage.jsm": {
         ActivityStreamStorage: function Fake() {},
         getDefaultOptions,
       },
     }));
@@ -133,17 +136,17 @@ describe("Top Sites Feed", () => {
     // Setup for tests that don't call `init` but require feed.storage
     feed._storage = storage;
     feed.store = {
       dispatch: sinon.spy(),
       getState() {
         return this.state;
       },
       state: {
-        Prefs: { values: { filterAdult: false, topSitesRows: 2 } },
+        Prefs: { values: { topSitesRows: 2 } },
         TopSites: { rows: Array(12).fill("site") },
       },
       dbStorage: { getDbTable: sandbox.stub().returns(storage) },
     };
     feed.dedupe.group = (...sites) => sites;
     links = FAKE_LINKS;
     // Turn off the search shortcuts experiment by default for other tests
     feed.store.state.Prefs.values[SEARCH_SHORTCUTS_EXPERIMENT_PREF] = false;
@@ -255,29 +258,24 @@ describe("Top Sites Feed", () => {
         assert.deepEqual(result, reference);
         assert.calledOnce(global.NewTabUtils.activityStreamLinks.getTopSites);
       });
       it("should indicate the links get typed bonus", async () => {
         const result = await feed.getLinksWithDefaults();
 
         assert.propertyVal(result[0], "typedBonus", true);
       });
-      it("should not filter out adult sites when pref is false", async () => {
-        await feed.getLinksWithDefaults();
-
-        assert.notCalled(filterAdultStub);
-      });
-      it("should filter out non-pinned adult sites when pref is true", async () => {
-        feed.store.state.Prefs.values.filterAdult = true;
+      it("should filter out non-pinned adult sites", async () => {
+        filterAdultStub.filter = sinon.stub().returns([]);
         fakeNewTabUtils.pinnedLinks.links = [{ url: "https://foo.com/" }];
 
         const result = await feed.getLinksWithDefaults();
 
         // The stub filters out everything
-        assert.calledOnce(filterAdultStub);
+        assert.calledOnce(filterAdultStub.filter);
         assert.equal(result.length, 1);
         assert.equal(result[0].url, fakeNewTabUtils.pinnedLinks.links[0].url);
       });
       it("should filter out the defaults that have been blocked", async () => {
         // make sure we only have one top site, and we block the only default site we have to show
         const url = "www.myonlytopsite.com";
         const topsite = {
           frecency: FAKE_FRECENCY,
--- a/browser/components/places/InteractionsBlocklist.jsm
+++ b/browser/components/places/InteractionsBlocklist.jsm
@@ -6,16 +6,17 @@
 
 var EXPORTED_SYMBOLS = ["InteractionsBlocklist"];
 
 const { XPCOMUtils } = ChromeUtils.import(
   "resource://gre/modules/XPCOMUtils.jsm"
 );
 
 XPCOMUtils.defineLazyModuleGetters(this, {
+  FilterAdult: "resource://activity-stream/lib/FilterAdult.jsm",
   Services: "resource://gre/modules/Services.jsm",
   UrlbarUtils: "resource:///modules/UrlbarUtils.jsm",
 });
 
 XPCOMUtils.defineLazyGetter(this, "logConsole", function() {
   return console.createInstance({
     prefix: "InteractionsBlocklist",
     maxLogLevel: Services.prefs.getBoolPref(
@@ -115,16 +116,20 @@ class _InteractionsBlocklist {
    * should not record an interaction.
    *
    * @param {string} urlToCheck
    *   The URL we are looking for on the blocklist.
    * @returns {boolean}
    *  True if `url` is on a blocklist. False otherwise.
    */
   isUrlBlocklisted(urlToCheck) {
+    if (FilterAdult.isAdultUrl(urlToCheck)) {
+      return true;
+    }
+
     // First, find the URL's base host: the hostname without any subdomains or a
     // public suffix.
     let url;
     try {
       url = new URL(urlToCheck);
       if (!url) {
         throw new Error();
       }
--- a/browser/components/places/tests/browser/interactions/browser_interactions_blocklist.js
+++ b/browser/components/places/tests/browser/interactions/browser_interactions_blocklist.js
@@ -1,19 +1,20 @@
 /* Any copyright is dedicated to the Public Domain.
  * http://creativecommons.org/publicdomain/zero/1.0/ */
 
 /**
  * Tests that interactions are not recorded for sites on the blocklist.
  */
 
-const ALLOWED_TEST_URL = "https://example.com/";
+const ALLOWED_TEST_URL = "http://mochi.test:8888/";
 const BLOCKED_TEST_URL = "https://example.com/browser";
 
 XPCOMUtils.defineLazyModuleGetters(this, {
+  FilterAdult: "resource://activity-stream/lib/FilterAdult.jsm",
   InteractionsBlocklist: "resource:///modules/InteractionsBlocklist.jsm",
 });
 
 add_task(async function setup() {
   let oldBlocklistValue = Services.prefs.getStringPref(
     "places.interactions.customBlocklist",
     "[]"
   );
@@ -70,17 +71,17 @@ async function loadBlockedUrl(expectReco
           url: ALLOWED_TEST_URL,
           totalViewTime: 10000,
         },
       ]);
     }
   });
 }
 
-add_task(async function test() {
+add_task(async function test_regexp() {
   info("Record BLOCKED_TEST_URL because it is not yet blocklisted.");
   await loadBlockedUrl(true);
 
   info("Add BLOCKED_TEST_URL to the blocklist and verify it is not recorded.");
   let blockedRegex = /^(https?:\/\/)?example\.com\/browser/i;
   InteractionsBlocklist.addRegexToBlocklist(blockedRegex);
   Assert.equal(
     Services.prefs.getStringPref("places.interactions.customBlocklist", "[]"),
@@ -91,8 +92,14 @@ add_task(async function test() {
   info("Remove BLOCKED_TEST_URL from the blocklist and verify it is recorded.");
   InteractionsBlocklist.removeRegexFromBlocklist(blockedRegex);
   Assert.equal(
     Services.prefs.getStringPref("places.interactions.customBlocklist", "[]"),
     JSON.stringify([])
   );
   await loadBlockedUrl(true);
 });
+
+add_task(async function test_adult() {
+  FilterAdult.addDomainToList("https://example.com/browser");
+  await loadBlockedUrl(false);
+  FilterAdult.removeDomainFromList("https://example.com/browser");
+});
--- a/browser/themes/shared/UITour.inc.css
+++ b/browser/themes/shared/UITour.inc.css
@@ -3,17 +3,22 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 %endif
 
 /* UI Tour */
 
 #UITourHighlightContainer {
   appearance: none;
+
+  /* Resets the native styles in windows and macOS */
+  border: none;
+  background-color: transparent;
   -moz-window-shadow: none;
+
   --panel-border-color: transparent;
   --panel-background: transparent;
   /* This is a buffer to compensate for the movement in the "wobble" effect,
      and for the box-shadow of #UITourHighlight. */
   --panel-padding: 4px;
   /* Compensate the displacement caused by padding. */
   margin: -4px;
 }
--- a/build/moz.configure/toolchain.configure
+++ b/build/moz.configure/toolchain.configure
@@ -776,37 +776,44 @@ def compiler_wrapper(wrapper, ccache):
 @depends_if(compiler_wrapper)
 def using_compiler_wrapper(compiler_wrapper):
     return True
 
 
 set_config("MOZ_USING_COMPILER_WRAPPER", using_compiler_wrapper)
 
 
+@dependable
+def wasm():
+    return split_triplet("wasm32-wasi", allow_wasi=True)
+
+
 @template
 def default_c_compilers(host_or_target, other_c_compiler=None):
     """Template defining the set of default C compilers for the host and
     target platforms.
     `host_or_target` is either `host` or `target` (the @depends functions
     from init.configure.
     `other_c_compiler` is the `target` C compiler when `host_or_target` is `host`.
     """
-    assert host_or_target in {host, target}
+    assert host_or_target in {host, target, wasm}
 
     other_c_compiler = () if other_c_compiler is None else (other_c_compiler,)
 
     @depends(host_or_target, target, toolchain_prefix, *other_c_compiler)
     def default_c_compilers(
         host_or_target, target, toolchain_prefix, *other_c_compiler
     ):
         if host_or_target.kernel == "WINNT":
             supported = types = ("clang-cl", "clang")
         elif host_or_target.kernel == "Darwin":
             types = ("clang",)
             supported = ("clang", "gcc")
+        elif host_or_target.kernel == "WASI":
+            supported = types = ("clang",)
         else:
             supported = types = ("clang", "gcc")
 
         info = other_c_compiler[0] if other_c_compiler else None
         if info and info.type in supported:
             # When getting default C compilers for the host, we prioritize the
             # same compiler as the target C compiler.
             prioritized = info.compiler
@@ -1044,42 +1051,47 @@ def compiler(
     When the language is 'C++', `c_compiler` is the result of the `compiler`
     template for the language 'C' for the same `host_or_target`.
     When `host_or_target` is `host`, `other_compiler` is the result of the
     `compiler` template for the same `language` for `target`.
     When `host_or_target` is `host` and the language is 'C++',
     `other_c_compiler` is the result of the `compiler` template for the
     language 'C' for `target`.
     """
-    assert host_or_target in {host, target}
+    assert host_or_target in {host, target, wasm}
     assert language in ("C", "C++")
     assert language == "C" or c_compiler is not None
     assert host_or_target is target or other_compiler is not None
     assert language == "C" or host_or_target is target or other_c_compiler is not None
 
     host_or_target_str = {
         host: "host",
         target: "target",
+        wasm: "wasm",
     }[host_or_target]
 
     sysroot = {
         host: host_sysroot,
         target: target_sysroot,
+        wasm: dependable(lambda: namespace(path=None)),
     }[host_or_target]
 
     multiarch_dir = {
         host: host_multiarch_dir,
         target: target_multiarch_dir,
+        wasm: never,
     }[host_or_target]
 
     var = {
         ("C", target): "CC",
         ("C++", target): "CXX",
         ("C", host): "HOST_CC",
         ("C++", host): "HOST_CXX",
+        ("C", wasm): "WASM_CC",
+        ("C++", wasm): "WASM_CXX",
     }[language, host_or_target]
 
     default_compilers = {
         "C": lambda: default_c_compilers(host_or_target, other_compiler),
         "C++": lambda: default_cxx_compilers(
             c_compiler, other_c_compiler, other_compiler
         ),
     }[language]()
@@ -1347,29 +1359,31 @@ def compiler(
 
         set_config(pp_var, preprocessor)
         add_old_configure_assignment(pp_var, preprocessor)
 
     if language == "C":
         linker_var = {
             target: "LD",
             host: "HOST_LD",
-        }[host_or_target]
-
-        @deprecated_option(env=linker_var, nargs=1)
-        def linker(value):
-            if value:
-                return value[0]
-
-        @depends(linker)
-        def unused_linker(linker):
-            if linker:
-                log.warning(
-                    "The value of %s is not used by this build system." % linker_var
-                )
+        }.get(host_or_target)
+
+        if linker_var:
+
+            @deprecated_option(env=linker_var, nargs=1)
+            def linker(value):
+                if value:
+                    return value[0]
+
+            @depends(linker)
+            def unused_linker(linker):
+                if linker:
+                    log.warning(
+                        "The value of %s is not used by this build system." % linker_var
+                    )
 
     return valid_compiler
 
 
 c_compiler = compiler("C", target)
 cxx_compiler = compiler("C++", target, c_compiler=c_compiler)
 host_c_compiler = compiler("C", host, other_compiler=c_compiler)
 host_cxx_compiler = compiler(
--- a/dom/webgpu/Buffer.cpp
+++ b/dom/webgpu/Buffer.cpp
@@ -53,21 +53,26 @@ Buffer::~Buffer() {
   mozilla::DropJSObjects(this);
 }
 
 void Buffer::Cleanup() {
   if (mValid && mParent) {
     mValid = false;
     auto bridge = mParent->GetBridge();
     if (bridge && bridge->IsOpen()) {
+      // Note: even if the buffer is considered mapped,
+      // the shmem may be empty before the mapAsync callback
+      // is resolved.
+      if (mMapped && mMapped->mShmem.IsReadable()) {
+        // Note: if the bridge is closed, all associated shmems are already
+        // deleted.
+        bridge->DeallocShmem(mMapped->mShmem);
+      }
       bridge->SendBufferDestroy(mId);
     }
-    if (bridge && mMapped) {
-      bridge->DeallocShmem(mMapped->mShmem);
-    }
   }
 }
 
 void Buffer::SetMapped(ipc::Shmem&& aShmem, bool aWritable) {
   MOZ_ASSERT(!mMapped);
   mMapped.emplace();
   mMapped->mShmem = std::move(aShmem);
   mMapped->mWritable = aWritable;
deleted file mode 100644
--- a/gfx/2d/CGTextDrawing.h
+++ /dev/null
@@ -1,135 +0,0 @@
-/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim: set ts=8 sts=2 et sw=2 tw=80: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-#ifndef _MOZILLA_GFX_SKIACGPOPUPDRAWER_H
-#define _MOZILLA_GFX_SKIACGPOPUPDRAWER_H
-
-#include <ApplicationServices/ApplicationServices.h>
-#include "nsDebug.h"
-#include "mozilla/Vector.h"
-#include "ScaledFontMac.h"
-#include <dlfcn.h>
-
-// This is used when we explicitly need CG to draw text to support things such
-// as vibrancy and subpixel AA on transparent backgrounds. The current use cases
-// are really only to enable Skia to support drawing text in those situations.
-
-namespace mozilla {
-namespace gfx {
-
-typedef void (*CGContextSetFontSmoothingBackgroundColorFunc)(CGContextRef cgContext,
-                                                             CGColorRef color);
-
-static CGContextSetFontSmoothingBackgroundColorFunc
-GetCGContextSetFontSmoothingBackgroundColorFunc() {
-  static CGContextSetFontSmoothingBackgroundColorFunc func = nullptr;
-  static bool lookedUpFunc = false;
-  if (!lookedUpFunc) {
-    func = (CGContextSetFontSmoothingBackgroundColorFunc)dlsym(
-        RTLD_DEFAULT, "CGContextSetFontSmoothingBackgroundColor");
-    lookedUpFunc = true;
-  }
-  return func;
-}
-
-static CGColorRef ColorToCGColor(CGColorSpaceRef aColorSpace, const DeviceColor& aColor) {
-  CGFloat components[4] = {aColor.r, aColor.g, aColor.b, aColor.a};
-  return CGColorCreate(aColorSpace, components);
-}
-
-static bool SetFontSmoothingBackgroundColor(CGContextRef aCGContext, CGColorSpaceRef aColorSpace,
-                                            const DeviceColor& aFontSmoothingBackgroundColor) {
-  if (aFontSmoothingBackgroundColor.a > 0) {
-    CGContextSetFontSmoothingBackgroundColorFunc setFontSmoothingBGColorFunc =
-        GetCGContextSetFontSmoothingBackgroundColorFunc();
-    if (setFontSmoothingBGColorFunc) {
-      CGColorRef color = ColorToCGColor(aColorSpace, aFontSmoothingBackgroundColor);
-      setFontSmoothingBGColorFunc(aCGContext, color);
-      CGColorRelease(color);
-      return true;
-    }
-  }
-
-  return false;
-}
-
-// Font rendering with a non-transparent font smoothing background color
-// can leave pixels in our buffer where the rgb components exceed the alpha
-// component. When this happens we need to clean up the data afterwards.
-// The purpose of this is probably the following: Correct compositing of
-// subpixel anti-aliased fonts on transparent backgrounds requires
-// different alpha values per RGB component. Usually, premultiplied color
-// values are derived by multiplying all components with the same per-pixel
-// alpha value. However, if you multiply each component with a *different*
-// alpha, and set the alpha component of the pixel to, say, the average
-// of the alpha values that you used during the premultiplication of the
-// RGB components, you can trick OVER compositing into doing a simplified
-// form of component alpha compositing. (You just need to make sure to
-// clamp the components of the result pixel to [0,255] afterwards.)
-static void EnsureValidPremultipliedData(CGContextRef aContext,
-                                         CGRect aTextBounds = CGRectInfinite) {
-  if (CGBitmapContextGetBitsPerPixel(aContext) != 32 ||
-      CGBitmapContextGetAlphaInfo(aContext) != kCGImageAlphaPremultipliedFirst) {
-    return;
-  }
-
-  uint8_t* bitmapData = (uint8_t*)CGBitmapContextGetData(aContext);
-  CGRect bitmapBounds =
-      CGRectMake(0, 0, CGBitmapContextGetWidth(aContext), CGBitmapContextGetHeight(aContext));
-  int stride = CGBitmapContextGetBytesPerRow(aContext);
-
-  CGRect bounds = CGRectIntersection(bitmapBounds, aTextBounds);
-  if (CGRectIsEmpty(bounds)) {
-    return;
-  }
-
-  int startX = bounds.origin.x;
-  int endX = startX + bounds.size.width;
-  MOZ_ASSERT(endX <= bitmapBounds.size.width);
-
-  // CGRect assume that our origin is the bottom left.
-  // The data assumes that the origin is the top left.
-  // Have to switch the Y axis so that our coordinates are correct
-  int startY = bitmapBounds.size.height - (bounds.origin.y + bounds.size.height);
-  int endY = startY + bounds.size.height;
-  MOZ_ASSERT(endY <= (int)CGBitmapContextGetHeight(aContext));
-
-  for (int y = startY; y < endY; y++) {
-    for (int x = startX; x < endX; x++) {
-      int i = y * stride + x * 4;
-      uint8_t a = bitmapData[i + 3];
-
-      bitmapData[i + 0] = std::min(a, bitmapData[i + 0]);
-      bitmapData[i + 1] = std::min(a, bitmapData[i + 1]);
-      bitmapData[i + 2] = std::min(a, bitmapData[i + 2]);
-    }
-  }
-}
-
-static CGRect ComputeGlyphsExtents(CGRect* bboxes, CGPoint* positions, CFIndex count, float scale) {
-  CGFloat x1, x2, y1, y2;
-  if (count < 1) return CGRectZero;
-
-  x1 = bboxes[0].origin.x + positions[0].x;
-  x2 = bboxes[0].origin.x + positions[0].x + scale * bboxes[0].size.width;
-  y1 = bboxes[0].origin.y + positions[0].y;
-  y2 = bboxes[0].origin.y + positions[0].y + scale * bboxes[0].size.height;
-
-  // accumulate max and minimum coordinates
-  for (int i = 1; i < count; i++) {
-    x1 = std::min(x1, bboxes[i].origin.x + positions[i].x);
-    y1 = std::min(y1, bboxes[i].origin.y + positions[i].y);
-    x2 = std::max(x2, bboxes[i].origin.x + positions[i].x + scale * bboxes[i].size.width);
-    y2 = std::max(y2, bboxes[i].origin.y + positions[i].y + scale * bboxes[i].size.height);
-  }
-
-  CGRect extents = {{x1, y1}, {x2 - x1, y2 - y1}};
-  return extents;
-}
-
-}  // namespace gfx
-}  // namespace mozilla
-
-#endif
--- a/gfx/2d/DrawTargetSkia.cpp
+++ b/gfx/2d/DrawTargetSkia.cpp
@@ -29,18 +29,16 @@
 #include "PathHelpers.h"
 #include "SourceSurfaceCapture.h"
 #include "Swizzle.h"
 #include <algorithm>
 
 #ifdef MOZ_WIDGET_COCOA
 #  include "BorrowedContext.h"
 #  include <ApplicationServices/ApplicationServices.h>
-#  include "ScaledFontMac.h"
-#  include "CGTextDrawing.h"
 #endif
 
 #ifdef XP_WIN
 #  include "ScaledFontDWrite.h"
 #endif
 
 namespace mozilla::gfx {
 
@@ -1084,43 +1082,17 @@ static bool SetupCGContext(DrawTargetSki
       CGContextClipToRects(aCGContext, rects.begin(), rects.length());
     }
   }
 
   CGContextConcatCTM(aCGContext,
                      GfxMatrixToCGAffineTransform(aDT->GetTransform()));
   return true;
 }
-
-static bool SetupCGGlyphs(CGContextRef aCGContext, const GlyphBuffer& aBuffer,
-                          Vector<CGGlyph, 32>& aGlyphs,
-                          Vector<CGPoint, 32>& aPositions) {
-  // Flip again so we draw text in right side up. Transform (3) from the top
-  CGContextScaleCTM(aCGContext, 1, -1);
-
-  if (!aGlyphs.resizeUninitialized(aBuffer.mNumGlyphs) ||
-      !aPositions.resizeUninitialized(aBuffer.mNumGlyphs)) {
-    gfxDevCrash(LogReason::GlyphAllocFailedCG)
-        << "glyphs/positions allocation failed";
-    return false;
-  }
-
-  for (unsigned int i = 0; i < aBuffer.mNumGlyphs; i++) {
-    aGlyphs[i] = aBuffer.mGlyphs[i].mIndex;
-
-    // Flip the y coordinates so that text ends up in the right spot after the
-    // (3) flip Inversion from (4) in the comments.
-    aPositions[i] = CGPointMake(aBuffer.mGlyphs[i].mPosition.x,
-                                -aBuffer.mGlyphs[i].mPosition.y);
-  }
-
-  return true;
-}
-// End long comment about transforms. SetupCGContext and SetupCGGlyphs should
-// stay next to each other.
+// End long comment about transforms.
 
 // The context returned from this method will have the origin
 // in the top left and will have applied all the neccessary clips
 // and transforms to the CGContext. See the comment above
 // SetupCGContext.
 CGContextRef DrawTargetSkia::BorrowCGContext(const DrawOptions& aOptions) {
   // Since we can't replay Skia clips, we have to use a layer if we have a
   // complex clip. After saving a layer, the SkCanvas queries for needing a
@@ -1217,106 +1189,17 @@ CGContextRef BorrowedCGContext::BorrowCG
   DrawTargetSkia* skiaDT = static_cast<DrawTargetSkia*>(aDT);
   return skiaDT->BorrowCGContext(DrawOptions());
 }
 
 void BorrowedCGContext::ReturnCGContextToDrawTarget(DrawTarget* aDT,
                                                     CGContextRef cg) {
   DrawTargetSkia* skiaDT = static_cast<DrawTargetSkia*>(aDT);
   skiaDT->ReturnCGContext(cg);
-  return;
 }
-
-static void SetFontColor(CGContextRef aCGContext, CGColorSpaceRef aColorSpace,
-                         const Pattern& aPattern) {
-  const DeviceColor& color = static_cast<const ColorPattern&>(aPattern).mColor;
-  CGColorRef textColor = ColorToCGColor(aColorSpace, color);
-  CGContextSetFillColorWithColor(aCGContext, textColor);
-  CGColorRelease(textColor);
-}
-
-/***
- * We need this to support subpixel AA text on OS X in two cases:
- * text in DrawTargets that are not opaque and text over vibrant backgrounds.
- * Skia normally doesn't support subpixel AA text on transparent backgrounds.
- * To get around this, we have to wrap the Skia bytes with a CGContext and ask
- * CG to draw the text.
- * In vibrancy cases, we have to use a private API,
- * CGContextSetFontSmoothingBackgroundColor, which sets the expected
- * background color the text will draw onto so that CG can render the text
- * properly. After that, we have to go back and fixup the pixels
- * such that their alpha values are correct.
- */
-bool DrawTargetSkia::FillGlyphsWithCG(ScaledFont* aFont,
-                                      const GlyphBuffer& aBuffer,
-                                      const Pattern& aPattern,
-                                      const DrawOptions& aOptions) {
-  MOZ_ASSERT(aFont->GetType() == FontType::MAC);
-  MOZ_ASSERT(aPattern.GetType() == PatternType::COLOR);
-
-  CGContextRef cgContext = BorrowCGContext(aOptions);
-  if (!cgContext) {
-    return false;
-  }
-
-  Vector<CGGlyph, 32> glyphs;
-  Vector<CGPoint, 32> positions;
-  if (!SetupCGGlyphs(cgContext, aBuffer, glyphs, positions)) {
-    ReturnCGContext(cgContext);
-    return false;
-  }
-
-  ScaledFontMac* macFont = static_cast<ScaledFontMac*>(aFont);
-  SetFontSmoothingBackgroundColor(cgContext, mColorSpace,
-                                  macFont->FontSmoothingBackgroundColor());
-  SetFontColor(cgContext, mColorSpace, aPattern);
-
-  CTFontDrawGlyphs(macFont->mCTFont, glyphs.begin(), positions.begin(),
-                   aBuffer.mNumGlyphs, cgContext);
-
-  // Calculate the area of the text we just drew
-  auto* bboxes = new CGRect[aBuffer.mNumGlyphs];
-  CTFontGetBoundingRectsForGlyphs(macFont->mCTFont, kCTFontOrientationDefault,
-                                  glyphs.begin(), bboxes, aBuffer.mNumGlyphs);
-  CGRect extents =
-      ComputeGlyphsExtents(bboxes, positions.begin(), aBuffer.mNumGlyphs, 1.0f);
-  delete[] bboxes;
-
-  CGAffineTransform cgTransform = CGContextGetCTM(cgContext);
-  extents = CGRectApplyAffineTransform(extents, cgTransform);
-
-  // Have to round it out to ensure we fully cover all pixels
-  Rect rect(extents.origin.x, extents.origin.y, extents.size.width,
-            extents.size.height);
-  rect.RoundOut();
-  extents = CGRectMake(rect.x, rect.y, rect.width, rect.height);
-
-  EnsureValidPremultipliedData(cgContext, extents);
-
-  ReturnCGContext(cgContext);
-  return true;
-}
-
-static bool HasFontSmoothingBackgroundColor(ScaledFont* aFont) {
-  // This should generally only be true if we have a popup context menu
-  if (aFont && aFont->GetType() == FontType::MAC) {
-    DeviceColor fontSmoothingBackgroundColor =
-        static_cast<ScaledFontMac*>(aFont)->FontSmoothingBackgroundColor();
-    return fontSmoothingBackgroundColor.a > 0;
-  }
-
-  return false;
-}
-
-static bool ShouldUseCGToFillGlyphs(ScaledFont* aFont,
-                                    const Pattern& aPattern) {
-  return HasFontSmoothingBackgroundColor(aFont) &&
-         aPattern.GetType() == PatternType::COLOR;
-}
-
 #endif
 
 static bool CanDrawFont(ScaledFont* aFont) {
   switch (aFont->GetType()) {
     case FontType::FREETYPE:
     case FontType::FONTCONFIG:
     case FontType::MAC:
     case FontType::GDI:
@@ -1332,24 +1215,16 @@ void DrawTargetSkia::DrawGlyphs(ScaledFo
                                 const StrokeOptions* aStrokeOptions,
                                 const DrawOptions& aOptions) {
   if (!CanDrawFont(aFont)) {
     return;
   }
 
   MarkChanged();
 
-#ifdef MOZ_WIDGET_COCOA
-  if (!aStrokeOptions && ShouldUseCGToFillGlyphs(aFont, aPattern)) {
-    if (FillGlyphsWithCG(aFont, aBuffer, aPattern, aOptions)) {
-      return;
-    }
-  }
-#endif
-
   ScaledFontBase* skiaFont = static_cast<ScaledFontBase*>(aFont);
   SkTypeface* typeface = skiaFont->GetSkTypeface();
   if (!typeface) {
     return;
   }
 
   AutoPaintSetup paint(mCanvas, aOptions, aPattern);
   if (aStrokeOptions && !StrokeOptionsToPaint(paint.mPaint, *aStrokeOptions)) {
--- a/gfx/gl/GLContext.cpp
+++ b/gfx/gl/GLContext.cpp
@@ -157,17 +157,16 @@ static const char* const sExtensionNames
     "GL_EXT_multisampled_render_to_texture",
     "GL_EXT_occlusion_query_boolean",
     "GL_EXT_packed_depth_stencil",
     "GL_EXT_read_format_bgra",
     "GL_EXT_robustness",
     "GL_EXT_sRGB",
     "GL_EXT_sRGB_write_control",
     "GL_EXT_shader_texture_lod",
-    "GL_EXT_texture3D",
     "GL_EXT_texture_compression_bptc",
     "GL_EXT_texture_compression_dxt1",
     "GL_EXT_texture_compression_rgtc",
     "GL_EXT_texture_compression_s3tc",
     "GL_EXT_texture_compression_s3tc_srgb",
     "GL_EXT_texture_filter_anisotropic",
     "GL_EXT_texture_format_BGRA8888",
     "GL_EXT_texture_norm16",
@@ -1318,17 +1317,18 @@ void GLContext::LoadMoreSymbols(const Sy
 
     if (IsSupported(GLFeature::texture_3D)) {
         const SymLoadStruct coreSymbols[] = {
             { (PRFuncPtr*) &mSymbols.fTexImage3D, {{ "glTexImage3D" }} },
             { (PRFuncPtr*) &mSymbols.fTexSubImage3D, {{ "glTexSubImage3D" }} },
             END_SYMBOLS
         };
         const SymLoadStruct extSymbols[] = {
-            { (PRFuncPtr*) &mSymbols.fTexSubImage3D, {{ "glTexSubImage3DEXT", "glTexSubImage3DOES" }} },
+            { (PRFuncPtr*) &mSymbols.fTexImage3D, {{ "glTexImage3DOES" }} },
+            { (PRFuncPtr*) &mSymbols.fTexSubImage3D, {{ "glTexSubImage3DOES" }} },
             END_SYMBOLS
         };
         fnLoadFeatureByCore(coreSymbols, extSymbols, GLFeature::texture_3D);
     }
 
     if (IsSupported(GLFeature::texture_3D_compressed)) {
         const SymLoadStruct coreSymbols[] = {
             { (PRFuncPtr*) &mSymbols.fCompressedTexImage3D, {{ "glCompressedTexImage3D" }} },
--- a/gfx/gl/GLContext.h
+++ b/gfx/gl/GLContext.h
@@ -429,17 +429,16 @@ class GLContext : public GenericAtomicRe
     EXT_multisampled_render_to_texture,
     EXT_occlusion_query_boolean,
     EXT_packed_depth_stencil,
     EXT_read_format_bgra,
     EXT_robustness,
     EXT_sRGB,
     EXT_sRGB_write_control,
     EXT_shader_texture_lod,
-    EXT_texture3D,
     EXT_texture_compression_bptc,
     EXT_texture_compression_dxt1,
     EXT_texture_compression_rgtc,
     EXT_texture_compression_s3tc,
     EXT_texture_compression_s3tc_srgb,
     EXT_texture_filter_anisotropic,
     EXT_texture_format_BGRA8888,
     EXT_texture_norm16,
--- a/gfx/gl/GLContextFeatures.cpp
+++ b/gfx/gl/GLContextFeatures.cpp
@@ -396,18 +396,17 @@ static const FeatureInfo sFeatureInfoArr
      GLVersion::GL3_2,
      GLESVersion::ES3,
      GLContext::Extension_None,
      {GLContext::ARB_sync, GLContext::APPLE_sync, GLContext::Extensions_End}},
     {"texture_3D",
      GLVersion::GL1_2,
      GLESVersion::ES3,
      GLContext::Extension_None,
-     {GLContext::EXT_texture3D, GLContext::OES_texture_3D,
-      GLContext::Extensions_End}},
+     {GLContext::OES_texture_3D, GLContext::Extensions_End}},
     {"texture_3D_compressed",
      GLVersion::GL1_3,
      GLESVersion::ES3,
      GLContext::Extension_None,
      {GLContext::ARB_texture_compression, GLContext::OES_texture_3D,
       GLContext::Extensions_End}},
     {"texture_3D_copy",
      GLVersion::GL1_2,
--- a/js/src/jsdate.cpp
+++ b/js/src/jsdate.cpp
@@ -969,16 +969,23 @@ done_date:
     ++i;
   } else if (PEEK('+') || PEEK('-')) {
     if (PEEK('-')) {
       tzMul = -1;
     }
     ++i;
     NEED_NDIGITS(2, tzHour);
     /*
+     * Non-standard extension to the ISO date format:
+     * allow two digits for the time zone offset.
+     */
+    if (i >= length && !isStrict) {
+      goto done;
+    }
+    /*
      * Non-standard extension to the ISO date format (permitted by ES5):
      * allow "-0700" as a time zone offset, not just "-07:00".
      */
     if (PEEK(':')) {
       ++i;
     }
     NEED_NDIGITS(2, tzMin);
   } else {
--- a/js/src/tests/non262/Date/non-iso.js
+++ b/js/src/tests/non262/Date/non-iso.js
@@ -41,16 +41,20 @@ assertEq(new Date("1997-03-08 1:1:1").ge
 assertEq(new Date("1997-03-08 11").getTime(),
          new Date("1997-03-08T11").getTime()); // Date(NaN)
 assertEq(new Date("1997-03-08").getTime(),
          new Date("1997-03-08").getTime());
 assertEq(new Date("1997-03-8").getTime(),
          new Date("1997-03-08").getTime());
 assertEq(new Date("1997-3-8").getTime(),
          new Date("1997-03-08").getTime());
+assertEq(new Date("1997-03-08 11:19:10-07").getTime(),
+         new Date("1997-03-08 11:19:10-0700").getTime());
+assertEq(new Date("1997-03-08T11:19:10-07").getTime(),
+         new Date(NaN).getTime());
 assertEq(new Date("1997-3-8 ").getTime(),
          new Date("1997-03-08T").getTime()); // Date(NaN)
 assertEq(new Date("1997-3-8T11:19:20").getTime(),
          new Date(NaN).getTime());
 assertEq(new Date("1997-03-8T11:19:20").getTime(),
          new Date(NaN).getTime());
 assertEq(new Date("+001997-3-8T11:19:20").getTime(),
          new Date(NaN).getTime());
--- a/python/mozbuild/mozbuild/test/test_rewrite_mozbuild.py
+++ b/python/mozbuild/mozbuild/test/test_rewrite_mozbuild.py
@@ -8,17 +8,17 @@ from __future__ import absolute_import, 
 import os
 import tempfile
 import unittest
 
 
 from mozunit import main
 import mozbuild.vendor.rewrite_mozbuild as mu
 
-SAMPLE_MOZBUILD = """
+SAMPLE_PIXMAN_MOZBUILD = """
 if CONFIG['OS_ARCH'] != 'Darwin' and CONFIG['CC_TYPE'] in ('clang', 'gcc'):
     if CONFIG['HAVE_ARM_NEON']:
         SOURCES += [
             "pixman-arm-neon-asm-bilinear.S",
             "pixman-arm-neon-asm.S",
         ]
     if CONFIG['HAVE_ARM_SIMD']:
         SOURCES += [
@@ -37,34 +37,70 @@ if use_sse2:
     DEFINES['USE_SSE'] = True
     DEFINES['USE_SSE2'] = True
     SOURCES += ['pixman-sse2.c']
     SOURCES['pixman-sse2.c'].flags += CONFIG['SSE_FLAGS'] + CONFIG['SSE2_FLAGS']
     if CONFIG['CC_TYPE'] in ('clang', 'gcc'):
         SOURCES['pixman-sse2.c'].flags += ['-Winline']
 """
 
+SAMPLE_DAV1D_MOZBUILD = """
+SOURCES += [
+    '../../third_party/dav1d/src/cdf.c',
+    '../../third_party/dav1d/src/cpu.c',
+    ]
+EXPORTS = [
+    '../../third_party/dav1d/src/header1.h',
+    '../../third_party/dav1d/src/header2.h',
+    ]
+"""
+
+
+SAMPLE_JPEGXL_MOZBUILD = """
+SOURCES += [
+    "/third_party/jpeg-xl/lib/jxl/ac_strategy.cc",
+    "/third_party/jpeg-xl/lib/jxl/alpha.cc",
+    "/third_party/jpeg-xl/lib/jxl/ans_common.cc",
+    "/third_party/jpeg-xl/lib/jxl/aux_out.cc",
+    ]
+EXPORTS.bob.carol = [
+    "/third_party/jpeg-xl/lib/jxl/header1.hpp",
+    "/third_party/jpeg-xl/lib/jxl/header2.h",
+]
+"""
+
+
+def _make_mozbuild_directory_structure(mozbuild_path, contents):
+    d = tempfile.TemporaryDirectory()
+    os.makedirs(os.path.join(d.name, os.path.split(mozbuild_path)[0]))
+
+    arcconfig = open(os.path.join(d.name, ".arcconfig"), mode="w")
+    arcconfig.close()
+
+    mozbuild = open(os.path.join(d.name, mozbuild_path), mode="w")
+    mozbuild.write(contents)
+    mozbuild.close()
+
+    return d
+
 
 class TestUtils(unittest.TestCase):
     def test_normalize_filename(self):
         self.assertEqual(mu.normalize_filename("foo/bar/moz.build", "/"), "/")
         self.assertEqual(
             mu.normalize_filename("foo/bar/moz.build", "a.c"), "foo/bar/a.c"
         )
         self.assertEqual(
             mu.normalize_filename("foo/bar/moz.build", "baz/a.c"), "foo/bar/baz/a.c"
         )
         self.assertEqual(mu.normalize_filename("foo/bar/moz.build", "/a.c"), "/a.c")
 
     def test_unnormalize_filename(self):
         test_vectors = [
-            (
-                "foo/bar/moz.build",
-                "/",
-            ),
+            ("foo/bar/moz.build", "/"),
             ("foo/bar/moz.build", "a.c"),
             ("foo/bar/moz.build", "baz/a.c"),
             ("foo/bar/moz.build", "/a.c"),
         ]
 
         for vector in test_vectors:
             mozbuild, file = vector
             self.assertEqual(
@@ -90,19 +126,17 @@ class TestUtils(unittest.TestCase):
                 {"> if conditional > SOURCES": ["root/dir/asm/blah.S"]},
             ),
             (
                 "root/dir/dostuff.c",
                 {
                     "> SOURCES": ["root/dir/main.c"],
                     "> if conditional > SOURCES": ["root/dir/asm/blah.S"],
                 },
-                {
-                    "> SOURCES": ["root/dir/main.c"],
-                },
+                {"> SOURCES": ["root/dir/main.c"]},
             ),
         ]
 
         for vector in test_vectors:
             target_filename_normalized, source_assignments, expected = vector
             actual = mu.find_all_posible_assignments_from_filename(
                 source_assignments, target_filename_normalized
             )
@@ -135,82 +169,239 @@ class TestUtils(unittest.TestCase):
             # )
             (
                 "foo/asm_arm.c",
                 {
                     "> SOURCES": ["foo/main.c", "foo/all_utility.c"],
                     "> if ASM > SOURCES": ["foo/asm_x86.c"],
                 },
                 "> if ASM > SOURCES",
-            ),
+            )
         ]
         for vector in test_vectors:
             normalized_filename, source_assignments, expected = vector
             actual, _ = mu.guess_best_assignment(
                 source_assignments, normalized_filename
             )
             self.assertEqual(actual, expected)
 
+    def test_mozbuild_removing(self):
+        test_vectors = [
+            (
+                "media/dav1d/moz.build",
+                SAMPLE_DAV1D_MOZBUILD,
+                "third_party/dav1d/src/cdf.c",
+                "media/dav1d/",
+                "third-party/dav1d/",
+                "    '../../third_party/dav1d/src/cdf.c',\n",
+            ),
+            (
+                "media/dav1d/moz.build",
+                SAMPLE_DAV1D_MOZBUILD,
+                "third_party/dav1d/src/header1.h",
+                "media/dav1d/",
+                "third-party/dav1d/",
+                "    '../../third_party/dav1d/src/header1.h',\n",
+            ),
+            (
+                "media/jxl/moz.build",
+                SAMPLE_JPEGXL_MOZBUILD,
+                "third_party/jpeg-xl/lib/jxl/alpha.cc",
+                "media/jxl/",
+                "third-party/jpeg-xl/",
+                '    "/third_party/jpeg-xl/lib/jxl/alpha.cc",\n',
+            ),
+            (
+                "media/jxl/moz.build",
+                SAMPLE_JPEGXL_MOZBUILD,
+                "third_party/jpeg-xl/lib/jxl/header1.hpp",
+                "media/jxl/",
+                "third-party/jpeg-xl/",
+                '    "/third_party/jpeg-xl/lib/jxl/header1.hpp",\n',
+            ),
+        ]
+
+        for vector in test_vectors:
+            (
+                mozbuild_path,
+                mozbuild_contents,
+                file_to_remove,
+                moz_yaml_dir,
+                vendoring_dir,
+                replace_str,
+            ) = vector
+
+            startdir = os.getcwd()
+            try:
+                mozbuild_dir = _make_mozbuild_directory_structure(
+                    mozbuild_path, mozbuild_contents
+                )
+                os.chdir(mozbuild_dir.name)
+
+                mu.remove_file_from_moz_build_file(
+                    file_to_remove,
+                    moz_yaml_dir=moz_yaml_dir,
+                    vendoring_dir=vendoring_dir,
+                )
+
+                with open(os.path.join(mozbuild_dir.name, mozbuild_path)) as file:
+                    contents = file.read()
+
+                expected_output = mozbuild_contents.replace(replace_str, "")
+                if contents != expected_output:
+                    print("File to remove:", file_to_remove)
+                    print("Contents:")
+                    print("-------------------")
+                    print(contents)
+                    print("-------------------")
+                    print("Expected:")
+                    print("-------------------")
+                    print(expected_output)
+                    print("-------------------")
+                self.assertEqual(contents, expected_output)
+            finally:
+                os.chdir(startdir)
+
     def test_mozbuild_adding(self):
         test_vectors = [
+            (
+                "media/dav1d/moz.build",
+                SAMPLE_DAV1D_MOZBUILD,
+                "third_party/dav1d/src/cdf2.c",
+                "media/dav1d/",
+                "third-party/dav1d/",
+                "cdf.c',\n",
+                "cdf.c',\n    '../../third_party/dav1d/src/cdf2.c',\n",
+            ),
+            (
+                "media/dav1d/moz.build",
+                SAMPLE_DAV1D_MOZBUILD,
+                "third_party/dav1d/src/header3.h",
+                "media/dav1d/",
+                "third-party/dav1d/",
+                "header2.h',\n",
+                "header2.h',\n    '../../third_party/dav1d/src/header3.h',\n",
+            ),
+            (
+                "media/jxl/moz.build",
+                SAMPLE_JPEGXL_MOZBUILD,
+                "third_party/jpeg-xl/lib/jxl/alpha2.cc",
+                "media/jxl/",
+                "third-party/jpeg-xl/",
+                'alpha.cc",\n',
+                'alpha.cc",\n    "/third_party/jpeg-xl/lib/jxl/alpha2.cc",\n',
+            ),
+            (
+                "media/jxl/moz.build",
+                SAMPLE_JPEGXL_MOZBUILD,
+                "third_party/jpeg-xl/lib/jxl/header3.hpp",
+                "media/jxl/",
+                "third-party/jpeg-xl/",
+                'header2.h",\n',
+                'header2.h",\n    "/third_party/jpeg-xl/lib/jxl/header3.hpp",\n',
+            ),
+        ]
+
+        for vector in test_vectors:
+            (
+                mozbuild_path,
+                mozbuild_contents,
+                file_to_add,
+                moz_yaml_dir,
+                vendoring_dir,
+                search_str,
+                replace_str,
+            ) = vector
+
+            startdir = os.getcwd()
+            try:
+                mozbuild_dir = _make_mozbuild_directory_structure(
+                    mozbuild_path, mozbuild_contents
+                )
+                os.chdir(mozbuild_dir.name)
+
+                mu.add_file_to_moz_build_file(
+                    file_to_add, moz_yaml_dir=moz_yaml_dir, vendoring_dir=vendoring_dir
+                )
+
+                with open(os.path.join(mozbuild_dir.name, mozbuild_path)) as file:
+                    contents = file.read()
+
+                expected_output = mozbuild_contents.replace(search_str, replace_str)
+                if contents != expected_output:
+                    print("File to add:", file_to_add)
+                    print("Contents:")
+                    print("-------------------")
+                    print(contents)
+                    print("-------------------")
+                    print("Expected:")
+                    print("-------------------")
+                    print(expected_output)
+                    print("-------------------")
+                self.assertEqual(contents, expected_output)
+            finally:
+                os.chdir(startdir)
+
+    # This test is legacy. I'm keeping it around, but new test vectors should be added to the
+    # non-internal test to exercise the public API.
+    def test_mozbuild_adding_internal(self):
+        test_vectors = [
             # (
             # mozbuild_contents
-            # unnormalized_filename_to_add
+            # unnormalized_filename_to_add,
+            # unnormalized_list_of_files
             # expected_output
             # )
             (
-                SAMPLE_MOZBUILD,
+                SAMPLE_PIXMAN_MOZBUILD,
                 "pixman-sse2-more.c",
                 ["pixman-sse2.c"],
-                SAMPLE_MOZBUILD.replace(
+                SAMPLE_PIXMAN_MOZBUILD.replace(
                     "SOURCES += ['pixman-sse2.c']",
                     "SOURCES += ['pixman-sse2-more.c','pixman-sse2.c']",
                 ),
             ),
             (
-                SAMPLE_MOZBUILD,
+                SAMPLE_PIXMAN_MOZBUILD,
                 "pixman-trap-more.c",
                 [
                     "pixman-region32.c",
                     "pixman-solid-fill.c",
                     "pixman-trap.c",
                     "pixman-utils.c",
                     "pixman-x86.c",
                     "pixman.c",
                 ],
-                SAMPLE_MOZBUILD.replace(
+                SAMPLE_PIXMAN_MOZBUILD.replace(
                     "'pixman-trap.c',", "'pixman-trap-more.c',\n    'pixman-trap.c',"
                 ),
             ),
             (
-                SAMPLE_MOZBUILD,
+                SAMPLE_PIXMAN_MOZBUILD,
                 "pixman-arm-neon-asm-more.S",
-                [
-                    "pixman-arm-neon-asm-bilinear.S",
-                    "pixman-arm-neon-asm.S",
-                ],
-                SAMPLE_MOZBUILD.replace(
+                ["pixman-arm-neon-asm-bilinear.S", "pixman-arm-neon-asm.S"],
+                SAMPLE_PIXMAN_MOZBUILD.replace(
                     '"pixman-arm-neon-asm.S"',
                     '"pixman-arm-neon-asm-more.S",\n            "pixman-arm-neon-asm.S"',
                 ),
             ),
             (
-                SAMPLE_MOZBUILD,
+                SAMPLE_PIXMAN_MOZBUILD,
                 "pixman-arm-simd-asm-smore.S",
                 ["pixman-arm-simd-asm-scaled.S", "pixman-arm-simd-asm.S"],
-                SAMPLE_MOZBUILD.replace(
+                SAMPLE_PIXMAN_MOZBUILD.replace(
                     "'pixman-arm-simd-asm.S'",
                     "'pixman-arm-simd-asm-smore.S',\n            'pixman-arm-simd-asm.S'",
                 ),
             ),
             (
-                SAMPLE_MOZBUILD,
+                SAMPLE_PIXMAN_MOZBUILD,
                 "pixman-arm-simd-asn.S",
                 ["pixman-arm-simd-asm-scaled.S", "pixman-arm-simd-asm.S"],
-                SAMPLE_MOZBUILD.replace(
+                SAMPLE_PIXMAN_MOZBUILD.replace(
                     "'pixman-arm-simd-asm.S'",
                     "'pixman-arm-simd-asm.S',\n            'pixman-arm-simd-asn.S'",
                 ),
             ),
         ]
 
         for vector in test_vectors:
             (
@@ -241,59 +432,55 @@ class TestUtils(unittest.TestCase):
                 print(contents)
                 print("-------------------")
                 print("Expected:")
                 print("-------------------")
                 print(expected_output)
                 print("-------------------")
             self.assertEqual(contents, expected_output)
 
-    def test_mozbuild_removing(self):
+    # This test is legacy. I'm keeping it around, but new test vectors should be added to the
+    # non-internal test to exercise the public API.
+    def test_mozbuild_removing_internal(self):
         test_vectors = [
             # (
             # mozbuild_contents
             # unnormalized_filename_to_add
             # expected_output
             # )
             (
-                SAMPLE_MOZBUILD,
+                SAMPLE_PIXMAN_MOZBUILD,
                 "pixman-sse2.c",
-                SAMPLE_MOZBUILD.replace(
-                    "SOURCES += ['pixman-sse2.c']",
-                    "SOURCES += []",
+                SAMPLE_PIXMAN_MOZBUILD.replace(
+                    "SOURCES += ['pixman-sse2.c']", "SOURCES += []"
                 ),
             ),
             (
-                SAMPLE_MOZBUILD,
+                SAMPLE_PIXMAN_MOZBUILD,
                 "pixman-trap.c",
-                SAMPLE_MOZBUILD.replace("    'pixman-trap.c',\n", ""),
+                SAMPLE_PIXMAN_MOZBUILD.replace("    'pixman-trap.c',\n", ""),
             ),
             (
-                SAMPLE_MOZBUILD,
+                SAMPLE_PIXMAN_MOZBUILD,
                 "pixman-arm-neon-asm.S",
-                SAMPLE_MOZBUILD.replace(
-                    '            "pixman-arm-neon-asm.S",\n',
-                    "",
+                SAMPLE_PIXMAN_MOZBUILD.replace(
+                    '            "pixman-arm-neon-asm.S",\n', ""
                 ),
             ),
             (
-                SAMPLE_MOZBUILD,
+                SAMPLE_PIXMAN_MOZBUILD,
                 "pixman-arm-simd-asm.S",
-                SAMPLE_MOZBUILD.replace(
-                    "            'pixman-arm-simd-asm.S'",
-                    "            ",
+                SAMPLE_PIXMAN_MOZBUILD.replace(
+                    "            'pixman-arm-simd-asm.S'", "            "
                 ),
             ),
             (
-                SAMPLE_MOZBUILD,
+                SAMPLE_PIXMAN_MOZBUILD,
                 "pixman-region32.c",
-                SAMPLE_MOZBUILD.replace(
-                    "'pixman-region32.c',",
-                    "",
-                ),
+                SAMPLE_PIXMAN_MOZBUILD.replace("'pixman-region32.c',", ""),
             ),
         ]
 
         for vector in test_vectors:
             (
                 mozbuild_contents,
                 unnormalized_filename_to_remove,
                 expected_output,
--- a/python/mozbuild/mozbuild/vendor/mach_commands.py
+++ b/python/mozbuild/mozbuild/vendor/mach_commands.py
@@ -27,16 +27,22 @@ class Vendor(MachCommandBase):
     )
     @CommandArgument(
         "--check-for-update",
         action="store_true",
         help="For scripted use, prints the new commit to update to, or nothing if up to date.",
         default=False,
     )
     @CommandArgument(
+        "--add-to-exports",
+        action="store_true",
+        help="Will attempt to add new header files into any relevant EXPORTS block",
+        default=False,
+    )
+    @CommandArgument(
         "--ignore-modified",
         action="store_true",
         help="Ignore modified files in current checkout",
         default=False,
     )
     @CommandArgument("-r", "--revision", help="Repository tag or commit to update to.")
     @CommandArgument(
         "--verify", "-v", action="store_true", help="(Only) verify the manifest"
@@ -46,16 +52,17 @@ class Vendor(MachCommandBase):
     )
     def vendor(
         self,
         command_context,
         library,
         revision,
         ignore_modified=False,
         check_for_update=False,
+        add_to_exports=False,
         verify=False,
     ):
         """
         Vendor third-party dependencies into the source repository.
 
         Vendoring rust and python can be done with ./mach vendor [rust/python].
         Vendoring other libraries can be done with ./mach vendor [arguments] path/to/file.yaml
         """
@@ -79,17 +86,19 @@ class Vendor(MachCommandBase):
         if not ignore_modified and not check_for_update:
             self.check_modified_files(command_context)
         if not revision:
             revision = "HEAD"
 
         from mozbuild.vendor.vendor_manifest import VendorManifest
 
         vendor_command = command_context._spawn(VendorManifest)
-        vendor_command.vendor(library, manifest, revision, check_for_update)
+        vendor_command.vendor(
+            library, manifest, revision, check_for_update, add_to_exports
+        )
 
         sys.exit(0)
 
     def check_modified_files(self, command_context):
         """
         Ensure that there aren't any uncommitted changes to files
         in the working copy, since we're going to change some state
         on the user.
--- a/python/mozbuild/mozbuild/vendor/rewrite_mozbuild.py
+++ b/python/mozbuild/mozbuild/vendor/rewrite_mozbuild.py
@@ -182,105 +182,92 @@ import os
 import re
 import ast
 import sys
 import copy
 import fileinput
 import subprocess
 
 from pprint import pprint
-from mozbuild.frontend.sandbox import alphabetical_sorted
+
+try:
+    from mozbuild.frontend.sandbox import alphabetical_sorted
+except Exception:
+
+    def alphabetical_sorted(iterable, key=lambda x: x.lower(), reverse=False):
+        return sorted(iterable, key=key, reverse=reverse)
+
+
+# This can be edited to enable better Python 3.8 behavior, but is set so that
+# everything is consistent by default so errors can be detected more easily.
+FORCE_DOWNGRADE_BEHAVIOR = True
 
 statistics = {
     "guess_candidates": {},
     "number_refinements": {},
     "needed_to_guess": 0,
     "length_logic": {},
 }
 
 
 def log(*args, **kwargs):
     # If is helpful to keep some logging statements around, but we don't want to print them
     #  unless we are debugging
     # print(*args, **kwargs)
     pass
 
 
-# < python 3.8 shims #########################
-# Taskcluster currently runs python 3.5 and it's difficult to move to a more recent one
-# Once Tskcl moves to 3.8 we could try to remove this. (Or keep it for developers who are < 3.8)
-# From https://github.com/python/cpython/blob/44f6b9aa49d562ab7c67952442b8348346b24141/Lib/ast.py
+##############################################
+
+import inspect
 
 
-def _splitlines_no_ff(source):
-    """Split a string into lines ignoring form feed and other chars.
-    This mimics how the Python parser splits source code.
-    """
-    idx = 0
-    lines = []
-    next_line = ""
-    while idx < len(source):
-        c = source[idx]
-        next_line += c
-        idx += 1
-        # Keep \r\n together
-        if c == "\r" and idx < len(source) and source[idx] == "\n":
-            next_line += "\n"
-            idx += 1
-        if c in "\r\n":
-            lines.append(next_line)
-            next_line = ""
+def node_to_name(code, node):
+    if (
+        not FORCE_DOWNGRADE_BEHAVIOR
+        and sys.version_info[0] >= 3
+        and sys.version_info[1] >= 8
+    ):
+        return ast.get_source_segment(code, node)
 
-    if next_line:
-        lines.append(next_line)
-    return lines
+    return node.__class__.__name__
 
 
-def _pad_whitespace(source):
-    r"""Replace all chars except '\f\t' in a line with spaces."""
-    result = ""
-    for c in source:
-        if c in "\f\t":
-            result += c
-        else:
-            result += " "
-    return result
+def get_attribute_label(node):
+    assert isinstance(node, ast.Attribute)
+
+    label = ""
+    subtarget = node
+    while isinstance(subtarget, ast.Attribute):
+        label = subtarget.attr + ("." if label else "") + label
+        subtarget = subtarget.value
+    assert isinstance(subtarget, ast.Name)
+    label = subtarget.id + "." + label
+
+    return label
 
 
-def ast_get_source_segment(source, node):
-    """Get source code segment of the *source* that generated *node*.
-    If some location information (`lineno`, `end_lineno`, `col_offset`,
-    or `end_col_offset`) is missing, return None.
-    If *padded* is `True`, the first line of a multi-line statement will
-    be padded with spaces to match its original position.
-    """
-    try:
-        lineno = node.lineno - 1
-        end_lineno = node.end_lineno - 1
-        col_offset = node.col_offset
-        end_col_offset = node.end_col_offset
-    except AttributeError:
-        return None
+def ast_get_source_segment(code, node):
+    if (
+        not FORCE_DOWNGRADE_BEHAVIOR
+        and sys.version_info[0] >= 3
+        and sys.version_info[1] >= 8
+    ):
+        return ast.get_source_segment(code, node)
 
-    lines = _splitlines_no_ff(source)
-    if end_lineno == lineno:
-        return lines[lineno].encode()[col_offset:end_col_offset].decode()
+    caller = inspect.stack()[1].function
+    if caller == "log":
+        return ""
 
-    if padded:
-        padding = _pad_whitespace(lines[lineno].encode()[:col_offset].decode())
-    else:
-        padding = ""
+    raise Exception("ast_get_source_segment is not available with this Python version.")
+
 
-    first = padding + lines[lineno].encode()[col_offset:].decode()
-    last = lines[end_lineno].encode()[:end_col_offset].decode()
-    lines = lines[lineno + 1 : end_lineno]
-
-    lines.insert(0, first)
-    lines.append(last)
-    return "".join(lines)
+# Overwrite it so we don't accidently use it
+if sys.version_info[0] >= 3 and sys.version_info[1] >= 8:
+    ast.get_source_segment = ast_get_source_segment
 
 
 ##############################################
 
 
 def node_to_readable_file_location(code, node, child_node=None):
     location = ""
 
@@ -291,38 +278,38 @@ def node_to_readable_file_location(code,
         location += node_to_readable_file_location(code, node.parent, node)
 
     location += " > "
     if isinstance(node, ast.Module):
         raise Exception("We shouldn't see a Module")
     elif isinstance(node, ast.If):
         assert child_node
         if child_node in node.body:
-            location += "if " + ast_get_source_segment(code, node.test)
+            location += "if " + node_to_name(code, node.test)
         else:
-            location += "else-of-if " + ast_get_source_segment(code, node.test)
+            location += "else-of-if " + node_to_name(code, node.test)
     elif isinstance(node, ast.For):
         location += (
             "for "
-            + ast_get_source_segment(code, node.target)
+            + node_to_name(code, node.target)
             + " in "
-            + ast_get_source_segment(code, node.iter)
+            + node_to_name(code, node.iter)
         )
     elif isinstance(node, ast.AugAssign):
         if isinstance(node.target, ast.Name):
             location += node.target.id
         else:
-            location += ast_get_source_segment(code, node.target)
+            location += node_to_name(code, node.target)
     elif isinstance(node, ast.Assign):
         # This assert would fire if we did e.g. some_sources = all_sources = [ ... ]
         assert len(node.targets) == 1, "Assignment node contains more than one target"
         if isinstance(node.targets[0], ast.Name):
             location += node.targets[0].id
         else:
-            location += ast_get_source_segment(code, node.targets[0])
+            location += node_to_name(code, node.targets[0])
     else:
         raise Exception("Got a node type I don't know how to handle: " + str(node))
 
     return location
 
 
 def assignment_node_to_source_filename_list(code, node):
     """
@@ -333,23 +320,25 @@ def assignment_node_to_source_filename_l
     If this happens, we'll return an empty list. The consequence of this is that we
     won't be able to match a file against this list, so we may not be able to add it.
 
     (But if the file matches a generated list, perhaps it will be included in the
     Sources list automatically?)
     """
     if isinstance(node.value, ast.List) and "elts" in node.value._fields:
         for f in node.value.elts:
-            if not isinstance(f, ast.Constant):
+            if not isinstance(f, ast.Constant) and not isinstance(f, ast.Str):
                 log(
                     "Found non-constant source file name in list: ",
                     ast_get_source_segment(code, f),
                 )
                 return []
-        return [f.value for f in node.value.elts]
+        return [
+            f.value if isinstance(f, ast.Constant) else f.s for f in node.value.elts
+        ]
     elif isinstance(node.value, ast.ListComp):
         # SOURCES += [f for f in foo if blah]
         log("Could not find the files for " + ast_get_source_segment(code, node.value))
     elif isinstance(node.value, ast.Name) or isinstance(node.value, ast.Subscript):
         # SOURCES += other_var
         # SOURCES += files['X64_SOURCES']
         log("Could not find the files for " + ast_get_source_segment(code, node))
     elif isinstance(node.value, ast.Call):
@@ -358,78 +347,115 @@ def assignment_node_to_source_filename_l
     else:
         raise Exception(
             "Unexpected node received in assignment_node_to_source_filename_list: "
             + str(node)
         )
     return []
 
 
-def mozbuild_file_to_source_assignments(normalized_mozbuild_filename):
+def mozbuild_file_to_source_assignments(normalized_mozbuild_filename, assignment_type):
     """
     Returns a dictionary of 'source-assignment-location' -> 'normalized source filename list'
     contained in the moz.build file specified
 
     normalized_mozbuild_filename: the moz.build file to read
     """
     source_assignments = {}
 
+    if assignment_type == "source-files":
+        targets = ["SOURCES", "UNIFIED_SOURCES"]
+    else:
+        targets = ["EXPORTS"]
+
     # Parse the AST of the moz.build file
     code = open(normalized_mozbuild_filename).read()
     root = ast.parse(code)
 
     # Populate node parents. This allows us to walk up from a node to the root.
     # (Really I think python's ast class should do this, but it doesn't, so we monkey-patch it)
     for node in ast.walk(root):
         for child in ast.iter_child_nodes(node):
             child.parent = node
 
     # Find all the assignments of SOURCES or UNIFIED_SOURCES
-    source_assignment_nodes = [
-        node
-        for node in ast.walk(root)
-        if isinstance(node, ast.AugAssign)
-        and isinstance(node.target, ast.Name)
-        and node.target.id in ["SOURCES", "UNIFIED_SOURCES"]
-    ]
-    assert (
-        len([n for n in source_assignment_nodes if not isinstance(n.op, ast.Add)]) == 0
-    ), "We got a Source assignment that wasn't +="
+    if assignment_type == "source-files":
+        source_assignment_nodes = [
+            node
+            for node in ast.walk(root)
+            if isinstance(node, ast.AugAssign)
+            and isinstance(node.target, ast.Name)
+            and node.target.id in targets
+        ]
+        assert (
+            len([n for n in source_assignment_nodes if not isinstance(n.op, ast.Add)])
+            == 0
+        ), "We got a Source assignment that wasn't +="
 
-    # Recurse and find nodes where we do SOURCES += other_var or SOURCES += FILES['foo']
-    recursive_assignment_nodes = [
-        node
-        for node in source_assignment_nodes
-        if isinstance(node.value, ast.Name) or isinstance(node.value, ast.Subscript)
-    ]
+        # Recurse and find nodes where we do SOURCES += other_var or SOURCES += FILES['foo']
+        recursive_assignment_nodes = [
+            node
+            for node in source_assignment_nodes
+            if isinstance(node.value, ast.Name) or isinstance(node.value, ast.Subscript)
+        ]
+
+        recursive_assignment_nodes_names = [
+            node.value.id
+            for node in recursive_assignment_nodes
+            if isinstance(node.value, ast.Name)
+        ]
+
+        # TODO: We do not dig into subscript variables. These are currently only used by two
+        #       libraries that use external sources.mozbuild files.
+        # recursive_assignment_nodes_names.extend([something<node> for node in
+        #    recursive_assignment_nodes if isinstance(node.value, ast.Subscript)]
 
-    recursive_assignment_nodes_names = [
-        node.value.id
-        for node in recursive_assignment_nodes
-        if isinstance(node.value, ast.Name)
-    ]
-
-    # TODO: We do not dig into subscript variables. These are currently only used by two libraries
-    #       that use external sources.mozbuild files.
-    # recursive_assignment_nodes_names.extend([something<node> for node in
-    #    recursive_assignment_nodes if isinstance(node.value, ast.Subscript)]
+        additional_assignment_nodes = [
+            node
+            for node in ast.walk(root)
+            if isinstance(node, ast.Assign)
+            and isinstance(node.targets[0], ast.Name)
+            and node.targets[0].id in recursive_assignment_nodes_names
+        ]
 
-    additional_assignment_nodes = [
-        node
-        for node in ast.walk(root)
-        if isinstance(node, ast.Assign)
-        and isinstance(node.targets[0], ast.Name)
-        and node.targets[0].id in recursive_assignment_nodes_names
-    ]
-
-    # Remove the original, useless assignment node (the SOURCES += other_var)
-    for node in recursive_assignment_nodes:
-        source_assignment_nodes.remove(node)
-    # Add the other_var += [''] source-assignment
-    source_assignment_nodes.extend(additional_assignment_nodes)
+        # Remove the original, useless assignment node (the SOURCES += other_var)
+        for node in recursive_assignment_nodes:
+            source_assignment_nodes.remove(node)
+        # Add the other_var += [''] source-assignment
+        source_assignment_nodes.extend(additional_assignment_nodes)
+    else:
+        source_assignment_nodes = [
+            node
+            for node in ast.walk(root)
+            if isinstance(node, ast.AugAssign)
+            and (
+                (isinstance(node.target, ast.Name) and node.target.id == "EXPORTS")
+                or (
+                    isinstance(node.target, ast.Attribute)
+                    and get_attribute_label(node.target).startswith("EXPORTS")
+                )
+            )
+        ]
+        source_assignment_nodes.extend(
+            [
+                node
+                for node in ast.walk(root)
+                if isinstance(node, ast.Assign)
+                and (
+                    (
+                        isinstance(node.targets[0], ast.Name)
+                        and node.targets[0].id == "EXPORTS"
+                    )
+                    or (
+                        isinstance(node.targets[0], ast.Attribute)
+                        and get_attribute_label(node.targets[0]).startswith("EXPORTS")
+                    )
+                )
+            ]
+        )
 
     # Get the source-assignment-location for the node:
     assignment_index = 1
     for a in source_assignment_nodes:
         source_assignment_location = (
             node_to_readable_file_location(code, a) + " " + str(assignment_index)
         )
         source_filename_list = assignment_node_to_source_filename_list(code, a)
@@ -753,18 +779,17 @@ def edit_moz_build_file_to_add_file(
 
             line = line.replace(find_str, replace_str)
 
         print(line, end="")  # line has its own newline on it, don't add a second
     file.close()
 
 
 def edit_moz_build_file_to_remove_file(
-    normalized_mozbuild_filename,
-    unnormalized_filename_to_remove,
+    normalized_mozbuild_filename, unnormalized_filename_to_remove
 ):
     """
     This function edits the moz.build file in-place
     """
 
     simple_file_line = re.compile(
         "^\s*['\"]" + unnormalized_filename_to_remove + "['\"],*$"
     )
@@ -812,21 +837,85 @@ def validate_directory_parameters(moz_ya
 
     # Ensure they are provided with trailing slashes
     moz_yaml_dir += "/" if moz_yaml_dir[-1] != "/" else ""
     vendoring_dir += "/" if vendoring_dir[-1] != "/" else ""
 
     return (moz_yaml_dir, vendoring_dir)
 
 
+HAS_ABSOLUTE = 1
+HAS_TRAVERSE_CHILD = 2
+HAS_RELATIVE_CHILD = 2  # behaves the same as above
+
+
+def get_file_reference_modes(source_assignments):
+    """
+    Given a set of source assignments, this function traverses through the
+    files references in those assignments to see if the files are referenced
+    using absolute paths (relative to gecko root) or relative paths.
+
+    It will return all the modes that are seen.
+    """
+    modes = set()
+
+    for key, list_of_normalized_filenames in source_assignments.items():
+        if not list_of_normalized_filenames:
+            continue
+        for file in list_of_normalized_filenames:
+            if file[0] == "/":
+                modes.add(HAS_ABSOLUTE)
+            elif file[0:2] == "../":
+                modes.add(HAS_TRAVERSE_CHILD)
+            else:
+                modes.add(HAS_RELATIVE_CHILD)
+    return modes
+
+
+def renormalize_filename(
+    mode,
+    moz_yaml_dir,
+    vendoring_dir,
+    normalized_mozbuild_filename,
+    normalized_filename_to_act_on,
+):
+    """
+    Edit the normalized_filename_to_act_on to either
+     - Make it an absolute path from gecko root (if we're in that mode)
+     - Get a relative path from the vendoring directory to the yaml directory where the
+       moz.build file is (If they are in separate directories)
+    """
+    if mode == HAS_ABSOLUTE:
+        # If the moz.build file uses absolute paths from the gecko root, this is easy,
+        # all we need to do is prepend a '/' to indicate that
+        normalized_filename_to_act_on = "/" + normalized_filename_to_act_on
+    elif moz_yaml_dir and vendoring_dir:
+        # To re-normalize it in this case, we:
+        #   (a) get the path from gecko_root to the moz.build file we are considering
+        #   (b) compute a relative path from that directory to the file we want
+        #   (c) because (b) started at the moz.build file's directory, it is not
+        #       normalized to the gecko_root. Therefore we need to normalize it by
+        #       prepending (a)
+        a = os.path.dirname(normalized_mozbuild_filename)
+        b = os.path.relpath(normalized_filename_to_act_on, start=a)
+        c = os.path.join(a, b)
+        normalized_filename_to_act_on = c
+
+    return normalized_filename_to_act_on
+
+
 #########################################################
 # PUBLIC API
 #########################################################
 
 
+class MozBuildRewriteException(Exception):
+    pass
+
+
 def remove_file_from_moz_build_file(
     normalized_filename_to_remove, moz_yaml_dir=None, vendoring_dir=None
 ):
     """
     Given a filename, relative to the gecko root (aka normalized), we look for the nearest
     moz.build file, look in that file for the file, and then edit that moz.build file in-place.
     """
     moz_yaml_dir, vendoring_dir = validate_directory_parameters(
@@ -838,48 +927,53 @@ def remove_file_from_moz_build_file(
     )
 
     # normalized_filename_to_remove is the path from gecko_root to the file. However, if we vendor
     #    separate from moz.yaml; then 'normalization' gets more complicated as explained above.
     # We will need to re-normalize the filename for each moz.build file we want to test, so we
     #    save the original normalized filename for this purpose
     original_normalized_filename_to_remove = normalized_filename_to_remove
 
+    # These are the two header file types specified in vendor_manifest.py > source_suffixes
+    if normalized_filename_to_remove.endswith(
+        ".h"
+    ) or normalized_filename_to_remove.endswith(".hpp"):
+        assignment_type = "header-files"
+    else:
+        assignment_type = "source-files"
+
     for normalized_mozbuild_filename in all_possible_normalized_mozbuild_filenames:
-        if moz_yaml_dir and vendoring_dir:
-            # Here is where we re-normalize the filename. For the rest of the algorithm, we
-            #    will be using this re-normalized filename.
-            # To re-normalize it, we:
-            #   (a) get the path from gecko_root to the moz.build file we are considering
-            #   (b) compute a relative path from that directory to the file we want
-            #   (c) because (b) started at the moz.build file's directory, it is not
-            #       normalized to the gecko_root. Therefore we need to normalize it by
-            #       prepending (a)
-            a = os.path.dirname(normalized_mozbuild_filename)
-            b = os.path.relpath(normalized_filename_to_remove, start=a)
-            c = os.path.join(a, b)
-            normalized_filename_to_remove = c
-
         source_assignments, root, code = mozbuild_file_to_source_assignments(
-            normalized_mozbuild_filename
+            normalized_mozbuild_filename, assignment_type
         )
 
-        for key in source_assignments:
-            normalized_source_filename_list = source_assignments[key]
-            if normalized_filename_to_remove in normalized_source_filename_list:
-                unnormalized_filename_to_remove = unnormalize_filename(
-                    normalized_mozbuild_filename, normalized_filename_to_remove
-                )
-                edit_moz_build_file_to_remove_file(
-                    normalized_mozbuild_filename, unnormalized_filename_to_remove
-                )
-                return
+        modes = get_file_reference_modes(source_assignments)
+
+        for mode in modes:
+            normalized_filename_to_remove = renormalize_filename(
+                mode,
+                moz_yaml_dir,
+                vendoring_dir,
+                normalized_mozbuild_filename,
+                normalized_filename_to_remove,
+            )
+
+            for key in source_assignments:
+                normalized_source_filename_list = source_assignments[key]
+                if normalized_filename_to_remove in normalized_source_filename_list:
+                    unnormalized_filename_to_remove = unnormalize_filename(
+                        normalized_mozbuild_filename, normalized_filename_to_remove
+                    )
+                    edit_moz_build_file_to_remove_file(
+                        normalized_mozbuild_filename, unnormalized_filename_to_remove
+                    )
+                    return
 
         normalized_filename_to_remove = original_normalized_filename_to_remove
-    raise Exception("Could not remove file")
+    raise MozBuildRewriteException("Could not remove " + normalized_filename_to_remove)
 
 
 def add_file_to_moz_build_file(
     normalized_filename_to_add, moz_yaml_dir=None, vendoring_dir=None
 ):
     """
     This is the overall function. Given a filename, relative to the gecko root (aka normalized),
     we look for a moz.build file to add it to, look for the place in the moz.build file to add it,
@@ -898,74 +992,90 @@ def add_file_to_moz_build_file(
     )
 
     # normalized_filename_to_add is the path from gecko_root to the file. However, if we vendor
     #    separate from moz.yaml; then 'normalization' gets more complicated as explained above.
     # We will need to re-normalize the filename for each moz.build file we want to test, so we
     #    save the original normalized filename for this purpose
     original_normalized_filename_to_add = normalized_filename_to_add
 
+    if normalized_filename_to_add.endswith(".h") or normalized_filename_to_add.endswith(
+        ".hpp"
+    ):
+        assignment_type = "header-files"
+    else:
+        assignment_type = "source-files"
+
     for normalized_mozbuild_filename in all_possible_normalized_mozbuild_filenames:
-        if moz_yaml_dir and vendoring_dir:
-            # Here is where we re-normalize the filename. For the rest of the algorithm, we
-            #    will be using this re-normalized filename.
-            # To re-normalize it, we:
-            #   (a) get the path from gecko_root to the moz.build file we are considering
-            #   (b) compute a relative path from that directory to the file we want
-            #   (c) because (b) started at the moz.build file's directory, it is not
-            #       normalized to the gecko_root. Therefore we need to normalize it by
-            #       prepending (a)
-            a = os.path.dirname(normalized_mozbuild_filename)
-            b = os.path.relpath(normalized_filename_to_add, start=a)
-            c = os.path.join(a, b)
-            normalized_filename_to_add = c
-
         source_assignments, root, code = mozbuild_file_to_source_assignments(
-            normalized_mozbuild_filename
-        )
-
-        possible_assignments = find_all_posible_assignments_from_filename(
-            source_assignments, normalized_filename_to_add
+            normalized_mozbuild_filename, assignment_type
         )
 
-        if len(possible_assignments) == 0:
-            normalized_filename_to_add = original_normalized_filename_to_add
-            continue
+        modes = get_file_reference_modes(source_assignments)
+
+        for mode in modes:
+            normalized_filename_to_add = renormalize_filename(
+                mode,
+                moz_yaml_dir,
+                vendoring_dir,
+                normalized_mozbuild_filename,
+                normalized_filename_to_add,
+            )
+
+            possible_assignments = find_all_posible_assignments_from_filename(
+                source_assignments, normalized_filename_to_add
+            )
 
-        assert (
-            len(possible_assignments) > 0
-        ), "Could not find a single possible source assignment"
-        if len(possible_assignments) > 1:
-            best_guess, _ = guess_best_assignment(
-                possible_assignments, normalized_filename_to_add
-            )
-            chosen_source_assignment_location = best_guess
-        else:
-            chosen_source_assignment_location = list(possible_assignments.keys())[0]
+            if len(possible_assignments) == 0:
+                normalized_filename_to_add = original_normalized_filename_to_add
+                continue
+
+            assert (
+                len(possible_assignments) > 0
+            ), "Could not find a single possible source assignment"
+            if len(possible_assignments) > 1:
+                best_guess, _ = guess_best_assignment(
+                    possible_assignments, normalized_filename_to_add
+                )
+                chosen_source_assignment_location = best_guess
+            else:
+                chosen_source_assignment_location = list(possible_assignments.keys())[0]
 
-        guessed_list_containing_normalized_filenames = possible_assignments[
-            chosen_source_assignment_location
-        ]
+            guessed_list_containing_normalized_filenames = possible_assignments[
+                chosen_source_assignment_location
+            ]
+
+            # unnormalize filenames so we can edit the moz.build file. They rarely use full paths.
+            unnormalized_filename_to_add = unnormalize_filename(
+                normalized_mozbuild_filename, normalized_filename_to_add
+            )
+            unnormalized_list_of_files = [
+                unnormalize_filename(normalized_mozbuild_filename, f)
+                for f in guessed_list_containing_normalized_filenames
+            ]
 
-        # unnormalize filenames so we can edit the moz.build file. They rarely use full paths.
-        unnormalized_filename_to_add = unnormalize_filename(
-            normalized_mozbuild_filename, normalized_filename_to_add
-        )
-        unnormalized_list_of_files = [
-            unnormalize_filename(normalized_mozbuild_filename, f)
-            for f in guessed_list_containing_normalized_filenames
-        ]
+            # unnormalize filenames so we can edit the moz.build file. They rarely use full paths.
+            unnormalized_filename_to_add = unnormalize_filename(
+                normalized_mozbuild_filename, normalized_filename_to_add
+            )
+            unnormalized_list_of_files = [
+                unnormalize_filename(normalized_mozbuild_filename, f)
+                for f in guessed_list_containing_normalized_filenames
+            ]
 
-        edit_moz_build_file_to_add_file(
-            normalized_mozbuild_filename,
-            unnormalized_filename_to_add,
-            unnormalized_list_of_files,
-        )
-        return
-    assert False, "Could not find a single moz.build file to edit"
+            edit_moz_build_file_to_add_file(
+                normalized_mozbuild_filename,
+                unnormalized_filename_to_add,
+                unnormalized_list_of_files,
+            )
+            return
+
+    raise MozBuildRewriteException(
+        "Could not find a single moz.build file to add " + normalized_filename_to_add
+    )
 
 
 #########################################################
 # TESTING CODE
 #########################################################
 
 
 def get_all_target_filenames_normalized(all_mozbuild_filenames_normalized):
@@ -1135,16 +1245,18 @@ def test_all_third_party_files(gecko_roo
         for f in failed_matched:
             print("\t", f[0], f[1])
     print("Statistics:")
     pprint(statistics)
 
 
 if __name__ == "__main__":
     gecko_root = get_gecko_root()
+    os.chdir(gecko_root)
 
-    os.chdir(gecko_root)
     add_file_to_moz_build_file(
-        "third_party/dav1d/src/arm/32/ipred16.S", "media/libdav1d", "third_party/dav1d/"
+        "third_party/jpeg-xl/lib/include/jxl/resizable_parallel_runner.h",
+        "media/libjxl",
+        "third_party/jpeg-xl",
     )
 
     # all_mozbuild_filenames_normalized = get_all_mozbuild_filenames(gecko_root)
     # test_all_third_party_files(gecko_root, all_mozbuild_filenames_normalized)
--- a/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+++ b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
@@ -14,23 +14,24 @@ import requests
 
 import mozfile
 import mozpack.path as mozpath
 
 from mozbuild.base import MozbuildObject
 from mozbuild.vendor.rewrite_mozbuild import (
     add_file_to_moz_build_file,
     remove_file_from_moz_build_file,
+    MozBuildRewriteException,
 )
 
 DEFAULT_EXCLUDE_FILES = [".git*"]
 
 
 class VendorManifest(MozbuildObject):
-    def vendor(self, yaml_file, manifest, revision, check_for_update):
+    def vendor(self, yaml_file, manifest, revision, check_for_update, add_to_exports):
         self.manifest = manifest
         if "vendor-directory" not in self.manifest["vendoring"]:
             self.manifest["vendoring"]["vendor-directory"] = os.path.dirname(yaml_file)
 
         self.source_host = self.get_source_host()
 
         # Check that updatebot key is available for libraries with existing
         # moz.yaml files but missing updatebot information
@@ -78,17 +79,19 @@ class VendorManifest(MozbuildObject):
             logging.INFO, "vendor", {}, "Registering changes with version control."
         )
         self.repository.add_remove_files(
             self.manifest["vendoring"]["vendor-directory"], os.path.dirname(yaml_file)
         )
 
         self.log(logging.INFO, "vendor", {}, "Updating moz.build files")
         self.update_moz_build(
-            self.manifest["vendoring"]["vendor-directory"], os.path.dirname(yaml_file)
+            self.manifest["vendoring"]["vendor-directory"],
+            os.path.dirname(yaml_file),
+            add_to_exports,
         )
 
         self.log(
             logging.INFO,
             "done",
             {"revision": revision},
             "Update to version '{revision}' ready to commit.",
         )
@@ -272,54 +275,73 @@ class VendorManifest(MozbuildObject):
                     "vendor",
                     {"script": script, "run_dir": run_dir},
                     "Performing run-script action script: {script} working dir: {run_dir}",
                 )
                 self.run_process(args=[script], cwd=run_dir, log_name=script)
             else:
                 assert False, "Unknown action supplied (how did this pass validation?)"
 
-    def update_moz_build(self, vendoring_dir, moz_yaml_dir):
+    def update_moz_build(self, vendoring_dir, moz_yaml_dir, add_to_exports):
         if vendoring_dir == moz_yaml_dir:
             vendoring_dir = moz_yaml_dir = None
 
-        source_suffixes = [".cc", ".c", ".cpp", ".h", ".hpp", ".S", ".asm"]
+        # If you edit this (especially for header files) you should double check
+        # rewrite_mozbuild.py around 'assignment_type'
+        source_suffixes = [".cc", ".c", ".cpp", ".S", ".asm"]
+        header_suffixes = [".h", ".hpp"]
 
         files_removed = self.repository.get_changed_files(diff_filter="D")
         files_added = self.repository.get_changed_files(diff_filter="A")
 
         # Filter the files added to just source files we track in moz.build files.
         files_added = [
             f for f in files_added if any([f.endswith(s) for s in source_suffixes])
         ]
+        header_files_to_add = [
+            f for f in files_added if any([f.endswith(s) for s in header_suffixes])
+        ]
+        if add_to_exports:
+            files_added += header_files_to_add
+        elif header_files_to_add:
+            self.log(
+                logging.WARNIGN,
+                "header_files_warning",
+                {},
+                (
+                    "We found %s header files in the update, pass --add-to-exports if you want"
+                    + " to attempt to include them in EXPORTS blocks: %s"
+                )
+                % (len(header_files_to_add), header_files_to_add),
+            )
 
         self.log(
             logging.DEBUG,
             "vendor",
             {"added": len(files_added), "removed": len(files_removed)},
             "Found {added} files added and {removed} files removed.",
         )
 
         should_abort = False
         for f in files_added:
             try:
                 add_file_to_moz_build_file(f, moz_yaml_dir, vendoring_dir)
-            except Exception:
+            except MozBuildRewriteException:
                 self.log(
                     logging.ERROR,
                     "vendor",
                     {},
                     "Could not add %s to the appropriate moz.build file" % f,
                 )
                 should_abort = True
 
         for f in files_removed:
             try:
                 remove_file_from_moz_build_file(f, moz_yaml_dir, vendoring_dir)
-            except Exception:
+            except MozBuildRewriteException:
                 self.log(
                     logging.ERROR,
                     "vendor",
                     {},
                     "Could not remove %s from the appropriate moz.build file" % f,
                 )
                 should_abort = True
 
--- a/security/certverifier/CertVerifier.cpp
+++ b/security/certverifier/CertVerifier.cpp
@@ -126,30 +126,28 @@ CertVerifier::CertVerifier(OcspDownloadC
         Unused << mThirdPartyIntermediateInputs.append(input);
       }
     }
   }
 }
 
 CertVerifier::~CertVerifier() = default;
 
-Result IsCertChainRootBuiltInRoot(const UniqueCERTCertList& chain,
+Result IsCertChainRootBuiltInRoot(const nsTArray<nsTArray<uint8_t>>& chain,
                                   bool& result) {
-  if (!chain || CERT_LIST_EMPTY(chain)) {
+  if (chain.IsEmpty()) {
     return Result::FATAL_ERROR_LIBRARY_FAILURE;
   }
-  CERTCertListNode* rootNode = CERT_LIST_TAIL(chain);
-  if (!rootNode) {
-    return Result::FATAL_ERROR_LIBRARY_FAILURE;
+  const nsTArray<uint8_t>& rootBytes = chain.LastElement();
+  Input rootInput;
+  Result rv = rootInput.Init(rootBytes.Elements(), rootBytes.Length());
+  if (rv != Result::Success) {
+    return rv;
   }
-  CERTCertificate* root = rootNode->cert;
-  if (!root) {
-    return Result::FATAL_ERROR_LIBRARY_FAILURE;
-  }
-  return IsCertBuiltInRoot(root, result);
+  return IsCertBuiltInRoot(rootInput, result);
 }
 
 Result IsDelegatedCredentialAcceptable(const DelegatedCredentialInfo& dcInfo) {
   bool isEcdsa = dcInfo.scheme == ssl_sig_ecdsa_secp256r1_sha256 ||
                  dcInfo.scheme == ssl_sig_ecdsa_secp384r1_sha384 ||
                  dcInfo.scheme == ssl_sig_ecdsa_secp521r1_sha512;
 
   // Firefox currently does not advertise any RSA schemes for use
@@ -162,28 +160,36 @@ Result IsDelegatedCredentialAcceptable(c
 
   return Result::Success;
 }
 
 // The term "builtin root" traditionally refers to a root CA certificate that
 // has been added to the NSS trust store, because it has been approved
 // for inclusion according to the Mozilla CA policy, and might be accepted
 // by Mozilla applications as an issuer for certificates seen on the public web.
-Result IsCertBuiltInRoot(CERTCertificate* cert, bool& result) {
+Result IsCertBuiltInRoot(Input certInput, bool& result) {
   if (NS_FAILED(BlockUntilLoadableCertsLoaded())) {
     return Result::FATAL_ERROR_LIBRARY_FAILURE;
   }
 
+  CERTCertDBHandle* certDB(CERT_GetDefaultCertDB());
+  SECItem certDER(UnsafeMapInputToSECItem(certInput));
+  UniqueCERTCertificate cert(
+      CERT_NewTempCertificate(certDB, &certDER, nullptr, false, true));
+  if (!cert) {
+    return Result::FATAL_ERROR_LIBRARY_FAILURE;
+  }
+
   result = false;
 #ifdef DEBUG
   nsCOMPtr<nsINSSComponent> component(do_GetService(PSM_COMPONENT_CONTRACTID));
   if (!component) {
     return Result::FATAL_ERROR_LIBRARY_FAILURE;
   }
-  nsresult rv = component->IsCertTestBuiltInRoot(cert, &result);
+  nsresult rv = component->IsCertTestBuiltInRoot(cert.get(), &result);
   if (NS_FAILED(rv)) {
     return Result::FATAL_ERROR_LIBRARY_FAILURE;
   }
   if (result) {
     return Success;
   }
 #endif  // DEBUG
   AutoSECMODListReadLock lock;
@@ -200,17 +206,18 @@ Result IsCertBuiltInRoot(CERTCertificate
       // the builtin roots, but which also contains additional CA certificates,
       // such as CAs trusted in a local deployment.
       // We want to be able to distinguish between these two categories,
       // because a CA, which may issue certificates for the public web,
       // is expected to comply with additional requirements.
       // If the certificate has attribute CKA_NSS_MOZILLA_CA_POLICY set to true,
       // then we treat it as a "builtin root".
       if (PK11_IsPresent(slot) && PK11_HasRootCerts(slot)) {
-        CK_OBJECT_HANDLE handle = PK11_FindCertInSlot(slot, cert, nullptr);
+        CK_OBJECT_HANDLE handle =
+            PK11_FindCertInSlot(slot, cert.get(), nullptr);
         if (handle != CK_INVALID_HANDLE &&
             PK11_HasAttributeSet(slot, handle, CKA_NSS_MOZILLA_CA_POLICY,
                                  false)) {
           // Attribute was found, and is set to true
           result = true;
           break;
         }
       }
@@ -276,30 +283,30 @@ void CertVerifier::LoadKnownCTLogs() {
     mCTVerifier->AddLog(std::move(logVerifier));
   }
   // TBD: Initialize mCTDiversityPolicy with the CA dependency map
   // of the known CT logs operators.
   mCTDiversityPolicy = MakeUnique<CTDiversityPolicy>();
 }
 
 Result CertVerifier::VerifyCertificateTransparencyPolicy(
-    NSSCertDBTrustDomain& trustDomain, const UniqueCERTCertList& builtChain,
-    Input sctsFromTLS, Time time,
+    NSSCertDBTrustDomain& trustDomain,
+    const nsTArray<nsTArray<uint8_t>>& builtChain, Input sctsFromTLS, Time time,
     /*optional out*/ CertificateTransparencyInfo* ctInfo) {
   if (ctInfo) {
     ctInfo->Reset();
   }
   if (mCTMode == CertificateTransparencyMode::Disabled) {
     return Success;
   }
   if (ctInfo) {
     ctInfo->enabled = true;
   }
 
-  if (!builtChain || CERT_LIST_EMPTY(builtChain)) {
+  if (builtChain.IsEmpty()) {
     return Result::FATAL_ERROR_INVALID_ARGS;
   }
 
   Input embeddedSCTs = trustDomain.GetSCTListFromCertificate();
   if (embeddedSCTs.GetLength() > 0) {
     MOZ_LOG(gCertVerifierLog, LogLevel::Debug,
             ("Got embedded SCT data of length %zu\n",
              static_cast<size_t>(embeddedSCTs.GetLength())));
@@ -311,66 +318,68 @@ Result CertVerifier::VerifyCertificateTr
              static_cast<size_t>(sctsFromOCSP.GetLength())));
   }
   if (sctsFromTLS.GetLength() > 0) {
     MOZ_LOG(gCertVerifierLog, LogLevel::Debug,
             ("Got TLS SCT data of length %zu\n",
              static_cast<size_t>(sctsFromTLS.GetLength())));
   }
 
-  CERTCertListNode* endEntityNode = CERT_LIST_HEAD(builtChain);
-  if (!endEntityNode || CERT_LIST_END(endEntityNode, builtChain)) {
-    return Result::FATAL_ERROR_INVALID_ARGS;
-  }
-  CERTCertListNode* issuerNode = CERT_LIST_NEXT(endEntityNode);
-  if (!issuerNode || CERT_LIST_END(issuerNode, builtChain)) {
+  if (builtChain.Length() == 1) {
     // Issuer certificate is required for SCT verification.
     // If we've arrived here, we probably have a "trust chain" with only one
     // certificate (i.e. a self-signed end-entity that has been set as a trust
     // anchor either by a third party modifying our trust DB or via the
     // enterprise roots feature). If this is the case, certificate transparency
     // information will probably not be present, and it certainly won't verify
     // correctly. To simplify things, we return an empty CTVerifyResult and a
     // "not enough SCTs" CTPolicyCompliance result.
     if (ctInfo) {
       CTVerifyResult emptyResult;
       ctInfo->verifyResult = std::move(emptyResult);
       ctInfo->policyCompliance = CTPolicyCompliance::NotEnoughScts;
     }
     return Success;
   }
 
-  CERTCertificate* endEntity = endEntityNode->cert;
-  CERTCertificate* issuer = issuerNode->cert;
-  if (!endEntity || !issuer) {
-    return Result::FATAL_ERROR_INVALID_ARGS;
-  }
-
-  if (endEntity->subjectName) {
-    MOZ_LOG(gCertVerifierLog, LogLevel::Debug,
-            ("Verifying CT Policy compliance of subject %s\n",
-             endEntity->subjectName));
-  }
-
-  Input endEntityDER;
+  const nsTArray<uint8_t>& endEntityBytes = builtChain.ElementAt(0);
+  Input endEntityInput;
   Result rv =
-      endEntityDER.Init(endEntity->derCert.data, endEntity->derCert.len);
+      endEntityInput.Init(endEntityBytes.Elements(), endEntityBytes.Length());
   if (rv != Success) {
     return rv;
   }
 
-  Input issuerPublicKeyDER;
-  rv = issuerPublicKeyDER.Init(issuer->derPublicKey.data,
-                               issuer->derPublicKey.len);
+  const nsTArray<uint8_t>& issuerBytes = builtChain.ElementAt(1);
+  Input issuerInput;
+  rv = issuerInput.Init(issuerBytes.Elements(), issuerBytes.Length());
+  if (rv != Success) {
+    return rv;
+  }
+  BackCert issuerBackCert(issuerInput, EndEntityOrCA::MustBeCA, nullptr);
+  rv = issuerBackCert.Init();
   if (rv != Success) {
     return rv;
   }
+  Input issuerPublicKeyInput = issuerBackCert.GetSubjectPublicKeyInfo();
+
+  SECItem endEntityDERItem = UnsafeMapInputToSECItem(endEntityInput);
+  UniqueCERTCertificate endEntityCert(CERT_NewTempCertificate(
+      CERT_GetDefaultCertDB(), &endEntityDERItem, nullptr, false, true));
+  if (!endEntityCert) {
+    return Result::FATAL_ERROR_LIBRARY_FAILURE;
+  }
+  if (endEntityCert->subjectName) {
+    MOZ_LOG(gCertVerifierLog, LogLevel::Debug,
+            ("Verifying CT Policy compliance of subject %s\n",
+             endEntityCert->subjectName));
+  }
 
   CTVerifyResult result;
-  rv = mCTVerifier->Verify(endEntityDER, issuerPublicKeyDER, embeddedSCTs,
+  rv = mCTVerifier->Verify(endEntityInput, issuerPublicKeyInput, embeddedSCTs,
                            sctsFromOCSP, sctsFromTLS, time, result);
   if (rv != Success) {
     MOZ_LOG(gCertVerifierLog, LogLevel::Debug,
             ("SCT verification failed with fatal error %" PRId32 "\n",
              static_cast<uint32_t>(rv)));
     return rv;
   }
 
@@ -409,30 +418,31 @@ Result CertVerifier::VerifyCertificateTr
          "invalidSignature=%zu invalidTimestamp=%zu "
          "decodingErrors=%zu\n",
          validCount, unknownLogCount, disqualifiedLogCount,
          invalidSignatureCount, invalidTimestampCount, result.decodingErrors));
   }
 
   PRTime notBefore;
   PRTime notAfter;
-  if (CERT_GetCertTimes(endEntity, &notBefore, &notAfter) != SECSuccess) {
+  if (CERT_GetCertTimes(endEntityCert.get(), &notBefore, &notAfter) !=
+      SECSuccess) {
     return Result::FATAL_ERROR_LIBRARY_FAILURE;
   }
   size_t lifetimeInMonths;
   rv = GetCertLifetimeInFullMonths(notBefore, notAfter, lifetimeInMonths);
   if (rv != Success) {
     return rv;
   }
 
   CTLogOperatorList allOperators;
   GetCTLogOperatorsFromVerifiedSCTList(result.verifiedScts, allOperators);
 
   CTLogOperatorList dependentOperators;
-  rv = mCTDiversityPolicy->GetDependentOperators(builtChain.get(), allOperators,
+  rv = mCTDiversityPolicy->GetDependentOperators(builtChain, allOperators,
                                                  dependentOperators);
   if (rv != Success) {
     return rv;
   }
 
   CTPolicyEnforcer ctPolicyEnforcer;
   CTPolicyCompliance ctPolicyCompliance;
   ctPolicyEnforcer.CheckCompliance(result.verifiedScts, lifetimeInMonths,
@@ -461,17 +471,17 @@ bool CertVerifier::SHA1ModeMoreRestricti
       MOZ_ASSERT(false, "unexpected SHA1Mode type");
       return true;
   }
 }
 
 Result CertVerifier::VerifyCert(
     CERTCertificate* cert, SECCertificateUsage usage, Time time, void* pinArg,
     const char* hostname,
-    /*out*/ UniqueCERTCertList& builtChain,
+    /*out*/ nsTArray<nsTArray<uint8_t>>& builtChain,
     /*optional*/ const Flags flags,
     /*optional*/ const Maybe<nsTArray<nsTArray<uint8_t>>>& extraCertificates,
     /*optional*/ const Maybe<nsTArray<uint8_t>>& stapledOCSPResponseArg,
     /*optional*/ const Maybe<nsTArray<uint8_t>>& sctsFromTLS,
     /*optional*/ const OriginAttributes& originAttributes,
     /*optional out*/ EVStatus* evStatus,
     /*optional out*/ OCSPStaplingStatus* ocspStaplingStatus,
     /*optional out*/ KeySizeStatus* keySizeStatus,
@@ -883,17 +893,17 @@ static bool CertIsSelfSigned(const Uniqu
   rv = VerifySignedData(trustDomain, backCert.GetSignedData(),
                         backCert.GetSubjectPublicKeyInfo());
   return rv == Success;
 }
 
 Result CertVerifier::VerifySSLServerCert(
     const UniqueCERTCertificate& peerCert, Time time,
     /*optional*/ void* pinarg, const nsACString& hostname,
-    /*out*/ UniqueCERTCertList& builtChain,
+    /*out*/ nsTArray<nsTArray<uint8_t>>& builtChain,
     /*optional*/ Flags flags,
     /*optional*/ const Maybe<nsTArray<nsTArray<uint8_t>>>& extraCertificates,
     /*optional*/ const Maybe<nsTArray<uint8_t>>& stapledOCSPResponse,
     /*optional*/ const Maybe<nsTArray<uint8_t>>& sctsFromTLS,
     /*optional*/ const Maybe<DelegatedCredentialInfo>& dcInfo,
     /*optional*/ const OriginAttributes& originAttributes,
     /*optional out*/ EVStatus* evStatus,
     /*optional out*/ OCSPStaplingStatus* ocspStaplingStatus,
--- a/security/certverifier/CertVerifier.h
+++ b/security/certverifier/CertVerifier.h
@@ -154,17 +154,17 @@ class CertVerifier {
     OCSP_STAPLING_INVALID = 4,
   };
 
   // *evOidPolicy == SEC_OID_UNKNOWN means the cert is NOT EV
   // Only one usage per verification is supported.
   mozilla::pkix::Result VerifyCert(
       CERTCertificate* cert, SECCertificateUsage usage,
       mozilla::pkix::Time time, void* pinArg, const char* hostname,
-      /*out*/ UniqueCERTCertList& builtChain, Flags flags = 0,
+      /*out*/ nsTArray<nsTArray<uint8_t>>& builtChain, Flags flags = 0,
       /*optional in*/
       const Maybe<nsTArray<nsTArray<uint8_t>>>& extraCertificates = Nothing(),
       /*optional in*/ const Maybe<nsTArray<uint8_t>>& stapledOCSPResponseArg =
           Nothing(),
       /*optional in*/ const Maybe<nsTArray<uint8_t>>& sctsFromTLS = Nothing(),
       /*optional in*/ const OriginAttributes& originAttributes =
           OriginAttributes(),
       /*optional out*/ EVStatus* evStatus = nullptr,
@@ -172,17 +172,17 @@ class CertVerifier {
       /*optional out*/ KeySizeStatus* keySizeStatus = nullptr,
       /*optional out*/ SHA1ModeResult* sha1ModeResult = nullptr,
       /*optional out*/ PinningTelemetryInfo* pinningTelemetryInfo = nullptr,
       /*optional out*/ CertificateTransparencyInfo* ctInfo = nullptr);
 
   mozilla::pkix::Result VerifySSLServerCert(
       const UniqueCERTCertificate& peerCert, mozilla::pkix::Time time,
       void* pinarg, const nsACString& hostname,
-      /*out*/ UniqueCERTCertList& builtChain,
+      /*out*/ nsTArray<nsTArray<uint8_t>>& builtChain,
       /*optional*/ Flags flags = 0,
       /*optional*/ const Maybe<nsTArray<nsTArray<uint8_t>>>& extraCertificates =
           Nothing(),
       /*optional*/ const Maybe<nsTArray<uint8_t>>& stapledOCSPResponse =
           Nothing(),
       /*optional*/ const Maybe<nsTArray<uint8_t>>& sctsFromTLS = Nothing(),
       /*optional*/ const Maybe<DelegatedCredentialInfo>& dcInfo = Nothing(),
       /*optional*/ const OriginAttributes& originAttributes =
@@ -251,28 +251,29 @@ class CertVerifier {
 
   // We only have a forward declarations of these classes (see above)
   // so we must allocate dynamically.
   UniquePtr<mozilla::ct::MultiLogCTVerifier> mCTVerifier;
   UniquePtr<mozilla::ct::CTDiversityPolicy> mCTDiversityPolicy;
 
   void LoadKnownCTLogs();
   mozilla::pkix::Result VerifyCertificateTransparencyPolicy(
-      NSSCertDBTrustDomain& trustDomain, const UniqueCERTCertList& builtChain,
+      NSSCertDBTrustDomain& trustDomain,
+      const nsTArray<nsTArray<uint8_t>>& builtChain,
       mozilla::pkix::Input sctsFromTLS, mozilla::pkix::Time time,
       /*optional out*/ CertificateTransparencyInfo* ctInfo);
 
   // Returns true if the configured SHA1 mode is more restrictive than the given
   // mode. SHA1Mode::Forbidden is more restrictive than any other mode except
   // Forbidden. Next is ImportedRoot, then ImportedRootOrBefore2016, then
   // Allowed. (A mode is never more restrictive than itself.)
   bool SHA1ModeMoreRestrictiveThanGivenMode(SHA1Mode mode);
 };
 
-mozilla::pkix::Result IsCertBuiltInRoot(CERTCertificate* cert, bool& result);
+mozilla::pkix::Result IsCertBuiltInRoot(pkix::Input certInput, bool& result);
 mozilla::pkix::Result CertListContainsExpectedKeys(const CERTCertList* certList,
                                                    const char* hostname,
                                                    mozilla::pkix::Time time);
 
 }  // namespace psm
 }  // namespace mozilla
 
 #endif  // CertVerifier_h
--- a/security/certverifier/NSSCertDBTrustDomain.cpp
+++ b/security/certverifier/NSSCertDBTrustDomain.cpp
@@ -17,24 +17,26 @@
 #include "cert.h"
 #include "cert_storage/src/cert_storage.h"
 #include "certdb.h"
 #include "mozilla/AppShutdown.h"
 #include "mozilla/Assertions.h"
 #include "mozilla/Casting.h"
 #include "mozilla/PodOperations.h"
 #include "mozilla/Services.h"
+#include "mozilla/SyncRunnable.h"
 #include "mozilla/TimeStamp.h"
 #include "mozilla/Unused.h"
 #include "mozpkix/Result.h"
 #include "mozpkix/pkix.h"
 #include "mozpkix/pkixnss.h"
 #include "mozpkix/pkixutil.h"
 #include "nsCRTGlue.h"
 #include "nsIObserverService.h"
+#include "nsNetCID.h"
 #include "nsNSSCertHelper.h"
 #include "nsNSSCertificate.h"
 #include "nsNSSCertificateDB.h"
 #include "nsPrintfCString.h"
 #include "nsServiceManagerUtils.h"
 #include "nsThreadUtils.h"
 #include "nss.h"
 #include "pk11pub.h"
@@ -68,17 +70,17 @@ NSSCertDBTrustDomain::NSSCertDBTrustDoma
     TimeDuration ocspTimeoutHard, uint32_t certShortLifetimeInDays,
     unsigned int minRSABits, ValidityCheckingMode validityCheckingMode,
     CertVerifier::SHA1Mode sha1Mode, NetscapeStepUpPolicy netscapeStepUpPolicy,
     CRLiteMode crliteMode, uint64_t crliteCTMergeDelaySeconds,
     const OriginAttributes& originAttributes,
     const Vector<Input>& thirdPartyRootInputs,
     const Vector<Input>& thirdPartyIntermediateInputs,
     const Maybe<nsTArray<nsTArray<uint8_t>>>& extraCertificates,
-    /*out*/ UniqueCERTCertList& builtChain,
+    /*out*/ nsTArray<nsTArray<uint8_t>>& builtChain,
     /*optional*/ PinningTelemetryInfo* pinningTelemetryInfo,
     /*optional*/ const char* hostname)
     : mCertDBTrustType(certDBTrustType),
       mOCSPFetching(ocspFetching),
       mOCSPCache(ocspCache),
       mPinArg(pinArg),
       mOCSPTimeoutSoft(ocspTimeoutSoft),
       mOCSPTimeoutHard(ocspTimeoutHard),
@@ -1086,120 +1088,147 @@ SECStatus GetCertDistrustAfterValue(cons
   return DER_DecodeTimeChoice(&distrustTime, distrustItem);
 }
 
 SECStatus GetCertNotBeforeValue(const CERTCertificate* cert,
                                 PRTime& distrustTime) {
   return DER_DecodeTimeChoice(&distrustTime, &cert->validity.notBefore);
 }
 
-nsresult isDistrustedCertificateChain(const UniqueCERTCertList& certList,
-                                      const SECTrustType certDBTrustType,
-                                      bool& isDistrusted) {
+nsresult isDistrustedCertificateChain(
+    const nsTArray<nsTArray<uint8_t>>& certArray,
+    const SECTrustType certDBTrustType, bool& isDistrusted) {
+  if (certArray.Length() == 0) {
+    return NS_ERROR_FAILURE;
+  }
+
   // Set the default result to be distrusted.
   isDistrusted = true;
 
   // There is no distrust to set if the certDBTrustType is not SSL or Email.
   if (certDBTrustType != trustSSL && certDBTrustType != trustEmail) {
     isDistrusted = false;
     return NS_OK;
   }
 
-  // Allocate objects and retreive the root and end-entity certificates.
-  const CERTCertificate* certRoot = CERT_LIST_TAIL(certList)->cert;
-  const CERTCertificate* certLeaf = CERT_LIST_HEAD(certList)->cert;
+  SECStatus runnableRV = SECFailure;
 
-  // Set isDistrusted to false if there is no distrust for the root.
-  if (!certRoot->distrust) {
-    isDistrusted = false;
-    return NS_OK;
-  }
+  RefPtr<Runnable> isDistrustedChainTask =
+      NS_NewRunnableFunction("isDistrustedCertificateChain", [&]() {
+        // Allocate objects and retreive the root and end-entity certificates.
+        CERTCertDBHandle* certDB(CERT_GetDefaultCertDB());
+        const nsTArray<uint8_t>& certRootDER = certArray.LastElement();
+        SECItem certRootDERItem = {
+            siBuffer, const_cast<unsigned char*>(certRootDER.Elements()),
+            AssertedCast<unsigned int>(certRootDER.Length())};
+        UniqueCERTCertificate certRoot(CERT_NewTempCertificate(
+            certDB, &certRootDERItem, nullptr, false, true));
+        if (!certRoot) {
+          runnableRV = SECFailure;
+          return;
+        }
+        const nsTArray<uint8_t>& certLeafDER = certArray.ElementAt(0);
+        SECItem certLeafDERItem = {
+            siBuffer, const_cast<unsigned char*>(certLeafDER.Elements()),
+            AssertedCast<unsigned int>(certLeafDER.Length())};
+        UniqueCERTCertificate certLeaf(CERT_NewTempCertificate(
+            certDB, &certLeafDERItem, nullptr, false, true));
+        if (!certLeaf) {
+          runnableRV = SECFailure;
+          return;
+        }
+
+        // Set isDistrusted to false if there is no distrust for the root.
+        if (!certRoot->distrust) {
+          isDistrusted = false;
+          runnableRV = SECSuccess;
+          return;
+        }
 
-  // Create a pointer to refer to the selected distrust struct.
-  SECItem* distrustPtr = nullptr;
-  if (certDBTrustType == trustSSL) {
-    distrustPtr = &certRoot->distrust->serverDistrustAfter;
-  }
-  if (certDBTrustType == trustEmail) {
-    distrustPtr = &certRoot->distrust->emailDistrustAfter;
-  }
+        // Create a pointer to refer to the selected distrust struct.
+        SECItem* distrustPtr = nullptr;
+        if (certDBTrustType == trustSSL) {
+          distrustPtr = &certRoot->distrust->serverDistrustAfter;
+        }
+        if (certDBTrustType == trustEmail) {
+          distrustPtr = &certRoot->distrust->emailDistrustAfter;
+        }
+
+        // Get validity for the current end-entity certificate
+        // and get the distrust field for the root certificate.
+        PRTime certRootDistrustAfter;
+        PRTime certLeafNotBefore;
 
-  // Get validity for the current end-entity certificate
-  // and get the distrust field for the root certificate.
-  PRTime certRootDistrustAfter;
-  PRTime certLeafNotBefore;
+        runnableRV =
+            GetCertDistrustAfterValue(distrustPtr, certRootDistrustAfter);
+        if (runnableRV != SECSuccess) {
+          return;
+        }
+
+        runnableRV = GetCertNotBeforeValue(certLeaf.get(), certLeafNotBefore);
+        if (runnableRV != SECSuccess) {
+          return;
+        }
 
-  SECStatus rv = GetCertDistrustAfterValue(distrustPtr, certRootDistrustAfter);
-  if (rv != SECSuccess) {
+        // Compare the validity of the end-entity certificate with
+        // the distrust value of the root.
+        if (certLeafNotBefore <= certRootDistrustAfter) {
+          isDistrusted = false;
+        }
+
+        runnableRV = SECSuccess;
+      });
+  nsCOMPtr<nsIEventTarget> socketThread(
+      do_GetService(NS_SOCKETTRANSPORTSERVICE_CONTRACTID));
+  if (!socketThread) {
     return NS_ERROR_FAILURE;
   }
-
-  rv = GetCertNotBeforeValue(certLeaf, certLeafNotBefore);
-  if (rv != SECSuccess) {
+  nsresult rv =
+      SyncRunnable::DispatchToThread(socketThread, isDistrustedChainTask);
+  if (NS_FAILED(rv) || runnableRV != SECSuccess) {
     return NS_ERROR_FAILURE;
   }
-
-  // Compare the validity of the end-entity certificate with
-  // the distrust value of the root.
-  if (certLeafNotBefore <= certRootDistrustAfter) {
-    isDistrusted = false;
-  }
-
   return NS_OK;
 }
 
-Result NSSCertDBTrustDomain::IsChainValid(const DERArray& certArray, Time time,
+Result NSSCertDBTrustDomain::IsChainValid(const DERArray& reversedDERArray,
+                                          Time time,
                                           const CertPolicyId& requiredPolicy) {
   MOZ_LOG(gCertVerifierLog, LogLevel::Debug,
           ("NSSCertDBTrustDomain: IsChainValid"));
 
-  UniqueCERTCertList certList;
-  SECStatus srv =
-      ConstructCERTCertListFromReversedDERArray(certArray, certList);
-  if (srv != SECSuccess) {
-    return MapPRErrorCodeToResult(PR_GetError());
+  size_t numCerts = reversedDERArray.GetLength();
+  if (numCerts < 1) {
+    return Result::FATAL_ERROR_LIBRARY_FAILURE;
   }
-  if (CERT_LIST_EMPTY(certList)) {
-    return Result::FATAL_ERROR_LIBRARY_FAILURE;
+  nsTArray<nsTArray<uint8_t>> certArray;
+  for (size_t i = numCerts; i > 0; --i) {
+    const Input* derInput = reversedDERArray.GetDER(i - 1);
+    certArray.EmplaceBack(derInput->UnsafeGetData(), derInput->GetLength());
   }
 
-  // Modernization in-progress: Keep certList as a CERTCertList for storage into
-  // the mBuiltChain variable at the end.
-  nsTArray<RefPtr<nsIX509Cert>> nssCertList;
-  nsresult nsrv = nsNSSCertificateDB::ConstructCertArrayFromUniqueCertList(
-      certList, nssCertList);
+  bool isBuiltInRoot = false;
 
-  if (NS_FAILED(nsrv)) {
-    return Result::FATAL_ERROR_LIBRARY_FAILURE;
+  const nsTArray<uint8_t>& rootBytes = certArray.LastElement();
+  Input rootInput;
+  Result rv = rootInput.Init(rootBytes.Elements(), rootBytes.Length());
+  if (rv != Success) {
+    return rv;
   }
-  nsCOMPtr<nsIX509Cert> rootCert;
-  nsrv = nsNSSCertificate::GetRootCertificate(nssCertList, rootCert);
-  if (NS_FAILED(nsrv)) {
-    return Result::FATAL_ERROR_LIBRARY_FAILURE;
+  rv = IsCertBuiltInRoot(rootInput, isBuiltInRoot);
+  if (rv != Result::Success) {
+    return rv;
   }
-  UniqueCERTCertificate root(rootCert->GetCert());
-  if (!root) {
-    return Result::FATAL_ERROR_LIBRARY_FAILURE;
-  }
-  bool isBuiltInRoot = false;
-  nsrv = rootCert->GetIsBuiltInRoot(&isBuiltInRoot);
-  if (NS_FAILED(nsrv)) {
-    return Result::FATAL_ERROR_LIBRARY_FAILURE;
-  }
+  nsresult nsrv;
   // If mHostname isn't set, we're not verifying in the context of a TLS
   // handshake, so don't verify key pinning in those cases.
   if (mHostname) {
     nsTArray<Span<const uint8_t>> derCertSpanList;
-    size_t numCerts = certArray.GetLength();
-    for (size_t i = numCerts; i > 0; --i) {
-      const Input* der = certArray.GetDER(i - 1);
-      if (!der) {
-        return Result::FATAL_ERROR_LIBRARY_FAILURE;
-      }
-      derCertSpanList.EmplaceBack(der->UnsafeGetData(), der->GetLength());
+    for (const auto& certDER : certArray) {
+      derCertSpanList.EmplaceBack(certDER.Elements(), certDER.Length());
     }
 
     bool chainHasValidPins;
     nsrv = PublicKeyPinningService::ChainHasValidPins(
         derCertSpanList, mHostname, time, isBuiltInRoot, chainHasValidPins,
         mPinningTelemetryInfo);
     if (NS_FAILED(nsrv)) {
       return Result::FATAL_ERROR_LIBRARY_FAILURE;
@@ -1209,56 +1238,64 @@ Result NSSCertDBTrustDomain::IsChainVali
     }
   }
 
   // Check that the childs' certificate NotBefore date is anterior to
   // the NotAfter value of the parent when the root is a builtin.
   if (isBuiltInRoot) {
     bool isDistrusted;
     nsrv =
-        isDistrustedCertificateChain(certList, mCertDBTrustType, isDistrusted);
+        isDistrustedCertificateChain(certArray, mCertDBTrustType, isDistrusted);
     if (NS_FAILED(nsrv)) {
       return Result::FATAL_ERROR_LIBRARY_FAILURE;
     }
     if (isDistrusted) {
       return Result::ERROR_UNTRUSTED_ISSUER;
     }
   }
 
   // See bug 1434300. If the root is a Symantec root, see if we distrust this
   // path. Since we already have the root available, we can check that cheaply
   // here before proceeding with the rest of the algorithm.
 
   // This algorithm only applies if we are verifying in the context of a TLS
   // handshake. To determine this, we check mHostname: If it isn't set, this is
   // not TLS, so don't run the algorithm.
-  nsTArray<uint8_t> rootCertDER(root.get()->derCert.data,
-                                root.get()->derCert.len);
+  const nsTArray<uint8_t>& rootCertDER = certArray.LastElement();
   if (mHostname && CertDNIsInList(rootCertDER, RootSymantecDNs)) {
-    nsTArray<nsTArray<uint8_t>> intCerts;
-
-    nsrv = nsNSSCertificate::GetIntermediatesAsDER(nssCertList, intCerts);
-    if (NS_FAILED(nsrv)) {
+    if (numCerts <= 1) {
       // This chain is supposed to be complete, so this is an error.
       return Result::ERROR_ADDITIONAL_POLICY_CONSTRAINT_FAILED;
     }
+    nsTArray<Input> intCerts;
+
+    for (size_t i = 1; i < certArray.Length() - 1; ++i) {
+      const nsTArray<uint8_t>& certBytes = certArray.ElementAt(i);
+      Input certInput;
+      rv = certInput.Init(certBytes.Elements(), certBytes.Length());
+      if (rv != Success) {
+        return Result::FATAL_ERROR_LIBRARY_FAILURE;
+      }
+
+      intCerts.EmplaceBack(certInput);
+    }
 
     bool isDistrusted = false;
     nsrv = CheckForSymantecDistrust(intCerts, RootAppleAndGoogleSPKIs,
                                     isDistrusted);
     if (NS_FAILED(nsrv)) {
       return Result::FATAL_ERROR_LIBRARY_FAILURE;
     }
     if (isDistrusted) {
       mSawDistrustedCAByPolicyError = true;
       return Result::ERROR_ADDITIONAL_POLICY_CONSTRAINT_FAILED;
     }
   }
 
-  mBuiltChain = std::move(certList);
+  mBuiltChain = std::move(certArray);
 
   return Success;
 }
 
 Result NSSCertDBTrustDomain::CheckSignatureDigestAlgorithm(
     DigestAlgorithm aAlg, EndEntityOrCA endEntityOrCA, Time notBefore) {
   // (new Date("2016-01-01T00:00:00Z")).getTime() / 1000
   static const Time JANUARY_FIRST_2016 = TimeFromEpochInSeconds(1451606400);
@@ -1694,17 +1731,16 @@ void SaveIntermediateCerts(const nsTArra
     // enterprise root temporarily imported via the child mode or enterprise
     // root features. We don't want to import these because they're intended to
     // be temporary (and because importing them happens to reset their trust
     // settings, which breaks these features).
     index++;
     if (index == 1 || index == certList.Length()) {
       continue;
     }
-
     SECItem certDERItem = {siBuffer,
                            const_cast<unsigned char*>(certDER.Elements()),
                            AssertedCast<unsigned int>(certDER.Length())};
     UniqueCERTCertificate certHandle(CERT_NewTempCertificate(
         CERT_GetDefaultCertDB(), &certDERItem, nullptr, false, true));
     if (!certHandle) {
       continue;
     }
--- a/security/certverifier/NSSCertDBTrustDomain.h
+++ b/security/certverifier/NSSCertDBTrustDomain.h
@@ -131,17 +131,17 @@ class NSSCertDBTrustDomain : public mozi
       unsigned int minRSABits, ValidityCheckingMode validityCheckingMode,
       CertVerifier::SHA1Mode sha1Mode,
       NetscapeStepUpPolicy netscapeStepUpPolicy, CRLiteMode crliteMode,
       uint64_t crliteCTMergeDelaySeconds,
       const OriginAttributes& originAttributes,
       const Vector<mozilla::pkix::Input>& thirdPartyRootInputs,
       const Vector<mozilla::pkix::Input>& thirdPartyIntermediateInputs,
       const Maybe<nsTArray<nsTArray<uint8_t>>>& extraCertificates,
-      /*out*/ UniqueCERTCertList& builtChain,
+      /*out*/ nsTArray<nsTArray<uint8_t>>& builtChain,
       /*optional*/ PinningTelemetryInfo* pinningTelemetryInfo = nullptr,
       /*optional*/ const char* hostname = nullptr);
 
   virtual Result FindIssuer(mozilla::pkix::Input encodedIssuerName,
                             IssuerChecker& checker,
                             mozilla::pkix::Time time) override;
 
   virtual Result GetCertTrust(
@@ -253,17 +253,17 @@ class NSSCertDBTrustDomain : public mozi
   CRLiteMode mCRLiteMode;
   uint64_t mCRLiteCTMergeDelaySeconds;
   bool mSawDistrustedCAByPolicyError;
   const OriginAttributes& mOriginAttributes;
   const Vector<mozilla::pkix::Input>& mThirdPartyRootInputs;  // non-owning
   const Vector<mozilla::pkix::Input>&
       mThirdPartyIntermediateInputs;                             // non-owning
   const Maybe<nsTArray<nsTArray<uint8_t>>>& mExtraCertificates;  // non-owning
-  UniqueCERTCertList& mBuiltChain;                               // non-owning
+  nsTArray<nsTArray<uint8_t>>& mBuiltChain;                      // non-owning
   PinningTelemetryInfo* mPinningTelemetryInfo;
   const char* mHostname;  // non-owning - only used for pinning checks
   nsCOMPtr<nsICertStorage> mCertStorage;
   CertVerifier::OCSPStaplingStatus mOCSPStaplingStatus;
   // Certificate Transparency data extracted during certificate verification
   UniqueSECItem mSCTListFromCertificate;
   UniqueSECItem mSCTListFromOCSPStapling;
 
--- a/security/certverifier/TrustOverrideUtils.h
+++ b/security/certverifier/TrustOverrideUtils.h
@@ -50,29 +50,23 @@ static bool CertDNIsInList(const nsTArra
     if (InputsAreEqual(subject, dnInput)) {
       return true;
     }
   }
   return false;
 }
 
 template <size_t T>
-static bool CertSPKIIsInList(const nsTArray<uint8_t>& aCert,
+static bool CertSPKIIsInList(Input aCertInput,
                              const DataAndLength (&aSpkiList)[T]) {
-  Input certInput;
-  mozilla::pkix::Result rv = certInput.Init(aCert.Elements(), aCert.Length());
-  if (rv != Success) {
-    return false;
-  }
-
   // we don't use the certificate for path building, so this parameter doesn't
   // matter
   EndEntityOrCA notUsedForPaths = EndEntityOrCA::MustBeEndEntity;
-  BackCert cert(certInput, notUsedForPaths, nullptr);
-  rv = cert.Init();
+  BackCert cert(aCertInput, notUsedForPaths, nullptr);
+  mozilla::pkix::Result rv = cert.Init();
   if (rv != Success) {
     return false;
   }
 
   Input publicKey(cert.GetSubjectPublicKeyInfo());
 
   for (auto& spki : aSpkiList) {
     Input spkiInput;
@@ -130,20 +124,19 @@ static bool CertMatchesStaticData(const 
 // This accepts a pre-segmented certificate chain (e.g. SegmentCertificateChain)
 // as |intCerts|, and pre-assumes that the root has been identified
 // as being affected (this is to avoid duplicate Segment operations in the
 // NSSCertDBTrustDomain). Each of the |intCerts| is evaluated against a
 // |allowlist| of SPKI entries, and if a match is found, then this returns
 // "not distrusted." Otherwise, due to the precondition holding, the chain is
 // "distrusted."
 template <size_t T>
-static nsresult CheckForSymantecDistrust(
-    const nsTArray<nsTArray<uint8_t>>& intCerts,
-    const DataAndLength (&allowlist)[T],
-    /* out */ bool& isDistrusted) {
+static nsresult CheckForSymantecDistrust(const nsTArray<Input>& intCerts,
+                                         const DataAndLength (&allowlist)[T],
+                                         /* out */ bool& isDistrusted) {
   // PRECONDITION: The rootCert is already verified as being one of the
   // affected Symantec roots
 
   isDistrusted = true;
 
   for (const auto& cert : intCerts) {
     if (CertSPKIIsInList(cert, allowlist)) {
       isDistrusted = false;
--- a/security/certverifier/tests/gtest/TrustOverrideTest.cpp
+++ b/security/certverifier/tests/gtest/TrustOverrideTest.cpp
@@ -210,17 +210,23 @@ TEST_F(psm_TrustOverrideTest, CheckCertD
 
   EXPECT_TRUE(CertDNIsInList(caArray, OverrideCaDNs))
       << "CA should be in the DN list";
   EXPECT_FALSE(CertDNIsInList(intermediateArray, OverrideCaDNs))
       << "Int should not be in the DN list";
 }
 
 TEST_F(psm_TrustOverrideTest, CheckCertSPKIIsInList) {
-  nsTArray<uint8_t> caArray(kOverrideCaDer, sizeof(kOverrideCaDer));
-  nsTArray<uint8_t> intermediateArray(kOverrideCaIntermediateDer,
-                                      sizeof(kOverrideCaIntermediateDer));
+  mozilla::pkix::Input caInput;
+  mozilla::pkix::Result rv =
+      caInput.Init(kOverrideCaDer, sizeof(kOverrideCaDer));
+  ASSERT_TRUE(rv == Success);
 
-  EXPECT_TRUE(CertSPKIIsInList(caArray, OverrideCaSPKIs))
+  mozilla::pkix::Input intermediateInput;
+  rv = intermediateInput.Init(kOverrideCaIntermediateDer,
+                              sizeof(kOverrideCaIntermediateDer));
+  ASSERT_TRUE(rv == Success);
+
+  EXPECT_TRUE(CertSPKIIsInList(caInput, OverrideCaSPKIs))
       << "CA should be in the SPKI list";
-  EXPECT_FALSE(CertSPKIIsInList(intermediateArray, OverrideCaSPKIs))
+  EXPECT_FALSE(CertSPKIIsInList(intermediateInput, OverrideCaSPKIs))
       << "Int should not be in the SPKI list";
 }
--- a/security/ct/CTDiversityPolicy.cpp
+++ b/security/ct/CTDiversityPolicy.cpp
@@ -25,15 +25,15 @@ void GetCTLogOperatorsFromVerifiedSCTLis
     }
     if (!alreadyAdded) {
       operators.push_back(sctLogOperatorId);
     }
   }
 }
 
 Result CTDiversityPolicy::GetDependentOperators(
-    const CERTCertList* builtChain, const CTLogOperatorList& operators,
-    CTLogOperatorList& dependentOperators) {
+    const nsTArray<nsTArray<uint8_t>>& builtChain,
+    const CTLogOperatorList& operators, CTLogOperatorList& dependentOperators) {
   return Success;
 }
 
 }  // namespace ct
 }  // namespace mozilla
--- a/security/ct/CTDiversityPolicy.h
+++ b/security/ct/CTDiversityPolicy.h
@@ -5,16 +5,17 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef CTDiversityPolicy_h
 #define CTDiversityPolicy_h
 
 #include "CTLog.h"
 #include "CTVerifyResult.h"
 #include "certt.h"
+#include "nsTArray.h"
 #include "mozpkix/Result.h"
 
 namespace mozilla {
 namespace ct {
 
 // Retuns the list of unique CT log operator IDs appearing in the provided
 // list of verified SCTs.
 void GetCTLogOperatorsFromVerifiedSCTList(const VerifiedSCTList& list,
@@ -25,17 +26,18 @@ void GetCTLogOperatorsFromVerifiedSCTLis
 // See CTPolicyEnforcer.h for more details.
 class CTDiversityPolicy {
  public:
   // Given a certificate chain and a set of CT log operators,
   // returns the subset of log operators that are dependent on the CA
   // issuing the certificate (as defined by the CT Policy).
   //
   // NOTE: TBD, PENDING FINALIZATION OF MOZILLA CT POLICY.
-  pkix::Result GetDependentOperators(const CERTCertList* builtChain,
-                                     const CTLogOperatorList& operators,
-                                     CTLogOperatorList& dependentOperators);
+  pkix::Result GetDependentOperators(
+      const nsTArray<nsTArray<uint8_t>>& builtChain,
+      const CTLogOperatorList& operators,
+      CTLogOperatorList& dependentOperators);
 };
 
 }  // namespace ct
 }  // namespace mozilla
 
 #endif  // CTDiversityPolicy_h
--- a/security/manager/ssl/SSLServerCertVerification.cpp
+++ b/security/manager/ssl/SSLServerCertVerification.cpp
@@ -538,17 +538,22 @@ void GatherEKUTelemetry(const UniqueCERT
 
   // Only log telemetry if the root CA is built-in
   CERTCertificate* rootCert = rootNode->cert;
   MOZ_ASSERT(rootCert);
   if (!rootCert) {
     return;
   }
   bool isBuiltIn = false;
-  Result rv = IsCertBuiltInRoot(rootCert, isBuiltIn);
+  Input rootInput;
+  Result rv = rootInput.Init(rootCert->derCert.data, rootCert->derCert.len);
+  if (rv != Result::Success) {
+    return;
+  }
+  rv = IsCertBuiltInRoot(rootInput, isBuiltIn);
   if (rv != Success || !isBuiltIn) {
     return;
   }
 
   // Find the EKU extension, if present
   bool foundEKU = false;
   SECOidTag oidTag;
   CERTCertExtension* ekuExtension = nullptr;
@@ -754,18 +759,38 @@ void GatherCertificateTransparencyTeleme
 // This function collects telemetry about certs. It will be called on one of
 // CertVerificationThread. When the socket process is used this will be called
 // on the parent process.
 static void CollectCertTelemetry(
     mozilla::pkix::Result aCertVerificationResult, EVStatus aEVStatus,
     CertVerifier::OCSPStaplingStatus aOcspStaplingStatus,
     KeySizeStatus aKeySizeStatus, SHA1ModeResult aSha1ModeResult,
     const PinningTelemetryInfo& aPinningTelemetryInfo,
-    const UniqueCERTCertList& aBuiltCertChain,
+    const nsTArray<nsTArray<uint8_t>>& aBuiltCertChain,
     const CertificateTransparencyInfo& aCertificateTransparencyInfo) {
+  UniqueCERTCertList builtCertChainList(CERT_NewCertList());
+  if (!builtCertChainList) {
+    return;
+  }
+  CERTCertDBHandle* certDB(CERT_GetDefaultCertDB());
+  for (const auto& certBytes : aBuiltCertChain) {
+    SECItem certDERItem = {siBuffer, const_cast<uint8_t*>(certBytes.Elements()),
+                           AssertedCast<unsigned int>(certBytes.Length())};
+    UniqueCERTCertificate cert(
+        CERT_NewTempCertificate(certDB, &certDERItem, nullptr, false, true));
+    if (!cert) {
+      return;
+    }
+    if (CERT_AddCertToListTail(builtCertChainList.get(), cert.get()) !=
+        SECSuccess) {
+      return;
+    }
+    Unused << cert.release();  // cert is now owned by certList.
+  }
+
   uint32_t evStatus = (aCertVerificationResult != Success) ? 0  // 0 = Failure
                       : (aEVStatus != EVStatus::EV)        ? 1  // 1 = DV
                                                            : 2;        // 2 = EV
   Telemetry::Accumulate(Telemetry::CERT_EV_STATUS, evStatus);
 
   if (aOcspStaplingStatus != CertVerifier::OCSP_STAPLING_NEVER_CHECKED) {
     Telemetry::Accumulate(Telemetry::SSL_OCSP_STAPLING, aOcspStaplingStatus);
   }
@@ -788,18 +813,18 @@ static void CollectCertTelemetry(
   if (aPinningTelemetryInfo.accumulateResult) {
     MOZ_ASSERT(aPinningTelemetryInfo.certPinningResultHistogram.isSome());
     Telemetry::Accumulate(
         aPinningTelemetryInfo.certPinningResultHistogram.value(),
         aPinningTelemetryInfo.certPinningResultBucket);
   }
 
   if (aCertVerificationResult == Success) {
-    GatherSuccessfulValidationTelemetry(aBuiltCertChain);
-    GatherCertificateTransparencyTelemetry(aBuiltCertChain,
+    GatherSuccessfulValidationTelemetry(builtCertChainList);
+    GatherCertificateTransparencyTelemetry(builtCertChainList,
                                            aEVStatus == EVStatus::EV,
                                            aCertificateTransparencyInfo);
   }
 }
 
 static void AuthCertificateSetResults(
     TransportSecurityInfo* aInfoObject, nsNSSCertificate* aCert,
     nsTArray<nsTArray<uint8_t>>&& aBuiltCertChain,
@@ -834,17 +859,17 @@ Result AuthCertificate(
     CertVerifier& certVerifier, void* aPinArg,
     const UniqueCERTCertificate& cert,
     const nsTArray<nsTArray<uint8_t>>& peerCertChain,
     const nsACString& aHostName, const OriginAttributes& aOriginAttributes,
     const Maybe<nsTArray<uint8_t>>& stapledOCSPResponse,
     const Maybe<nsTArray<uint8_t>>& sctsFromTLSExtension,
     const Maybe<DelegatedCredentialInfo>& dcInfo, uint32_t providerFlags,
     Time time, uint32_t certVerifierFlags,
-    /*out*/ UniqueCERTCertList& builtCertChain,
+    /*out*/ nsTArray<nsTArray<uint8_t>>& builtCertChain,
     /*out*/ EVStatus& evStatus,
     /*out*/ CertificateTransparencyInfo& certificateTransparencyInfo,
     /*out*/ bool& aIsCertChainRootBuiltInRoot) {
   MOZ_ASSERT(cert);
 
   CertVerifier::OCSPStaplingStatus ocspStaplingStatus =
       CertVerifier::OCSP_STAPLING_NEVER_CHECKED;
   KeySizeStatus keySizeStatus = KeySizeStatus::NeverChecked;
@@ -986,16 +1011,28 @@ PRErrorCode AuthCertificateParseResults(
   return errorCodeTrust      ? errorCodeTrust
          : errorCodeMismatch ? errorCodeMismatch
          : errorCodeTime     ? errorCodeTime
                              : aDefaultErrorCodeToReport;
 }
 
 }  // unnamed namespace
 
+static nsTArray<nsTArray<uint8_t>> CreateCertBytesArray(
+    const UniqueCERTCertList& aCertChain) {
+  nsTArray<nsTArray<uint8_t>> certsBytes;
+  for (CERTCertListNode* n = CERT_LIST_HEAD(aCertChain);
+       !CERT_LIST_END(n, aCertChain); n = CERT_LIST_NEXT(n)) {
+    nsTArray<uint8_t> certBytes;
+    certBytes.AppendElements(n->cert->derCert.data, n->cert->derCert.len);
+    certsBytes.AppendElement(std::move(certBytes));
+  }
+  return certsBytes;
+}
+
 /*static*/
 SECStatus SSLServerCertVerificationJob::Dispatch(
     uint64_t addrForLogging, void* aPinArg,
     const UniqueCERTCertificate& serverCert,
     nsTArray<nsTArray<uint8_t>>&& peerCertChain, const nsACString& aHostName,
     int32_t aPort, const OriginAttributes& aOriginAttributes,
     Maybe<nsTArray<uint8_t>>& stapledOCSPResponse,
     Maybe<nsTArray<uint8_t>>& sctsFromTLSExtension,
@@ -1048,36 +1085,33 @@ SSLServerCertVerificationJob::Run() {
 
   RefPtr<SharedCertVerifier> certVerifier(GetDefaultCertVerifier());
   if (!certVerifier) {
     PR_SetError(SEC_ERROR_NOT_INITIALIZED, 0);
     return NS_OK;
   }
 
   TimeStamp jobStartTime = TimeStamp::Now();
-  UniqueCERTCertList builtCertChain;
   EVStatus evStatus;
   CertificateTransparencyInfo certificateTransparencyInfo;
   bool isCertChainRootBuiltInRoot = false;
+  nsTArray<nsTArray<uint8_t>> certBytesArray;
   Result rv = AuthCertificate(
       *certVerifier, mPinArg, mCert, mPeerCertChain, mHostName,
       mOriginAttributes, mStapledOCSPResponse, mSCTsFromTLSExtension, mDCInfo,
-      mProviderFlags, mTime, mCertVerifierFlags, builtCertChain, evStatus,
+      mProviderFlags, mTime, mCertVerifierFlags, certBytesArray, evStatus,
       certificateTransparencyInfo, isCertChainRootBuiltInRoot);
 
   RefPtr<nsNSSCertificate> nsc = nsNSSCertificate::Create(mCert.get());
-  nsTArray<nsTArray<uint8_t>> certBytesArray;
   if (rv == Success) {
     Telemetry::AccumulateTimeDelta(
         Telemetry::SSL_SUCCESFUL_CERT_VALIDATION_TIME_MOZILLAPKIX, jobStartTime,
         TimeStamp::Now());
     Telemetry::Accumulate(Telemetry::SSL_CERT_ERROR_OVERRIDES, 1);
 
-    certBytesArray =
-        TransportSecurityInfo::CreateCertBytesArray(builtCertChain);
     mResultTask->Dispatch(
         nsc, std::move(certBytesArray), std::move(mPeerCertChain),
         TransportSecurityInfo::ConvertCertificateTransparencyInfoToStatus(
             certificateTransparencyInfo),
         evStatus, true, 0, 0, isCertChainRootBuiltInRoot, mProviderFlags);
     return NS_OK;
   }
 
@@ -1197,17 +1231,17 @@ SECStatus AuthCertificateHook(void* arg,
 
   UniqueCERTCertList peerCertChain(SSL_PeerCertificateChain(fd));
   if (!peerCertChain) {
     PR_SetError(PR_INVALID_STATE_ERROR, 0);
     return SECFailure;
   }
 
   nsTArray<nsTArray<uint8_t>> peerCertsBytes =
-      TransportSecurityInfo::CreateCertBytesArray(peerCertChain);
+      CreateCertBytesArray(peerCertChain);
 
   // SSL_PeerStapledOCSPResponses will never return a non-empty response if
   // OCSP stapling wasn't enabled because libssl wouldn't have let the server
   // return a stapled OCSP response.
   // We don't own these pointers.
   const SECItemArray* csa = SSL_PeerStapledOCSPResponses(fd);
   Maybe<nsTArray<uint8_t>> stapledOCSPResponse;
   // we currently only support single stapled responses
--- a/security/manager/ssl/TransportSecurityInfo.cpp
+++ b/security/manager/ssl/TransportSecurityInfo.cpp
@@ -1148,29 +1148,16 @@ uint16_t TransportSecurityInfo::ConvertC
     case CTPolicyCompliance::Unknown:
     default:
       MOZ_ASSERT_UNREACHABLE("Unexpected CTPolicyCompliance type");
   }
 
   return nsITransportSecurityInfo::CERTIFICATE_TRANSPARENCY_NOT_APPLICABLE;
 }
 
-// static
-nsTArray<nsTArray<uint8_t>> TransportSecurityInfo::CreateCertBytesArray(
-    const UniqueCERTCertList& aCertChain) {
-  nsTArray<nsTArray<uint8_t>> certsBytes;
-  for (CERTCertListNode* n = CERT_LIST_HEAD(aCertChain);
-       !CERT_LIST_END(n, aCertChain); n = CERT_LIST_NEXT(n)) {
-    nsTArray<uint8_t> certBytes;
-    certBytes.AppendElements(n->cert->derCert.data, n->cert->derCert.len);
-    certsBytes.AppendElement(std::move(certBytes));
-  }
-  return certsBytes;
-}
-
 NS_IMETHODIMP
 TransportSecurityInfo::GetIsDomainMismatch(bool* aIsDomainMismatch) {
   NS_ENSURE_ARG_POINTER(aIsDomainMismatch);
   *aIsDomainMismatch = mHaveCertErrorBits && mIsDomainMismatch;
   return NS_OK;
 }
 
 NS_IMETHODIMP
--- a/security/manager/ssl/TransportSecurityInfo.h
+++ b/security/manager/ssl/TransportSecurityInfo.h
@@ -86,19 +86,16 @@ class TransportSecurityInfo : public nsI
   bool HasServerCert() {
     MutexAutoLock lock(mMutex);
     return mServerCert != nullptr;
   }
 
   static uint16_t ConvertCertificateTransparencyInfoToStatus(
       const mozilla::psm::CertificateTransparencyInfo& info);
 
-  static nsTArray<nsTArray<uint8_t>> CreateCertBytesArray(
-      const UniqueCERTCertList& aCertChain);
-
   // Use errorCode == 0 to indicate success;
   virtual void SetCertVerificationResult(PRErrorCode errorCode){};
 
   void SetCertificateTransparencyStatus(
       uint16_t aCertificateTransparencyStatus) {
     MutexAutoLock lock(mMutex);
     mCertificateTransparencyStatus = aCertificateTransparencyStatus;
   }
--- a/security/manager/ssl/nsNSSCallbacks.cpp
+++ b/security/manager/ssl/nsNSSCallbacks.cpp
@@ -1093,21 +1093,21 @@ static void RebuildVerifiedCertificateIn
   int flags = mozilla::psm::CertVerifier::FLAG_LOCAL_ONLY;
   if (!infoObject->SharedState().IsOCSPStaplingEnabled() ||
       !infoObject->SharedState().IsOCSPMustStapleEnabled()) {
     flags |= CertVerifier::FLAG_TLS_IGNORE_STATUS_REQUEST;
   }
 
   EVStatus evStatus;
   CertificateTransparencyInfo certificateTransparencyInfo;
-  UniqueCERTCertList builtChain;
+  nsTArray<nsTArray<uint8_t>> certBytesArray;
   bool isBuiltCertChainRootBuiltInRoot = false;
   mozilla::pkix::Result rv = certVerifier->VerifySSLServerCert(
       cert, mozilla::pkix::Now(), infoObject, infoObject->GetHostName(),
-      builtChain, flags, maybePeerCertsBytes, stapledOCSPResponse,
+      certBytesArray, flags, maybePeerCertsBytes, stapledOCSPResponse,
       sctsFromTLSExtension, Nothing(), infoObject->GetOriginAttributes(),
       &evStatus,
       nullptr,  // OCSP stapling telemetry
       nullptr,  // key size telemetry
       nullptr,  // SHA-1 telemetry
       nullptr,  // pinning telemetry
       &certificateTransparencyInfo, &isBuiltCertChainRootBuiltInRoot);
 
@@ -1127,18 +1127,16 @@ static void RebuildVerifiedCertificateIn
     infoObject->SetServerCert(nssc, EVStatus::NotEV);
   }
 
   if (rv == Success) {
     uint16_t status =
         TransportSecurityInfo::ConvertCertificateTransparencyInfoToStatus(
             certificateTransparencyInfo);
     infoObject->SetCertificateTransparencyStatus(status);
-    nsTArray<nsTArray<uint8_t>> certBytesArray =
-        TransportSecurityInfo::CreateCertBytesArray(builtChain);
     infoObject->SetSucceededCertChain(std::move(certBytesArray));
     infoObject->SetIsBuiltCertChainRootBuiltInRoot(
         isBuiltCertChainRootBuiltInRoot);
   }
 }
 
 void HandshakeCallback(PRFileDesc* fd, void* client_data) {
   SECStatus rv;
--- a/security/manager/ssl/nsNSSCertificate.cpp
+++ b/security/manager/ssl/nsNSSCertificate.cpp
@@ -146,17 +146,22 @@ nsresult nsNSSCertificate::GetCertType(u
   *aCertType = mCertType;
   return NS_OK;
 }
 
 NS_IMETHODIMP
 nsNSSCertificate::GetIsBuiltInRoot(bool* aIsBuiltInRoot) {
   NS_ENSURE_ARG(aIsBuiltInRoot);
 
-  pkix::Result rv = IsCertBuiltInRoot(mCert.get(), *aIsBuiltInRoot);
+  pkix::Input certInput;
+  pkix::Result rv = certInput.Init(mCert->derCert.data, mCert->derCert.len);
+  if (rv != pkix::Result::Success) {
+    return NS_ERROR_FAILURE;
+  }
+  rv = IsCertBuiltInRoot(certInput, *aIsBuiltInRoot);
   if (rv != pkix::Result::Success) {
     return NS_ERROR_FAILURE;
   }
   return NS_OK;
 }
 
 NS_IMETHODIMP
 nsNSSCertificate::GetDbKey(nsACString& aDbKey) {
--- a/security/manager/ssl/nsNSSCertificateDB.cpp
+++ b/security/manager/ssl/nsNSSCertificateDB.cpp
@@ -1257,17 +1257,17 @@ nsresult VerifyCertAtTime(nsIX509Cert* a
   UniqueCERTCertificate nssCert(aCert->GetCert());
   if (!nssCert) {
     return NS_ERROR_INVALID_ARG;
   }
 
   RefPtr<SharedCertVerifier> certVerifier(GetDefaultCertVerifier());
   NS_ENSURE_TRUE(certVerifier, NS_ERROR_FAILURE);
 
-  UniqueCERTCertList resultChain;
+  nsTArray<nsTArray<uint8_t>> resultChain;
   EVStatus evStatus;
   mozilla::pkix::Result result;
 
   if (!aHostname.IsVoid() && aUsage == certificateUsageSSLServer) {
     result =
         certVerifier->VerifySSLServerCert(nssCert, aTime,
                                           nullptr,  // Assume no context
                                           aHostname, resultChain, aFlags,
@@ -1284,21 +1284,24 @@ nsresult VerifyCertAtTime(nsIX509Cert* a
         aHostname.IsVoid() ? nullptr : flatHostname.get(), resultChain, aFlags,
         Nothing(),  // extraCertificates
         Nothing(),  // stapledOCSPResponse
         Nothing(),  // sctsFromTLSExtension
         OriginAttributes(), &evStatus);
   }
 
   if (result == mozilla::pkix::Success) {
-    nsresult rv = nsNSSCertificateDB::ConstructCertArrayFromUniqueCertList(
-        resultChain, aVerifiedChain);
-
-    if (NS_FAILED(rv)) {
-      return rv;
+    for (const auto& certDER : resultChain) {
+      RefPtr<nsIX509Cert> cert = nsNSSCertificate::ConstructFromDER(
+          const_cast<char*>(reinterpret_cast<const char*>(certDER.Elements())),
+          static_cast<int>(certDER.Length()));
+      if (!cert) {
+        return NS_ERROR_FAILURE;
+      }
+      aVerifiedChain.AppendElement(cert);
     }
 
     if (evStatus == EVStatus::EV) {
       *aHasEVPolicy = true;
     }
   }
 
   *_retval = mozilla::pkix::MapResultToPRErrorCode(result);
--- a/taskcluster/scripts/builder/build-haz-linux.sh
+++ b/taskcluster/scripts/builder/build-haz-linux.sh
@@ -172,18 +172,16 @@ function check_hazards () {
 }
 
 trap grab_artifacts EXIT
 
 # Directory to hold the (useless) object files generated by the analysis.
 export HAZ_OBJDIR="$WORKSPACE/obj-analyzed-$PROJECT"
 mkdir -p "$HAZ_OBJDIR"
 
-export LD_LIBRARY_PATH="$MOZ_FETCHES_DIR/gcc/lib64:$LD_LIBRARY_PATH"
-
 # Gather the information from the source tree by compiling it.
 $GECKO_PATH/mach hazards gather --application=$PROJECT --haz-objdir="$HAZ_OBJDIR" --work-dir="$ANALYSIS_DIR"
 
 # Analyze the collected information.
 $GECKO_PATH/mach hazards analyze --application=$PROJECT --shell-objdir="$HAZARD_SHELL_OBJDIR" --work-dir="$ANALYSIS_DIR"
 
 check_hazards "$ANALYSIS_DIR"
 
--- a/taskcluster/scripts/builder/build-l10n.sh
+++ b/taskcluster/scripts/builder/build-l10n.sh
@@ -36,19 +36,16 @@ fail() {
     echo # make sure error message is on a new line
     echo "[build-l10n.sh:error]" "${@}"
     exit 1
 }
 
 export MOZ_CRASHREPORTER_NO_REPORT=1
 export TINDERBOX_OUTPUT=1
 
-# Ensure that in tree libraries can be found
-export LIBRARY_PATH=$LIBRARY_PATH:$WORKSPACE/obj-build:$WORKSPACE/src/gcc/lib64
-
 # test required parameters are supplied
 if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi
 if [[ -z "${MOZHARNESS_CONFIG}" && -z "${EXTRA_MOZHARNESS_CONFIG}" ]]; then fail "MOZHARNESS_CONFIG or EXTRA_MOZHARNESS_CONFIG is not set"; fi
 
 cleanup() {
     local rv=$?
     cleanup_xvfb
     exit $rv
--- a/taskcluster/scripts/builder/build-linux.sh
+++ b/taskcluster/scripts/builder/build-linux.sh
@@ -39,19 +39,16 @@ fail() {
 
 export MOZ_CRASHREPORTER_NO_REPORT=1
 export TINDERBOX_OUTPUT=1
 
 # use "simple" package names so that they can be hard-coded in the task's
 # extras.locations
 export MOZ_SIMPLE_PACKAGE_NAME=target
 
-# Ensure that in tree libraries can be found
-export LIBRARY_PATH=$LIBRARY_PATH:$WORKSPACE/obj-build:$WORKSPACE/src/gcc/lib64
-
 # test required parameters are supplied
 if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi
 if [[ -z "${MOZHARNESS_CONFIG}" && -z "${EXTRA_MOZHARNESS_CONFIG}" ]]; then fail "MOZHARNESS_CONFIG or EXTRA_MOZHARNESS_CONFIG is not set"; fi
 
 # run XVfb in the background, if necessary
 if $NEED_XVFB; then
     . /builds/worker/scripts/xvfb.sh
 
--- a/taskcluster/scripts/misc/build-llvm-dsymutil.sh
+++ b/taskcluster/scripts/misc/build-llvm-dsymutil.sh
@@ -12,16 +12,14 @@ cd build
 
 cmake \
   -GNinja \
   -DCMAKE_BUILD_TYPE=Release \
   -DLLVM_TARGETS_TO_BUILD="X86;AArch64" \
   -DCMAKE_C_COMPILER=$MOZ_FETCHES_DIR/gcc/bin/gcc \
   ..
 
-export LD_LIBRARY_PATH=$MOZ_FETCHES_DIR/gcc/lib64
-
 ninja dsymutil llvm-symbolizer
 
 tar --xform='s,^,llvm-dsymutil/,' -Jcf llvm-dsymutil.tar.xz bin/dsymutil bin/llvm-symbolizer
 
 mkdir -p $UPLOAD_DIR
 cp llvm-dsymutil.tar.xz $UPLOAD_DIR
--- a/toolkit/moz.configure
+++ b/toolkit/moz.configure
@@ -2116,73 +2116,46 @@ with only_when(requires_wasm_sandboxing 
             die("Argument to --with-wasi-sysroot must be a directory")
         if not os.path.isabs(wasi_sysroot):
             die("Argument to --with-wasi-sysroot must be an absolute path")
 
         return wasi_sysroot
 
     set_config("WASI_SYSROOT", wasi_sysroot)
 
-    def wasm_compiler_with_flags(
-        wasm_compiler, provided_wasm_compiler, sysroot, compiler_wrapper
-    ):
+    def wasm_compiler_with_flags(compiler, sysroot):
         if not sysroot:
             return
-        if provided_wasm_compiler:
-            return " ".join(
-                list(compiler_wrapper or [])
-                + provided_wasm_compiler.wrapper
-                + [provided_wasm_compiler.program]
-                + provided_wasm_compiler.flags
+        elif compiler:
+            return (
+                compiler.wrapper
+                + [compiler.compiler]
+                + compiler.flags
                 + ["--sysroot=%s" % sysroot]
             )
-        elif wasm_compiler:
-            return " ".join(
-                list(compiler_wrapper or [])
-                + [wasm_compiler]
-                + ["--target=wasm32-wasi", "--sysroot=%s" % sysroot]
-            )
-
-    option(env="WASM_CC", nargs=1, help="Path to the C->WASM compiler")
-    provided_wasm_cc = provided_program("WASM_CC")
-    wasm_cc = check_prog(
-        "_WASM_CC",
-        ["clang"],
-        input=provided_wasm_cc.program,
-        paths=clang_search_path,
-        allow_missing=True,
-        what="the C->WASM compiler",
-    )
-
-    @depends(wasm_cc, provided_wasm_cc, wasi_sysroot, compiler_wrapper)
-    def wasm_cc_with_flags(wasm_cc, provided_wasm_cc, wasi_sysroot, compiler_wrapper):
-        return wasm_compiler_with_flags(
-            wasm_cc, provided_wasm_cc, wasi_sysroot, compiler_wrapper
-        )
+
+    wasm_cc = compiler("C", wasm, other_compiler=c_compiler)
+
+    @depends(wasm_cc, wasi_sysroot)
+    def wasm_cc_with_flags(wasm_cc, wasi_sysroot):
+        return wasm_compiler_with_flags(wasm_cc, wasi_sysroot)
 
     set_config("WASM_CC", wasm_cc_with_flags)
 
-    option(env="WASM_CXX", nargs=1, help="Path to the C++->WASM compiler")
-    provided_wasm_cxx = provided_program("WASM_CXX")
-    wasm_cxx = check_prog(
-        "_WASM_CXX",
-        ["clang++"],
-        input=provided_wasm_cxx.program,
-        paths=clang_search_path,
-        allow_missing=True,
-        what="the C++->WASM compiler",
+    wasm_cxx = compiler(
+        "C++",
+        wasm,
+        c_compiler=wasm_cc,
+        other_compiler=cxx_compiler,
+        other_c_compiler=c_compiler,
     )
 
-    @depends(wasm_cxx, provided_wasm_cxx, wasi_sysroot, compiler_wrapper)
-    def wasm_cxx_with_flags(
-        wasm_cxx, provided_wasm_cxx, wasi_sysroot, compiler_wrapper
-    ):
-        return wasm_compiler_with_flags(
-            wasm_cxx, provided_wasm_cxx, wasi_sysroot, compiler_wrapper
-        )
+    @depends(wasm_cxx, wasi_sysroot)
+    def wasm_cxx_with_flags(wasm_cxx, wasi_sysroot):
+        return wasm_compiler_with_flags(wasm_cxx, wasi_sysroot)
 
     set_config("WASM_CXX", wasm_cxx_with_flags)
 
     wasm_compile_flags = dependable(
         ["-fno-exceptions", "-fno-strict-aliasing", "-Qunused-arguments"]
     )
     option(env="WASM_CFLAGS", nargs=1, help="Options to pass to WASM_CC")