Merge mozilla-inbound to mozilla-central. a=merge
authorCosmin Sabou <csabou@mozilla.com>
Sat, 15 Dec 2018 04:47:12 +0200
changeset 450810 d86d184dc7d6aa246ada69063e07dbb503c33092
parent 450604 d8a944c012a7a814ee5a5fea4c9075969451a48b (current diff)
parent 450809 b37f33f2be2d164e3de71d8f17d89c967135a14c (diff)
child 450826 70a854d2ee1ebc18f9438af38731527738b2ed24
child 450829 9016c136594b12fb155a6a2917d48c82ab5fa695
push id35208
push usercsabou@mozilla.com
push dateSat, 15 Dec 2018 02:48:07 +0000
treeherdermozilla-central@d86d184dc7d6 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge
milestone66.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge mozilla-inbound to mozilla-central. a=merge
js/src/builtin/TestingFunctions.cpp
js/src/gc/GC.cpp
js/src/gc/Verifier.cpp
js/src/jit-test/tests/wasm/gc/block-addmarkobservers.js
js/src/jit-test/tests/wasm/gc/block-debugger-findscripts.js
js/src/jit-test/tests/wasm/gc/block-debuggermemory-takecensus.js
js/src/jit-test/tests/wasm/gc/block-gczeal.js
js/src/jit-test/tests/wasm/gc/block-selectforgc.js
js/src/jit-test/tests/wasm/gc/block-verifyprebarriers-2.js
js/src/jit-test/tests/wasm/gc/block-verifyprebarriers.js
js/src/jsfriendapi.h
old-configure.in
testing/web-platform/meta/audio-output/idlharness.window.js.ini
testing/web-platform/meta/content-security-policy/inheritance/blob-url-self-navigate-inherits.sub.html.ini
testing/web-platform/meta/content-security-policy/plugin-types/plugintypes-notype-data.sub.html.ini
testing/web-platform/meta/content-security-policy/plugin-types/plugintypes-notype-url.sub.html.ini
testing/web-platform/meta/content-security-policy/plugin-types/plugintypes-nourl-blocked.sub.html.ini
testing/web-platform/meta/css/css-text/line-break/line-break-normal-021.xht.ini
testing/web-platform/meta/css/css-text/line-break/line-break-normal-024a.xht.ini
testing/web-platform/meta/css/css-text/line-break/line-break-normal-024b.xht.ini
testing/web-platform/meta/css/css-text/overflow-wrap/overflow-wrap-break-word-004.html.ini
testing/web-platform/meta/css/css-text/overflow-wrap/overflow-wrap-break-word-005.html.ini
testing/web-platform/meta/css/css-text/white-space/white-space-collapsing-discard-001.xht.ini
testing/web-platform/meta/css/css-text/white-space/white-space-collapsing-preserve-breaks-001.xht.ini
testing/web-platform/meta/css/css-text/word-break/word-break-break-all-010.html.ini
testing/web-platform/meta/css/css-text/word-break/word-break-break-all-011.html.ini
testing/web-platform/meta/css/filter-effects/backdrop-filter-basic-opacity.html.ini
testing/web-platform/meta/css/filter-effects/backdrop-filter-clip-rect.html.ini
testing/web-platform/tests/audio-output/idlharness.window.js
testing/web-platform/tests/content-security-policy/inheritance/blob-url-self-navigate-inherits.sub.html
testing/web-platform/tests/content-security-policy/inheritance/iframe.html
testing/web-platform/tests/content-security-policy/plugin-types/plugintypes-mismatched-data.sub.html
testing/web-platform/tests/content-security-policy/plugin-types/plugintypes-mismatched-url.sub.html
testing/web-platform/tests/content-security-policy/plugin-types/plugintypes-notype-data.sub.html
testing/web-platform/tests/content-security-policy/plugin-types/plugintypes-notype-url.sub.html
testing/web-platform/tests/content-security-policy/plugin-types/plugintypes-nourl-allowed.sub.html
testing/web-platform/tests/content-security-policy/plugin-types/plugintypes-nourl-blocked.sub.html
testing/web-platform/tests/css/css-text/line-break/line-break-normal-021.xht
testing/web-platform/tests/css/css-text/line-break/line-break-normal-022.xht
testing/web-platform/tests/css/css-text/line-break/line-break-normal-023a.xht
testing/web-platform/tests/css/css-text/line-break/line-break-normal-023b.xht
testing/web-platform/tests/css/css-text/line-break/line-break-normal-024a.xht
testing/web-platform/tests/css/css-text/line-break/line-break-normal-024b.xht
testing/web-platform/tests/css/css-text/line-break/reference/line-break-normal-021-ref.xht
testing/web-platform/tests/css/css-text/line-break/reference/line-break-normal-022-ref.xht
testing/web-platform/tests/css/css-text/line-break/reference/line-break-normal-023a-ref.xht
testing/web-platform/tests/css/css-text/line-break/reference/line-break-normal-023b-ref.xht
testing/web-platform/tests/css/css-text/line-break/reference/line-break-normal-024a-ref.xht
testing/web-platform/tests/css/css-text/line-break/reference/line-break-normal-024b-ref.xht
testing/web-platform/tests/css/css-text/white-space/reference/white-space-collapsing-discard-001-ref.xht
testing/web-platform/tests/css/css-text/white-space/reference/white-space-collapsing-preserve-breaks-001-ref.xht
testing/web-platform/tests/css/css-text/white-space/reference/white-space-collapsing-trim-inner-001-ref.xht
testing/web-platform/tests/css/css-text/white-space/white-space-collapsing-discard-001.xht
testing/web-platform/tests/css/css-text/white-space/white-space-collapsing-preserve-breaks-001.xht
testing/web-platform/tests/css/css-text/white-space/white-space-collapsing-trim-inner-001.xht
testing/web-platform/tests/css/cssom-view/overscrollBehavior-manual.html
testing/web-platform/tests/resources/chromium/mojo_layouttest_test.mojom.js
testing/web-platform/tests/resources/chromium/mojo_layouttest_test.mojom.js.headers
testing/web-platform/tests/resources/testharness.js
testing/web-platform/tests/svg/import/painting-marker-01-f-manual.svg
testing/web-platform/tests/svg/import/painting-marker-02-f-manual.svg
testing/web-platform/tests/svg/import/painting-marker-03-f-manual.svg
testing/web-platform/tests/svg/import/painting-marker-04-f-manual.svg
testing/web-platform/tests/svg/import/painting-marker-05-f-manual.svg
testing/web-platform/tests/svg/import/painting-marker-06-f-manual.svg
testing/web-platform/tests/svg/import/painting-marker-07-f-manual.svg
testing/web-platform/tests/svg/import/painting-marker-properties-01-f-manual.svg
--- a/browser/components/downloads/content/downloads.js
+++ b/browser/components/downloads/content/downloads.js
@@ -819,40 +819,45 @@ var DownloadsView = {
     DownloadsCommon.log("Context menu has hidden.");
     this.contextMenuOpen = false;
   },
 
   /**
    * Mouse listeners to handle selection on hover.
    */
   onDownloadMouseOver(aEvent) {
-    if (aEvent.originalTarget.classList.contains("downloadButton")) {
-      aEvent.target.classList.add("downloadHoveringButton");
+    let item = aEvent.target.closest("richlistitem,richlistbox");
+    if (item.localName != "richlistitem") {
+      return;
     }
-    if (!(this.contextMenuOpen || this.subViewOpen) &&
-        aEvent.target.parentNode == this.richListBox) {
-      this.richListBox.selectedItem = aEvent.target;
+
+    if (aEvent.target.classList.contains("downloadButton")) {
+      item.classList.add("downloadHoveringButton");
+    }
+
+    if (!this.contextMenuOpen && !this.subViewOpen) {
+      this.richListBox.selectedItem = item;
     }
   },
 
   onDownloadMouseOut(aEvent) {
-    if (aEvent.originalTarget.classList.contains("downloadButton")) {
-      aEvent.target.classList.remove("downloadHoveringButton");
+    let item = aEvent.target.closest("richlistitem,richlistbox");
+    if (item.localName != "richlistitem") {
+      return;
     }
-    if (!(this.contextMenuOpen || this.subViewOpen) &&
-        aEvent.target.parentNode == this.richListBox) {
-      // If the destination element is outside of the richlistitem, clear the
-      // selection.
-      let element = aEvent.relatedTarget;
-      while (element && element != aEvent.target) {
-        element = element.parentNode;
-      }
-      if (!element) {
-        this.richListBox.selectedIndex = -1;
-      }
+
+    if (aEvent.target.classList.contains("downloadButton")) {
+      item.classList.remove("downloadHoveringButton");
+    }
+
+    // If the destination element is outside of the richlistitem, clear the
+    // selection.
+    if (!this.contextMenuOpen && !this.subViewOpen &&
+        !item.contains(aEvent.relatedTarget)) {
+      this.richListBox.selectedIndex = -1;
     }
   },
 
   onDownloadContextMenu(aEvent) {
     let element = this.richListBox.selectedItem;
     if (!element) {
       return;
     }
--- a/devtools/client/debugger/new/README.mozilla
+++ b/devtools/client/debugger/new/README.mozilla
@@ -1,13 +1,13 @@
 This is the debugger.html project output.
 See https://github.com/devtools-html/debugger.html
 
-Version 107
+Version 108
 
-Comparison: https://github.com/devtools-html/debugger.html/compare/release-106...release-107
+Comparison: https://github.com/devtools-html/debugger.html/compare/release-107...release-108
 
 Packages:
 - babel-plugin-transform-es2015-modules-commonjs @6.26.2
 - babel-preset-react @6.24.1
 - react @16.4.1
 - react-dom @16.4.1
 - webpack @3.12.0
--- a/devtools/client/debugger/new/dist/debugger.css
+++ b/devtools/client/debugger/new/dist/debugger.css
@@ -497,16 +497,17 @@ html[dir="rtl"] .tree-node button.arrow 
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
 
 .shortcuts-content {
   padding: 15px;
   column-width: 250px;
   cursor: default;
+  user-select: none;
 }
 
 .shortcuts-content h2 {
   margin-top: 2px;
   margin-bottom: 2px;
   color: var(--theme-content-color1);
 }
 
@@ -1905,22 +1906,16 @@ html .toggle-button.end.vertical svg {
 .theme-dark .sources-list .managed-tree .tree .node img.blackBox {
   background-color: var(--theme-body-color);
 }
 
 .theme-dark .sources-list .managed-tree .tree .node.focused img.blackBox {
   background-color: white;
 }
 
-.tree:not(.object-inspector)
-  .tree-node[data-expandable="false"]
-  .tree-indent:last-of-type {
-  margin-inline-end: 4px;
-}
-
 /*
   Custom root styles
 */
 .sources-pane.sources-list-custom-root {
   display: block;
   position: relative;
 }
 
@@ -2242,16 +2237,17 @@ menuseparator {
   left: 0;
   right: 0;
   opacity: 1;
   z-index: 1;
   -moz-user-select: none;
   user-select: none;
   height: var(--editor-footer-height);
   box-sizing: border-box;
+  justify-content: space-between;
 }
 
 .source-footer .commands {
   display: flex;
   align-items: center;
 }
 
 .source-footer .commands * {
@@ -2322,16 +2318,17 @@ menuseparator {
 .source-footer .mapped-source {
   white-space: nowrap;
   overflow: hidden;
   text-overflow: ellipsis;
 }
 
 .source-footer .cursor-position {
   padding: 5px;
+  white-space: nowrap;
 }
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
 
 .search-bar {
   display: flex;
   border: 1px solid transparent;
@@ -2685,45 +2682,50 @@ menuseparator {
 .add-to-expression-bar .expression-to-save-button {
   font-size: 14px;
   color: var(--theme-comment);
 }
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
 
-.call-site {
+.column-breakpoint {
   display: inline;
 }
 
-.call-site svg {
+.column-breakpoint svg {
   display: inline;
   cursor: pointer;
   height: 12px;
   width: 9px;
   vertical-align: top;
 }
 
-.call-site.active svg {
+.column-breakpoint.active svg {
   fill: var(--blue-50);
   stroke: var(--blue-60);
 }
 
-.call-site.disabled svg {
+.column-breakpoint.disabled svg {
   fill: var(--blue-50);
   stroke: var(--blue-40);
   fill-opacity: 0.5;
 }
 
-.theme-dark .call-site.active svg {
+.column-breakpoint.has-condition svg {
+  fill: var(--theme-graphs-yellow);
+  stroke: var(--theme-graphs-orange);
+}
+
+.theme-dark .column-breakpoint.active svg {
   fill: var(--blue-55);
   stroke: var(--blue-40);
 }
 
-.theme-dark .call-site.disabled svg {
+.theme-dark .column-breakpoint.disabled svg {
   fill: var(--blue-50);
   stroke: var(--blue-60);
   fill-opacity: 0.5;
 }
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
 
@@ -2824,16 +2826,24 @@ html[dir="rtl"] .editor-mount {
 .theme-dark {
   --gutter-hover-background-color: #414141;
   --breakpoint-fill: var(--blue-55);
   --breakpoint-stroke: var(--blue-40);
   --breakpoint-fill-disabled: var(--blue-50);
   --breakpoint-stroke-disabled: var(--blue-60);
 }
 
+.empty-line .CodeMirror-linenumber {
+  opacity: 0.5;
+}
+
+.theme-dark .empty-line .CodeMirror-linenumber {
+  opacity: 0.6;
+}
+
 :not(.empty-line):not(.new-breakpoint)
   > .CodeMirror-gutter-wrapper:hover
   > .CodeMirror-linenumber {
   height: 13px;
   color: var(--theme-body-color);
   /* Add 1px offset to the background to match it
     with the actual breakpoint image dimensions */
   background: linear-gradient(
@@ -2968,17 +2978,17 @@ html[dir="rtl"] .editor-mount {
   font-size: 11px;
 }
 
 .theme-dark .editor-wrapper .CodeMirror-line .cm-comment {
   color: var(--theme-comment);
 }
 
 .debug-expression,
-.new-debug-line .call-site {
+.new-debug-line .column-breakpoint {
   background-color: var(--debug-expression-background);
 }
 
 debug-expression-error {
   background-color: var(--debug-expression-error-background);
 }
 
 .new-debug-line .CodeMirror-line {
@@ -3118,16 +3128,17 @@ debug-expression-error {
 .breakpoints-exceptions,
 .breakpoints-exceptions-caught {
   padding: 0.25em 1em;
 }
 
 .breakpoints-exceptions {
   padding-bottom: 0.5em;
   padding-top: 0.5em;
+  user-select: none;
 }
 
 .breakpoints-list .breakpoint {
   min-height: var(--breakpoint-expression-height);
 }
 
 .breakpoints-exceptions-caught {
   padding: 0 1em 0.5em 3em;
@@ -3499,16 +3510,17 @@ html[dir="rtl"] .breakpoints-list .break
   color: var(--theme-body-color);
   opacity: 0.6;
   font-size: 12px;
   text-align: center;
   font-style: italic;
   font-weight: 300;
   cursor: default;
   min-height: 24px;
+  white-space: normal;
 }
 
 .why-paused > div {
   margin: auto;
 }
 
 .theme-dark .secondary-panes .why-paused {
   color: white;
--- a/devtools/client/debugger/new/src/components/Editor/ColumnBreakpoint.js
+++ b/devtools/client/debugger/new/src/components/Editor/ColumnBreakpoint.js
@@ -12,71 +12,91 @@ import Svg from "../shared/Svg";
 // eslint-disable-next-line max-len
 import type { ColumnBreakpoint as ColumnBreakpointType } from "../../selectors/visibleColumnBreakpoints";
 
 type Bookmark = {
   clear: Function
 };
 
 type Props = {
-  callSite: Object,
   editor: Object,
   source: Object,
   enabled: boolean,
   toggleBreakpoint: (number, number) => void,
   columnBreakpoint: ColumnBreakpointType
 };
 
 const breakpointImg = document.createElement("div");
 ReactDOM.render(<Svg name={"column-marker"} />, breakpointImg);
-function makeBookmark(isActive, { onClick }) {
+function makeBookmark(isActive, condition, { onClick }) {
   const bp = breakpointImg.cloneNode(true);
   const className = isActive ? "active" : "disabled";
-  bp.className = classnames("call-site", className);
+
+  bp.className = classnames(
+    "column-breakpoint",
+    {
+      "has-condition": condition
+    },
+    className
+  );
+  if (condition) {
+    bp.setAttribute("title", condition);
+  }
   bp.onclick = onClick;
+
   return bp;
 }
 
-export default class CallSite extends PureComponent<Props> {
-  addCallSite: Function;
+export default class ColumnBreakpoint extends PureComponent<Props> {
+  addColumnBreakpoint: Function;
   bookmark: ?Bookmark;
 
-  addCallSite = (nextProps: ?Props) => {
+  addColumnBreakpoint = (nextProps: ?Props) => {
     const { columnBreakpoint, source } = nextProps || this.props;
+
     const sourceId = source.id;
+    const doc = getDocument(sourceId);
+    if (!doc) {
+      return;
+    }
+
     const { line, column } = columnBreakpoint.location;
-    const widget = makeBookmark(columnBreakpoint.enabled, {
-      onClick: this.toggleBreakpoint
-    });
-    const doc = getDocument(sourceId);
+    const widget = makeBookmark(
+      columnBreakpoint.enabled,
+      columnBreakpoint.condition,
+      {
+        onClick: this.toggleBreakpoint
+      }
+    );
+
     this.bookmark = doc.setBookmark({ line: line - 1, ch: column }, { widget });
   };
 
-  clearCallSite = () => {
+  clearColumnBreakpoint = () => {
     if (this.bookmark) {
       this.bookmark.clear();
       this.bookmark = null;
     }
   };
 
   toggleBreakpoint = () => {
     const { columnBreakpoint, toggleBreakpoint } = this.props;
     const { line, column } = columnBreakpoint.location;
     toggleBreakpoint(line, column);
   };
 
   componentDidMount() {
-    this.addCallSite();
+    this.addColumnBreakpoint();
   }
 
   componentWillUnmount() {
-    this.clearCallSite();
+    this.clearColumnBreakpoint();
   }
 
   componentDidUpdate() {
-    this.clearCallSite();
-    this.addCallSite();
+    this.clearColumnBreakpoint();
+    this.addColumnBreakpoint();
   }
 
   render() {
     return null;
   }
 }
--- a/devtools/client/debugger/new/src/components/Editor/Footer.js
+++ b/devtools/client/debugger/new/src/components/Editor/Footer.js
@@ -69,17 +69,17 @@ class SourceFooter extends PureComponent
     editor.codeMirror.off("cursorActivity", this.onCursorChange);
   }
 
   prettyPrintButton() {
     const { selectedSource, togglePrettyPrint } = this.props;
 
     if (isLoading(selectedSource) && selectedSource.isPrettyPrinted) {
       return (
-        <div className="loader">
+        <div className="loader" key="pretty-loader">
           <Svg name="loader" />
         </div>
       );
     }
 
     if (!shouldShowPrettyPrint(selectedSource)) {
       return;
     }
@@ -136,45 +136,46 @@ class SourceFooter extends PureComponent
   blackBoxSummary() {
     const { selectedSource } = this.props;
 
     if (!selectedSource || !selectedSource.isBlackBoxed) {
       return;
     }
 
     return (
-      <span className="blackbox-summary">
+      <span className="blackbox-summary" key="blackbox-summary">
         {L10N.getStr("sourceFooter.blackboxed")}
       </span>
     );
   }
 
   renderToggleButton() {
     if (this.props.horizontal) {
       return;
     }
 
     return (
       <PaneToggleButton
         position="end"
+        key="toggle"
         collapsed={!this.props.endPanelCollapsed}
         horizontal={this.props.horizontal}
         handleClick={this.props.togglePaneCollapse}
       />
     );
   }
 
   renderCommands() {
-    return (
-      <div className="commands">
-        {this.prettyPrintButton()}
-        {this.blackBoxButton()}
-        {this.blackBoxSummary()}
-      </div>
-    );
+    const commands = [
+      this.prettyPrintButton(),
+      this.blackBoxButton(),
+      this.blackBoxSummary()
+    ].filter(Boolean);
+
+    return commands.length ? <div className="commands">{commands}</div> : null;
   }
 
   renderSourceSummary() {
     const { mappedSource, jumpToMappedLocation, selectedSource } = this.props;
 
     if (!mappedSource || !isOriginal(selectedSource)) {
       return null;
     }
@@ -222,19 +223,19 @@ class SourceFooter extends PureComponent
 
     if (!shouldShowFooter(selectedSource, horizontal)) {
       return null;
     }
 
     return (
       <div className="source-footer">
         {this.renderCommands()}
-        {this.renderCursorPosition()}
         {this.renderSourceSummary()}
         {this.renderToggleButton()}
+        {this.renderCursorPosition()}
       </div>
     );
   }
 }
 
 const mapStateToProps = state => {
   const selectedSource = getSelectedSource(state);
   const selectedId = selectedSource.id;
--- a/devtools/client/debugger/new/src/components/Editor/Tab.js
+++ b/devtools/client/debugger/new/src/components/Editor/Tab.js
@@ -63,16 +63,17 @@ class Tab extends PureComponent<Props> {
       closeTab,
       closeTabs,
       tabSources,
       showSource,
       togglePrettyPrint,
       selectedSource
     } = this.props;
 
+    const tabCount = tabSources.length;
     const otherTabs = tabSources.filter(t => t.id !== tab);
     const sourceTab = tabSources.find(t => t.id == tab);
     const tabURLs = tabSources.map(t => t.url);
     const otherTabURLs = otherTabs.map(t => t.url);
 
     if (!sourceTab) {
       return;
     }
@@ -84,31 +85,31 @@ class Tab extends PureComponent<Props> {
         item: {
           ...tabMenuItems.closeTab,
           click: () => closeTab(sourceTab)
         }
       },
       {
         item: {
           ...tabMenuItems.closeOtherTabs,
-          click: () => closeTabs(otherTabURLs)
-        },
-        hidden: () => tabSources.size === 1
+          click: () => closeTabs(otherTabURLs),
+          disabled: () => tabCount === 1
+        }
       },
       {
         item: {
           ...tabMenuItems.closeTabsToEnd,
           click: () => {
             const tabIndex = tabSources.findIndex(t => t.id == tab);
             closeTabs(tabURLs.filter((t, i) => i > tabIndex));
-          }
-        },
-        hidden: () =>
-          tabSources.size === 1 ||
-          tabSources.some((t, i) => t === tab && tabSources.size - 1 === i)
+          },
+          disabled: () =>
+            tabCount === 1 ||
+            tabSources.some((t, i) => t === tab && tabCount - 1 === i)
+        }
       },
       {
         item: { ...tabMenuItems.closeAllTabs, click: () => closeTabs(tabURLs) }
       },
       { item: { type: "separator" } },
       {
         item: {
           ...tabMenuItems.copyToClipboard,
--- a/devtools/client/debugger/new/src/components/Editor/index.js
+++ b/devtools/client/debugger/new/src/components/Editor/index.js
@@ -89,17 +89,18 @@ export type Props = {
   closeConditionalPanel: void => void,
   setContextMenu: (string, any) => void,
   continueToHere: number => void,
   toggleBreakpoint: number => void,
   toggleBreakpointsAtLine: number => void,
   addOrToggleDisabledBreakpoint: number => void,
   jumpToMappedLocation: any => void,
   traverseResults: (boolean, Object) => void,
-  updateViewport: void => void
+  updateViewport: void => void,
+  closeTab: Source => void
 };
 
 type State = {
   editor: SourceEditor
 };
 
 class Editor extends PureComponent<Props, State> {
   $editorWrapper: ?HTMLDivElement;
@@ -155,17 +156,16 @@ class Editor extends PureComponent<Props
 
     codeMirror.on("gutterClick", this.onGutterClick);
 
     // Set code editor wrapper to be focusable
     codeMirrorWrapper.tabIndex = 0;
     codeMirrorWrapper.addEventListener("keydown", e => this.onKeyDown(e));
     codeMirrorWrapper.addEventListener("click", e => this.onClick(e));
     codeMirrorWrapper.addEventListener("mouseover", onMouseOver(codeMirror));
-    codeMirror.on("scroll", this.onEditorScroll);
 
     const toggleFoldMarkerVisibility = e => {
       if (node instanceof HTMLElement) {
         node
           .querySelectorAll(".CodeMirror-guttermarker-subtle")
           .forEach(elem => {
             elem.classList.toggle("visible");
           });
@@ -182,16 +182,19 @@ class Editor extends PureComponent<Props
       );
       codeMirror.on("contextmenu", (cm, event) => this.openMenu(event));
     } else {
       codeMirrorWrapper.addEventListener("contextmenu", event =>
         this.openMenu(event)
       );
     }
 
+    codeMirror.on("scroll", this.onEditorScroll);
+    this.onEditorScroll();
+
     this.setState({ editor });
     return editor;
   }
 
   componentDidMount() {
     const { shortcuts } = this.context;
 
     const searchAgainKey = L10N.getStr("sourceSearch.search.again.key2");
@@ -199,32 +202,44 @@ class Editor extends PureComponent<Props
       "sourceSearch.search.againPrev.key2"
     );
 
     shortcuts.on(L10N.getStr("toggleBreakpoint.key"), this.onToggleBreakpoint);
     shortcuts.on(
       L10N.getStr("toggleCondPanel.key"),
       this.onToggleConditionalPanel
     );
+    shortcuts.on(L10N.getStr("sourceTabs.closeTab.key"), this.onClosePress);
     shortcuts.on("Esc", this.onEscape);
     shortcuts.on(searchAgainPrevKey, this.onSearchAgain);
     shortcuts.on(searchAgainKey, this.onSearchAgain);
   }
 
+  onClosePress = (key, e: KeyboardEvent) => {
+    const { selectedSource } = this.props;
+    if (selectedSource) {
+      e.preventDefault();
+      e.stopPropagation();
+      this.props.closeTab(selectedSource);
+    }
+  };
+
   componentWillUnmount() {
     if (this.state.editor) {
       this.state.editor.destroy();
+      this.state.editor.codeMirror.off("scroll", this.onEditorScroll);
       this.setState({ editor: null });
     }
 
     const searchAgainKey = L10N.getStr("sourceSearch.search.again.key2");
     const searchAgainPrevKey = L10N.getStr(
       "sourceSearch.search.againPrev.key2"
     );
     const shortcuts = this.context.shortcuts;
+    shortcuts.off(L10N.getStr("sourceTabs.closeTab.key"));
     shortcuts.off(L10N.getStr("toggleBreakpoint.key"));
     shortcuts.off(L10N.getStr("toggleCondPanel.key"));
     shortcuts.off(searchAgainPrevKey);
     shortcuts.off(searchAgainKey);
   }
 
   componentDidUpdate(prevProps, prevState) {
     const { selectedSource } = this.props;
@@ -621,11 +636,12 @@ export default connect(
     closeConditionalPanel: actions.closeConditionalPanel,
     setContextMenu: actions.setContextMenu,
     continueToHere: actions.continueToHere,
     toggleBreakpoint: actions.toggleBreakpoint,
     toggleBreakpointsAtLine: actions.toggleBreakpointsAtLine,
     addOrToggleDisabledBreakpoint: actions.addOrToggleDisabledBreakpoint,
     jumpToMappedLocation: actions.jumpToMappedLocation,
     traverseResults: actions.traverseResults,
-    updateViewport: actions.updateViewport
+    updateViewport: actions.updateViewport,
+    closeTab: actions.closeTab
   }
 )(Editor);
--- a/devtools/client/debugger/new/src/components/PrimaryPanes/SourcesTreeItem.js
+++ b/devtools/client/debugger/new/src/components/PrimaryPanes/SourcesTreeItem.js
@@ -178,17 +178,21 @@ class SourceTreeItem extends Component<P
       hasMatchingGeneratedSource,
       hasSiblingOfSameName
     } = this.props;
 
     const suffix = hasMatchingGeneratedSource ? (
       <span className="suffix">{L10N.getStr("sourceFooter.mappedSuffix")}</span>
     ) : null;
 
-    const querystring = getSourceQueryString(source);
+    let querystring;
+    if (hasSiblingOfSameName) {
+      querystring = getSourceQueryString(source);
+    }
+
     const query =
       hasSiblingOfSameName && querystring ? (
         <span className="query">{querystring}</span>
       ) : null;
 
     return (
       <div
         className={classnames("node", { focused })}
--- a/devtools/client/debugger/new/src/selectors/visibleColumnBreakpoints.js
+++ b/devtools/client/debugger/new/src/selectors/visibleColumnBreakpoints.js
@@ -1,25 +1,26 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
 
-import { groupBy, hasIn, sortedUniqBy } from "lodash";
+import { groupBy, get, sortedUniqBy } from "lodash";
 import { createSelector } from "reselect";
 
 import { getViewport } from "../selectors";
 import { getVisibleBreakpoints } from "./visibleBreakpoints";
 import { getVisiblePausePoints } from "./visiblePausePoints";
 import { makeLocationId } from "../utils/breakpoint";
 
 import type { SourceLocation } from "../types";
 
 export type ColumnBreakpoint = {|
   +location: SourceLocation,
-  +enabled: boolean
+  +enabled: boolean,
+  +condition: ?string
 |};
 
 function contains(location, range) {
   return (
     location.line >= range.start.line &&
     location.line <= range.end.line &&
     location.column >= range.start.column &&
     location.column <= range.end.column
@@ -30,19 +31,23 @@ function groupBreakpoints(breakpoints) {
   const map = groupBy(breakpoints, ({ location }) => location.line);
   for (const line in map) {
     map[line] = groupBy(map[line], ({ location }) => location.column);
   }
 
   return map;
 }
 
-function isEnabled(location, breakpointMap) {
+function findBreakpoint(location, breakpointMap) {
   const { line, column } = location;
-  return hasIn(breakpointMap, [line, column]);
+  const breakpoints = get(breakpointMap, [line, column]);
+
+  if (breakpoints) {
+    return breakpoints[0];
+  }
 }
 
 function getLineCount(columnBreakpoints) {
   const lineCount = {};
   columnBreakpoints.forEach(({ location: { line } }) => {
     if (!lineCount[line]) {
       lineCount[line] = 0;
     }
@@ -96,20 +101,26 @@ export function getColumnBreakpoints(pau
   );
 
   // 5. Check that there is atleast one other possible breakpoint on the line
   const lineCount = getLineCount(columnBreakpoints);
   columnBreakpoints = columnBreakpoints.filter(
     ({ location: { line } }) => lineCount[line] > 1
   );
 
-  return columnBreakpoints.map(({ location }) => ({
-    location,
-    enabled: isEnabled(location, breakpointMap)
-  }));
+  return columnBreakpoints.map(({ location }) => {
+    // Find the breakpoint so if we know it's enabled and has condition
+    const foundBreakpoint = findBreakpoint(location, breakpointMap);
+
+    return {
+      location,
+      enabled: !!foundBreakpoint,
+      condition: foundBreakpoint ? foundBreakpoint.condition : null
+    };
+  });
 }
 
 export const visibleColumnBreakpoints = createSelector(
   getVisiblePausePoints,
   getVisibleBreakpoints,
   getViewport,
   getColumnBreakpoints
 );
--- a/devtools/client/debugger/new/src/utils/quick-open.js
+++ b/devtools/client/debugger/new/src/utils/quick-open.js
@@ -58,17 +58,17 @@ export function parseLineColumn(query: s
     };
   }
 }
 
 export function formatSourcesForList(source: Source, tabs: TabList) {
   const title = getFilename(source);
   const relativeUrlWithQuery = `${source.relativeUrl}${getSourceQueryString(
     source
-  )}`;
+  ) || ""}`;
   const subtitle = endTruncateStr(relativeUrlWithQuery, 100);
   const value = relativeUrlWithQuery;
   return {
     value,
     title,
     subtitle,
     icon: tabs.some(tab => tab.url == source.url)
       ? "tab result-item-icon"
--- a/devtools/client/debugger/new/src/utils/source.js
+++ b/devtools/client/debugger/new/src/utils/source.js
@@ -472,10 +472,14 @@ export function isOriginal(source: Sourc
   return isOriginalId(source.id);
 }
 
 export function isGenerated(source: Source) {
   return isGeneratedId(source.id);
 }
 
 export function getSourceQueryString(source: ?Source) {
-  return source ? parseURL(source.url).search : "";
+  if (!source) {
+    return;
+  }
+
+  return parseURL(getRawSourceURL(source.url)).search;
 }
--- a/devtools/client/debugger/new/test/mochitest/browser.ini
+++ b/devtools/client/debugger/new/test/mochitest/browser.ini
@@ -586,16 +586,17 @@ support-files =
   examples/react/build/main.js.map
   examples/doc-react.html
   examples/wasm-sourcemaps/fib.c
   examples/wasm-sourcemaps/fib.wasm
   examples/wasm-sourcemaps/fib.wasm.map
   examples/wasm-sourcemaps/utils.js
   examples/fetch.js
   examples/doc-xhr.html
+  examples/doc-xhr-run-to-completion.html
   examples/sum/sum.js
   examples/sum/sum.min.js
   examples/sum/sum.min.js.map
   examples/big-sourcemap_files/bundle.js
   examples/big-sourcemap_files/bundle.js.map
   examples/reload/code_reload_1.js
   examples/reload/code_reload_2.js
   examples/reload/doc-reload.html
@@ -654,16 +655,17 @@ support-files =
   examples/doc_rr_recovery.html
   examples/doc_rr_error.html
 
 [browser_dbg-asm.js]
 [browser_dbg-async-stepping.js]
 [browser_dbg-sourcemapped-breakpoint-console.js]
 skip-if = (os == "win" && ccov) # Bug 1453549
 [browser_dbg-xhr-breakpoints.js]
+[browser_dbg-xhr-run-to-completion.js]
 [browser_dbg-sourcemapped-scopes.js]
 skip-if = ccov || (verify && debug && (os == 'linux')) # Bug 1441545
 [browser_dbg-sourcemapped-stepping.js]
 skip-if = (os == 'win' && os_version == '10.0' && ccov) # Bug 1480680
 [browser_dbg-sourcemapped-preview.js]
 skip-if = os == "win" # Bug 1448523, Bug 1448450
 [browser_dbg-breaking.js]
 [browser_dbg-breaking-from-console.js]
@@ -742,16 +744,18 @@ skip-if = os == 'linux' && !asan # bug 1
 [browser_dbg-sources.js]
 [browser_dbg-sources-arrow-keys.js]
 [browser_dbg-sources-named-eval.js]
 [browser_dbg-sources-querystring.js]
 skip-if = true
 [browser_dbg-stepping.js]
 skip-if = debug || (verify && (os == 'win')) || (os == "win" && os_version == "6.1")
 [browser_dbg-tabs.js]
+[browser_dbg-tabs-keyboard.js]
+skip-if = os == "win"
 [browser_dbg-tabs-pretty-print.js]
 [browser_dbg-tabs-without-urls.js]
 [browser_dbg-toggling-tools.js]
 [browser_dbg-react-app.js]
 skip-if = os == "win"
 [browser_dbg-wasm-sourcemaps.js]
 skip-if = true
 [browser_dbg_rr_breakpoints-01.js]
--- a/devtools/client/debugger/new/test/mochitest/browser_dbg-sources-querystring.js
+++ b/devtools/client/debugger/new/test/mochitest/browser_dbg-sources-querystring.js
@@ -4,16 +4,21 @@
 // Tests that the source tree works.
 
 function getLabel(dbg, index) {
   return findElement(dbg, "sourceNode", index)
     .textContent.trim()
     .replace(/^[\s\u200b]*/g, "");
 }
 
+function assertBreakpointHeading(dbg, label, index) {
+  const breakpointHeading = findElement(dbg, "breakpointItem", index).innerText;
+  is(breakpointHeading, label, `Breakpoint heading is ${label}`);
+}
+
 add_task(async function() {
   const dbg = await initDebugger("doc-sources-querystring.html", "simple1.js?x=1", "simple1.js?x=2");
   const {
     selectors: { getSelectedSource },
     getState
   } = dbg;
 
   // Expand nodes and make sure more sources appear.
@@ -27,19 +32,24 @@ add_task(async function() {
     "simple1.js?x=1 and simple2.jsx=2 exist"
   );
 
   const source = findSource(dbg, "simple1.js?x=1");
   await selectSource(dbg, source);
   const tab = findElement(dbg, "activeTab");
   is(tab.innerText, "simple1.js?x=1", "Tab label is simple1.js?x=1");
   await addBreakpoint(dbg, "simple1.js?x=1", 6);
-  const breakpointHeading = findElement(dbg, "breakpointItem", 2).innerText;
-  is(
-    breakpointHeading,
-    "simple1.js?x=1",
-    "Breakpoint heading is simple1.js?x=1"
-  );
+  assertBreakpointHeading(dbg, "simple1.js?x=1", 2);
+
+  // pretty print the source and check the tab text
+  clickElement(dbg, "prettyPrintButton");
+  await waitForSource(dbg, "simple1.js?x=1:formatted");
 
+  const prettyTab = findElement(dbg, "activeTab");
+  is(prettyTab.innerText, "simple1.js?x=1", "Tab label is simple1.js?x=1");
+  ok(prettyTab.querySelector("img.prettyPrint"));
+  assertBreakpointHeading(dbg, "simple1.js?x=1", 2);
+
+  // assert quick open works with queries
   pressKey(dbg, "quickOpen");
   type(dbg, "simple1.js?x");
   ok(findElement(dbg, "resultItems")[0].innerText.includes("simple.js?x=1"));
 });
new file mode 100644
--- /dev/null
+++ b/devtools/client/debugger/new/test/mochitest/browser_dbg-tabs-keyboard.js
@@ -0,0 +1,20 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+// Tests removing tabs with keyboard shortcuts
+
+add_task(async function() {
+  const dbg = await initDebugger("doc-scripts.html", "simple1", "simple2");
+
+  await selectSource(dbg, "simple1");
+  await selectSource(dbg, "simple2");
+  is(countTabs(dbg), 2);
+  
+  pressKey(dbg, "close");
+  waitForDispatch(dbg, "CLOSE_TAB");
+  is(countTabs(dbg), 1);
+
+  pressKey(dbg, "close");
+  waitForDispatch(dbg, "CLOSE_TAB");
+  is(countTabs(dbg), 0);
+});
--- a/devtools/client/debugger/new/test/mochitest/browser_dbg-tabs-without-urls.js
+++ b/devtools/client/debugger/new/test/mochitest/browser_dbg-tabs-without-urls.js
@@ -1,15 +1,11 @@
 /* Any copyright is dedicated to the Public Domain.
  * http://creativecommons.org/publicdomain/zero/1.0/ */
 
-function countTabs(dbg) {
-  return findElement(dbg, "sourceTabs").children.length;
-}
-
 // Test that URL-less sources have tabs added to the UI but 
 // do not persist upon reload
 add_task(async function() {
   const dbg = await initDebugger("doc-scripts.html", "simple1", "simple2");
 
   await selectSource(dbg, "simple1");
   await selectSource(dbg, "simple2");
 
--- a/devtools/client/debugger/new/test/mochitest/browser_dbg-tabs.js
+++ b/devtools/client/debugger/new/test/mochitest/browser_dbg-tabs.js
@@ -1,17 +1,13 @@
 /* Any copyright is dedicated to the Public Domain.
  * http://creativecommons.org/publicdomain/zero/1.0/ */
 
 // Tests adding and removing tabs
 
-function countTabs(dbg) {
-  return findElement(dbg, "sourceTabs").children.length;
-}
-
 add_task(async function() {
   const dbg = await initDebugger("doc-scripts.html", "simple1", "simple2");
 
   await selectSource(dbg, "simple1");
   await selectSource(dbg, "simple2");
   is(countTabs(dbg), 2);
 
   // Test reloading the debugger
new file mode 100644
--- /dev/null
+++ b/devtools/client/debugger/new/test/mochitest/browser_dbg-xhr-run-to-completion.js
@@ -0,0 +1,31 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
+
+// Test that XHR handlers are not called when pausing in the debugger.
+add_task(async function() {
+  const dbg = await initDebugger("doc-xhr-run-to-completion.html");
+  invokeInTab("singleRequest", "doc-xhr-run-to-completion.html");
+  await waitForPaused(dbg);
+  assertPausedLocation(dbg);
+  resume(dbg);
+  await once(Services.ppmm, "test passed");
+});
+
+// Test that XHR handlers are not called when pausing in the debugger,
+// including when there are multiple XHRs and multiple times we pause before
+// they can be processed.
+add_task(async function() {
+  const dbg = await initDebugger("doc-xhr-run-to-completion.html");
+  invokeInTab("multipleRequests", "doc-xhr-run-to-completion.html");
+  await waitForPaused(dbg);
+  assertPausedLocation(dbg);
+  resume(dbg);
+  await waitForPaused(dbg);
+  assertPausedLocation(dbg);
+  resume(dbg);
+  await waitForPaused(dbg);
+  assertPausedLocation(dbg);
+  resume(dbg);
+  await once(Services.ppmm, "test passed");
+});
new file mode 100644
--- /dev/null
+++ b/devtools/client/debugger/new/test/mochitest/examples/doc-xhr-run-to-completion.html
@@ -0,0 +1,41 @@
+ <!-- This Source Code Form is subject to the terms of the Mozilla Public
+    - License, v. 2.0. If a copy of the MPL was not distributed with this
+    - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+<!DOCTYPE html>
+<meta charset=UTF-8>
+<script>
+const cpmm = SpecialPowers.Services.cpmm;
+var result;
+
+function newXHR() {
+    var xhr = new XMLHttpRequest();
+    xhr.open('GET', 'doc-xhr-run-to-completion.html', true);
+    xhr.onload = done;
+    xhr.onerror = done;
+    xhr.onreadystatechange = done;
+    xhr.ontimeout = done;
+    xhr.send();
+}
+
+function singleRequest() {
+    result = "test failed";
+    newXHR();
+    debugger;
+    result = "test passed";
+}
+
+function multipleRequests() {
+    result = "test failed";
+    newXHR();
+    newXHR();
+    debugger;
+    newXHR();
+    debugger;
+    debugger;
+    result = "test passed";
+}
+
+function done() {
+    cpmm.sendAsyncMessage(result);
+}
+</script>
--- a/devtools/client/debugger/new/test/mochitest/helpers.js
+++ b/devtools/client/debugger/new/test/mochitest/helpers.js
@@ -596,16 +596,20 @@ async function selectSource(dbg, url, li
   return waitForSelectedSource(dbg, url);
 }
 
 
 async function closeTab(dbg, url) {
   await dbg.actions.closeTab(findSource(dbg, url));
 }
 
+function countTabs(dbg) {
+  return findElement(dbg, "sourceTabs").children.length;
+}
+
 /**
  * Steps over.
  *
  * @memberof mochitest/actions
  * @param {Object} dbg
  * @return {Promise}
  * @static
  */
@@ -913,16 +917,17 @@ const cmdShift = isMac
 const endKey = isMac
   ? { code: "VK_RIGHT", modifiers: cmdOrCtrl }
   : { code: "VK_END" };
 const startKey = isMac
   ? { code: "VK_LEFT", modifiers: cmdOrCtrl }
   : { code: "VK_HOME" };
 
 const keyMappings = {
+  close: { code: "w", modifiers: cmdOrCtrl },
   debugger: { code: "s", modifiers: shiftOrAlt },
   inspector: { code: "c", modifiers: shiftOrAlt },
   quickOpen: { code: "p", modifiers: cmdOrCtrl },
   quickOpenFunc: { code: "o", modifiers: cmdShift },
   quickOpenLine: { code: ":", modifiers: cmdOrCtrl },
   fileSearch: { code: "f", modifiers: cmdOrCtrl },
   Enter: { code: "VK_RETURN" },
   ShiftEnter: { code: "VK_RETURN", modifiers: shiftOrAlt },
--- a/devtools/client/locales/en-US/debugger.properties
+++ b/devtools/client/locales/en-US/debugger.properties
@@ -537,16 +537,17 @@ xhrBreakpoints.label=Add XHR breakpoint
 # LOCALIZATION NOTE (pauseOnAnyXHR): The pause on any XHR checkbox description
 # when the debugger will pause on any XHR requests.
 pauseOnAnyXHR=Pause on any URL
 
 # LOCALIZATION NOTE (sourceTabs.closeTab): Editor source tab context menu item
 # for closing the selected tab below the mouse.
 sourceTabs.closeTab=Close tab
 sourceTabs.closeTab.accesskey=c
+sourceTabs.closeTab.key=CmdOrCtrl+W
 
 # LOCALIZATION NOTE (sourceTabs.closeOtherTabs): Editor source tab context menu item
 # for closing the other tabs.
 sourceTabs.closeOtherTabs=Close other tabs
 sourceTabs.closeOtherTabs.accesskey=o
 
 # LOCALIZATION NOTE (sourceTabs.closeTabsToEnd): Editor source tab context menu item
 # for closing the tabs to the end (the right for LTR languages) of the selected tab.
--- a/dom/base/Selection.cpp
+++ b/dom/base/Selection.cpp
@@ -643,17 +643,19 @@ Selection::Selection(nsFrameSelection* a
       mDirection(eDirNext),
       mSelectionType(SelectionType::eNormal),
       mCustomColors(nullptr),
       mSelectionChangeBlockerCount(0),
       mUserInitiated(false),
       mCalledByJS(false),
       mNotifyAutoCopy(false) {}
 
-Selection::~Selection() {
+Selection::~Selection() { Disconnect(); }
+
+void Selection::Disconnect() {
   SetAnchorFocusRange(-1);
 
   uint32_t count = mRanges.Length();
   for (uint32_t i = 0; i < count; ++i) {
     mRanges[i].mRange->SetSelection(nullptr);
   }
 
   if (mAutoScrollTimer) {
@@ -720,17 +722,18 @@ NS_IMPL_CYCLE_COLLECTION_TRACE_WRAPPERCA
 // QueryInterface implementation for Selection
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(Selection)
   NS_WRAPPERCACHE_INTERFACE_MAP_ENTRY
   NS_INTERFACE_MAP_ENTRY(nsISupportsWeakReference)
   NS_INTERFACE_MAP_ENTRY(nsISupports)
 NS_INTERFACE_MAP_END
 
 NS_IMPL_MAIN_THREAD_ONLY_CYCLE_COLLECTING_ADDREF(Selection)
-NS_IMPL_MAIN_THREAD_ONLY_CYCLE_COLLECTING_RELEASE(Selection)
+NS_IMPL_MAIN_THREAD_ONLY_CYCLE_COLLECTING_RELEASE_WITH_LAST_RELEASE(
+    Selection, Disconnect())
 
 const RangeBoundary& Selection::AnchorRef() const {
   if (!mAnchorFocusRange) {
     static RangeBoundary sEmpty;
     return sEmpty;
   }
 
   if (GetDirection() == eDirNext) {
--- a/dom/base/Selection.h
+++ b/dom/base/Selection.h
@@ -665,16 +665,18 @@ class Selection final : public nsSupport
    *      [non]-editable
    *      <div id="c" contenteditable>
    *        de[sc]endant
    *  in this case, this returns nullptr because the second range is in
    *  non-editable area.
    */
   Element* GetCommonEditingHostForAllRanges();
 
+  void Disconnect();
+
   // These are the ranges inside this selection. They are kept sorted in order
   // of DOM start position.
   //
   // This data structure is sorted by the range beginnings. As the ranges are
   // disjoint, it is also implicitly sorted by the range endings. This allows
   // us to perform binary searches when searching for existence of a range,
   // giving us O(log n) search time.
   //
--- a/dom/base/nsDocument.cpp
+++ b/dom/base/nsDocument.cpp
@@ -281,16 +281,17 @@
 #include "nsIURIMutator.h"
 #include "mozilla/DocumentStyleRootIterator.h"
 #include "mozilla/PendingFullscreenEvent.h"
 #include "mozilla/RestyleManager.h"
 #include "mozilla/ClearOnShutdown.h"
 #include "nsHTMLTags.h"
 #include "NodeUbiReporting.h"
 #include "nsICookieService.h"
+#include "mozilla/net/ChannelEventQueue.h"
 #include "mozilla/net/RequestContextService.h"
 #include "StorageAccessPermissionRequest.h"
 
 using namespace mozilla;
 using namespace mozilla::dom;
 
 typedef nsTArray<Link*> LinkArray;
 
@@ -8575,16 +8576,32 @@ void nsIDocument::UnsuppressEventHandlin
 
   if (aFireEvents) {
     MOZ_RELEASE_ASSERT(NS_IsMainThread());
     nsCOMPtr<nsIRunnable> ded = new nsDelayedEventDispatcher(documents);
     Dispatch(TaskCategory::Other, ded.forget());
   } else {
     FireOrClearDelayedEvents(documents, false);
   }
+
+  if (!EventHandlingSuppressed()) {
+    MOZ_ASSERT(NS_IsMainThread());
+    nsTArray<RefPtr<net::ChannelEventQueue>> queues;
+    mSuspendedQueues.SwapElements(queues);
+    for (net::ChannelEventQueue* queue : queues) {
+      queue->Resume();
+    }
+  }
+}
+
+void nsIDocument::AddSuspendedChannelEventQueue(
+    net::ChannelEventQueue* aQueue) {
+  MOZ_ASSERT(NS_IsMainThread());
+  MOZ_ASSERT(EventHandlingSuppressed());
+  mSuspendedQueues.AppendElement(aQueue);
 }
 
 nsISupports* nsIDocument::GetCurrentContentSink() {
   return mParser ? mParser->GetContentSink() : nullptr;
 }
 
 nsIDocument* nsIDocument::GetTemplateContentsOwner() {
   if (!mTemplateContentsOwner) {
--- a/dom/base/nsDocumentEncoder.cpp
+++ b/dom/base/nsDocumentEncoder.cpp
@@ -179,17 +179,17 @@ class nsDocumentEncoder : public nsIDocu
   bool mDisableContextSerialize;
   bool mIsCopying;  // Set to true only while copying
   bool mNodeIsContainer;
   bool mIsPlainText;
   nsStringBuffer* mCachedBuffer;
 };
 
 NS_IMPL_CYCLE_COLLECTING_ADDREF(nsDocumentEncoder)
-NS_IMPL_CYCLE_COLLECTING_RELEASE(nsDocumentEncoder)
+NS_IMPL_CYCLE_COLLECTING_RELEASE_WITH_LAST_RELEASE(nsDocumentEncoder, Clear())
 
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(nsDocumentEncoder)
   NS_INTERFACE_MAP_ENTRY(nsIDocumentEncoder)
   NS_INTERFACE_MAP_ENTRY(nsISupports)
 NS_INTERFACE_MAP_END
 
 NS_IMPL_CYCLE_COLLECTION(nsDocumentEncoder, mDocument, mSelection, mRange,
                          mNode, mSerializer, mCommonParent)
--- a/dom/base/nsIDocument.h
+++ b/dom/base/nsIDocument.h
@@ -205,16 +205,22 @@ class Sequence;
 template <typename, typename>
 class CallbackObjectHolder;
 
 enum class CallerType : uint32_t;
 
 }  // namespace dom
 }  // namespace mozilla
 
+namespace mozilla {
+namespace net {
+class ChannelEventQueue;
+} // namespace net
+} // namespace mozilla
+
 // Must be kept in sync with xpcom/rust/xpcom/src/interfaces/nonidl.rs
 #define NS_IDOCUMENT_IID                             \
   {                                                  \
     0xce1f7627, 0x7109, 0x4977, {                    \
       0xba, 0x77, 0x49, 0x0f, 0xfd, 0xe0, 0x7a, 0xaa \
     }                                                \
   }
 
@@ -2438,16 +2444,24 @@ class nsIDocument : public nsINode,
 
   void DecreaseEventSuppression() {
     MOZ_ASSERT(mEventsSuppressed);
     --mEventsSuppressed;
     UpdateFrameRequestCallbackSchedulingState();
   }
 
   /**
+   * Note a ChannelEventQueue which has been suspended on the document's behalf
+   * to prevent XHRs from running content scripts while event handling is
+   * suppressed. The document is responsible for resuming the queue after
+   * event handling is unsuppressed.
+   */
+  void AddSuspendedChannelEventQueue(mozilla::net::ChannelEventQueue* aQueue);
+
+  /**
    * Increment https://html.spec.whatwg.org/#ignore-destructive-writes-counter
    */
   void IncrementIgnoreDestructiveWritesCounter() {
     ++mIgnoreDestructiveWritesCounter;
   }
 
   /**
    * Decrement https://html.spec.whatwg.org/#ignore-destructive-writes-counter
@@ -4020,16 +4034,20 @@ class nsIDocument : public nsINode,
 
   // If we're an external resource document, this will be non-null and will
   // point to our "display document": the one that all resource lookups should
   // go to.
   nsCOMPtr<nsIDocument> mDisplayDocument;
 
   uint32_t mEventsSuppressed;
 
+  // Any XHR ChannelEventQueues that were suspended on this document while
+  // events were suppressed.
+  nsTArray<RefPtr<mozilla::net::ChannelEventQueue>> mSuspendedQueues;
+
   /**
    * https://html.spec.whatwg.org/#ignore-destructive-writes-counter
    */
   uint32_t mIgnoreDestructiveWritesCounter;
 
   /**
    * The current frame request callback handle
    */
--- a/dom/base/nsRange.cpp
+++ b/dom/base/nsRange.cpp
@@ -339,29 +339,25 @@ NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(ns
   // but it wouldn't know we're calling it from Unlink and so would do
   // more work than it really needs to.
   if (tmp->mRegisteredCommonAncestor) {
     tmp->UnregisterCommonAncestor(tmp->mRegisteredCommonAncestor, true);
   }
 
   tmp->Reset();
 
-  // This needs to be unlinked after Reset() is called, as it controls
-  // the result of IsInSelection() which is used by tmp->Reset().
   MOZ_DIAGNOSTIC_ASSERT(!tmp->isInList(),
                         "Shouldn't be registered now that we're unlinking");
-  NS_IMPL_CYCLE_COLLECTION_UNLINK(mSelection);
 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
 
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(nsRange)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOwner)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mStart)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mEnd)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mRoot)
-  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSelection)
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
 NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN(nsRange)
   NS_IMPL_CYCLE_COLLECTION_TRACE_PRESERVED_WRAPPER
 NS_IMPL_CYCLE_COLLECTION_TRACE_END
 
 static void MarkDescendants(nsINode* aNode) {
   // Set NodeIsDescendantOfCommonAncestorForRangeInSelection on aNode's
@@ -891,17 +887,17 @@ void nsRange::NotifySelectionListenersAf
     // Our internal code should not move focus with using this instance while
     // it's calling Selection::NotifySelectionListeners() which may move focus
     // or calls selection listeners.  So, let's set mCalledByJS to false here
     // since non-*JS() methods don't set it to false.
     AutoCalledByJSRestore calledByJSRestorer(*this);
     mCalledByJS = false;
     // Be aware, this range may be modified or stop being a range for selection
     // after this call.  Additionally, the selection instance may have gone.
-    RefPtr<Selection> selection = mSelection;
+    RefPtr<Selection> selection = mSelection.get();
     selection->NotifySelectionListeners(calledByJSRestorer.SavedValue());
   }
 }
 
 /******************************************************
  * Private helper routines
  ******************************************************/
 
--- a/dom/base/nsRange.h
+++ b/dom/base/nsRange.h
@@ -485,17 +485,17 @@ class nsRange final : public nsStubMutat
   };
 
   nsCOMPtr<nsIDocument> mOwner;
   nsCOMPtr<nsINode> mRoot;
   // mRegisteredCommonAncestor is only non-null when the range
   // IsInSelection().  It's kept alive via mStartContainer/mEndContainer,
   // because we update it any time those could become disconnected from it.
   nsINode* MOZ_NON_OWNING_REF mRegisteredCommonAncestor;
-  RefPtr<mozilla::dom::Selection> mSelection;
+  mozilla::WeakPtr<mozilla::dom::Selection> mSelection;
 
   // These raw pointers are used to remember a child that is about
   // to be inserted between a CharacterData call and a subsequent
   // ContentInserted or ContentAppended call. It is safe to store
   // these refs because the caller is guaranteed to trigger both
   // notifications while holding a strong reference to the new child.
   nsIContent* MOZ_NON_OWNING_REF mNextStartRef;
   nsIContent* MOZ_NON_OWNING_REF mNextEndRef;
--- a/dom/bindings/BindingUtils.cpp
+++ b/dom/bindings/BindingUtils.cpp
@@ -3953,17 +3953,17 @@ JS::Handle<JSObject*> GetPerInterfaceObj
    * changed after they have been set.
    *
    * Calling address() avoids the read barrier that does gray unmarking, but
    * it's not possible for the object to be gray here.
    */
 
   const JS::Heap<JSObject*>& entrySlot =
       protoAndIfaceCache.EntrySlotMustExist(aSlotId);
-  MOZ_ASSERT(JS::ObjectIsNotGray(entrySlot));
+  JS::AssertObjectIsNotGray(entrySlot);
   return JS::Handle<JSObject*>::fromMarkedLocation(entrySlot.address());
 }
 
 namespace binding_detail {
 bool IsGetterEnabled(JSContext* aCx, JS::Handle<JSObject*> aObj,
                      JSJitGetterOp aGetter,
                      const Prefable<const JSPropertySpec>* aAttributes) {
   MOZ_ASSERT(aAttributes);
--- a/dom/bindings/BindingUtils.h
+++ b/dom/bindings/BindingUtils.h
@@ -1444,23 +1444,23 @@ template <typename T, bool hasWrapObject
 struct WrapNativeHelper {
   static inline JSObject* Wrap(JSContext* cx, T* parent,
                                nsWrapperCache* cache) {
     MOZ_ASSERT(cache);
 
     JSObject* obj;
     if ((obj = cache->GetWrapper())) {
       // GetWrapper always unmarks gray.
-      MOZ_ASSERT(JS::ObjectIsNotGray(obj));
+      JS::AssertObjectIsNotGray(obj);
       return obj;
     }
 
     // WrapObject never returns a gray thing.
     obj = parent->WrapObject(cx, nullptr);
-    MOZ_ASSERT(JS::ObjectIsNotGray(obj));
+    JS::AssertObjectIsNotGray(obj);
 
     return obj;
   }
 };
 
 // Wrapping of our native parent, for cases when it's not a WebIDL object.  In
 // this case it must be nsISupports.
 template <typename T>
@@ -1470,22 +1470,22 @@ struct WrapNativeHelper<T, false> {
     JSObject* obj;
     if (cache && (obj = cache->GetWrapper())) {
 #ifdef DEBUG
       JS::Rooted<JSObject*> rootedObj(cx, obj);
       NS_ASSERTION(WrapNativeISupports(cx, parent, cache) == rootedObj,
                    "Unexpected object in nsWrapperCache");
       obj = rootedObj;
 #endif
-      MOZ_ASSERT(JS::ObjectIsNotGray(obj));
+      JS::AssertObjectIsNotGray(obj);
       return obj;
     }
 
     obj = WrapNativeISupports(cx, parent, cache);
-    MOZ_ASSERT(JS::ObjectIsNotGray(obj));
+    JS::AssertObjectIsNotGray(obj);
     return obj;
   }
 };
 
 // Finding the associated global for an object.
 template <typename T>
 static inline JSObject* FindAssociatedGlobal(
     JSContext* cx, T* p, nsWrapperCache* cache,
@@ -1494,47 +1494,47 @@ static inline JSObject* FindAssociatedGl
   if (!p) {
     return JS::CurrentGlobalOrNull(cx);
   }
 
   JSObject* obj = WrapNativeHelper<T>::Wrap(cx, p, cache);
   if (!obj) {
     return nullptr;
   }
-  MOZ_ASSERT(JS::ObjectIsNotGray(obj));
+  JS::AssertObjectIsNotGray(obj);
 
   // The object is never a CCW but it may not be in the current compartment of
   // the JSContext.
   obj = JS::GetNonCCWObjectGlobal(obj);
 
   switch (scope) {
     case mozilla::dom::ReflectionScope::XBL: {
       // If scope is set to XBLScope, it means that the canonical reflector for
       // this native object should live in the content XBL scope. Note that we
       // never put anonymous content inside an add-on scope.
       if (xpc::IsInContentXBLScope(obj)) {
         return obj;
       }
       JS::Rooted<JSObject*> rootedObj(cx, obj);
       JSObject* xblScope = xpc::GetXBLScope(cx, rootedObj);
       MOZ_ASSERT_IF(xblScope, JS_IsGlobalObject(xblScope));
-      MOZ_ASSERT(JS::ObjectIsNotGray(xblScope));
+      JS::AssertObjectIsNotGray(xblScope);
       return xblScope;
     }
 
     case mozilla::dom::ReflectionScope::UAWidget: {
       // If scope is set to UAWidgetScope, it means that the canonical reflector
       // for this native object should live in the UA widget scope.
       if (xpc::IsInUAWidgetScope(obj)) {
         return obj;
       }
       JS::Rooted<JSObject*> rootedObj(cx, obj);
       JSObject* uaWidgetScope = xpc::GetUAWidgetScope(cx, rootedObj);
       MOZ_ASSERT_IF(uaWidgetScope, JS_IsGlobalObject(uaWidgetScope));
-      MOZ_ASSERT(JS::ObjectIsNotGray(uaWidgetScope));
+      JS::AssertObjectIsNotGray(uaWidgetScope);
       return uaWidgetScope;
     }
 
     case ReflectionScope::Content:
       return obj;
   }
 
   MOZ_CRASH("Unknown ReflectionScope variant");
--- a/dom/bindings/CallbackObject.h
+++ b/dom/bindings/CallbackObject.h
@@ -133,17 +133,17 @@ class CallbackObject : public nsISupport
   }
 
   /*
    * If the callback is known to be non-gray, then this method can be
    * used instead of CallbackOrNull() to avoid the overhead of
    * ExposeObjectToActiveJS().
    */
   JS::Handle<JSObject*> CallbackKnownNotGray() const {
-    MOZ_ASSERT(JS::ObjectIsNotGray(mCallback));
+    JS::AssertObjectIsNotGray(mCallback);
     return CallbackPreserveColor();
   }
 
   nsIGlobalObject* IncumbentGlobalOrNull() const { return mIncumbentGlobal; }
 
   enum ExceptionHandling {
     // Report any exception and don't throw it to the caller code.
     eReportExceptions,
--- a/dom/bindings/Codegen.py
+++ b/dom/bindings/Codegen.py
@@ -503,17 +503,16 @@ class CGDOMJSClass(CGThing):
               ${finalize}, /* finalize */
               ${call}, /* call */
               nullptr,               /* hasInstance */
               nullptr,               /* construct */
               ${trace}, /* trace */
             };
 
             static const js::ClassExtension sClassExtension = {
-              nullptr, /* weakmapKeyDelegateOp */
               ${objectMoved} /* objectMovedOp */
             };
 
             static const DOMJSClass sClass = {
               { "${name}",
                 ${flags},
                 &sClassOps,
                 JS_NULL_CLASS_SPEC,
@@ -3766,17 +3765,17 @@ class CGWrapWithCacheMethod(CGAbstractMe
             MOZ_ASSERT(ToSupportsIsOnPrimaryInheritanceChain(aObject, aCache),
                        "nsISupports must be on our primary inheritance chain");
 
             JS::Rooted<JSObject*> global(aCx, FindAssociatedGlobal(aCx, aObject->GetParentObject()));
             if (!global) {
               return false;
             }
             MOZ_ASSERT(JS_IsGlobalObject(global));
-            MOZ_ASSERT(JS::ObjectIsNotGray(global));
+            JS::AssertObjectIsNotGray(global);
 
             // That might have ended up wrapping us already, due to the wonders
             // of XBL.  Check for that, and bail out as needed.
             aReflector.set(aCache->GetWrapper());
             if (aReflector) {
             #ifdef DEBUG
               AssertReflectorHasGivenProto(aCx, aReflector, aGivenProto);
             #endif // DEBUG
--- a/dom/bindings/SimpleGlobalObject.cpp
+++ b/dom/bindings/SimpleGlobalObject.cpp
@@ -69,17 +69,17 @@ static const js::ClassOps SimpleGlobalCl
     SimpleGlobal_finalize,
     nullptr,
     nullptr,
     nullptr,
     JS_GlobalObjectTraceHook,
 };
 
 static const js::ClassExtension SimpleGlobalClassExtension = {
-    nullptr, SimpleGlobal_moved};
+    SimpleGlobal_moved};
 
 const js::Class SimpleGlobalClass = {
     "",
     JSCLASS_GLOBAL_FLAGS | JSCLASS_HAS_PRIVATE |
         JSCLASS_PRIVATE_IS_NSISUPPORTS | JSCLASS_FOREGROUND_FINALIZE,
     &SimpleGlobalClassOps,
     JS_NULL_CLASS_SPEC,
     &SimpleGlobalClassExtension,
--- a/dom/payments/PaymentRequest.cpp
+++ b/dom/payments/PaymentRequest.cpp
@@ -640,16 +640,21 @@ PaymentRequest::PaymentRequest(nsPIDOMWi
       mUpdateError(NS_OK),
       mState(eCreated),
       mIPC(nullptr) {
   MOZ_ASSERT(aWindow);
   RegisterActivityObserver();
 }
 
 already_AddRefed<Promise> PaymentRequest::CanMakePayment(ErrorResult& aRv) {
+  if (!InFullyActiveDocument()) {
+    aRv.Throw(NS_ERROR_DOM_ABORT_ERR);
+    return nullptr;
+  }
+
   if (mState != eCreated) {
     aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
     return nullptr;
   }
 
   if (mResultPromise) {
     // XXX This doesn't match the spec but does match Chromium.
     aRv.Throw(NS_ERROR_DOM_NOT_ALLOWED_ERR);
@@ -678,37 +683,36 @@ already_AddRefed<Promise> PaymentRequest
 void PaymentRequest::RespondCanMakePayment(bool aResult) {
   MOZ_ASSERT(mResultPromise);
   mResultPromise->MaybeResolve(aResult);
   mResultPromise = nullptr;
 }
 
 already_AddRefed<Promise> PaymentRequest::Show(
     const Optional<OwningNonNull<Promise>>& aDetailsPromise, ErrorResult& aRv) {
+  if (!InFullyActiveDocument()) {
+    aRv.Throw(NS_ERROR_DOM_ABORT_ERR);
+    return nullptr;
+  }
+
   nsIGlobalObject* global = GetOwnerGlobal();
   nsCOMPtr<nsPIDOMWindowInner> win = do_QueryInterface(global);
-  MOZ_ASSERT(win);
   nsIDocument* doc = win->GetExtantDoc();
 
   if (!EventStateManager::IsHandlingUserInput()) {
     nsString msg = NS_LITERAL_STRING(
         "User activation is now required to call PaymentRequest.show()");
     nsContentUtils::ReportToConsoleNonLocalized(
         msg, nsIScriptError::warningFlag, NS_LITERAL_CSTRING("Security"), doc);
     if (StaticPrefs::dom_payments_request_user_interaction_required()) {
       aRv.Throw(NS_ERROR_DOM_SECURITY_ERR);
       return nullptr;
     }
   }
 
-  if (!doc || !doc->IsCurrentActiveDocument()) {
-    aRv.Throw(NS_ERROR_DOM_ABORT_ERR);
-    return nullptr;
-  }
-
   if (mState != eCreated) {
     aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
     return nullptr;
   }
 
   ErrorResult result;
   RefPtr<Promise> promise = Promise::Create(global, result);
   if (result.Failed()) {
@@ -787,16 +791,21 @@ void PaymentRequest::RespondShowPayment(
 }
 
 void PaymentRequest::RespondComplete() {
   MOZ_ASSERT(mResponse);
   mResponse->RespondComplete();
 }
 
 already_AddRefed<Promise> PaymentRequest::Abort(ErrorResult& aRv) {
+  if (!InFullyActiveDocument()) {
+    aRv.Throw(NS_ERROR_DOM_ABORT_ERR);
+    return nullptr;
+  }
+
   if (mState != eInteractive) {
     aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
     return nullptr;
   }
 
   if (mAbortPromise) {
     aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
     return nullptr;
@@ -867,16 +876,21 @@ nsresult PaymentRequest::UpdatePayment(J
   nsresult rv = manager->UpdatePayment(aCx, this, aDetails, mRequestShipping);
   if (NS_WARN_IF(NS_FAILED(rv))) {
     return rv;
   }
   return NS_OK;
 }
 
 void PaymentRequest::AbortUpdate(nsresult aRv) {
+  // perfect ignoring when the document is not fully active.
+  if (!InFullyActiveDocument()) {
+    return;
+  }
+
   MOZ_ASSERT(NS_FAILED(aRv));
 
   if (mState != eInteractive) {
     return;
   }
   // Close down any remaining user interface.
   RefPtr<PaymentRequestManager> manager = PaymentRequestManager::GetSingleton();
   MOZ_ASSERT(manager);
@@ -1050,16 +1064,20 @@ void PaymentRequest::GetOptions(PaymentO
 }
 
 void PaymentRequest::SetOptions(const PaymentOptions& aOptions) {
   mOptions = aOptions;
 }
 
 void PaymentRequest::ResolvedCallback(JSContext* aCx,
                                       JS::Handle<JS::Value> aValue) {
+  if (!InFullyActiveDocument()) {
+    return;
+  }
+
   MOZ_ASSERT(aCx);
   mUpdating = false;
   if (NS_WARN_IF(!aValue.isObject())) {
     return;
   }
 
   // Converting value to a PaymentDetailsUpdate dictionary
   PaymentDetailsUpdate details;
@@ -1079,20 +1097,48 @@ void PaymentRequest::ResolvedCallback(JS
   if (NS_FAILED(UpdatePayment(aCx, details))) {
     AbortUpdate(NS_ERROR_DOM_ABORT_ERR);
     return;
   }
 }
 
 void PaymentRequest::RejectedCallback(JSContext* aCx,
                                       JS::Handle<JS::Value> aValue) {
+  if (!InFullyActiveDocument()) {
+    return;
+  }
+
   mUpdating = false;
   AbortUpdate(NS_ERROR_DOM_ABORT_ERR);
 }
 
+bool PaymentRequest::InFullyActiveDocument() {
+  nsIGlobalObject* global = GetOwnerGlobal();
+  if (!global) {
+    return false;
+  }
+
+  nsCOMPtr<nsPIDOMWindowInner> win = do_QueryInterface(global);
+  nsIDocument* doc = win->GetExtantDoc();
+  if (!doc || !doc->IsCurrentActiveDocument()) {
+    return false;
+  }
+
+  // According to the definition of the fully active document, recursive
+  // checking the parent document are all IsCurrentActiveDocument
+  nsIDocument* parentDoc = doc->GetParentDocument();
+  while (parentDoc) {
+    if (parentDoc && !parentDoc->IsCurrentActiveDocument()) {
+      return false;
+    }
+    parentDoc = parentDoc->GetParentDocument();
+  }
+  return true;
+}
+
 void PaymentRequest::RegisterActivityObserver() {
   if (nsPIDOMWindowInner* window = GetOwner()) {
     nsCOMPtr<nsIDocument> doc = window->GetExtantDoc();
     if (doc) {
       doc->RegisterActivityObserver(
           NS_ISUPPORTS_CAST(nsIDocumentActivity*, this));
     }
   }
@@ -1109,17 +1155,36 @@ void PaymentRequest::UnregisterActivityO
 }
 
 void PaymentRequest::NotifyOwnerDocumentActivityChanged() {
   nsPIDOMWindowInner* window = GetOwner();
   NS_ENSURE_TRUE_VOID(window);
   nsIDocument* doc = window->GetExtantDoc();
   NS_ENSURE_TRUE_VOID(doc);
 
-  if (!doc->IsCurrentActiveDocument()) {
+  if (!InFullyActiveDocument()) {
+    if (mState == eInteractive) {
+      if (mAcceptPromise) {
+        mAcceptPromise->MaybeReject(NS_ERROR_DOM_ABORT_ERR);
+        mAcceptPromise = nullptr;
+      }
+      if (mResponse) {
+        mResponse->RejectRetry(NS_ERROR_DOM_ABORT_ERR);
+      }
+      if (mAbortPromise) {
+        mAbortPromise->MaybeReject(NS_ERROR_DOM_ABORT_ERR);
+        mAbortPromise = nullptr;
+      }
+    }
+    if (mState == eCreated) {
+      if (mResultPromise) {
+        mResultPromise->MaybeReject(NS_ERROR_DOM_ABORT_ERR);
+        mResultPromise = nullptr;
+      }
+    }
     RefPtr<PaymentRequestManager> mgr = PaymentRequestManager::GetSingleton();
     mgr->ClosePayment(this);
   }
 }
 
 PaymentRequest::~PaymentRequest() {
   if (mIPC) {
     // If we're being destroyed, the PaymentRequestManager isn't holding any
--- a/dom/payments/PaymentRequest.h
+++ b/dom/payments/PaymentRequest.h
@@ -198,16 +198,18 @@ class PaymentRequest final : public DOME
   inline void ShippingWasRequested() { mRequestShipping = true; }
 
   nsresult UpdatePaymentMethod(const nsAString& aMethodName,
                                const ChangeDetails& aMethodDetails);
 
   void ResolvedCallback(JSContext* aCx, JS::Handle<JS::Value> aValue) override;
   void RejectedCallback(JSContext* aCx, JS::Handle<JS::Value> aValue) override;
 
+  bool InFullyActiveDocument();
+
   IMPL_EVENT_HANDLER(merchantvalidation);
   IMPL_EVENT_HANDLER(shippingaddresschange);
   IMPL_EVENT_HANDLER(shippingoptionchange);
   IMPL_EVENT_HANDLER(paymentmethodchange);
 
   void SetIPC(PaymentRequestChild* aChild) { mIPC = aChild; }
 
   PaymentRequestChild* GetIPC() const { return mIPC; }
--- a/dom/payments/PaymentRequestUpdateEvent.cpp
+++ b/dom/payments/PaymentRequestUpdateEvent.cpp
@@ -47,16 +47,19 @@ PaymentRequestUpdateEvent::PaymentReques
       mRequest(nullptr) {
   MOZ_ASSERT(aOwner);
 }
 
 void PaymentRequestUpdateEvent::ResolvedCallback(JSContext* aCx,
                                                  JS::Handle<JS::Value> aValue) {
   MOZ_ASSERT(aCx);
   MOZ_ASSERT(mRequest);
+  if (!mRequest->InFullyActiveDocument()) {
+    return;
+  }
 
   if (NS_WARN_IF(!aValue.isObject()) || !mWaitForUpdate) {
     return;
   }
 
   // Converting value to a PaymentDetailsUpdate dictionary
   PaymentDetailsUpdate details;
   if (!details.Init(aCx, aValue)) {
@@ -84,30 +87,36 @@ void PaymentRequestUpdateEvent::Resolved
   }
   mWaitForUpdate = false;
   mRequest->SetUpdating(false);
 }
 
 void PaymentRequestUpdateEvent::RejectedCallback(JSContext* aCx,
                                                  JS::Handle<JS::Value> aValue) {
   MOZ_ASSERT(mRequest);
+  if (!mRequest->InFullyActiveDocument()) {
+    return;
+  }
 
   mRequest->AbortUpdate(NS_ERROR_DOM_ABORT_ERR);
   mWaitForUpdate = false;
   mRequest->SetUpdating(false);
 }
 
 void PaymentRequestUpdateEvent::UpdateWith(Promise& aPromise,
                                            ErrorResult& aRv) {
   if (!IsTrusted()) {
     aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
     return;
   }
 
   MOZ_ASSERT(mRequest);
+  if (!mRequest->InFullyActiveDocument()) {
+    return;
+  }
 
   if (mWaitForUpdate || !mRequest->ReadyForUpdate()) {
     aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
     return;
   }
 
   aPromise.AppendNativeHandler(this);
 
--- a/dom/payments/PaymentResponse.cpp
+++ b/dom/payments/PaymentResponse.cpp
@@ -171,16 +171,22 @@ void PaymentResponse::GetPayerPhone(nsSt
 // (the object should be kept alive by the callee).
 already_AddRefed<PaymentAddress> PaymentResponse::GetShippingAddress() const {
   RefPtr<PaymentAddress> address = mShippingAddress;
   return address.forget();
 }
 
 already_AddRefed<Promise> PaymentResponse::Complete(PaymentComplete result,
                                                     ErrorResult& aRv) {
+  MOZ_ASSERT(mRequest);
+  if (!mRequest->InFullyActiveDocument()) {
+    aRv.Throw(NS_ERROR_DOM_ABORT_ERR);
+    return nullptr;
+  }
+
   if (mCompleteCalled) {
     aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
     return nullptr;
   }
 
   mCompleteCalled = true;
 
   if (mTimer) {
@@ -221,40 +227,35 @@ void PaymentResponse::RespondComplete() 
   if (mPromise) {
     mPromise->MaybeResolve(JS::UndefinedHandleValue);
     mPromise = nullptr;
   }
 }
 
 already_AddRefed<Promise> PaymentResponse::Retry(
     JSContext* aCx, const PaymentValidationErrors& aErrors, ErrorResult& aRv) {
+  MOZ_ASSERT(mRequest);
+  if (!mRequest->InFullyActiveDocument()) {
+    aRv.Throw(NS_ERROR_DOM_ABORT_ERR);
+    return nullptr;
+  }
+
   nsIGlobalObject* global = GetOwner()->AsGlobal();
   ErrorResult errResult;
   RefPtr<Promise> promise = Promise::Create(global, errResult);
   if (errResult.Failed()) {
     aRv.Throw(NS_ERROR_FAILURE);
     return nullptr;
   }
 
   if (mTimer) {
     mTimer->Cancel();
     mTimer = nullptr;
   }
 
-  if (NS_WARN_IF(!GetOwner())) {
-    aRv.Throw(NS_ERROR_FAILURE);
-    return nullptr;
-  }
-
-  nsIDocument* doc = GetOwner()->GetExtantDoc();
-  if (!doc || !doc->IsCurrentActiveDocument()) {
-    promise->MaybeReject(NS_ERROR_DOM_ABORT_ERR);
-    return promise.forget();
-  }
-
   if (mCompleteCalled || mRetryPromise) {
     promise->MaybeReject(NS_ERROR_DOM_INVALID_STATE_ERR);
     return promise.forget();
   }
 
   nsresult rv = ValidatePaymentValidationErrors(aErrors);
   if (NS_WARN_IF(NS_FAILED(rv))) {
     promise->MaybeReject(rv);
@@ -408,16 +409,21 @@ nsresult PaymentResponse::ValidatePaymen
     }
   }
   return NS_ERROR_DOM_ABORT_ERR;
 }
 
 NS_IMETHODIMP
 PaymentResponse::Notify(nsITimer* timer) {
   mTimer = nullptr;
+
+  if (!mRequest->InFullyActiveDocument()) {
+    return NS_OK;
+  }
+
   if (mCompleteCalled) {
     return NS_OK;
   }
 
   mCompleteCalled = true;
 
   RefPtr<PaymentRequestManager> manager = PaymentRequestManager::GetSingleton();
   if (NS_WARN_IF(!manager)) {
--- a/dom/payments/test/ClosePaymentChromeScript.js
+++ b/dom/payments/test/ClosePaymentChromeScript.js
@@ -3,20 +3,20 @@
    http://creativecommons.org/publicdomain/zero/1.0/ */
 "use strict";
 
 const { XPCOMUtils } = ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
 
 const paymentSrv = Cc["@mozilla.org/dom/payments/payment-request-service;1"].getService(Ci.nsIPaymentRequestService);
 
 function emitTestFail(message) {
-  sendAsyncMessage("test-fail", message);
+  sendAsyncMessage("test-fail", `${DummyUIService.testName}: ${message}`);
 }
 function emitTestPass(message) {
-  sendAsyncMessage("test-pass", message);
+  sendAsyncMessage("test-pass", `${DummyUIService.testName}: ${message}`);
 }
 
 addMessageListener("close-check", function() {
   const paymentEnum = paymentSrv.enumerate();
   if (paymentEnum.hasMoreElements()) {
     emitTestFail("Non-empty PaymentRequest queue in PaymentRequestService.");
   } else {
     emitTestPass("Got empty PaymentRequest queue in PaymentRequestService.");
@@ -51,26 +51,29 @@ addMessageListener("payment-num-check", 
     emitTestPass("Got expected '" + numPayments +
                  "' PaymentRequests in PaymentRequestService.");
   }
   // force cleanup PaymentRequests for clear environment to next testcase.
   paymentSrv.cleanup();
   sendAsyncMessage("payment-num-check-complete");
 });
 
-var respondRequestId;
+addMessageListener("test-setup", (testName) => {
+  DummyUIService.testName = testName;
+  sendAsyncMessage("test-setup-complete");
+});
 
 addMessageListener("reject-payment", (expectedError) => {
   try {
     const responseData = Cc["@mozilla.org/dom/payments/general-response-data;1"].
                             createInstance(Ci.nsIGeneralResponseData);
     responseData.initData({});
     const showResponse = Cc["@mozilla.org/dom/payments/payment-show-action-response;1"].
                             createInstance(Ci.nsIPaymentShowActionResponse);
-    showResponse.init(respondRequestId,
+    showResponse.init(DummyUIService.respondRequestId,
                       Ci.nsIPaymentActionResponse.PAYMENT_REJECTED,
                       "",                 // payment method
                       responseData,       // payment method data
                       "",                 // payer name
                       "",                 // payer email
                       "");                // payer phone
     paymentSrv.respondPayment(showResponse.QueryInterface(Ci.nsIPaymentActionResponse));
     emitTestPass("Reject PaymentRequest successfully");
@@ -85,33 +88,34 @@ addMessageListener("reject-payment", (ex
     emitTestFail("Unexpected error '" + error.name +
                  "' when reponding a closed PaymentRequest");
   }
   sendAsyncMessage("reject-payment-complete");
 });
 
 addMessageListener("update-payment", () => {
   try {
-    paymentSrv.changeShippingOption(respondRequestId, "");
+    paymentSrv.changeShippingOption(DummyUIService.respondRequestId, "");
     emitTestPass("Change shippingOption succefully");
   } catch (error) {
     emitTestFail("Unexpected error '" + error.name +
                  "' when changing the shipping option");
   }
   sendAsyncMessage("update-payment-complete");
 });
 
 const DummyUIService = {
-  showPayment: (requestId => {respondRequestId = requestId}),
-  abortPayment: (requestId) => {respondRequestId = requestId},
-  completePayment: (requestId) => {respondRequestId = requestId},
-  updatePayment: (requestId) => {respondRequestId = requestId},
-  closePayment: (requestId) => {respondRequestId = requestId},
+  testName: "",
+  respondRequestId: "",
+  showPayment: (requestId) => {DummyUIService.respondRequestId = requestId},
+  abortPayment: (requestId) => {DummyUIService.respondRequestId = requestId},
+  completePayment: (requestId) => {DummyUIService.respondRequestId = requestId},
+  updatePayment: (requestId) => {DummyUIService.respondRequestId = requestId},
+  closePayment: (requestId) => {this.respondRequestId = requestId},
   QueryInterface: ChromeUtils.generateQI([Ci.nsIPaymentUIService]),
 };
 
 paymentSrv.setTestingUIService(DummyUIService.QueryInterface(Ci.nsIPaymentUIService));
 
-
 addMessageListener("teardown", function() {
   paymentSrv.setTestingUIService(null);
   sendAsyncMessage('teardown-complete');
 });
--- a/dom/payments/test/test_closePayment.html
+++ b/dom/payments/test/test_closePayment.html
@@ -3,114 +3,109 @@
 <!--
 https://bugzilla.mozilla.org/show_bug.cgi?id=1408234
 -->
 <head>
   <meta charset="utf-8">
   <title>Test for closing PaymentRequest</title>
   <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
   <script type="application/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
+  <script type="application/javascript" src="DefaultData.js"></script>
   <script type="application/javascript">
 
   "use strict";
   SimpleTest.waitForExplicitFinish();
 
   var gUrl = SimpleTest.getTestFileURL('ClosePaymentChromeScript.js');
   var gScript = SpecialPowers.loadChromeScript(gUrl);
-  var testName = "";
 
   function testFailHandler(message) {
-    ok(false, testName + ": " + message);
+    ok(false, message);
   }
   function testPassHandler(message) {
-    ok(true, testName + ": " + message);
+    ok(true, message);
   }
   gScript.addMessageListener("test-fail", testFailHandler);
   gScript.addMessageListener("test-pass", testPassHandler);
 
   async function requestChromeAction(action, params) {
     gScript.sendAsyncMessage(action, params);
     await new Promise(resolve => {
       gScript.addMessageListener(`${action}-complete`, function completeListener() {
         gScript.removeMessageListener(`${action}-complete`, completeListener);
         resolve();
       });
     });
   }
 
-  function testCloseByReloading() {
-    return new Promise(async (resolve, reject) => {
-      testName = "testCloseByReloading";
-      let expectedResults = ["successful", "successful"];
+  async function testCloseByReloading() {
+    const testName = "testCloseByReloading";
+    await requestChromeAction("test-setup", testName);
+    info(testName);
+    return new Promise(async (resolve) => {
       let nextStatus = ["creating", "reloading"];
       let currStatus = nextStatus.shift();
       let ifr = document.createElement('iframe');
       await requestChromeAction("payment-num-set");
       let listener = async function(event) {
-        let expected = expectedResults.shift();
-        is(event.data, expected,
-          testName + ": Expected '" + expected + "' when " + currStatus +
-          ", but got '" + event.data + "'");
+        is(event.data, "successful",
+           `${testName}: Expected 'successful' when ${currStatus}, but got '${event.data}'.`);
         if (currStatus === "creating") {
           ifr.contentWindow.location.reload();
         } else if (currStatus === "reloading") {
           window.removeEventListener("message", listener);
           await requestChromeAction("payment-num-check", 1);
           document.body.removeChild(ifr);
           resolve();
         }
         currStatus = nextStatus.shift();
       }
       window.addEventListener("message", listener);
       ifr.src = "simple_payment_request.html";
       document.body.appendChild(ifr);
     });
   }
 
-  function testCloseByRedirecting() {
-    return new Promise((resolve, reject) => {
-      testName = "testCloseByRedirecting";
-      let expectedResults = ["successful", "successful"];
+  async function testCloseByRedirecting() {
+    const testName = "testCloseByRedirecting";
+    await requestChromeAction("test-setup", testName);
+    return new Promise((resolve) => {
       let nextStatus = ["creating", "redirecting"];
       let currStatus = nextStatus.shift();
       let ifr = document.createElement('iframe');
       let listener = async function(event) {
-        let expected = expectedResults.shift();
-        is(event.data, expected,
-          testName + ": Expected '" + expected + "' when " + currStatus +
-          ", but got '" + event.data + "'");
+        is(event.data, "successful",
+           `${testName}: Expected 'successful' when ${currStatus}, but got '${event.data}'.`);
         if (currStatus === "creating") {
           ifr.src = "blank_page.html";
         } else if (currStatus === "redirecting"){
           window.removeEventListener("message", listener);
           await requestChromeAction("close-check");
           document.body.removeChild(ifr);
           resolve();
         }
         currStatus = nextStatus.shift();
       };
       window.addEventListener("message", listener);
       ifr.src = "simple_payment_request.html";
       document.body.appendChild(ifr);
     });
   }
 
-  function testCloseByRedirectingAfterShow() {
-    return new Promise((resolve, reject) => {
-      testName = "testCloseByRedirectingAfterShow";
+  async function testCloseByRedirectingAfterShow() {
+    const testName = "testCloseByRedirectingAfterShow";
+    await requestChromeAction("test-setup", testName);
+    return new Promise((resolve) => {
       let nextStatus = ["creating", "showing", "redirecting"];
       let currStatus = nextStatus.shift();
-      let expectedResults = ["successful", "successful", "successful"];
       let ifr = document.createElement('iframe');
       let handler = undefined;
       let listener = async (event) => {
-        let expected = expectedResults.shift();
-        is(event.data, expected,
-          testName + ": Expected '" + expected + "' when " + currStatus +
-          ", but got '" + event.data + "'");
+        is(event.data, "successful",
+           `${testName}: Expected 'successful' when ${currStatus}, but got '${event.data}'.`);
         if (currStatus === "creating") {
           handler = SpecialPowers.getDOMWindowUtils(ifr.contentWindow).setHandlingUserInput(true);
           ifr.contentWindow.postMessage("show PaymentRequest", "*");
         } else if (currStatus === "showing") {
           handler.destruct();
           ifr.src = "blank_page.html";
         } else if (currStatus === "redirecting") {
           window.removeEventListener("message", listener);
@@ -122,50 +117,48 @@ https://bugzilla.mozilla.org/show_bug.cg
         currStatus = nextStatus.shift();
       }
       window.addEventListener("message", listener);
       ifr.src = "simple_payment_request.html";
       document.body.appendChild(ifr);
     });
   }
 
-  function testCloseByRemovingIframe() {
-    return new Promise((resolve, reject) => {
-      testName = "testCloseByRemovingIframe";
-      let expectedResults = ["successful"];
+  async function testCloseByRemovingIframe() {
+    const testName = "testCloseByRemovingIframe";
+    await requestChromeAction("test-setup", testName);
+    return new Promise((resolve) => {
       let nextStatus = ["creating"];
       let currStatus = nextStatus.shift();
       let ifr = document.createElement('iframe');
       let listener = async function(event) {
-        let expected = expectedResults.shift();
-        is(event.data, expected,
-          testName + ": Expected '" + expected + "' when " + currStatus +
-          ", but got '" + event.data + "'");
+        is(event.data, "successful",
+           `${testName}: Expected 'successful' when ${currStatus}, but got '${event.data}'.`);
         document.body.removeChild(ifr);
         window.removeEventListener("message", listener);
         await requestChromeAction("close-check");
         resolve();
       };
       window.addEventListener("message", listener);
       ifr.src = "simple_payment_request.html";
       document.body.appendChild(ifr);
     });
   }
 
-  function testUpdateWithRespondedPayment() {
+  async function testUpdateWithRespondedPayment() {
+    const testName = "testUpdateWithRespondedPayment";
+    await requestChromeAction("test-setup", testName);
     return new Promise(resolve => {
-      testName = "testUpdateWithRespondedPayment";
       let nextStatus = ["creating", "showing", "closing", "updating", "finishing"];
       let currStatus = nextStatus.shift();
       let ifr = document.createElement('iframe');
       let handler = undefined;
       let listener = async function(event) {
         is(event.data, "successful",
-          testName + ": Expected 'successful' when " + currStatus +
-          ", but got '" + event.data + "'");
+           `${testName}: Expected 'successful' when ${currStatus}, but got '${event.data}'.`);
         switch (currStatus) {
           case "creating":
             handler = SpecialPowers.getDOMWindowUtils(ifr.contentWindow).setHandlingUserInput(true);
             ifr.contentWindow.postMessage("show PaymentRequest", "*");
             break;
           case "showing":
             await requestChromeAction("update-payment");
             break;
@@ -189,41 +182,94 @@ https://bugzilla.mozilla.org/show_bug.cg
         currStatus = nextStatus.shift();
       }
       window.addEventListener("message", listener);
       ifr.src = "simple_payment_request.html";
       document.body.appendChild(ifr);
     });
   }
 
-  function teardown() {
-    return new Promise((resolve, reject) => {
+  function getLoadedPaymentRequest(iframe, url) {
+    return new Promise(resolve => {
+      iframe.addEventListener(
+        "load",
+        () => {
+          const { PaymentRequest } = iframe.contentWindow;
+          const request = new PaymentRequest(defaultMethods, defaultDetails);
+          resolve(request);
+        },
+        { once: true }
+      );
+      iframe.src = url;
+    });
+  }
+
+  async function testNonfullyActivePayment() {
+    const testName = "testNonfullyActivePayment";
+    await requestChromeAction("test-setup", testName);
+    return new Promise(async (resolve) => {
+      const outer = document.createElement("iframe");
+      outer.allowPaymentRequest = true;
+      document.body.appendChild(outer);
+      await getLoadedPaymentRequest(outer,"blank_page.html");
+
+      const inner = outer.contentDocument.createElement("iframe");
+      inner.allowPaymentRequest = true;
+      outer.contentDocument.body.appendChild(inner);
+
+      const request = await getLoadedPaymentRequest(inner,"blank_page.html");
+      ok(request, `${testName}: PaymentRequest in inner iframe should exist.`);
+
+      await new Promise(res => {
+        outer.addEventListener("load", res);
+        outer.src = "simple_payment_request.html";
+      });
+
+      let handler = SpecialPowers.getDOMWindowUtils(inner.contentWindow).setHandlingUserInput(true);
+      try {
+        const showPromise = await request.show();
+        ok(false, `${testName}: expected 'AbortError', but got resolved.`);
+      } catch (error) {
+        is(error.name, "AbortError",
+           `${testName}: expected 'AbortError'.`);
+      }
+      await handler.destruct();
+      inner.remove();
+      outer.remove();
+      resolve();
+    });
+  }
+
+  async function teardown() {
+    return new Promise((resolve) => {
       gScript.addMessageListener("teardown-complete", function teardownCompleteHandler() {
         gScript.removeMessageListener("teardown-complete", teardownCompleteHandler);
         gScript.removeMessageListener("test-fail", testFailHandler);
         gScript.removeMessageListener("test-pass", testPassHandler);
         gScript.destroy();
         SimpleTest.finish();
         resolve();
       });
       gScript.sendAsyncMessage("teardown");
     });
   }
 
-  function runTests() {
-    testCloseByReloading()
-    .then(testCloseByRedirecting)
-    .then(testCloseByRedirectingAfterShow)
-    .then(testCloseByRemovingIframe)
-    .then(testUpdateWithRespondedPayment)
-    .then(teardown)
-    .catch( e => {
-      ok(false, "Unexpected error: " + e.name);
+  async function runTests() {
+    try {
+      await testCloseByReloading();
+      await testCloseByRedirecting();
+      await testCloseByRedirectingAfterShow();
+      await testCloseByRemovingIframe();
+      await testUpdateWithRespondedPayment();
+      await testNonfullyActivePayment();
+      await teardown();
+    } catch(e) {
+      ok(false, "test_closePayment.html: Unexpected error: " + e.name);
       SimpleTest.finish();
-    });
+    }
   }
 
   window.addEventListener('load', function() {
     SpecialPowers.pushPrefEnv({
       'set': [
         ['dom.payments.request.enabled', true],
       ]
     }, runTests);
--- a/js/public/CallArgs.h
+++ b/js/public/CallArgs.h
@@ -302,20 +302,20 @@ class MOZ_STACK_CLASS CallArgs
                          bool ignoresReturnValue = false) {
     CallArgs args;
     args.clearUsedRval();
     args.argv_ = argv;
     args.argc_ = argc;
     args.constructing_ = constructing;
     args.ignoresReturnValue_ = ignoresReturnValue;
 #ifdef DEBUG
-    MOZ_ASSERT(ValueIsNotGray(args.thisv()));
-    MOZ_ASSERT(ValueIsNotGray(args.calleev()));
+    AssertValueIsNotGray(args.thisv());
+    AssertValueIsNotGray(args.calleev());
     for (unsigned i = 0; i < argc; ++i) {
-      MOZ_ASSERT(ValueIsNotGray(argv[i]));
+      AssertValueIsNotGray(argv[i]);
     }
 #endif
     return args;
   }
 
  public:
   /*
    * Helper for requireAtLeast to report the actual exception.  Public
--- a/js/public/Class.h
+++ b/js/public/Class.h
@@ -488,18 +488,16 @@ typedef bool (*JSHasInstanceOp)(JSContex
  * structures. The only exception for this rule is the case when the embedding
  * needs a tight integration with GC. In that case the embedding can check if
  * the traversal is a part of the marking phase through calling
  * JS_IsGCMarkingTracer and apply a special code like emptying caches or
  * marking its native structures.
  */
 typedef void (*JSTraceOp)(JSTracer* trc, JSObject* obj);
 
-typedef JSObject* (*JSWeakmapKeyDelegateOp)(JSObject* obj);
-
 typedef size_t (*JSObjectMovedOp)(JSObject* obj, JSObject* old);
 
 /* js::Class operation signatures. */
 
 namespace js {
 
 typedef bool (*LookupPropertyOp)(JSContext* cx, JS::HandleObject obj,
                                  JS::HandleId id, JS::MutableHandleObject objp,
@@ -683,29 +681,16 @@ struct MOZ_STATIC_CLASS ClassSpec {
   bool shouldDefineConstructor() const {
     MOZ_ASSERT(defined());
     return !(flags & DontDefineConstructor);
   }
 };
 
 struct MOZ_STATIC_CLASS ClassExtension {
   /**
-   * If an object is used as a key in a weakmap, it may be desirable for the
-   * garbage collector to keep that object around longer than it otherwise
-   * would. A common case is when the key is a wrapper around an object in
-   * another compartment, and we want to avoid collecting the wrapper (and
-   * removing the weakmap entry) as long as the wrapped object is alive. In
-   * that case, the wrapped object is returned by the wrapper's
-   * weakmapKeyDelegateOp hook. As long as the wrapper is used as a weakmap
-   * key, it will not be collected (and remain in the weakmap) until the
-   * wrapped object is collected.
-   */
-  JSWeakmapKeyDelegateOp weakmapKeyDelegateOp;
-
-  /**
    * Optional hook called when an object is moved by generational or
    * compacting GC.
    *
    * There may exist weak pointers to an object that are not traced through
    * when the normal trace APIs are used, for example objects in the wrapper
    * cache. This hook allows these pointers to be updated.
    *
    * Note that this hook can be called before JS_NewObject() returns if a GC
@@ -947,19 +932,16 @@ struct MOZ_STATIC_CLASS Class {
   }
   const JSPropertySpec* specPrototypeProperties() const {
     return spec ? spec->prototypeProperties : nullptr;
   }
   FinishClassInitOp specFinishInitHook() const {
     return spec ? spec->finishInit : nullptr;
   }
 
-  JSWeakmapKeyDelegateOp extWeakmapKeyDelegateOp() const {
-    return ext ? ext->weakmapKeyDelegateOp : nullptr;
-  }
   JSObjectMovedOp extObjectMovedOp() const {
     return ext ? ext->objectMovedOp : nullptr;
   }
 
   LookupPropertyOp getOpsLookupProperty() const {
     return oOps ? oOps->lookupProperty : nullptr;
   }
   DefinePropertyOp getOpsDefineProperty() const {
--- a/js/public/HeapAPI.h
+++ b/js/public/HeapAPI.h
@@ -125,17 +125,24 @@ enum StackKind {
 const uint32_t DefaultNurseryBytes = 16 * js::gc::ChunkSize;
 
 /* Default maximum heap size in bytes to pass to JS_NewContext(). */
 const uint32_t DefaultHeapMaxBytes = 32 * 1024 * 1024;
 
 namespace shadow {
 
 struct Zone {
-  enum GCState : uint8_t { NoGC, Mark, MarkGray, Sweep, Finished, Compact };
+  enum GCState : uint8_t {
+    NoGC,
+    MarkBlackOnly,
+    MarkBlackAndGray,
+    Sweep,
+    Finished,
+    Compact
+  };
 
  protected:
   JSRuntime* const runtime_;
   JSTracer* const barrierTracer_;  // A pointer to the JSRuntime's |gcMarker|.
   uint32_t needsIncrementalBarrier_;
   GCState gcState_;
 
   Zone(JSRuntime* runtime, JSTracer* barrierTracerArg)
@@ -159,22 +166,24 @@ struct Zone {
   }
 
   // Note: Unrestricted access to the zone's runtime from an arbitrary
   // thread can easily lead to races. Use this method very carefully.
   JSRuntime* runtimeFromAnyThread() const { return runtime_; }
 
   GCState gcState() const { return gcState_; }
   bool wasGCStarted() const { return gcState_ != NoGC; }
-  bool isGCMarkingBlack() const { return gcState_ == Mark; }
-  bool isGCMarkingGray() const { return gcState_ == MarkGray; }
+  bool isGCMarkingBlackOnly() const { return gcState_ == MarkBlackOnly; }
+  bool isGCMarkingBlackAndGray() const { return gcState_ == MarkBlackAndGray; }
   bool isGCSweeping() const { return gcState_ == Sweep; }
   bool isGCFinished() const { return gcState_ == Finished; }
   bool isGCCompacting() const { return gcState_ == Compact; }
-  bool isGCMarking() const { return gcState_ == Mark || gcState_ == MarkGray; }
+  bool isGCMarking() const {
+    return isGCMarkingBlackOnly() || isGCMarkingBlackAndGray();
+  }
   bool isGCSweepingOrCompacting() const {
     return gcState_ == Sweep || gcState_ == Compact;
   }
 
   static MOZ_ALWAYS_INLINE JS::shadow::Zone* asShadowZone(JS::Zone* zone) {
     return reinterpret_cast<JS::shadow::Zone*>(zone);
   }
 };
@@ -423,17 +432,17 @@ static MOZ_ALWAYS_INLINE bool CellIsMark
     return false;
   }
   return TenuredCellIsMarkedGray(cell);
 }
 
 extern JS_PUBLIC_API bool CellIsMarkedGrayIfKnown(const Cell* cell);
 
 #ifdef DEBUG
-extern JS_PUBLIC_API bool CellIsNotGray(const Cell* cell);
+extern JS_PUBLIC_API void AssertCellIsNotGray(const Cell* cell);
 
 extern JS_PUBLIC_API bool ObjectIsMarkedBlack(const JSObject* obj);
 #endif
 
 MOZ_ALWAYS_INLINE ChunkLocation GetCellLocation(const void* cell) {
   uintptr_t addr = uintptr_t(cell);
   addr &= ~js::gc::ChunkMask;
   addr |= js::gc::ChunkLocationOffset;
--- a/js/public/Id.h
+++ b/js/public/Id.h
@@ -166,22 +166,20 @@ struct GCPolicy<jsid> {
   }
   static bool isValid(jsid id) {
     return !JSID_IS_GCTHING(id) ||
            js::gc::IsCellPointerValid(JSID_TO_GCTHING(id).asCell());
   }
 };
 
 #ifdef DEBUG
-MOZ_ALWAYS_INLINE bool IdIsNotGray(jsid id) {
-  if (!JSID_IS_GCTHING(id)) {
-    return true;
+MOZ_ALWAYS_INLINE void AssertIdIsNotGray(jsid id) {
+  if (JSID_IS_GCTHING(id)) {
+    AssertCellIsNotGray(JSID_TO_GCTHING(id).asCell());
   }
-
-  return CellIsNotGray(JSID_TO_GCTHING(id).asCell());
 }
 #endif
 
 }  // namespace JS
 
 namespace js {
 
 template <>
--- a/js/public/Proxy.h
+++ b/js/public/Proxy.h
@@ -368,18 +368,16 @@ class JS_FRIEND_API BaseProxyHandler {
   //       We are not prepared to do this, as there's little const correctness
   //       in the external APIs that handle proxies.
   virtual bool isCallable(JSObject* obj) const;
   virtual bool isConstructor(JSObject* obj) const;
 
   virtual bool getElements(JSContext* cx, HandleObject proxy, uint32_t begin,
                            uint32_t end, ElementAdder* adder) const;
 
-  /* See comment for weakmapKeyDelegateOp in js/Class.h. */
-  virtual JSObject* weakmapKeyDelegate(JSObject* proxy) const;
   virtual bool isScripted() const { return false; }
 };
 
 extern JS_FRIEND_DATA const js::Class ProxyClass;
 
 inline bool IsProxy(const JSObject* obj) {
   return GetObjectClass(obj)->isProxy();
 }
@@ -519,24 +517,31 @@ inline const Value& GetProxyReservedSlot
   return detail::GetProxyDataLayout(obj)->reservedSlots->slots[n];
 }
 
 inline void SetProxyHandler(JSObject* obj, const BaseProxyHandler* handler) {
   detail::GetProxyDataLayout(obj)->handler = handler;
 }
 
 inline void SetProxyReservedSlot(JSObject* obj, size_t n, const Value& extra) {
-  MOZ_ASSERT_IF(gc::detail::ObjectIsMarkedBlack(obj),
-                JS::ValueIsNotGray(extra));
+#ifdef DEBUG
+  if (gc::detail::ObjectIsMarkedBlack(obj)) {
+    JS::AssertValueIsNotGray(extra);
+  }
+#endif
+
   detail::SetProxyReservedSlotUnchecked(obj, n, extra);
 }
 
 inline void SetProxyPrivate(JSObject* obj, const Value& value) {
-  MOZ_ASSERT_IF(gc::detail::ObjectIsMarkedBlack(obj),
-                JS::ValueIsNotGray(value));
+#ifdef DEBUG
+  if (gc::detail::ObjectIsMarkedBlack(obj)) {
+    JS::AssertValueIsNotGray(value);
+  }
+#endif
 
   Value* vp = &detail::GetProxyDataLayout(obj)->values()->privateSlot;
 
   // Trigger a barrier before writing the slot.
   if (vp->isGCThing() || value.isGCThing()) {
     detail::SetValueInProxy(vp, value);
   } else {
     *vp = value;
--- a/js/public/RootingAPI.h
+++ b/js/public/RootingAPI.h
@@ -359,34 +359,42 @@ static MOZ_ALWAYS_INLINE bool ObjectIsMa
   return js::gc::detail::CellIsMarkedGrayIfKnown(cell);
 }
 
 static MOZ_ALWAYS_INLINE bool ObjectIsMarkedGray(
     const JS::Heap<JSObject*>& obj) {
   return ObjectIsMarkedGray(obj.unbarrieredGet());
 }
 
-// The following *IsNotGray functions are for use in assertions and take account
-// of the eventual gray marking state at the end of any ongoing incremental GC.
+// The following *IsNotGray functions take account of the eventual
+// gray marking state at the end of any ongoing incremental GC by
+// delaying the checks if necessary.
+
 #ifdef DEBUG
-inline bool CellIsNotGray(js::gc::Cell* maybeCell) {
-  if (!maybeCell) {
-    return true;
+
+inline void AssertCellIsNotGray(const js::gc::Cell* maybeCell) {
+  if (maybeCell) {
+    js::gc::detail::AssertCellIsNotGray(maybeCell);
   }
-
-  return js::gc::detail::CellIsNotGray(maybeCell);
 }
 
-inline bool ObjectIsNotGray(JSObject* maybeObj) {
-  return CellIsNotGray(reinterpret_cast<js::gc::Cell*>(maybeObj));
+inline void AssertObjectIsNotGray(JSObject* maybeObj) {
+  AssertCellIsNotGray(reinterpret_cast<js::gc::Cell*>(maybeObj));
 }
 
-inline bool ObjectIsNotGray(const JS::Heap<JSObject*>& obj) {
-  return ObjectIsNotGray(obj.unbarrieredGet());
+inline void AssertObjectIsNotGray(const JS::Heap<JSObject*>& obj) {
+  AssertObjectIsNotGray(obj.unbarrieredGet());
 }
+
+#else
+
+inline void AssertCellIsNotGray(js::gc::Cell* maybeCell) {}
+inline void AssertObjectIsNotGray(JSObject* maybeObj) {}
+inline void AssertObjectIsNotGray(const JS::Heap<JSObject*>& obj) {}
+
 #endif
 
 /**
  * The TenuredHeap<T> class is similar to the Heap<T> class above in that it
  * encapsulates the GC concerns of an on-heap reference to a JS object. However,
  * it has two important differences:
  *
  *  1) Pointers which are statically known to only reference "tenured" objects
--- a/js/public/Value.h
+++ b/js/public/Value.h
@@ -1365,26 +1365,24 @@ static inline JS::Value PoisonedObjectVa
   return v;
 }
 
 }  // namespace js
 
 #ifdef DEBUG
 namespace JS {
 
-MOZ_ALWAYS_INLINE bool ValueIsNotGray(const Value& value) {
-  if (!value.isGCThing()) {
-    return true;
+MOZ_ALWAYS_INLINE void AssertValueIsNotGray(const Value& value) {
+  if (value.isGCThing()) {
+    AssertCellIsNotGray(value.toGCThing());
   }
-
-  return CellIsNotGray(value.toGCThing());
 }
 
-MOZ_ALWAYS_INLINE bool ValueIsNotGray(const Heap<Value>& value) {
-  return ValueIsNotGray(value.unbarrieredGet());
+MOZ_ALWAYS_INLINE void AssertValueIsNotGray(const Heap<Value>& value) {
+  AssertValueIsNotGray(value.unbarrieredGet());
 }
 
 }  // namespace JS
 #endif
 
 /************************************************************************/
 
 namespace JS {
--- a/js/public/Wrapper.h
+++ b/js/public/Wrapper.h
@@ -128,17 +128,16 @@ class JS_FRIEND_API Wrapper : public For
 
  public:
   explicit constexpr Wrapper(unsigned aFlags, bool aHasPrototype = false,
                              bool aHasSecurityPolicy = false)
       : ForwardingProxyHandler(&family, aHasPrototype, aHasSecurityPolicy),
         mFlags(aFlags) {}
 
   virtual bool finalizeInBackground(const Value& priv) const override;
-  virtual JSObject* weakmapKeyDelegate(JSObject* proxy) const override;
 
   using BaseProxyHandler::Action;
 
   enum Flags { CROSS_COMPARTMENT = 1 << 0, LAST_USED_FLAG = CROSS_COMPARTMENT };
 
   static JSObject* New(JSContext* cx, JSObject* obj, const Wrapper* handler,
                        const WrapperOptions& options = WrapperOptions());
 
--- a/js/rust/src/jsglue.cpp
+++ b/js/rust/src/jsglue.cpp
@@ -89,17 +89,16 @@ struct ProxyTraps {
   void (*finalize)(JSFreeOp* fop, JSObject* proxy);
   size_t (*objectMoved)(JSObject* proxy, JSObject* old);
 
   bool (*isCallable)(JSObject* obj);
   bool (*isConstructor)(JSObject* obj);
 
   // getElements
 
-  // weakmapKeyDelegate
   // isScripted
 };
 
 static int HandlerFamily;
 
 #define DEFER_TO_TRAP_OR_BASE_CLASS(_base)                                    \
                                                                               \
   /* Standard internal methods. */                                            \
--- a/js/src/builtin/MapObject.cpp
+++ b/js/src/builtin/MapObject.cpp
@@ -136,17 +136,16 @@ static const ClassOps MapIteratorObjectC
                                                    nullptr, /* delProperty */
                                                    nullptr, /* enumerate */
                                                    nullptr, /* newEnumerate */
                                                    nullptr, /* resolve */
                                                    nullptr, /* mayResolve */
                                                    MapIteratorObject::finalize};
 
 static const ClassExtension MapIteratorObjectClassExtension = {
-    nullptr, /* weakmapKeyDelegateOp */
     MapIteratorObject::objectMoved};
 
 const Class MapIteratorObject::class_ = {
     "Map Iterator",
     JSCLASS_HAS_RESERVED_SLOTS(MapIteratorObject::SlotCount) |
         JSCLASS_FOREGROUND_FINALIZE | JSCLASS_SKIP_NURSERY_FINALIZE,
     &MapIteratorObjectClassOps, JS_NULL_CLASS_SPEC,
     &MapIteratorObjectClassExtension};
@@ -907,17 +906,16 @@ static const ClassOps SetIteratorObjectC
                                                    nullptr, /* delProperty */
                                                    nullptr, /* enumerate */
                                                    nullptr, /* newEnumerate */
                                                    nullptr, /* resolve */
                                                    nullptr, /* mayResolve */
                                                    SetIteratorObject::finalize};
 
 static const ClassExtension SetIteratorObjectClassExtension = {
-    nullptr, /* weakmapKeyDelegateOp */
     SetIteratorObject::objectMoved};
 
 const Class SetIteratorObject::class_ = {
     "Set Iterator",
     JSCLASS_HAS_RESERVED_SLOTS(SetIteratorObject::SlotCount) |
         JSCLASS_FOREGROUND_FINALIZE | JSCLASS_SKIP_NURSERY_FINALIZE,
     &SetIteratorObjectClassOps, JS_NULL_CLASS_SPEC,
     &SetIteratorObjectClassExtension};
--- a/js/src/builtin/TestingFunctions.cpp
+++ b/js/src/builtin/TestingFunctions.cpp
@@ -1059,21 +1059,16 @@ static bool ScheduleGC(JSContext* cx, un
   JS_GetGCZealBits(cx, &zealBits, &freq, &next);
   args.rval().setInt32(next);
   return true;
 }
 
 static bool SelectForGC(JSContext* cx, unsigned argc, Value* vp) {
   CallArgs args = CallArgsFromVp(argc, vp);
 
-  if (gc::GCRuntime::temporaryAbortIfWasmGc(cx)) {
-    JS_ReportErrorASCII(cx, "API temporarily unavailable under wasm gc");
-    return false;
-  }
-
   /*
    * The selectedForMarking set is intended to be manually marked at slice
    * start to detect missing pre-barriers. It is invalid for nursery things
    * to be in the set, so evict the nursery before adding items.
    */
   cx->runtime()->gc.evictNursery();
 
   for (unsigned i = 0; i < args.length(); i++) {
--- a/js/src/builtin/TypedObject.cpp
+++ b/js/src/builtin/TypedObject.cpp
@@ -2296,18 +2296,17 @@ const ObjectOps TypedObject::objectOps_ 
       nullptr, /* mayResolve  */                                           \
       nullptr, /* finalize    */                                           \
       nullptr, /* call        */                                           \
       nullptr, /* hasInstance */                                           \
       nullptr, /* construct   */                                           \
       Trace,                                                               \
   };                                                                       \
   static const ClassExtension Name##ClassExt = {                           \
-      nullptr, /* weakmapKeyDelegateOp */                                  \
-      Moved    /* objectMovedOp */                                         \
+      Moved /* objectMovedOp */                                            \
   };                                                                       \
   const Class Name::class_ = {                                             \
       #Name,           Class::NON_NATIVE | JSCLASS_DELAY_METADATA_BUILDER, \
       &Name##ClassOps, JS_NULL_CLASS_SPEC,                                 \
       &Name##ClassExt, &TypedObject::objectOps_}
 
 DEFINE_TYPEDOBJ_CLASS(OutlineTransparentTypedObject,
                       OutlineTypedObject::obj_trace, nullptr);
--- a/js/src/builtin/WeakMapObject-inl.h
+++ b/js/src/builtin/WeakMapObject-inl.h
@@ -42,21 +42,19 @@ static MOZ_ALWAYS_INLINE bool WeakCollec
     obj->setPrivate(map);
   }
 
   // Preserve wrapped native keys to prevent wrapper optimization.
   if (!TryPreserveReflector(cx, key)) {
     return false;
   }
 
-  if (JSWeakmapKeyDelegateOp op = key->getClass()->extWeakmapKeyDelegateOp()) {
-    RootedObject delegate(cx, op(key));
-    if (delegate && !TryPreserveReflector(cx, delegate)) {
-      return false;
-    }
+  RootedObject delegate(cx, UncheckedUnwrapWithoutExpose(key));
+  if (delegate && !TryPreserveReflector(cx, delegate)) {
+    return false;
   }
 
   MOZ_ASSERT(key->compartment() == obj->compartment());
   MOZ_ASSERT_IF(value.isObject(),
                 value.toObject().compartment() == obj->compartment());
   if (!map->put(key, value)) {
     JS_ReportOutOfMemory(cx);
     return false;
--- a/js/src/devtools/rootAnalysis/annotations.js
+++ b/js/src/devtools/rootAnalysis/annotations.js
@@ -26,19 +26,16 @@ function indirectCallCannotGC(fullCaller
         return true;
 
     if (name == "mapper" && caller == "ptio.c:pt_MapError")
         return true;
 
     if (name == "params" && caller == "PR_ExplodeTime")
         return true;
 
-    if (name == "op" && /GetWeakmapKeyDelegate/.test(caller))
-        return true;
-
     // hook called during script finalization which cannot GC.
     if (/CallDestroyScriptHook/.test(caller))
         return true;
 
     // template method called during marking and hence cannot GC
     if (name == "op" && caller.includes("bool js::WeakMap<Key, Value, HashPolicy>::keyNeedsMark(JSObject*)"))
     {
         return true;
@@ -163,17 +160,16 @@ function isSuppressedVirtualMethod(csu, 
 var ignoreFunctions = {
     "ptio.c:pt_MapError" : true,
     "je_malloc_printf" : true,
     "malloc_usable_size" : true,
     "vprintf_stderr" : true,
     "PR_ExplodeTime" : true,
     "PR_ErrorInstallTable" : true,
     "PR_SetThreadPrivate" : true,
-    "JSObject* js::GetWeakmapKeyDelegate(JSObject*)" : true, // FIXME: mark with AutoSuppressGCAnalysis instead
     "uint8 NS_IsMainThread()" : true,
 
     // Has an indirect call under it by the name "__f", which seemed too
     // generic to ignore by itself.
     "void* std::_Locale_impl::~_Locale_impl(int32)" : true,
 
     // Bug 1056410 - devirtualization prevents the standard nsISupports::Release heuristic from working
     "uint32 nsXPConnect::Release()" : true,
--- a/js/src/gc/Barrier.cpp
+++ b/js/src/gc/Barrier.cpp
@@ -50,17 +50,17 @@ void HeapSlot::assertPreconditionForWrit
   } else {
     uint32_t numShifted = obj->getElementsHeader()->numShiftedElements();
     MOZ_ASSERT(slot >= numShifted);
     MOZ_ASSERT(
         static_cast<HeapSlot*>(obj->getDenseElements() + (slot - numShifted))
             ->get() == target);
   }
 
-  CheckTargetIsNotGray(obj);
+  AssertTargetIsNotGray(obj);
 }
 
 bool CurrentThreadIsIonCompiling() { return TlsContext.get()->ionCompiling; }
 
 bool CurrentThreadIsIonCompilingSafeForMinorGC() {
   return TlsContext.get()->ionCompilingSafeForMinorGC;
 }
 
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -261,17 +261,17 @@ struct InternalBarrierMethods<T*> {
 
   static void postBarrier(T** vp, T* prev, T* next) {
     T::writeBarrierPost(vp, prev, next);
   }
 
   static void readBarrier(T* v) { T::readBarrier(v); }
 
 #ifdef DEBUG
-  static bool thingIsNotGray(T* v) { return T::thingIsNotGray(v); }
+  static void assertThingIsNotGray(T* v) { return T::assertThingIsNotGray(v); }
 #endif
 };
 
 template <typename S>
 struct PreBarrierFunctor : public VoidDefaultAdaptor<S> {
   template <typename T>
   void operator()(T* t);
 };
@@ -317,36 +317,41 @@ struct InternalBarrierMethods<Value> {
     }
   }
 
   static void readBarrier(const Value& v) {
     DispatchTyped(ReadBarrierFunctor<Value>(), v);
   }
 
 #ifdef DEBUG
-  static bool thingIsNotGray(const Value& v) { return JS::ValueIsNotGray(v); }
+  static void assertThingIsNotGray(const Value& v) {
+    JS::AssertValueIsNotGray(v);
+  }
 #endif
 };
 
 template <>
 struct InternalBarrierMethods<jsid> {
   static bool isMarkable(jsid id) { return JSID_IS_GCTHING(id); }
   static void preBarrier(jsid id) {
     DispatchTyped(PreBarrierFunctor<jsid>(), id);
   }
   static void postBarrier(jsid* idp, jsid prev, jsid next) {}
 #ifdef DEBUG
-  static bool thingIsNotGray(jsid id) { return JS::IdIsNotGray(id); }
+  static void assertThingIsNotGray(jsid id) { JS::AssertIdIsNotGray(id); }
 #endif
 };
 
 template <typename T>
-static inline void CheckTargetIsNotGray(const T& v) {
-  MOZ_ASSERT(InternalBarrierMethods<T>::thingIsNotGray(v) ||
-             CurrentThreadIsTouchingGrayThings());
+static inline void AssertTargetIsNotGray(const T& v) {
+#ifdef DEBUG
+  if (!CurrentThreadIsTouchingGrayThings()) {
+    InternalBarrierMethods<T>::assertThingIsNotGray(v);
+  }
+#endif
 }
 
 // Base class of all barrier types.
 //
 // This is marked non-memmovable since post barriers added by derived classes
 // can add pointers to class instances to the store buffer.
 template <typename T>
 class MOZ_NON_MEMMOVABLE BarrieredBase {
@@ -432,17 +437,17 @@ class PreBarriered : public WriteBarrier
     this->pre();
     this->value = nullptr;
   }
 
   DECLARE_POINTER_ASSIGN_OPS(PreBarriered, T);
 
  private:
   void set(const T& v) {
-    CheckTargetIsNotGray(v);
+    AssertTargetIsNotGray(v);
     this->pre();
     this->value = v;
   }
 };
 
 /*
  * A pre- and post-barriered heap pointer, for use inside the JS engine.
  *
@@ -478,26 +483,26 @@ class GCPtr : public WriteBarrieredBase<
     MOZ_ASSERT(CurrentThreadIsGCSweeping() ||
                this->value == JS::SafelyInitialized<T>());
     Poison(this, JS_FREED_HEAP_PTR_PATTERN, sizeof(*this),
            MemCheckKind::MakeNoAccess);
   }
 #endif
 
   void init(const T& v) {
-    CheckTargetIsNotGray(v);
+    AssertTargetIsNotGray(v);
     this->value = v;
     this->post(JS::SafelyInitialized<T>(), v);
   }
 
   DECLARE_POINTER_ASSIGN_OPS(GCPtr, T);
 
  private:
   void set(const T& v) {
-    CheckTargetIsNotGray(v);
+    AssertTargetIsNotGray(v);
     this->pre();
     T tmp = this->value;
     this->value = v;
     this->post(tmp, this->value);
   }
 
   /*
    * Unlike HeapPtr<T>, GCPtr<T> must be managed with GC lifetimes.
@@ -552,37 +557,37 @@ class HeapPtr : public WriteBarrieredBas
   }
 
   ~HeapPtr() {
     this->pre();
     this->post(this->value, JS::SafelyInitialized<T>());
   }
 
   void init(const T& v) {
-    CheckTargetIsNotGray(v);
+    AssertTargetIsNotGray(v);
     this->value = v;
     this->post(JS::SafelyInitialized<T>(), this->value);
   }
 
   DECLARE_POINTER_ASSIGN_OPS(HeapPtr, T);
 
   /* Make this friend so it can access pre() and post(). */
   template <class T1, class T2>
   friend inline void BarrieredSetPair(Zone* zone, HeapPtr<T1*>& v1, T1* val1,
                                       HeapPtr<T2*>& v2, T2* val2);
 
  protected:
   void set(const T& v) {
-    CheckTargetIsNotGray(v);
+    AssertTargetIsNotGray(v);
     this->pre();
     postBarrieredSet(v);
   }
 
   void postBarrieredSet(const T& v) {
-    CheckTargetIsNotGray(v);
+    AssertTargetIsNotGray(v);
     T tmp = this->value;
     this->value = v;
     this->post(tmp, this->value);
   }
 };
 
 // Base class for barriered pointer types that intercept reads and writes.
 template <typename T>
@@ -629,17 +634,17 @@ class ReadBarriered : public ReadBarrier
   // the read barrier of the defunct edge.
   ReadBarriered(ReadBarriered&& v) : ReadBarrieredBase<T>(std::move(v)) {
     this->post(JS::SafelyInitialized<T>(), v.value);
   }
 
   ~ReadBarriered() { this->post(this->value, JS::SafelyInitialized<T>()); }
 
   ReadBarriered& operator=(const ReadBarriered& v) {
-    CheckTargetIsNotGray(v.value);
+    AssertTargetIsNotGray(v.value);
     T prior = this->value;
     this->value = v.value;
     this->post(prior, v.value);
     return *this;
   }
 
   const T& get() const {
     if (InternalBarrierMethods<T>::isMarkable(this->value)) {
@@ -655,17 +660,17 @@ class ReadBarriered : public ReadBarrier
   operator const T&() const { return get(); }
 
   const T& operator->() const { return get(); }
 
   T* unsafeGet() { return &this->value; }
   T const* unsafeGet() const { return &this->value; }
 
   void set(const T& v) {
-    CheckTargetIsNotGray(v);
+    AssertTargetIsNotGray(v);
     T tmp = this->value;
     this->value = v;
     this->post(tmp, v);
   }
 };
 
 // A WeakRef pointer does not hold its target live and is automatically nulled
 // out when the GC discovers that it is not reachable from any other path.
@@ -796,17 +801,17 @@ class ImmutableTenuredPtr {
  public:
   operator T() const { return value; }
   T operator->() const { return value; }
 
   operator Handle<T>() const { return Handle<T>::fromMarkedLocation(&value); }
 
   void init(T ptr) {
     MOZ_ASSERT(ptr->isTenured());
-    CheckTargetIsNotGray(ptr);
+    AssertTargetIsNotGray(ptr);
     value = ptr;
   }
 
   T get() const { return value; }
   const T* address() { return &value; }
 };
 
 template <typename T>
--- a/js/src/gc/Cell.h
+++ b/js/src/gc/Cell.h
@@ -129,17 +129,17 @@ struct alignas(gc::CellAlignBytes) Cell 
   inline const T* as() const {
     // |this|-qualify the |is| call below to avoid compile errors with even
     // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
     MOZ_ASSERT(this->is<T>());
     return static_cast<const T*>(this);
   }
 
 #ifdef DEBUG
-  static inline bool thingIsNotGray(Cell* cell);
+  static inline void assertThingIsNotGray(Cell* cell);
   inline bool isAligned() const;
   void dump(GenericPrinter& out) const;
   void dump() const;
 #endif
 
  protected:
   uintptr_t address() const;
   inline Chunk* chunk() const;
@@ -425,18 +425,18 @@ static MOZ_ALWAYS_INLINE void AssertVali
 
 /* static */ MOZ_ALWAYS_INLINE void TenuredCell::writeBarrierPost(
     void* cellp, TenuredCell* prior, TenuredCell* next) {
   AssertValidToSkipBarrier(next);
 }
 
 #ifdef DEBUG
 
-/* static */ bool Cell::thingIsNotGray(Cell* cell) {
-  return JS::CellIsNotGray(cell);
+/* static */ void Cell::assertThingIsNotGray(Cell* cell) {
+  JS::AssertCellIsNotGray(cell);
 }
 
 bool Cell::isAligned() const {
   if (!isTenured()) {
     return true;
   }
   return asTenured().isAligned();
 }
--- a/js/src/gc/GC.cpp
+++ b/js/src/gc/GC.cpp
@@ -924,16 +924,17 @@ GCRuntime::GCRuntime(JSRuntime* rt)
       safeToYield(true),
       sweepOnBackgroundThread(false),
       blocksToFreeAfterSweeping(
           (size_t)JSContext::TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
       sweepGroupIndex(0),
       sweepGroups(nullptr),
       currentSweepGroup(nullptr),
       sweepZone(nullptr),
+      hasMarkedGrayRoots(false),
       abortSweepAfterCurrentGroup(false),
       startedCompacting(false),
       relocatedArenasToRelease(nullptr),
 #ifdef JS_GC_ZEAL
       markingValidator(nullptr),
 #endif
       defaultTimeBudget_(TuningDefaults::DefaultTimeBudget),
       incrementalAllowed(true),
@@ -1015,17 +1016,19 @@ const char gc::ZealModeHelpText[] =
     "        before sweeping foreground finalized objects\n"
     "    22: (YieldBeforeSweepingNonObjects) Incremental GC in two slices that "
     "yields\n"
     "        before sweeping non-object GC things\n"
     "    23: (YieldBeforeSweepingShapeTrees) Incremental GC in two slices that "
     "yields\n"
     "        before sweeping shape trees\n"
     "    24: (CheckWeakMapMarking) Check weak map marking invariants after "
-    "every GC\n";
+    "every GC\n"
+    "    25: (YieldWhileGrayMarking) Incremental GC in two slices that yields\n"
+    "        during gray marking\n";
 
 // The set of zeal modes that control incremental slices. These modes are
 // mutually exclusive.
 static const mozilla::EnumSet<ZealMode> IncrementalSliceZealModes = {
     ZealMode::YieldBeforeMarking,
     ZealMode::YieldBeforeSweeping,
     ZealMode::IncrementalMultipleSlices,
     ZealMode::YieldBeforeSweepingAtoms,
@@ -1033,20 +1036,16 @@ static const mozilla::EnumSet<ZealMode> 
     ZealMode::YieldBeforeSweepingTypes,
     ZealMode::YieldBeforeSweepingObjects,
     ZealMode::YieldBeforeSweepingNonObjects,
     ZealMode::YieldBeforeSweepingShapeTrees};
 
 void GCRuntime::setZeal(uint8_t zeal, uint32_t frequency) {
   MOZ_ASSERT(zeal <= unsigned(ZealMode::Limit));
 
-  if (temporaryAbortIfWasmGc(rt->mainContextFromOwnThread())) {
-    return;
-  }
-
   if (verifyPreData) {
     VerifyBarriers(rt, PreBarrierVerifier);
   }
 
   if (zeal == 0) {
     if (hasZealMode(ZealMode::GenerationalGC)) {
       evictNursery(JS::gcreason::DEBUG_GC);
       nursery().leaveZealMode();
@@ -1079,20 +1078,16 @@ void GCRuntime::setZeal(uint8_t zeal, ui
   zealFrequency = frequency;
   nextScheduled = schedule ? frequency : 0;
 }
 
 void GCRuntime::unsetZeal(uint8_t zeal) {
   MOZ_ASSERT(zeal <= unsigned(ZealMode::Limit));
   ZealMode zealMode = ZealMode(zeal);
 
-  if (temporaryAbortIfWasmGc(rt->mainContextFromOwnThread())) {
-    return;
-  }
-
   if (!hasZealMode(zealMode)) {
     return;
   }
 
   if (verifyPreData) {
     VerifyBarriers(rt, PreBarrierVerifier);
   }
 
@@ -4130,17 +4125,17 @@ bool GCRuntime::prepareZonesForCollectio
 
   auto currentTime = ReallyNow();
 
   for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
     /* Set up which zones will be collected. */
     if (ShouldCollectZone(zone, reason)) {
       MOZ_ASSERT(zone->canCollect());
       any = true;
-      zone->changeGCState(Zone::NoGC, Zone::Mark);
+      zone->changeGCState(Zone::NoGC, Zone::MarkBlackOnly);
     } else {
       *isFullOut = false;
     }
 
     zone->setPreservingCode(false);
   }
 
   // Discard JIT code more aggressively if the process is approaching its
@@ -4179,17 +4174,17 @@ bool GCRuntime::prepareZonesForCollectio
   MOZ_ASSERT_IF(reason == JS::gcreason::DELAYED_ATOMS_GC,
                 atomsZone->isGCMarking());
 
   /* Check that at least one zone is scheduled for collection. */
   return any;
 }
 
 static void DiscardJITCodeForGC(JSRuntime* rt) {
-  js::CancelOffThreadIonCompile(rt, JS::Zone::Mark);
+  js::CancelOffThreadIonCompile(rt, JS::Zone::MarkBlackOnly);
   for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
     gcstats::AutoPhase ap(rt->gc.stats(),
                           gcstats::PhaseKind::MARK_DISCARD_CODE);
     zone->discardJitCode(rt->defaultFreeOp(),
                          /* discardBaselineCode = */ true,
                          /* releaseTypes = */ true);
   }
 }
@@ -4258,16 +4253,17 @@ bool GCRuntime::beginMarkPhase(JS::gcrea
    * arenaAllocatedDuringGC().
    */
   for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
     zone->arenas.clearFreeLists();
   }
 
   marker.start();
   GCMarker* gcmarker = &marker;
+  gcmarker->clearMarkCount();
 
   {
     gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::PREPARE);
     AutoLockHelperThreadState helperLock;
 
     /*
      * Clear all mark state for the zones we are collecting. This is linear
      * in the size of the heap we are collecting and so can be slow. Do this
@@ -4455,41 +4451,39 @@ void GCRuntime::markWeakReferences(gcsta
   marker.leaveWeakMarkingMode();
 }
 
 void GCRuntime::markWeakReferencesInCurrentGroup(gcstats::PhaseKind phase) {
   markWeakReferences<SweepGroupZonesIter>(phase);
 }
 
 template <class ZoneIterT>
-void GCRuntime::markGrayReferences(gcstats::PhaseKind phase) {
+void GCRuntime::markGrayRoots(gcstats::PhaseKind phase) {
+  MOZ_ASSERT(marker.markColor() == MarkColor::Gray);
+
   gcstats::AutoPhase ap(stats(), phase);
   if (hasValidGrayRootsBuffer()) {
     for (ZoneIterT zone(rt); !zone.done(); zone.next()) {
       markBufferedGrayRoots(zone);
     }
   } else {
     MOZ_ASSERT(!isIncremental);
     if (JSTraceDataOp op = grayRootTracer.op) {
       (*op)(&marker, grayRootTracer.data);
     }
   }
-  drainMarkStack();
-}
-
-void GCRuntime::markGrayReferencesInCurrentGroup(gcstats::PhaseKind phase) {
-  markGrayReferences<SweepGroupZonesIter>(phase);
 }
 
 void GCRuntime::markAllWeakReferences(gcstats::PhaseKind phase) {
   markWeakReferences<GCZonesIter>(phase);
 }
 
 void GCRuntime::markAllGrayReferences(gcstats::PhaseKind phase) {
-  markGrayReferences<GCZonesIter>(phase);
+  markGrayRoots<GCZonesIter>(phase);
+  drainMarkStack();
 }
 
 #ifdef JS_GC_ZEAL
 
 struct GCChunkHasher {
   typedef gc::Chunk* Lookup;
 
   /*
@@ -4642,27 +4636,27 @@ void js::gc::MarkingValidator::nonIncrem
   {
     gcstats::AutoPhase ap1(gc->stats(), gcstats::PhaseKind::SWEEP);
     gcstats::AutoPhase ap2(gc->stats(), gcstats::PhaseKind::SWEEP_MARK);
 
     gc->markAllWeakReferences(gcstats::PhaseKind::SWEEP_MARK_WEAK);
 
     /* Update zone state for gray marking. */
     for (GCZonesIter zone(runtime); !zone.done(); zone.next()) {
-      zone->changeGCState(Zone::Mark, Zone::MarkGray);
+      zone->changeGCState(Zone::MarkBlackOnly, Zone::MarkBlackAndGray);
     }
 
     AutoSetMarkColor setColorGray(gc->marker, MarkColor::Gray);
 
     gc->markAllGrayReferences(gcstats::PhaseKind::SWEEP_MARK_GRAY);
     gc->markAllWeakReferences(gcstats::PhaseKind::SWEEP_MARK_GRAY_WEAK);
 
     /* Restore zone state. */
     for (GCZonesIter zone(runtime); !zone.done(); zone.next()) {
-      zone->changeGCState(Zone::MarkGray, Zone::Mark);
+      zone->changeGCState(Zone::MarkBlackAndGray, Zone::MarkBlackOnly);
     }
     MOZ_ASSERT(gc->marker.isDrained());
   }
 
   /* Take a copy of the non-incremental mark state and restore the original. */
   {
     AutoLockGC lock(runtime);
     for (auto chunk = gc->allNonEmptyChunks(lock); !chunk.done();
@@ -4814,52 +4808,48 @@ static void DropStringWrappers(JSRuntime
  * If we consider these dependencies as a graph, then all the compartments in
  * any strongly-connected component of this graph must be swept in the same
  * slice.
  *
  * Tarjan's algorithm is used to calculate the components.
  */
 namespace {
 struct AddOutgoingEdgeFunctor {
-  bool needsEdge_;
   ZoneComponentFinder& finder_;
 
-  AddOutgoingEdgeFunctor(bool needsEdge, ZoneComponentFinder& finder)
-      : needsEdge_(needsEdge), finder_(finder) {}
+  explicit AddOutgoingEdgeFunctor(ZoneComponentFinder& finder)
+      : finder_(finder) {}
 
   template <typename T>
   void operator()(T tp) {
-    TenuredCell& other = (*tp)->asTenured();
-
     /*
      * Add edge to wrapped object compartment if wrapped object is not
      * marked black to indicate that wrapper compartment not be swept
      * after wrapped compartment.
      */
-    if (needsEdge_) {
-      JS::Zone* zone = other.zone();
-      if (zone->isGCMarking()) {
-        finder_.addEdgeTo(zone);
-      }
+    JS::Zone* zone = (*tp)->asTenured().zone();
+    if (zone->isGCMarking()) {
+      finder_.addEdgeTo(zone);
     }
   }
 };
 }  // namespace
 
 void Compartment::findOutgoingEdges(ZoneComponentFinder& finder) {
   for (js::WrapperMap::Enum e(crossCompartmentWrappers); !e.empty();
        e.popFront()) {
     CrossCompartmentKey& key = e.front().mutableKey();
     MOZ_ASSERT(!key.is<JSString*>());
-    bool needsEdge = true;
-    if (key.is<JSObject*>()) {
-      TenuredCell& other = key.as<JSObject*>()->asTenured();
-      needsEdge = !other.isMarkedBlack();
-    }
-    key.applyToWrapped(AddOutgoingEdgeFunctor(needsEdge, finder));
+    if (key.is<JSObject*>() &&
+        key.as<JSObject*>()->asTenured().isMarkedBlack()) {
+      // CCW target is already marked, so we don't need to watch out for
+      // later marking of the CCW.
+      continue;
+    }
+    key.applyToWrapped(AddOutgoingEdgeFunctor(finder));
   }
 }
 
 void Zone::findOutgoingEdges(ZoneComponentFinder& finder) {
   /*
    * Any compartment may have a pointer to an atom in the atoms
    * compartment, and these aren't in the cross compartment map.
    */
@@ -4924,17 +4914,17 @@ void GCRuntime::groupZonesForSweeping(JS
 #endif
 
   for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
     MOZ_ASSERT(zone->isGCMarking());
     finder.addNode(zone);
   }
   sweepGroups = finder.getResultsList();
   currentSweepGroup = sweepGroups;
-  sweepGroupIndex = 0;
+  sweepGroupIndex = 1;
 
   for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
     zone->gcSweepGroupEdges().clear();
   }
 
 #ifdef DEBUG
   for (Zone* head = currentSweepGroup; head; head = head->nextGroup()) {
     for (Zone* zone = head; zone; zone = zone->nextNodeInGroup()) {
@@ -4960,36 +4950,38 @@ void GCRuntime::getNextSweepGroup() {
   }
 
   MOZ_ASSERT_IF(abortSweepAfterCurrentGroup, !isIncremental);
   if (!isIncremental) {
     ZoneComponentFinder::mergeGroups(currentSweepGroup);
   }
 
   for (Zone* zone = currentSweepGroup; zone; zone = zone->nextNodeInGroup()) {
-    MOZ_ASSERT(zone->isGCMarking());
+    MOZ_ASSERT(zone->isGCMarkingBlackOnly());
     MOZ_ASSERT(!zone->isQueuedForBackgroundSweep());
   }
 
   if (abortSweepAfterCurrentGroup) {
     for (SweepGroupZonesIter zone(rt); !zone.done(); zone.next()) {
       MOZ_ASSERT(!zone->gcNextGraphComponent);
       zone->setNeedsIncrementalBarrier(false);
-      zone->changeGCState(Zone::Mark, Zone::NoGC);
+      zone->changeGCState(Zone::MarkBlackOnly, Zone::NoGC);
       zone->arenas.unmarkPreMarkedFreeCells();
       zone->gcGrayRoots().clearAndFree();
     }
 
     for (SweepGroupCompartmentsIter comp(rt); !comp.done(); comp.next()) {
       ResetGrayList(comp);
     }
 
     abortSweepAfterCurrentGroup = false;
     currentSweepGroup = nullptr;
   }
+
+  hasMarkedGrayRoots = false;
 }
 
 /*
  * Gray marking:
  *
  * At the end of collection, anything reachable from a gray root that has not
  * otherwise been marked black must be marked gray.
  *
@@ -5103,18 +5095,19 @@ void GCRuntime::markIncomingCrossCompart
   static const gcstats::PhaseKind statsPhases[] = {
       gcstats::PhaseKind::SWEEP_MARK_INCOMING_BLACK,
       gcstats::PhaseKind::SWEEP_MARK_INCOMING_GRAY};
   gcstats::AutoPhase ap1(stats(), statsPhases[unsigned(color)]);
 
   bool unlinkList = color == MarkColor::Gray;
 
   for (SweepGroupCompartmentsIter c(rt); !c.done(); c.next()) {
-    MOZ_ASSERT_IF(color == MarkColor::Gray, c->zone()->isGCMarkingGray());
-    MOZ_ASSERT_IF(color == MarkColor::Black, c->zone()->isGCMarkingBlack());
+    MOZ_ASSERT(c->zone()->isGCMarking());
+    MOZ_ASSERT_IF(color == MarkColor::Gray,
+                  c->zone()->isGCMarkingBlackAndGray());
     MOZ_ASSERT_IF(c->gcIncomingGrayPointers,
                   IsGrayListObject(c->gcIncomingGrayPointers));
 
     for (JSObject* src = c->gcIncomingGrayPointers; src;
          src = NextIncomingCrossCompartmentPointer(src, unlinkList)) {
       JSObject* dst = CrossCompartmentPointerReferent(src);
       MOZ_ASSERT(dst->compartment() == c);
 
@@ -5130,18 +5123,16 @@ void GCRuntime::markIncomingCrossCompart
         }
       }
     }
 
     if (unlinkList) {
       c->gcIncomingGrayPointers = nullptr;
     }
   }
-
-  drainMarkStack();
 }
 
 static bool RemoveFromGrayList(JSObject* wrapper) {
   AutoTouchingGrayThings tgt;
 
   if (!IsGrayListObject(wrapper)) {
     return false;
   }
@@ -5178,16 +5169,28 @@ static bool RemoveFromGrayList(JSObject*
 static void ResetGrayList(Compartment* comp) {
   JSObject* src = comp->gcIncomingGrayPointers;
   while (src) {
     src = NextIncomingCrossCompartmentPointer(src, true);
   }
   comp->gcIncomingGrayPointers = nullptr;
 }
 
+#ifdef DEBUG
+static bool HasIncomingCrossCompartmentPointers(JSRuntime* rt) {
+  for (SweepGroupCompartmentsIter c(rt); !c.done(); c.next()) {
+    if (c->gcIncomingGrayPointers) {
+      return true;
+    }
+  }
+
+  return false;
+}
+#endif
+
 void js::NotifyGCNukeWrapper(JSObject* obj) {
   /*
    * References to target of wrapper are being removed, we no longer have to
    * remember to mark it.
    */
   RemoveFromGrayList(obj);
 }
 
@@ -5233,48 +5236,74 @@ static inline void MaybeCheckWeakMapMark
     for (SweepGroupZonesIter zone(gc->rt); !zone.done(); zone.next()) {
       MOZ_RELEASE_ASSERT(WeakMapBase::checkMarkingForZone(zone));
     }
   }
 
 #endif
 }
 
-IncrementalProgress GCRuntime::endMarkingSweepGroup(FreeOp* fop,
-                                                    SliceBudget& budget) {
+IncrementalProgress GCRuntime::markGrayReferencesInCurrentGroup(
+    FreeOp* fop, SliceBudget& budget) {
+  MOZ_ASSERT(marker.markColor() == MarkColor::Black);
+
+  if (hasMarkedGrayRoots) {
+    return Finished;
+  }
+
+  MOZ_ASSERT(cellsToAssertNotGray.ref().empty());
+
   gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_MARK);
 
-  // Mark any incoming black pointers from previously swept compartments
-  // whose referents are not marked. This can occur when gray cells become
-  // black by the action of UnmarkGray.
+  // Mark any incoming gray pointers from previously swept compartments that
+  // have subsequently been marked black. This can occur when gray cells
+  // become black by the action of UnmarkGray.
   markIncomingCrossCompartmentPointers(MarkColor::Black);
-  markWeakReferencesInCurrentGroup(gcstats::PhaseKind::SWEEP_MARK_WEAK);
+  drainMarkStack();
 
   // Change state of current group to MarkGray to restrict marking to this
   // group.  Note that there may be pointers to the atoms zone, and
   // these will be marked through, as they are not marked with
   // TraceCrossCompartmentEdge.
   for (SweepGroupZonesIter zone(rt); !zone.done(); zone.next()) {
-    zone->changeGCState(Zone::Mark, Zone::MarkGray);
+    zone->changeGCState(Zone::MarkBlackOnly, Zone::MarkBlackAndGray);
   }
 
   AutoSetMarkColor setColorGray(marker, MarkColor::Gray);
 
   // Mark incoming gray pointers from previously swept compartments.
   markIncomingCrossCompartmentPointers(MarkColor::Gray);
 
-  // Mark gray roots and mark transitively inside the current compartment
-  // group.
-  markGrayReferencesInCurrentGroup(gcstats::PhaseKind::SWEEP_MARK_GRAY);
+  markGrayRoots<SweepGroupZonesIter>(gcstats::PhaseKind::SWEEP_MARK_GRAY);
+
+  hasMarkedGrayRoots = true;
+
+#ifdef JS_GC_ZEAL
+  if (shouldYieldForZeal(ZealMode::YieldWhileGrayMarking)) {
+    return NotFinished;
+  }
+#endif
+
+  return marker.markUntilBudgetExhausted(budget) ? Finished : NotFinished;
+}
+
+IncrementalProgress GCRuntime::endMarkingSweepGroup(FreeOp* fop,
+                                                    SliceBudget& budget) {
+  MOZ_ASSERT(marker.markColor() == MarkColor::Black);
+  MOZ_ASSERT(!HasIncomingCrossCompartmentPointers(rt));
+
+  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_MARK);
+
+  markWeakReferencesInCurrentGroup(gcstats::PhaseKind::SWEEP_MARK_WEAK);
+
+  AutoSetMarkColor setColorGray(marker, MarkColor::Gray);
+
+  // Mark transitively inside the current compartment group.
   markWeakReferencesInCurrentGroup(gcstats::PhaseKind::SWEEP_MARK_GRAY_WEAK);
 
-  // Restore marking state.
-  for (SweepGroupZonesIter zone(rt); !zone.done(); zone.next()) {
-    zone->changeGCState(Zone::MarkGray, Zone::Mark);
-  }
   MOZ_ASSERT(marker.isDrained());
 
   // We must not yield after this point before we start sweeping the group.
   safeToYield = false;
 
   MaybeCheckWeakMapMarking(this);
 
   return Finished;
@@ -5569,33 +5598,40 @@ IncrementalProgress GCRuntime::beginSwee
 
   using namespace gcstats;
 
   AutoSCC scc(stats(), sweepGroupIndex);
 
   bool sweepingAtoms = false;
   for (SweepGroupZonesIter zone(rt); !zone.done(); zone.next()) {
     /* Set the GC state to sweeping. */
-    zone->changeGCState(Zone::Mark, Zone::Sweep);
+    zone->changeGCState(Zone::MarkBlackAndGray, Zone::Sweep);
 
     /* Purge the ArenaLists before sweeping. */
     zone->arenas.unmarkPreMarkedFreeCells();
     zone->arenas.clearFreeLists();
 
     if (zone->isAtomsZone()) {
       sweepingAtoms = true;
     }
 
 #ifdef DEBUG
     zone->gcLastSweepGroupIndex = sweepGroupIndex;
 #endif
   }
 
   validateIncrementalMarking();
 
+#ifdef DEBUG
+  for (auto cell : cellsToAssertNotGray.ref()) {
+    JS::AssertCellIsNotGray(cell);
+  }
+  cellsToAssertNotGray.ref().clearAndFree();
+#endif
+
   {
     AutoPhase ap(stats(), PhaseKind::FINALIZE_START);
     callFinalizeCallbacks(fop, JSFINALIZE_GROUP_PREPARE);
     {
       AutoPhase ap2(stats(), PhaseKind::WEAK_ZONES_CALLBACK);
       callWeakPointerZonesCallbacks();
     }
     {
@@ -5754,26 +5790,24 @@ void GCRuntime::beginSweepPhase(JS::gcre
 
   computeNonIncrementalMarkingForValidation(session);
 
   gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP);
 
   sweepOnBackgroundThread = reason != JS::gcreason::DESTROY_RUNTIME &&
                             !gcTracer.traceEnabled() && CanUseExtraThreads();
 
+  hasMarkedGrayRoots = false;
+
   AssertNoWrappersInGrayList(rt);
   DropStringWrappers(rt);
 
   groupZonesForSweeping(reason);
 
   sweepActions->assertFinished();
-
-  // We must not yield after this point until we start sweeping the first sweep
-  // group.
-  safeToYield = false;
 }
 
 bool ArenaLists::foregroundFinalize(FreeOp* fop, AllocKind thingKind,
                                     SliceBudget& sliceBudget,
                                     SortedArenaList& sweepList) {
   if (!arenaListsToSweep(thingKind) && incrementalSweptArenas.ref().isEmpty()) {
     return true;
   }
@@ -6434,16 +6468,17 @@ ForEachAllocKind(AllocKinds kinds, Uniqu
 
 bool GCRuntime::initSweepActions() {
   using namespace sweepaction;
   using sweepaction::Call;
 
   sweepActions.ref() = RepeatForSweepGroup(
       rt,
       Sequence(
+          Call(&GCRuntime::markGrayReferencesInCurrentGroup),
           Call(&GCRuntime::endMarkingSweepGroup),
           Call(&GCRuntime::beginSweepingSweepGroup),
           MaybeYield(ZealMode::IncrementalMultipleSlices),
           MaybeYield(ZealMode::YieldBeforeSweepingAtoms),
           Call(&GCRuntime::sweepAtomsTable),
           MaybeYield(ZealMode::YieldBeforeSweepingCaches),
           Call(&GCRuntime::sweepWeakCaches),
           ForEachZoneInSweepGroup(
@@ -6664,16 +6699,17 @@ void GCRuntime::finishCollection() {
     }
 
     MOZ_ASSERT(!zone->wasGCStarted());
     MOZ_ASSERT(!zone->needsIncrementalBarrier());
     MOZ_ASSERT(!zone->isOnList());
   }
 
   MOZ_ASSERT(zonesToMaybeCompact.ref().isEmpty());
+  MOZ_ASSERT(cellsToAssertNotGray.ref().empty());
 
   lastGCTime = currentTime;
 }
 
 static const char* HeapStateToLabel(JS::HeapState heapState) {
   switch (heapState) {
     case JS::HeapState::MinorCollecting:
       return "js::Nursery::collect";
@@ -6737,17 +6773,17 @@ GCRuntime::IncrementalResult GCRuntime::
       clearBufferedGrayRoots();
 
       for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
         ResetGrayList(c);
       }
 
       for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
         zone->setNeedsIncrementalBarrier(false);
-        zone->changeGCState(Zone::Mark, Zone::NoGC);
+        zone->changeGCState(Zone::MarkBlackOnly, Zone::NoGC);
         zone->arenas.unmarkPreMarkedFreeCells();
       }
 
       {
         AutoLockHelperThreadState lock;
         blocksToFreeAfterSweeping.ref().freeAll();
       }
 
@@ -6756,18 +6792,16 @@ GCRuntime::IncrementalResult GCRuntime::
 
       MOZ_ASSERT(!marker.shouldCheckCompartments());
 
       break;
     }
 
     case State::Sweep: {
       // Finish sweeping the current sweep group, then abort.
-      marker.reset();
-
       for (CompartmentsIter c(rt); !c.done(); c.next()) {
         c->gcState.scheduledForDestruction = false;
       }
 
       abortSweepAfterCurrentGroup = true;
       isCompacting = false;
 
       break;
@@ -7089,16 +7123,17 @@ void GCRuntime::incrementalSlice(SliceBu
 
     case State::Finish:
       finishCollection();
       incrementalState = State::NotActive;
       break;
   }
 
   MOZ_ASSERT(safeToYield);
+  MOZ_ASSERT(marker.markColor() == MarkColor::Black);
 }
 
 gc::AbortReason gc::IsIncrementalGCUnsafe(JSRuntime* rt) {
   MOZ_ASSERT(!rt->mainContextFromOwnThread()->suppressGC);
 
   if (!rt->gc.isIncrementalGCAllowed()) {
     return gc::AbortReason::IncrementalDisabled;
   }
@@ -8147,22 +8182,16 @@ void GCRuntime::clearSelectedForMarking(
 }
 
 void GCRuntime::setDeterministic(bool enabled) {
   MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
   deterministicOnly = enabled;
 }
 #endif
 
-#ifdef ENABLE_WASM_GC
-/* static */ bool GCRuntime::temporaryAbortIfWasmGc(JSContext* cx) {
-  return cx->options().wasmGc() && cx->suppressGC;
-}
-#endif
-
 #ifdef DEBUG
 
 /* Should only be called manually under gdb */
 void PreventGCDuringInteractiveDebug() { TlsContext.get()->suppressGC++; }
 
 #endif
 
 void js::ReleaseAllJITCode(FreeOp* fop) {
@@ -8893,33 +8922,45 @@ JS_PUBLIC_API bool js::gc::detail::CellI
     return false;
   }
 
   return detail::CellIsMarkedGray(tc);
 }
 
 #ifdef DEBUG
 
-JS_PUBLIC_API bool js::gc::detail::CellIsNotGray(const Cell* cell) {
+JS_PUBLIC_API void js::gc::detail::AssertCellIsNotGray(const Cell* cell) {
   // Check that a cell is not marked gray.
   //
   // Since this is a debug-only check, take account of the eventual mark state
   // of cells that will be marked black by the next GC slice in an incremental
   // GC. For performance reasons we don't do this in CellIsMarkedGrayIfKnown.
 
   if (!CanCheckGrayBits(cell)) {
-    return true;
+    return;
   }
 
   // TODO: I'd like to AssertHeapIsIdle() here, but this ends up getting
   // called during GC and while iterating the heap for memory reporting.
   MOZ_ASSERT(!JS::RuntimeHeapIsCycleCollecting());
 
   auto tc = &cell->asTenured();
-  return !detail::CellIsMarkedGray(tc);
+  if (tc->zone()->isGCMarkingBlackAndGray()) {
+    // We are doing gray marking in the cell's zone. Even if the cell is
+    // currently marked gray it may eventually be marked black. Delay the check
+    // until we finish gray marking.
+    JSRuntime* rt = tc->zone()->runtimeFromMainThread();
+    AutoEnterOOMUnsafeRegion oomUnsafe;
+    if (!rt->gc.cellsToAssertNotGray.ref().append(cell)) {
+      oomUnsafe.crash("Can't append to delayed gray checks list");
+    }
+    return;
+  }
+
+  MOZ_ASSERT(!detail::CellIsMarkedGray(tc));
 }
 
 extern JS_PUBLIC_API bool js::gc::detail::ObjectIsMarkedBlack(
     const JSObject* obj) {
   return obj->isMarkedBlack();
 }
 
 #endif
--- a/js/src/gc/GCEnum.h
+++ b/js/src/gc/GCEnum.h
@@ -70,17 +70,18 @@ enum class AbortReason {
   D(CheckNursery, 16)                  \
   D(YieldBeforeSweepingAtoms, 17)      \
   D(CheckGrayMarking, 18)              \
   D(YieldBeforeSweepingCaches, 19)     \
   D(YieldBeforeSweepingTypes, 20)      \
   D(YieldBeforeSweepingObjects, 21)    \
   D(YieldBeforeSweepingNonObjects, 22) \
   D(YieldBeforeSweepingShapeTrees, 23) \
-  D(CheckWeakMapMarking, 24)
+  D(CheckWeakMapMarking, 24)           \
+  D(YieldWhileGrayMarking, 25)
 
 enum class ZealMode {
 #define ZEAL_MODE(name, value) name = value,
   JS_FOR_EACH_ZEAL_MODE(ZEAL_MODE)
 #undef ZEAL_MODE
       Count,
   Limit = Count - 1
 };
--- a/js/src/gc/GCMarker.h
+++ b/js/src/gc/GCMarker.h
@@ -44,26 +44,26 @@ struct WeakMarkable {
 
 using WeakEntryVector = Vector<WeakMarkable, 2, js::SystemAllocPolicy>;
 
 using WeakKeyTable =
     OrderedHashMap<JS::GCCellPtr, WeakEntryVector, WeakKeyTableHashPolicy,
                    js::SystemAllocPolicy>;
 
 /*
- * When the native stack is low, the GC does not call js::TraceChildren to mark
+ * When the mark stack is full, the GC does not call js::TraceChildren to mark
  * the reachable "children" of the thing. Rather the thing is put aside and
- * js::TraceChildren is called later with more space on the C stack.
+ * js::TraceChildren is called later when the mark stack is empty.
  *
  * To implement such delayed marking of the children with minimal overhead for
- * the normal case of sufficient native stack, the code adds a field per arena.
- * The field markingDelay->link links all arenas with delayed things into a
- * stack list with the pointer to stack top in GCMarker::unmarkedArenaStackTop.
- * GCMarker::delayMarkingChildren adds arenas to the stack as necessary while
- * markDelayedChildren pops the arenas from the stack until it empties.
+ * the normal case of sufficient stack, we link arenas into a list using
+ * Arena::setNextDelayedMarking(). The head of the list is stored in
+ * GCMarker::unmarkedArenaStackTop. GCMarker::delayMarkingChildren() adds arenas
+ * to the list as necessary while markAllDelayedChildren() pops the arenas from
+ * the stack until it is empty.
  */
 class MarkStack {
  public:
   /*
    * We use a common mark stack to mark GC things of different types and use
    * the explicit tags to distinguish them when it cannot be deduced from
    * the context of push or pop operation.
    */
@@ -266,43 +266,63 @@ class GCMarker : public JSTracer {
    * objects. If this invariant is violated, the cycle collector may free
    * objects that are still reachable.
    */
   void setMarkColorGray();
   void setMarkColorBlack();
   void setMarkColor(gc::MarkColor newColor);
   gc::MarkColor markColor() const { return color; }
 
+  // Return whether a cell is marked relative to the current marking color. If
+  // the cell is black then this returns true, but if it's gray it will return
+  // false if the mark color is black.
+  template <typename T>
+  bool isMarked(T* thingp) {
+    return color == gc::MarkColor::Black ? gc::IsMarkedBlack(runtime(), thingp)
+                                         : gc::IsMarked(runtime(), thingp);
+  }
+  template <typename T>
+  bool isMarkedUnbarriered(T* thingp) {
+    return color == gc::MarkColor::Black
+               ? gc::IsMarkedBlackUnbarriered(runtime(), thingp)
+               : gc::IsMarkedUnbarriered(runtime(), thingp);
+  }
+
   void enterWeakMarkingMode();
   void leaveWeakMarkingMode();
   void abortLinearWeakMarking() {
     leaveWeakMarkingMode();
     linearWeakMarkingDisabled_ = true;
   }
 
   void delayMarkingArena(gc::Arena* arena);
   void delayMarkingChildren(const void* thing);
-  void markDelayedChildren(gc::Arena* arena);
-  MOZ_MUST_USE bool markDelayedChildren(SliceBudget& budget);
+  void markDelayedChildren(gc::Arena* arena, gc::MarkColor color);
+  MOZ_MUST_USE bool markAllDelayedChildren(SliceBudget& budget);
+  bool processDelayedMarkingList(gc::Arena** outputList, gc::MarkColor color,
+                                 bool shouldYield, SliceBudget& budget);
   bool hasDelayedChildren() const { return !!unmarkedArenaStackTop; }
 
   bool isDrained() { return isMarkStackEmpty() && !unmarkedArenaStackTop; }
 
   MOZ_MUST_USE bool markUntilBudgetExhausted(SliceBudget& budget);
 
   void setGCMode(JSGCMode mode) { stack.setGCMode(mode); }
 
   size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
 
 #ifdef DEBUG
   bool shouldCheckCompartments() { return strictCompartmentChecking; }
 #endif
 
   void markEphemeronValues(gc::Cell* markedCell, gc::WeakEntryVector& entry);
 
+  size_t getMarkCount() const { return markCount; }
+  void clearMarkCount() { markCount = 0; }
+
   static GCMarker* fromTracer(JSTracer* trc) {
     MOZ_ASSERT(trc->isMarkingTracer());
     return static_cast<GCMarker*>(trc);
   }
 
  private:
 #ifdef DEBUG
   void checkZone(void* p);
@@ -343,43 +363,53 @@ class GCMarker : public JSTracer {
 
   template <typename T>
   inline void pushTaggedPtr(T* ptr);
 
   inline void pushValueArray(JSObject* obj, HeapSlot* start, HeapSlot* end);
 
   bool isMarkStackEmpty() { return stack.isEmpty(); }
 
+  bool hasBlackEntries() const { return stack.position() > grayPosition; }
+
+  bool hasGrayEntries() const { return grayPosition > 0 && !stack.isEmpty(); }
+
   MOZ_MUST_USE bool restoreValueArray(
       const gc::MarkStack::SavedValueArray& array, HeapSlot** vpp,
       HeapSlot** endp);
   gc::MarkStack::ValueArray restoreValueArray(
       const gc::MarkStack::SavedValueArray& savedArray);
 
   void saveValueRanges();
   gc::MarkStack::SavedValueArray saveValueRange(
       const gc::MarkStack::ValueArray& array);
 
   inline void processMarkStackTop(SliceBudget& budget);
 
   /* The mark stack. Pointers in this stack are "gray" in the GC sense. */
   gc::MarkStack stack;
 
+  /* Stack entries at positions below this are considered gray. */
+  MainThreadData<size_t> grayPosition;
+
   /* The color is only applied to objects and functions. */
   MainThreadData<gc::MarkColor> color;
 
   /* Pointer to the top of the stack of arenas we are delaying marking on. */
   MainThreadData<js::gc::Arena*> unmarkedArenaStackTop;
 
   /*
    * If the weakKeys table OOMs, disable the linear algorithm and fall back
    * to iterating until the next GC.
    */
   MainThreadData<bool> linearWeakMarkingDisabled_;
 
+  /* The count of marked objects during GC. */
+  size_t markCount;
+
 #ifdef DEBUG
   /* Count of arenas that are currently in the stack. */
   MainThreadData<size_t> markLaterArenas;
 
   /* Assert that start and stop are called with correct ordering. */
   MainThreadData<bool> started;
 
   /*
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -293,26 +293,16 @@ class GCRuntime {
   void setNextScheduled(uint32_t count);
   void verifyPreBarriers();
   void maybeVerifyPreBarriers(bool always);
   bool selectForMarking(JSObject* object);
   void clearSelectedForMarking();
   void setDeterministic(bool enable);
 #endif
 
-#ifdef ENABLE_WASM_GC
-  // If we run with wasm-gc enabled and there's wasm frames on the stack,
-  // then GCs are suppressed and many APIs should not be available.
-  // TODO (bug 1456824) This is temporary and should be removed once proper
-  // GC support is implemented.
-  static bool temporaryAbortIfWasmGc(JSContext* cx);
-#else
-  static bool temporaryAbortIfWasmGc(JSContext* cx) { return false; }
-#endif
-
   uint64_t nextCellUniqueId() {
     MOZ_ASSERT(nextCellUniqueId_ > 0);
     uint64_t uid = ++nextCellUniqueId_;
     return uid;
   }
 
 #ifdef DEBUG
   bool shutdownCollectedEverything() const { return arenasEmptyAtShutdown; }
@@ -620,26 +610,27 @@ class GCRuntime {
   void markCompartments();
   IncrementalProgress markUntilBudgetExhausted(SliceBudget& sliceBudget,
                                                gcstats::PhaseKind phase);
   void drainMarkStack();
   template <class ZoneIterT>
   void markWeakReferences(gcstats::PhaseKind phase);
   void markWeakReferencesInCurrentGroup(gcstats::PhaseKind phase);
   template <class ZoneIterT>
-  void markGrayReferences(gcstats::PhaseKind phase);
+  void markGrayRoots(gcstats::PhaseKind phase);
   void markBufferedGrayRoots(JS::Zone* zone);
-  void markGrayReferencesInCurrentGroup(gcstats::PhaseKind phase);
   void markAllWeakReferences(gcstats::PhaseKind phase);
   void markAllGrayReferences(gcstats::PhaseKind phase);
 
   void beginSweepPhase(JS::gcreason::Reason reason, AutoGCSession& session);
   void groupZonesForSweeping(JS::gcreason::Reason reason);
   MOZ_MUST_USE bool findInterZoneEdges();
   void getNextSweepGroup();
+  IncrementalProgress markGrayReferencesInCurrentGroup(FreeOp* fop,
+                                                       SliceBudget& budget);
   IncrementalProgress endMarkingSweepGroup(FreeOp* fop, SliceBudget& budget);
   void markIncomingCrossCompartmentPointers(MarkColor color);
   IncrementalProgress beginSweepingSweepGroup(FreeOp* fop, SliceBudget& budget);
   void sweepDebuggerOnMainThread(FreeOp* fop);
   void sweepJitDataOnMainThread(FreeOp* fop);
   IncrementalProgress endSweepingSweepGroup(FreeOp* fop, SliceBudget& budget);
   IncrementalProgress performSweepActions(SliceBudget& sliceBudget);
   IncrementalProgress sweepTypeInformation(FreeOp* fop, SliceBudget& budget,
@@ -887,18 +878,28 @@ class GCRuntime {
 
   MainThreadData<JS::Zone*> sweepGroups;
   MainThreadOrGCTaskData<JS::Zone*> currentSweepGroup;
   MainThreadData<UniquePtr<SweepAction<GCRuntime*, FreeOp*, SliceBudget&>>>
       sweepActions;
   MainThreadOrGCTaskData<JS::Zone*> sweepZone;
   MainThreadData<mozilla::Maybe<AtomsTable::SweepIterator>> maybeAtomsToSweep;
   MainThreadOrGCTaskData<JS::detail::WeakCacheBase*> sweepCache;
+  MainThreadData<bool> hasMarkedGrayRoots;
   MainThreadData<bool> abortSweepAfterCurrentGroup;
 
+#ifdef DEBUG
+  // During gray marking, delay AssertCellIsNotGray checks by
+  // recording the cell pointers here and checking after marking has
+  // finished.
+  MainThreadData<Vector<const Cell*, 0, SystemAllocPolicy>>
+      cellsToAssertNotGray;
+  friend void js::gc::detail::AssertCellIsNotGray(const Cell*);
+#endif
+
   friend class SweepGroupsIter;
   friend class WeakCacheSweepIterator;
 
   /*
    * Incremental compacting state.
    */
   MainThreadData<bool> startedCompacting;
   MainThreadData<ZoneList> zonesToMaybeCompact;
@@ -1098,17 +1099,18 @@ inline bool GCRuntime::needZealousGC() {
 inline bool GCRuntime::hasIncrementalTwoSliceZealMode() {
   return hasZealMode(ZealMode::YieldBeforeMarking) ||
          hasZealMode(ZealMode::YieldBeforeSweeping) ||
          hasZealMode(ZealMode::YieldBeforeSweepingAtoms) ||
          hasZealMode(ZealMode::YieldBeforeSweepingCaches) ||
          hasZealMode(ZealMode::YieldBeforeSweepingTypes) ||
          hasZealMode(ZealMode::YieldBeforeSweepingObjects) ||
          hasZealMode(ZealMode::YieldBeforeSweepingNonObjects) ||
-         hasZealMode(ZealMode::YieldBeforeSweepingShapeTrees);
+         hasZealMode(ZealMode::YieldBeforeSweepingShapeTrees) ||
+         hasZealMode(ZealMode::YieldWhileGrayMarking);
 }
 
 #else
 inline bool GCRuntime::hasZealMode(ZealMode mode) { return false; }
 inline void GCRuntime::clearZealMode(ZealMode mode) {}
 inline bool GCRuntime::upcomingZealousGC() { return false; }
 inline bool GCRuntime::needZealousGC() { return false; }
 inline bool GCRuntime::hasIncrementalTwoSliceZealMode() { return false; }
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -245,17 +245,17 @@ void js::CheckTracedThing(JSTracer* trc,
                     IsUnmarkGrayTracer(trc) || IsClearEdgesTracer(trc));
 
   if (isGcMarkingTracer) {
     GCMarker* gcMarker = GCMarker::fromTracer(trc);
     MOZ_ASSERT_IF(gcMarker->shouldCheckCompartments(),
                   zone->isCollecting() || zone->isAtomsZone());
 
     MOZ_ASSERT_IF(gcMarker->markColor() == MarkColor::Gray,
-                  !zone->isGCMarkingBlack() || zone->isAtomsZone());
+                  !zone->isGCMarkingBlackOnly() || zone->isAtomsZone());
 
     MOZ_ASSERT(!(zone->isGCSweeping() || zone->isGCFinished() ||
                  zone->isGCCompacting()));
   }
 
   /*
    * Try to assert that the thing is allocated.
    *
@@ -314,46 +314,57 @@ static inline bool ShouldMarkCrossCompar
   }
 
   if (color == MarkColor::Black) {
     // Check our sweep groups are correct: we should never have to
     // mark something in a zone that we have started sweeping.
     MOZ_ASSERT_IF(!dst.isMarkedBlack(), !dstZone->isGCSweeping());
 
     /*
-     * Having black->gray edges violates our promise to the cycle
-     * collector. This can happen if we're collecting a compartment and it
-     * has an edge to an uncollected compartment: it's possible that the
-     * source and destination of the cross-compartment edge should be gray,
-     * but the source was marked black by the write barrier.
+     * Having black->gray edges violates our promise to the cycle collector so
+     * we ensure that gray things we encounter when marking black end up getting
+     * marked black.
+     *
+     * This can happen for two reasons:
+     *
+     * 1) If we're collecting a compartment and it has an edge to an uncollected
+     * compartment it's possible that the source and destination of the
+     * cross-compartment edge should be gray, but the source was marked black by
+     * the write barrier.
+     *
+     * 2) If we yield during gray marking and the write barrier marks a gray
+     * thing black.
+     *
+     * We handle the first case before returning whereas the second case happens
+     * as part of normal marking.
      */
-    if (dst.isMarkedGray()) {
-      MOZ_ASSERT(!dstZone->isCollecting());
+    if (dst.isMarkedGray() && !dstZone->isGCMarking()) {
       UnmarkGrayGCThing(marker->runtime(),
                         JS::GCCellPtr(&dst, dst.getTraceKind()));
+      return false;
     }
 
     return dstZone->isGCMarking();
   } else {
     // Check our sweep groups are correct as above.
     MOZ_ASSERT_IF(!dst.isMarkedAny(), !dstZone->isGCSweeping());
 
-    if (dstZone->isGCMarkingBlack()) {
+    if (dstZone->isGCMarkingBlackOnly()) {
       /*
        * The destination compartment is being not being marked gray now,
        * but it will be later, so record the cell so it can be marked gray
        * at the appropriate time.
        */
       if (!dst.isMarkedAny()) {
         DelayCrossCompartmentGrayMarking(src);
       }
       return false;
     }
 
-    return dstZone->isGCMarkingGray();
+    return dstZone->isGCMarkingBlackAndGray();
   }
 }
 
 static bool ShouldTraceCrossCompartment(JSTracer* trc, JSObject* src,
                                         Cell* dstCell) {
   if (!trc->isMarkingTracer()) {
     return true;
   }
@@ -611,19 +622,25 @@ struct ImplicitEdgeHolderType<JSObject*>
 
 template <>
 struct ImplicitEdgeHolderType<JSScript*> {
   typedef JSScript* Type;
 };
 
 void GCMarker::markEphemeronValues(gc::Cell* markedCell,
                                    WeakEntryVector& values) {
-  size_t initialLen = values.length();
-  for (size_t i = 0; i < initialLen; i++) {
-    values[i].weakmap->markEntry(this, markedCell, values[i].key);
+  DebugOnly<size_t> initialLen = values.length();
+
+  for (const auto& markable : values) {
+    if (color == gc::MarkColor::Black &&
+        markable.weakmap->markColor == gc::MarkColor::Gray) {
+      continue;
+    }
+
+    markable.weakmap->markEntry(this, markedCell, markable.key);
   }
 
   // The vector should not be appended to during iteration because the key is
   // already marked, and even in cases where we have a multipart key, we
   // should only be inserting entries for the unmarked portions.
   MOZ_ASSERT(values.length() == initialLen);
 }
 
@@ -949,21 +966,20 @@ static bool TraceKindParticipatesInCC(JS
 template <typename T>
 bool js::GCMarker::mark(T* thing) {
   if (IsInsideNursery(thing)) {
     return false;
   }
   AssertShouldMarkInZone(thing);
   TenuredCell* cell = TenuredCell::fromPointer(thing);
 
-  if (!TypeParticipatesInCC<T>::value) {
-    return cell->markIfUnmarked(MarkColor::Black);
-  }
-
-  return cell->markIfUnmarked(markColor());
+  MarkColor color = TypeParticipatesInCC<T>::value ? markColor()
+                                                   : MarkColor::Black;
+  markCount++;
+  return cell->markIfUnmarked(color);
 }
 
 /*** Inline, Eager GC Marking ***********************************************/
 
 // Each of the eager, inline marking paths is directly preceeded by the
 // out-of-line, generic tracing code for comparison. Both paths must end up
 // traversing equivalent subgraphs.
 
@@ -1577,36 +1593,52 @@ bool GCMarker::markUntilBudgetExhausted(
   strictCompartmentChecking = true;
   auto acc = mozilla::MakeScopeExit([&] { strictCompartmentChecking = false; });
 #endif
 
   if (budget.isOverBudget()) {
     return false;
   }
 
+  // This method leaves the mark color as it found it.
+  AutoSetMarkColor autoSetBlack(*this, MarkColor::Black);
+
+  // Change representation of value arrays on the stack while the mutator
+  // runs.
+  auto svr = mozilla::MakeScopeExit([&] { saveValueRanges(); });
+
   for (;;) {
-    while (!stack.isEmpty()) {
+    while (hasBlackEntries()) {
+      MOZ_ASSERT(markColor() == MarkColor::Black);
       processMarkStackTop(budget);
       if (budget.isOverBudget()) {
-        saveValueRanges();
         return false;
       }
     }
 
+    if (hasGrayEntries()) {
+      AutoSetMarkColor autoSetGray(*this, MarkColor::Gray);
+      do {
+        processMarkStackTop(budget);
+        if (budget.isOverBudget()) {
+          return false;
+        }
+      } while (hasGrayEntries());
+    }
+
     if (!hasDelayedChildren()) {
       break;
     }
 
     /*
      * Mark children of things that caused too deep recursion during the
      * above tracing. Don't do this until we're done with everything
      * else.
      */
-    if (!markDelayedChildren(budget)) {
-      saveValueRanges();
+    if (!markAllDelayedChildren(budget)) {
       return false;
     }
   }
 
   return true;
 }
 
 inline static bool ObjectDenseElementsMayBeMarkable(NativeObject* nobj) {
@@ -2328,16 +2360,17 @@ void MarkStackIter::saveValueArray(
  * ExpandWeakMaps: the GC is recomputing the liveness of WeakMap entries by
  * expanding each live WeakMap into its constituent key->value edges, a table
  * of which will be consulted in a later phase whenever marking a potential
  * key.
  */
 GCMarker::GCMarker(JSRuntime* rt)
     : JSTracer(rt, JSTracer::TracerKindTag::Marking, ExpandWeakMaps),
       stack(),
+      grayPosition(0),
       color(MarkColor::Black),
       unmarkedArenaStackTop(nullptr)
 #ifdef DEBUG
       ,
       markLaterArenas(0),
       started(false),
       strictCompartmentChecking(false)
 #endif
@@ -2407,29 +2440,31 @@ void GCMarker::setMarkColor(gc::MarkColo
   if (newColor == gc::MarkColor::Black) {
     setMarkColorBlack();
   } else {
     setMarkColorGray();
   }
 }
 
 void GCMarker::setMarkColorGray() {
-  MOZ_ASSERT(isDrained());
+  MOZ_ASSERT(!hasBlackEntries());
   MOZ_ASSERT(color == gc::MarkColor::Black);
   MOZ_ASSERT(runtime()->gc.state() == State::Sweep);
 
   color = gc::MarkColor::Gray;
+  grayPosition = SIZE_MAX;
 }
 
 void GCMarker::setMarkColorBlack() {
-  MOZ_ASSERT(isDrained());
+  MOZ_ASSERT(!hasBlackEntries());
   MOZ_ASSERT(color == gc::MarkColor::Gray);
   MOZ_ASSERT(runtime()->gc.state() == State::Sweep);
 
   color = gc::MarkColor::Black;
+  grayPosition = stack.position();
 }
 
 template <typename T>
 void GCMarker::pushTaggedPtr(T* ptr) {
   checkZone(ptr);
   if (!stack.push(ptr)) {
     delayMarkingChildren(ptr);
   }
@@ -2444,17 +2479,17 @@ void GCMarker::pushValueArray(JSObject* 
 
   if (!stack.push(obj, start, end)) {
     delayMarkingChildren(obj);
   }
 }
 
 void GCMarker::repush(JSObject* obj) {
   MOZ_ASSERT_IF(markColor() == MarkColor::Gray,
-                gc::TenuredCell::fromPointer(obj)->isMarkedGray());
+                gc::TenuredCell::fromPointer(obj)->isMarkedAny());
   MOZ_ASSERT_IF(markColor() == MarkColor::Black,
                 gc::TenuredCell::fromPointer(obj)->isMarkedBlack());
   pushTaggedPtr(obj);
 }
 
 void GCMarker::enterWeakMarkingMode() {
   MOZ_ASSERT(tag_ == TracerKindTag::Marking);
   if (linearWeakMarkingDisabled_) {
@@ -2491,62 +2526,122 @@ void GCMarker::leaveWeakMarkingMode() {
   AutoEnterOOMUnsafeRegion oomUnsafe;
   for (GCZonesIter zone(runtime()); !zone.done(); zone.next()) {
     if (!zone->gcWeakKeys().clear()) {
       oomUnsafe.crash("clearing weak keys in GCMarker::leaveWeakMarkingMode()");
     }
   }
 }
 
-void GCMarker::markDelayedChildren(Arena* arena) {
+void GCMarker::markDelayedChildren(Arena* arena, MarkColor color) {
   JS::TraceKind kind = MapAllocToTraceKind(arena->getAllocKind());
-
-  // Whether we need to mark children of gray or black cells in the arena
-  // depends on which kind of marking we were doing when the arena as pushed
-  // onto the list.  We never change mark color without draining the mark
-  // stack though so this is the same as the current color.
-  bool markGrayCells =
-      markColor() == MarkColor::Gray && TraceKindParticipatesInCC(kind);
+  MOZ_ASSERT_IF(color == MarkColor::Gray, TraceKindParticipatesInCC(kind));
 
   for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
     TenuredCell* t = i.getCell();
-    if ((markGrayCells && t->isMarkedGray()) ||
-        (!markGrayCells && t->isMarkedBlack())) {
+    if ((color == MarkColor::Gray && t->isMarkedGray()) ||
+        (color == MarkColor::Black && t->isMarkedBlack())) {
       js::TraceChildren(this, t, kind);
     }
   }
 }
 
-bool GCMarker::markDelayedChildren(SliceBudget& budget) {
+static inline bool ArenaCanHaveGrayThings(Arena* arena) {
+  JS::TraceKind kind = MapAllocToTraceKind(arena->getAllocKind());
+  return TraceKindParticipatesInCC(kind);
+}
+
+/*
+ * Process arenas from |unmarkedArenaStackTop| and move them to
+ * |*output| (if non-null) marking the unmarked children of marked
+ * cells of color |color| if |shouldMarkArena| returns true. If
+ * |shouldYield|, return early if the |budget| is exceeded.
+ *
+ * This is called twice, first to mark gray children and then to mark
+ * black children.
+ */
+bool GCMarker::processDelayedMarkingList(Arena** outputList, MarkColor color,
+                                         bool shouldYield,
+                                         SliceBudget& budget) {
+  // If marking gets delayed at the same arena again, we must repeat marking
+  // of its things. Therefore we pop arena from the stack and clear its
+  // hasDelayedMarking flag before we begin the marking.
+
+  while (unmarkedArenaStackTop) {
+    Arena* arena = unmarkedArenaStackTop;
+    unmarkedArenaStackTop = arena->getNextDelayedMarking();
+
+    arena->unsetDelayedMarking();
+
+#ifdef DEBUG
+    MOZ_ASSERT(markLaterArenas);
+    if (!outputList) {
+      markLaterArenas--;
+    }
+#endif
+
+    if (color == MarkColor::Black ||
+        (color == MarkColor::Gray && ArenaCanHaveGrayThings(arena))) {
+      markDelayedChildren(arena, color);
+      budget.step(150);
+      if (shouldYield && budget.isOverBudget()) {
+        return false;
+      }
+    }
+
+    if (outputList) {
+      arena->setNextDelayedMarking(*outputList);
+      *outputList = arena;
+    }
+  }
+
+  return true;
+}
+
+bool GCMarker::markAllDelayedChildren(SliceBudget& budget) {
+  MOZ_ASSERT(!hasBlackEntries());
+  MOZ_ASSERT(markColor() == MarkColor::Black);
+
   GCRuntime& gc = runtime()->gc;
   gcstats::AutoPhase ap(gc.stats(), gc.state() == State::Mark,
                         gcstats::PhaseKind::MARK_DELAYED);
 
+  // We don't know which mark color we were using when an arena was
+  // pushed onto the list so we mark children of marked things both
+  // colors in two passes over the list. Gray marking must be done
+  // first as gray entries always sit before black entries on the
+  // mark stack.
+  //
+  // In order to guarantee progress here, the fist pass (gray
+  // marking) is done non-incrementally. We can't remove anything
+  // from the list until the second pass so if we yield during the
+  // first pass we will have to restart and process all the arenas
+  // over again. If there are enough arenas we may never finish
+  // during our timeslice. Disallowing yield during the first pass
+  // ensures that the list will at least shrink by one arena every
+  // time.
+
   MOZ_ASSERT(unmarkedArenaStackTop);
-  do {
-    /*
-     * If marking gets delayed at the same arena again, we must repeat
-     * marking of its things. For that we pop arena from the stack and
-     * clear its hasDelayedMarking flag before we begin the marking.
-     */
-    Arena* arena = unmarkedArenaStackTop;
-    MOZ_ASSERT(arena->hasDelayedMarking);
-    MOZ_ASSERT(markLaterArenas);
-    unmarkedArenaStackTop = arena->getNextDelayedMarking();
-    arena->unsetDelayedMarking();
-#ifdef DEBUG
-    markLaterArenas--;
-#endif
-    markDelayedChildren(arena);
-
-    budget.step(150);
-    if (budget.isOverBudget()) {
-      return false;
-    }
-  } while (unmarkedArenaStackTop);
+
+  Arena* processedList = nullptr;
+  bool finished;
+  finished = processDelayedMarkingList(&processedList, MarkColor::Gray,
+                                       false, /* don't yield */
+                                       budget);
+  MOZ_ASSERT(finished);
+
+  unmarkedArenaStackTop = processedList;
+  finished = processDelayedMarkingList(nullptr, MarkColor::Black,
+                                       true, /* yield if over budget */
+                                       budget);
+  if (!finished) {
+    return false;
+  }
+
+  MOZ_ASSERT(!unmarkedArenaStackTop);
   MOZ_ASSERT(!markLaterArenas);
 
   return true;
 }
 
 template <typename T>
 static void PushArenaTyped(GCMarker* gcmarker, Arena* arena) {
   for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
@@ -3156,32 +3251,32 @@ static inline void CheckIsMarkedThing(T*
       !ThingIsPermanentAtomOrWellKnownSymbol(*thingp),
       CurrentThreadCanAccessRuntime(rt) ||
           CurrentThreadCanAccessZone((*thingp)->zoneFromAnyThread()) ||
           (JS::RuntimeHeapIsCollecting() && rt->gc.state() == State::Sweep));
 #endif
 }
 
 template <typename T>
-static bool IsMarkedInternalCommon(T* thingp) {
+static inline bool ShouldCheckMarkState(JSRuntime* rt, T** thingp) {
   CheckIsMarkedThing(thingp);
   MOZ_ASSERT(!IsInsideNursery(*thingp));
 
   TenuredCell& thing = (*thingp)->asTenured();
   Zone* zone = thing.zoneFromAnyThread();
   if (!zone->isCollectingFromAnyThread() || zone->isGCFinished()) {
-    return true;
+    return false;
   }
 
   if (zone->isGCCompacting() && IsForwarded(*thingp)) {
     *thingp = Forwarded(*thingp);
-    return true;
+    return false;
   }
 
-  return thing.isMarkedAny();
+  return true;
 }
 
 template <typename T>
 struct MightBeNurseryAllocated {
   static const bool value = mozilla::IsBaseOf<JSObject, T>::value ||
                             mozilla::IsBaseOf<JSString, T>::value;
 };
 
@@ -3192,17 +3287,40 @@ bool js::gc::IsMarkedInternal(JSRuntime*
   }
 
   if (MightBeNurseryAllocated<T>::value && IsInsideNursery(*thingp)) {
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
     Cell** cellp = reinterpret_cast<Cell**>(thingp);
     return Nursery::getForwardedPointer(cellp);
   }
 
-  return IsMarkedInternalCommon(thingp);
+  if (!ShouldCheckMarkState(rt, thingp)) {
+    return true;
+  }
+
+  return (*thingp)->asTenured().isMarkedAny();
+}
+
+template <typename T>
+bool js::gc::IsMarkedBlackInternal(JSRuntime* rt, T** thingp) {
+  if (IsOwnedByOtherRuntime(rt, *thingp)) {
+    return true;
+  }
+
+  if (MightBeNurseryAllocated<T>::value && IsInsideNursery(*thingp)) {
+    MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
+    Cell** cellp = reinterpret_cast<Cell**>(thingp);
+    return Nursery::getForwardedPointer(cellp);
+  }
+
+  if (!ShouldCheckMarkState(rt, thingp)) {
+    return true;
+  }
+
+  return (*thingp)->asTenured().isMarkedBlack();
 }
 
 template <typename S>
 struct IsMarkedFunctor : public IdentityDefaultAdaptor<S> {
   template <typename T>
   S operator()(T* t, JSRuntime* rt, bool* rv) {
     *rv = IsMarkedInternal(rt, &t);
     return js::gc::RewrapTaggedPointer<S, T>::wrap(t);
@@ -3211,16 +3329,32 @@ struct IsMarkedFunctor : public Identity
 
 template <typename T>
 bool js::gc::IsMarkedInternal(JSRuntime* rt, T* thingp) {
   bool rv = true;
   *thingp = DispatchTyped(IsMarkedFunctor<T>(), *thingp, rt, &rv);
   return rv;
 }
 
+template <typename S>
+struct IsMarkedBlackFunctor : public IdentityDefaultAdaptor<S> {
+  template <typename T>
+  S operator()(T* t, JSRuntime* rt, bool* rv) {
+    *rv = IsMarkedBlackInternal(rt, &t);
+    return js::gc::RewrapTaggedPointer<S, T>::wrap(t);
+  }
+};
+
+template <typename T>
+bool js::gc::IsMarkedBlackInternal(JSRuntime* rt, T* thingp) {
+  bool rv = true;
+  *thingp = DispatchTyped(IsMarkedBlackFunctor<T>(), *thingp, rt, &rv);
+  return rv;
+}
+
 bool js::gc::IsAboutToBeFinalizedDuringSweep(TenuredCell& tenured) {
   MOZ_ASSERT(!IsInsideNursery(&tenured));
   MOZ_ASSERT(tenured.zoneFromAnyThread()->isGCSweeping());
   return !tenured.isMarkedAny();
 }
 
 template <typename T>
 bool js::gc::IsAboutToBeFinalizedInternal(T** thingp) {
@@ -3283,18 +3417,19 @@ JS_PUBLIC_API bool EdgeNeedsSweepUnbarri
 // Instantiate a copy of the Tracing templates for each public GC type.
 #define INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS(type)             \
   template JS_PUBLIC_API bool EdgeNeedsSweep<type>(JS::Heap<type>*); \
   template JS_PUBLIC_API bool EdgeNeedsSweepUnbarrieredSlow<type>(type*);
 FOR_EACH_PUBLIC_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS)
 FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(
     INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS)
 
-#define INSTANTIATE_INTERNAL_MARKING_FUNCTIONS(type)          \
-  template bool IsMarkedInternal(JSRuntime* rt, type* thing); \
+#define INSTANTIATE_INTERNAL_MARKING_FUNCTIONS(type)               \
+  template bool IsMarkedInternal(JSRuntime* rt, type* thing);      \
+  template bool IsMarkedBlackInternal(JSRuntime* rt, type* thing); \
   template bool IsAboutToBeFinalizedInternal(type* thingp);
 
 #define INSTANTIATE_INTERNAL_MARKING_FUNCTIONS_FROM_TRACEKIND(_1, type, _2) \
   INSTANTIATE_INTERNAL_MARKING_FUNCTIONS(type*)
 
 JS_FOR_EACH_TRACEKIND(INSTANTIATE_INTERNAL_MARKING_FUNCTIONS_FROM_TRACEKIND)
 FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(INSTANTIATE_INTERNAL_MARKING_FUNCTIONS)
 
@@ -3371,30 +3506,63 @@ class UnmarkGrayTracer : public JS::Call
 
   void onChild(const JS::GCCellPtr& thing) override;
 
 #ifdef DEBUG
   TracerKind getTracerKind() const override { return TracerKind::UnmarkGray; }
 #endif
 };
 
+static bool
+IsCCTraceKindInternal(JS::TraceKind kind)
+{
+    switch (kind) {
+#define EXPAND_IS_CC_TRACE_KIND(name, _, addToCCKind)    \
+      case JS::TraceKind::name:                          \
+        return addToCCKind;
+JS_FOR_EACH_TRACEKIND(EXPAND_IS_CC_TRACE_KIND)
+      default:
+        MOZ_CRASH("Unexpected trace kind");
+    }
+}
+
 void UnmarkGrayTracer::onChild(const JS::GCCellPtr& thing) {
   Cell* cell = thing.asCell();
 
-  // Cells in the nursery cannot be gray, and therefore must necessarily point
-  // to only black edges.
-  if (!cell->isTenured()) {
+  // Cells in the nursery cannot be gray, and nor can certain kinds of tenured
+  // cells. These must necessarily point only to black edges.
+  if (!cell->isTenured() ||
+      !IsCCTraceKindInternal(cell->asTenured().getTraceKind())) {
 #ifdef DEBUG
+    MOZ_ASSERT(!cell->isMarkedGray());
     AssertNonGrayTracer nongray(runtime());
     TraceChildren(&nongray, cell, thing.kind());
 #endif
     return;
   }
 
   TenuredCell& tenured = cell->asTenured();
+
+  // If the cell is in a zone that we're currently marking gray, then it's
+  // possible that it is currently white but will end up gray. To handle this
+  // case, push any cells in zones that are currently being marked onto the
+  // mark stack and they will eventually get marked black.
+  Zone* zone = tenured.zone();
+  if (zone->needsIncrementalBarrier()) {
+    if (!cell->isMarkedBlack()) {
+      Cell* tmp = cell;
+      TraceManuallyBarrieredGenericPointerEdge(zone->barrierTracer(), &tmp,
+                                               "read barrier");
+      MOZ_ASSERT(tmp == cell);
+      unmarkedAny = true;
+    }
+    return;
+  }
+
+  MOZ_ASSERT(!zone->isGCMarkingBlackAndGray());
   if (!tenured.isMarkedGray()) {
     return;
   }
 
   tenured.markBlack();
   unmarkedAny = true;
 
   if (!stack.append(thing)) {
@@ -3425,16 +3593,17 @@ bool js::IsUnmarkGrayTracer(JSTracer* tr
   return trc->isCallbackTracer() &&
          trc->asCallbackTracer()->getTracerKind() ==
              JS::CallbackTracer::TracerKind::UnmarkGray;
 }
 #endif
 
 static bool UnmarkGrayGCThing(JSRuntime* rt, JS::GCCellPtr thing) {
   MOZ_ASSERT(thing);
+  MOZ_ASSERT(thing.asCell()->isMarkedGray());
 
   // Gray cell unmarking can occur at different points between recording and
   // replay, so disallow recorded events from occurring in the tracer.
   mozilla::recordreplay::AutoDisallowThreadEvents d;
 
   UnmarkGrayTracer unmarker(rt);
   gcstats::AutoPhase innerPhase(rt->gc.stats(),
                                 gcstats::PhaseKind::UNMARK_GRAY);
--- a/js/src/gc/Marking.h
+++ b/js/src/gc/Marking.h
@@ -63,37 +63,55 @@ void PushArena(GCMarker* gcmarker, Arena
 // separate implementations.
 
 template <typename T>
 bool IsMarkedInternal(JSRuntime* rt, T* thing);
 template <typename T>
 bool IsMarkedInternal(JSRuntime* rt, T** thing);
 
 template <typename T>
+bool IsMarkedBlackInternal(JSRuntime* rt, T* thing);
+template <typename T>
+bool IsMarkedBlackInternal(JSRuntime* rt, T** thing);
+
+template <typename T>
 bool IsAboutToBeFinalizedInternal(T* thingp);
 template <typename T>
 bool IsAboutToBeFinalizedInternal(T** thingp);
 
-// Report whether a thing has been marked.  Things which are in zones that are
-// not currently being collected or are owned by another runtime are always
-// reported as being marked.
+// Report whether a GC thing has been marked with any color. Things which are in
+// zones that are not currently being collected or are owned by another runtime
+// are always reported as being marked.
 template <typename T>
 inline bool IsMarkedUnbarriered(JSRuntime* rt, T* thingp) {
   return IsMarkedInternal(rt, ConvertToBase(thingp));
 }
 
-// Report whether a thing has been marked.  Things which are in zones that are
-// not currently being collected or are owned by another runtime are always
-// reported as being marked.
+// Report whether a GC thing has been marked with any color. Things which are in
+// zones that are not currently being collected or are owned by another runtime
+// are always reported as being marked.
 template <typename T>
 inline bool IsMarked(JSRuntime* rt, WriteBarrieredBase<T>* thingp) {
   return IsMarkedInternal(rt,
                           ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
 }
 
+// Report whether a GC thing has been marked black.
+template <typename T>
+inline bool IsMarkedBlackUnbarriered(JSRuntime* rt, T* thingp) {
+  return IsMarkedBlackInternal(rt, ConvertToBase(thingp));
+}
+
+// Report whether a GC thing has been marked black.
+template <typename T>
+inline bool IsMarkedBlack(JSRuntime* rt, WriteBarrieredBase<T>* thingp) {
+  return IsMarkedBlackInternal(
+      rt, ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
+}
+
 template <typename T>
 inline bool IsAboutToBeFinalizedUnbarriered(T* thingp) {
   return IsAboutToBeFinalizedInternal(ConvertToBase(thingp));
 }
 
 template <typename T>
 inline bool IsAboutToBeFinalized(WriteBarrieredBase<T>* thingp) {
   return IsAboutToBeFinalizedInternal(
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -545,17 +545,17 @@ inline void BufferGrayRootsTracer::buffe
     if (!zone->gcGrayRoots().append(tenured)) {
       bufferingGrayRootsFailed = true;
     }
   }
 }
 
 void GCRuntime::markBufferedGrayRoots(JS::Zone* zone) {
   MOZ_ASSERT(grayBufferState == GrayBufferState::Okay);
-  MOZ_ASSERT(zone->isGCMarkingGray() || zone->isGCCompacting());
+  MOZ_ASSERT(zone->isGCMarkingBlackAndGray() || zone->isGCCompacting());
 
   auto& roots = zone->gcGrayRoots();
   if (roots.empty()) {
     return;
   }
 
   for (size_t i = 0; i < roots.length(); i++) {
     Cell* cell = roots[i];
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -976,17 +976,21 @@ void Statistics::beginGC(JSGCInvocationK
 void Statistics::endGC() {
   TimeDuration sccTotal, sccLongest;
   sccDurations(&sccTotal, &sccLongest);
 
   runtime->addTelemetry(JS_TELEMETRY_GC_IS_ZONE_GC,
                         !zoneStats.isFullCollection());
   TimeDuration markTotal = SumPhase(PhaseKind::MARK, phaseTimes);
   TimeDuration markRootsTotal = SumPhase(PhaseKind::MARK_ROOTS, phaseTimes);
-  runtime->addTelemetry(JS_TELEMETRY_GC_MARK_MS, t(markTotal));
+  double markTime = t(markTotal);
+  size_t markCount = runtime->gc.marker.getMarkCount();
+  double markRate = markCount / markTime;
+  runtime->addTelemetry(JS_TELEMETRY_GC_MARK_MS, markTime);
+  runtime->addTelemetry(JS_TELEMETRY_GC_MARK_RATE, markRate);
   runtime->addTelemetry(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[Phase::SWEEP]));
   if (runtime->gc.isCompactingGc()) {
     runtime->addTelemetry(JS_TELEMETRY_GC_COMPACT_MS,
                           t(phaseTimes[Phase::COMPACT]));
   }
   runtime->addTelemetry(JS_TELEMETRY_GC_MARK_ROOTS_MS, t(markRootsTotal));
   runtime->addTelemetry(JS_TELEMETRY_GC_MARK_GRAY_MS,
                         t(phaseTimes[Phase::SWEEP_MARK_GRAY]));
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -175,19 +175,16 @@ static VerifyNode* NextNode(VerifyNode* 
 }
 
 void gc::GCRuntime::startVerifyPreBarriers() {
   if (verifyPreData || isIncrementalGCInProgress()) {
     return;
   }
 
   JSContext* cx = rt->mainContextFromOwnThread();
-  if (temporaryAbortIfWasmGc(cx)) {
-    return;
-  }
 
   if (IsIncrementalGCUnsafe(rt) != AbortReason::None ||
       rt->hasHelperThreadZones()) {
     return;
   }
 
   number++;
 
@@ -403,19 +400,16 @@ void gc::GCRuntime::verifyPreBarriers() 
   if (verifyPreData) {
     endVerifyPreBarriers();
   } else {
     startVerifyPreBarriers();
   }
 }
 
 void gc::VerifyBarriers(JSRuntime* rt, VerifierType type) {
-  if (GCRuntime::temporaryAbortIfWasmGc(rt->mainContextFromOwnThread())) {
-    return;
-  }
   if (type == PreBarrierVerifier) {
     rt->gc.verifyPreBarriers();
   }
 }
 
 void gc::GCRuntime::maybeVerifyPreBarriers(bool always) {
   if (!hasZealMode(ZealMode::VerifierPre)) {
     return;
@@ -704,18 +698,22 @@ void CheckGrayMarkingTracer::checkCell(C
     failures++;
 
     fprintf(stderr, "Found black to gray edge to ");
     dumpCellInfo(cell);
     fprintf(stderr, "\n");
     dumpCellPath();
 
 #ifdef DEBUG
+    if (parent->is<JSObject>()) {
+      fprintf(stderr, "\nSource: ");
+      DumpObject(parent->as<JSObject>(), stderr);
+    }
     if (cell->is<JSObject>()) {
-      fprintf(stderr, "\n");
+      fprintf(stderr, "\nTarget: ");
       DumpObject(cell->as<JSObject>(), stderr);
     }
 #endif
   }
 }
 
 bool CheckGrayMarkingTracer::check(AutoTraceSession& session) {
   if (!traceHeap(session)) {
@@ -748,44 +746,38 @@ static Zone* GetCellZone(Cell* cell) {
 }
 
 static JSObject* MaybeGetDelegate(Cell* cell) {
   if (!cell->is<JSObject>()) {
     return nullptr;
   }
 
   JSObject* object = cell->as<JSObject>();
-  JSWeakmapKeyDelegateOp op = object->getClass()->extWeakmapKeyDelegateOp();
-  if (!op) {
-    return nullptr;
-  }
-
-  JS::AutoSuppressGCAnalysis nogc;  // Calling the delegate op cannot GC.
-  return op(object);
+  return js::UncheckedUnwrapWithoutExpose(object);
 }
 
 bool js::gc::CheckWeakMapEntryMarking(const WeakMapBase* map, Cell* key,
                                       Cell* value) {
   DebugOnly<Zone*> zone = map->zone();
   MOZ_ASSERT(zone->isGCMarking());
 
   JSObject* object = map->memberOf;
   MOZ_ASSERT_IF(object, object->zone() == zone);
 
   // Debugger weak maps can have keys in different zones.
   Zone* keyZone = GetCellZone(key);
   MOZ_ASSERT_IF(!map->allowKeysInOtherZones(),
                 keyZone == zone || keyZone->isAtomsZone());
 
-  DebugOnly<Zone*> valueZone = GetCellZone(value);
+  Zone* valueZone = GetCellZone(value);
   MOZ_ASSERT(valueZone == zone || valueZone->isAtomsZone());
 
-  // We may not know the color of the map, but we know that it's
-  // alive so it must at least be marked gray.
-  CellColor mapColor = object ? GetCellColor(object) : CellColor::Gray;
+  CellColor mapColor = map->markColor == MarkColor::Black ? CellColor::Black
+                                                          : CellColor::Gray;
+  MOZ_ASSERT_IF(object, GetCellColor(object) == mapColor);
 
   CellColor keyColor = GetCellColor(key);
   CellColor valueColor =
       valueZone->isGCMarking() ? GetCellColor(value) : CellColor::Black;
 
   if (valueColor < Min(mapColor, keyColor)) {
     fprintf(stderr, "WeakMap value is less marked than map and key\n");
     fprintf(stderr, "(map %p is %s, key %p is %s, value %p is %s)\n", map,
--- a/js/src/gc/WeakMap-inl.h
+++ b/js/src/gc/WeakMap-inl.h
@@ -13,33 +13,49 @@
 #include "vm/JSContext.h"
 
 namespace js {
 
 template <typename T>
 static T extractUnbarriered(const WriteBarrieredBase<T>& v) {
   return v.get();
 }
+
 template <typename T>
 static T* extractUnbarriered(T* v) {
   return v;
 }
 
+inline /* static */ JSObject* WeakMapBase::getDelegate(JSObject* key) {
+  return UncheckedUnwrapWithoutExpose(key);
+}
+
+inline /* static */ JSObject* WeakMapBase::getDelegate(JSScript* script) {
+  return nullptr;
+}
+
+inline /* static */ JSObject* WeakMapBase::getDelegate(LazyScript* script) {
+  return nullptr;
+}
+
 template <class K, class V>
 WeakMap<K, V>::WeakMap(JSContext* cx, JSObject* memOf)
     : Base(cx->zone()), WeakMapBase(memOf, cx->zone()) {
   using ElemType = typename K::ElementType;
   using NonPtrType = typename mozilla::RemovePointer<ElemType>::Type;
   // The object's TraceKind needs to be added to CC graph if this object is
   // used as a WeakMap key. See the comments for IsCCTraceKind for details.
   static_assert(JS::IsCCTraceKind(NonPtrType::TraceKind),
                 "Object's TraceKind should be added to CC graph.");
 
   zone()->gcWeakMapList().insertFront(this);
-  marked = JS::IsIncrementalGCInProgress(TlsContext.get());
+  if (zone()->wasGCStarted()) {
+    marked = true;
+    markColor = gc::MarkColor::Black;
+  }
 }
 
 // Trace a WeakMap entry based on 'markedCell' getting marked, where 'origKey'
 // is the key in the weakmap. These will probably be the same, but can be
 // different eg when markedCell is a delegate for origKey.
 //
 // This implementation does not use 'markedCell'; it looks up origKey and checks
 // the mark bits on everything it cares about, one of which will be
@@ -53,36 +69,38 @@ void WeakMap<K, V>::markEntry(GCMarker* 
   // Lookup that can't be constructed from a Cell*. The WeakKeyTable
   // mechanism is indexed with a GCCellPtr, so that won't work.
   Ptr p = Base::lookup(static_cast<Lookup>(origKey.asCell()));
   MOZ_ASSERT(p.found());
 
   K key(p->key());
   MOZ_ASSERT((markedCell == extractUnbarriered(key)) ||
              (markedCell == getDelegate(key)));
-  if (gc::IsMarked(marker->runtime(), &key)) {
+  if (marker->isMarked(&key)) {
     TraceEdge(marker, &p->value(), "ephemeron value");
-  } else if (keyNeedsMark(key)) {
+  } else if (keyNeedsMark(marker, key)) {
     TraceEdge(marker, &p->value(), "WeakMap ephemeron value");
     TraceEdge(marker, &key, "proxy-preserved WeakMap ephemeron key");
     MOZ_ASSERT(key == p->key());  // No moving
   }
   key.unsafeSet(nullptr);  // Prevent destructor from running barriers.
 }
 
 template <class K, class V>
 void WeakMap<K, V>::trace(JSTracer* trc) {
   MOZ_ASSERT_IF(JS::RuntimeHeapIsBusy(), isInList());
 
   TraceNullableEdge(trc, &memberOf, "WeakMap owner");
 
   if (trc->isMarkingTracer()) {
     MOZ_ASSERT(trc->weakMapAction() == ExpandWeakMaps);
+    auto marker = GCMarker::fromTracer(trc);
     marked = true;
-    (void)markIteratively(GCMarker::fromTracer(trc));
+    markColor = marker->markColor();
+    (void)markIteratively(marker);
     return;
   }
 
   if (trc->weakMapAction() == DoNotTraceWeakMaps) {
     return;
   }
 
   // Trace keys only if weakMapAction() says to.
@@ -117,31 +135,35 @@ template <class K, class V>
       marker->abortLinearWeakMarking();
     }
   }
 }
 
 template <class K, class V>
 bool WeakMap<K, V>::markIteratively(GCMarker* marker) {
   MOZ_ASSERT(marked);
+  if (marker->markColor() == gc::MarkColor::Black &&
+      markColor == gc::MarkColor::Gray) {
+    return false;
+  }
 
   bool markedAny = false;
 
   for (Enum e(*this); !e.empty(); e.popFront()) {
     // If the entry is live, ensure its key and value are marked.
-    bool keyIsMarked = gc::IsMarked(marker->runtime(), &e.front().mutableKey());
-    if (!keyIsMarked && keyNeedsMark(e.front().key())) {
+    bool keyIsMarked = marker->isMarked(&e.front().mutableKey());
+    if (!keyIsMarked && keyNeedsMark(marker, e.front().key())) {
       TraceEdge(marker, &e.front().mutableKey(),
                 "proxy-preserved WeakMap entry key");
       keyIsMarked = true;
       markedAny = true;
     }
 
     if (keyIsMarked) {
-      if (!gc::IsMarked(marker->runtime(), &e.front().value())) {
+      if (!marker->isMarked(&e.front().value())) {
         TraceEdge(marker, &e.front().value(), "WeakMap entry value");
         markedAny = true;
       }
     } else if (marker->isWeakMarkingTracer()) {
       // Entry is not yet known to be live. Record this weakmap and
       // the lookup key in the list of weak keys. Also record the
       // delegate, if any, because marking the delegate also marks
       // the entry.
@@ -153,61 +175,34 @@ bool WeakMap<K, V>::markIteratively(GCMa
       }
     }
   }
 
   return markedAny;
 }
 
 template <class K, class V>
-inline JSObject* WeakMap<K, V>::getDelegate(JSObject* key) const {
-  JS::AutoSuppressGCAnalysis nogc;
-
-  JSWeakmapKeyDelegateOp op = key->getClass()->extWeakmapKeyDelegateOp();
-  if (!op) {
-    return nullptr;
-  }
-
-  JSObject* obj = op(key);
-  if (!obj) {
-    return nullptr;
-  }
-
-  MOZ_ASSERT(obj->runtimeFromMainThread() == zone()->runtimeFromMainThread());
-  return obj;
-}
-
-template <class K, class V>
-inline JSObject* WeakMap<K, V>::getDelegate(JSScript* script) const {
-  return nullptr;
-}
-
-template <class K, class V>
-inline JSObject* WeakMap<K, V>::getDelegate(LazyScript* script) const {
-  return nullptr;
-}
-
-template <class K, class V>
-inline bool WeakMap<K, V>::keyNeedsMark(JSObject* key) const {
+inline bool WeakMap<K, V>::keyNeedsMark(GCMarker* marker, JSObject* key) const {
   JSObject* delegate = getDelegate(key);
   /*
    * Check if the delegate is marked with any color to properly handle
    * gray marking when the key's delegate is black and the map is gray.
    */
-  return delegate &&
-         gc::IsMarkedUnbarriered(zone()->runtimeFromMainThread(), &delegate);
+  return delegate && marker->isMarkedUnbarriered(&delegate);
 }
 
 template <class K, class V>
-inline bool WeakMap<K, V>::keyNeedsMark(JSScript* script) const {
+inline bool WeakMap<K, V>::keyNeedsMark(GCMarker* marker,
+                                        JSScript* script) const {
   return false;
 }
 
 template <class K, class V>
-inline bool WeakMap<K, V>::keyNeedsMark(LazyScript* script) const {
+inline bool WeakMap<K, V>::keyNeedsMark(GCMarker* marker,
+                                        LazyScript* script) const {
   return false;
 }
 
 template <class K, class V>
 void WeakMap<K, V>::sweep() {
   /* Remove all entries whose keys remain unmarked. */
   for (Enum e(*this); !e.empty(); e.popFront()) {
     if (gc::IsAboutToBeFinalized(&e.front().mutableKey())) {
@@ -236,16 +231,21 @@ void WeakMap<K, V>::traceMappings(WeakMa
 }
 
 #if DEBUG
 template <class K, class V>
 void WeakMap<K, V>::assertEntriesNotAboutToBeFinalized() {
   for (Range r = Base::all(); !r.empty(); r.popFront()) {
     K k(r.front().key());
     MOZ_ASSERT(!gc::IsAboutToBeFinalized(&k));
+    JSObject* delegate = getDelegate(k);
+    if (delegate) {
+      MOZ_ASSERT(!gc::IsAboutToBeFinalizedUnbarriered(&delegate),
+                 "weakmap marking depends on a key tracing its delegate");
+    }
     MOZ_ASSERT(!gc::IsAboutToBeFinalized(&r.front().value()));
     MOZ_ASSERT(k == r.front().key());
   }
 }
 #endif
 
 #ifdef JS_GC_ZEAL
 template <class K, class V>
--- a/js/src/gc/WeakMap.cpp
+++ b/js/src/gc/WeakMap.cpp
@@ -18,17 +18,17 @@
 #include "vm/JSObject.h"
 
 #include "vm/JSObject-inl.h"
 
 using namespace js;
 using namespace js::gc;
 
 WeakMapBase::WeakMapBase(JSObject* memOf, Zone* zone)
-    : memberOf(memOf), zone_(zone), marked(false) {
+    : memberOf(memOf), zone_(zone), marked(false), markColor(MarkColor::Black) {
   MOZ_ASSERT_IF(memberOf, memberOf->compartment()->zone() == zone);
 }
 
 WeakMapBase::~WeakMapBase() {
   MOZ_ASSERT(CurrentThreadIsGCSweeping() || CurrentThreadCanAccessZone(zone_));
 }
 
 void WeakMapBase::unmarkZone(JS::Zone* zone) {
--- a/js/src/gc/WeakMap.h
+++ b/js/src/gc/WeakMap.h
@@ -95,16 +95,20 @@ class WeakMapBase : public mozilla::Link
 
   // Restore information about which weak maps are marked for many zones.
   static void restoreMarkedWeakMaps(WeakMapSet& markedWeakMaps);
 
 #if defined(JS_GC_ZEAL) || defined(DEBUG)
   static bool checkMarkingForZone(JS::Zone* zone);
 #endif
 
+  static JSObject* getDelegate(JSObject* key);
+  static JSObject* getDelegate(JSScript* script);
+  static JSObject* getDelegate(LazyScript* script);
+
  protected:
   // Instance member functions called by the above. Instantiations of WeakMap
   // override these with definitions appropriate for their Key and Value types.
   virtual void trace(JSTracer* tracer) = 0;
   virtual bool findZoneEdges() = 0;
   virtual void sweep() = 0;
   virtual void traceMappings(WeakMapTracer* tracer) = 0;
   virtual void clearAndCompact() = 0;
@@ -124,18 +128,20 @@ class WeakMapBase : public mozilla::Link
 #endif
 
   // Object that this weak map is part of, if any.
   GCPtrObject memberOf;
 
   // Zone containing this weak map.
   JS::Zone* zone_;
 
-  // Whether this object has been traced during garbage collection.
+  // Whether this object has been marked during garbage collection and which
+  // color it was marked.
   bool marked;
+  gc::MarkColor markColor;
 };
 
 template <class Key, class Value>
 class WeakMap
     : public HashMap<Key, Value, MovableCellHasher<Key>, ZoneAllocPolicy>,
       public WeakMapBase {
  public:
   typedef HashMap<Key, Value, MovableCellHasher<Key>, ZoneAllocPolicy> Base;
@@ -176,31 +182,36 @@ class WeakMap
   void trace(JSTracer* trc) override;
 
  protected:
   static void addWeakEntry(GCMarker* marker, JS::GCCellPtr key,
                            const gc::WeakMarkable& markable);
 
   bool markIteratively(GCMarker* marker) override;
 
-  JSObject* getDelegate(JSObject* key) const;
-  JSObject* getDelegate(JSScript* script) const;
-  JSObject* getDelegate(LazyScript* script) const;
-
+  /**
+   * If a wrapper is used as a key in a weakmap, the garbage collector should
+   * keep that object around longer than it otherwise would. We want to avoid
+   * collecting the wrapper (and removing the weakmap entry) as long as the
+   * wrapped object is alive (because the object can be rewrapped and looked up
+   * again). As long as the wrapper is used as a weakmap key, it will not be
+   * collected (and remain in the weakmap) until the wrapped object is
+   * collected.
+   */
  private:
   void exposeGCThingToActiveJS(const JS::Value& v) const {
     JS::ExposeValueToActiveJS(v);
   }
   void exposeGCThingToActiveJS(JSObject* obj) const {
     JS::ExposeObjectToActiveJS(obj);
   }
 
-  bool keyNeedsMark(JSObject* key) const;
-  bool keyNeedsMark(JSScript* script) const;
-  bool keyNeedsMark(LazyScript* script) const;
+  bool keyNeedsMark(GCMarker* marker, JSObject* key) const;
+  bool keyNeedsMark(GCMarker* marker, JSScript* script) const;
+  bool keyNeedsMark(GCMarker* marker, LazyScript* script) const;
 
   bool findZoneEdges() override {
     // This is overridden by ObjectValueMap.
     return true;
   }
 
   void sweep() override;
 
deleted file mode 100644
--- a/js/src/jit-test/tests/wasm/gc/block-addmarkobservers.js
+++ /dev/null
@@ -1,23 +0,0 @@
-grayRoot().x = Object.create(null);
-
-try {
-    wasmEvalText(`
-        (module
-            (import "global" "func" (result i32))
-            (func (export "func_0") (result i32)
-             call 0 ;; calls the import, which is func #0
-            )
-        )
-    `, {
-        global: {
-            func: function() {
-                addMarkObservers([grayRoot().x]);
-                getMarks();
-            }
-        }
-    }).exports.func_0();
-} catch(e) {
-    // If there's an error, it must be that the addMarkObservers API is
-    // temporarily disabled because of wasm gc.
-    assertEq(/temporarily unavailable/.test(e.message), true);
-}
deleted file mode 100644
--- a/js/src/jit-test/tests/wasm/gc/block-debugger-findscripts.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// |jit-test| skip-if: !wasmGcEnabled()
-
-wasmEvalText(`
-    (module
-        (import "global" "func" (result i32))
-        (func (export "func_0") (result i32)
-         call 0 ;; calls the import, which is func #0
-        )
-    )
-`, {
-    global: {
-        func() {
-          var g = newGlobal();
-          var dbg = new Debugger(g);
-          var caught = false;
-          try {
-            dbg.findScripts().filter(isWasm(g, isValidWasmURL, 2));
-          } catch(e) {
-              caught = true;
-              assertEq(/temporarily unavailable/.test(e.toString()), true);
-          }
-          // When this assertion fails, it's time to remove the restriction in
-          // Debugger.findScripts.
-          assertEq(caught, true);
-        }
-    }
-}).exports.func_0();;
deleted file mode 100644
--- a/js/src/jit-test/tests/wasm/gc/block-debuggermemory-takecensus.js
+++ /dev/null
@@ -1,25 +0,0 @@
-let g = newGlobal();
-
-let { exports } = wasmEvalText(`
-    (module
-        (import "global" "func" (result i32))
-        (func (export "func_0") (result i32)
-         call 0 ;; calls the import, which is func #0
-        )
-    )
-`, {
-    global: {
-        func() {
-            let dbg = new Debugger(g);
-            dbg.memory.takeCensus({ breakdown: { by: 'objectClass' } });
-        }
-    }
-});
-
-try {
-    exports.func_0();
-} catch(e) {
-    // If there's an error, it must be that the addMarkObservers API is
-    // temporarily disabled because of wasm gc.
-    assertEq(/temporarily unavailable/.test(e.message), true);
-}
deleted file mode 100644
--- a/js/src/jit-test/tests/wasm/gc/block-gczeal.js
+++ /dev/null
@@ -1,11 +0,0 @@
-wasmEvalText(`(module
-    (import "global" "func" (result i32))
-    (func (export "func_0") (result i32)
-     call 0 ;; calls the import, which is func #0
-    )
-)`, { global: {
-    func() {
-        gczeal(7,6);
-        gczeal();
-    }
-} }).exports.func_0();
deleted file mode 100644
--- a/js/src/jit-test/tests/wasm/gc/block-selectforgc.js
+++ /dev/null
@@ -1,22 +0,0 @@
-let { exports } = wasmEvalText(`
-    (module
-        (import "global" "func" (result i32))
-        (func (export "func_0") (result i32)
-         call 0 ;; calls the import, which is func #0
-        )
-    )
-`, {
-    global: {
-        func() {
-            selectforgc();
-        }
-    }
-});
-
-try {
-    exports.func_0();
-} catch(e) {
-    // If there's an error, it must be that the addMarkObservers API is
-    // temporarily disabled because of wasm gc.
-    assertEq(/temporarily unavailable/.test(e.message), true);
-}
deleted file mode 100644
--- a/js/src/jit-test/tests/wasm/gc/block-verifyprebarriers-2.js
+++ /dev/null
@@ -1,16 +0,0 @@
-verifyprebarriers();
-
-wasmEvalText(`
-    (module
-        (import "global" "func" (result i32))
-        (func (export "func_0") (result i32)
-         call 0
-        )
-    )
-`, {
-    global: {
-        func() {
-            verifyprebarriers();
-        }
-    }
-}).exports.func_0();
deleted file mode 100644
--- a/js/src/jit-test/tests/wasm/gc/block-verifyprebarriers.js
+++ /dev/null
@@ -1,14 +0,0 @@
-wasmEvalText(`
-    (module
-        (import "global" "func" (result i32))
-        (func (export "func_0") (result i32)
-         call 0
-        )
-    )
-`, {
-    global: {
-        func() {
-            verifyprebarriers();
-        }
-    }
-}).exports.func_0();
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/wasm/gc/stackmaps1.js
@@ -0,0 +1,89 @@
+// |jit-test| skip-if: !wasmGcEnabled()
+
+// Tests wasm frame tracing.  Only tests for direct and indirect call chains
+// in wasm that lead to JS allocation.  Does not test any timeout or interrupt
+// related aspects.  The structure is
+//
+//   test top level: call fn2
+//   fn2: call fn1
+//   fn1: do 100k times { call-direct fn0; call-indirect fn0; }
+//   fn0: call out to JS that does allocation
+//
+// Eventually fn0 will trigger GC and we expect the chain of resulting frames
+// to be traced correctly.  fn2, fn1 and fn0 have some ref-typed args, so
+// there will be traceable stack words to follow, in the sequence of frames.
+
+const {Module,Instance} = WebAssembly;
+
+let t =
+  `(module
+     (gc_feature_opt_in 2)
+     (import $check3 "" "check3" (func (param anyref) (param anyref) (param anyref)))
+     (type $typeOfFn0
+           (func (result i32) (param i32) (param anyref) (param i32)
+                              (param anyref) (param anyref) (param i32)))
+     (table 1 1 anyfunc)
+     (elem (i32.const 0) $fn0)
+
+     (import $alloc "" "alloc" (func (result anyref)))
+
+     ;; -- fn 0
+     (func $fn0 (export "fn0")
+                (result i32) (param $arg1 i32) (param $arg2 anyref) (param $arg3 i32)
+                             (param $arg4 anyref) (param $arg5 anyref) (param $arg6 i32)
+       (call $alloc)
+       drop
+       (i32.add (i32.add (get_local $arg1) (get_local $arg3)) (get_local $arg6))
+
+       ;; Poke the ref-typed arguments, to be sure that they got kept alive
+       ;; properly across any GC that the |alloc| call might have done.
+       (call $check3 (get_local $arg2) (get_local $arg4) (get_local $arg5))
+     )
+
+     ;; -- fn 1
+     (func $fn1 (export "fn1") (param $arg1 anyref) (result i32)
+       (local $i i32)
+
+       (loop i32
+         ;; call direct 0
+         (call $fn0 (i32.const 10) (get_local $arg1) (i32.const 12)
+                    (get_local $arg1) (get_local $arg1) (i32.const 15))
+
+         ;; call indirect 0
+         (call_indirect $typeOfFn0
+                    (i32.const 10) (get_local $arg1) (i32.const 12)
+                    (get_local $arg1) (get_local $arg1) (i32.const 15)
+                    (i32.const 0)) ;; table index
+
+         i32.add
+
+         ;; Do 60k iterations of this loop, to get a good amount of allocation
+         (set_local $i (i32.add (get_local $i) (i32.const 1)))
+         (br_if 0 (i32.lt_s (get_local $i) (i32.const 60000)))
+       )
+     )
+
+     ;; -- fn 2
+     (func $fn2 (export "fn2") (param $arg1 anyref) (result i32)
+       (call $fn1 (get_local $arg1))
+     )
+   )`;
+
+function Croissant(chocolate, number) {
+    this.chocolate = chocolate;
+    this.number = number;
+}
+
+function allocates() {
+    return new Croissant(true, 271828);
+}
+
+function check3(a1, a2, a3) {
+    assertEq(a1.number, 31415927);
+    assertEq(a2.number, 31415927);
+    assertEq(a3.number, 31415927);
+}
+
+let i = wasmEvalText(t, {"":{alloc: allocates, check3: check3}});
+
+print(i.exports.fn2( new Croissant(false, 31415927) ));
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/wasm/gc/stackmaps2.js
@@ -0,0 +1,134 @@
+// |jit-test| skip-if: !wasmGcEnabled()
+
+// Tests wasm frame tracing in the presence of interrupt handlers that perform
+// allocation.  The structure is
+//
+//   test top level: call fn2
+//   fn2: call fn1
+//   fn1: repeat { call-direct fn0; call-indirect fn0; }
+//   fn0: a 100-iteration loop that does nothing except waste time
+//
+// At the same time we are asynchronously runnning handler(), which does a lot
+// of allocation.  At some point that will trigger a GC.  Assuming that
+// handler() runs whilst fn0 is running (the most likely scenario, since fn0
+// consumes the majority of the wasm running time), then the runtime will walk
+// the stack from the wasm exit frame, through fn0, fn1 and finally fn2.  As
+// with stackmaps1.js, there are some ref-typed args in use so as provide
+// traceable stack slots to follow.
+//
+// The test runs until the loop in fn1 determines that handler() has allocated
+// sufficient memory as to have caused at least three collections.  This helps
+// keep the test effective in the face of wide variations in the rate of
+// progress of the handler()'s loop (eg x86+native is fast, arm64+simulator is
+// slow).
+
+const {Module,Instance} = WebAssembly;
+
+const DEBUG = false;
+
+let t =
+  `(module
+     (gc_feature_opt_in 2)
+     (type $typeOfFn0
+           (func (result i32) (param i32) (param anyref) (param i32)
+                              (param anyref) (param anyref) (param i32)))
+     (table 1 1 anyfunc)
+     (elem (i32.const 0) $fn0)
+
+     (import $alloc "" "alloc" (func (result anyref)))
+     (import $quitp "" "quitp" (func (result i32)))
+     (import $check3 "" "check3" (func (param anyref) (param anyref) (param anyref)))
+
+     ;; -- fn 0
+     (func $fn0 (export "fn0")
+                (result i32) (param $arg1 i32) (param $arg2 anyref) (param $arg3 i32)
+                             (param $arg4 anyref) (param $arg5 anyref) (param $arg6 i32)
+       (local $i i32)
+
+       ;; spinloop to waste time
+       (loop
+         (set_local $i (i32.add (get_local $i) (i32.const 1)))
+         (br_if 0 (i32.lt_s (get_local $i) (i32.const 100)))
+       )
+
+       (i32.add (i32.add (get_local $arg1) (get_local $arg3)) (get_local $arg6))
+
+       ;; Poke the ref-typed arguments, to be sure that they got kept alive
+       ;; properly across any GC that might have happened.
+       (call $check3 (get_local $arg2) (get_local $arg4) (get_local $arg5))
+     )
+
+     ;; -- fn 1
+     (func $fn1 (export "fn1") (param $arg1 anyref) (result i32)
+       (loop i32
+         ;; call direct to $fn0
+         (call $fn0 (i32.const 10) (get_local $arg1) (i32.const 12)
+                    (get_local $arg1) (get_local $arg1) (i32.const 15))
+
+         ;; call indirect to table index 0, which is $fn0
+         (call_indirect $typeOfFn0
+                    (i32.const 10) (get_local $arg1) (i32.const 12)
+                    (get_local $arg1) (get_local $arg1) (i32.const 15)
+                    (i32.const 0)) ;; table index
+
+         i32.add
+
+         ;; Continue iterating until handler() has allocated enough
+         (br_if 0 (i32.eq (call $quitp) (i32.const 0)))
+       )
+     )
+
+     ;; -- fn 2
+     (func $fn2 (export "fn2") (param $arg1 anyref) (result i32)
+       (call $fn1 (get_local $arg1))
+     )
+   )`;
+
+function Croissant(chocolate, number) {
+    this.chocolate = chocolate;
+    this.number = number;
+}
+
+function allocates() {
+    return new Croissant(true, 271828);
+}
+
+let totAllocs = 0;
+
+function handler() {
+    if (DEBUG) {
+        print('XXXXXXXX icallback: START');
+    }
+    let q = allocates();
+    let sum = 0;
+    let iters = 15000;
+    for (let i = 0; i < iters; i++) {
+        let x = allocates();
+        // Without this hoop jumping to create an apparent use of |x|, Ion
+        // will remove the allocation call and make the test pointless.
+        if (x == q) { sum++; }
+    }
+    totAllocs += iters;
+    // Artificial use of |sum|.  See comment above.
+    if (sum == 133713371337) { print("unlikely!"); }
+    timeout(0.5, handler);
+    if (DEBUG) {
+        print('XXXXXXXX icallback: END');
+    }
+    return true;
+}
+
+function quitp() {
+    return totAllocs > 200000 ? 1 : 0;
+}
+
+function check3(a1, a2, a3) {
+    assertEq(a1.number, 31415927);
+    assertEq(a2.number, 31415927);
+    assertEq(a3.number, 31415927);
+}
+
+let i = wasmEvalText(t, {"":{alloc: allocates, quitp: quitp, check3: check3}});
+
+timeout(0.5, handler);
+print(i.exports.fn2( new Croissant(false, 31415927) ));
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/wasm/gc/stackmaps3.js
@@ -0,0 +1,204 @@
+// |jit-test| skip-if: !wasmGcEnabled()
+
+// Generates a bunch of numbers-on-the-heap, and tries to ensure that they are
+// held live -- at least for a short while -- only by references from the wasm
+// evaluation stack.  Then assembles them in a list and checks that the list
+// is as expected (and we don't segfault).  While all this is running we also
+// have an regular interrupt whose handler does a bunch of allocation, so as
+// to cause as much disruption as possible.
+
+// Note this makes an assumption about how the wasm compiler works.  There's
+// no particular reason that the wasm compiler needs to keep the results of
+// the $mkBoxedInt calls on the machine stack.  It could equally cache them in
+// registers or even reorder the call sequences so as to interleave
+// construction of the list elements with construction of the list itself.  It
+// just happens that our baseline compiler will behave as described.  That
+// said, however, it's hard to imagine how an implementation could complete
+// the list construction without having at least one root in a register or on
+// the stack, so the test still has value regardless of how the underlying
+// implementation works.
+
+const {Module,Instance} = WebAssembly;
+
+const DEBUG = false;
+
+let t =
+  `(module
+     (gc_feature_opt_in 2)
+     (import $mkCons "" "mkCons" (func (result anyref)
+                                       (param anyref) (param anyref)))
+     (import $mkBoxedInt "" "mkBoxedInt" (func (result anyref)))
+
+     (func $mkNil (result anyref)
+       ref.null
+     )
+
+     (func $mkConsIgnoringScalar (result anyref)
+              (param $hd anyref) (param i32) (param $tl anyref)
+        (get_local $hd)
+        (get_local $tl)
+        call $mkCons
+     )
+
+     (func $mkList (export "mkList") (result anyref)
+        call $mkList20
+     )
+
+     (func $mkList20 (result anyref)
+       ;; create 20 pointers to boxed ints on the stack, plus a few
+       ;; scalars for added confusion
+       (local $scalar99 i32)
+       (local $scalar97 i32)
+       (set_local $scalar99 (i32.const 99))
+       (set_local $scalar97 (i32.const 97))
+
+       call $mkBoxedInt
+       get_local $scalar99
+       call $mkBoxedInt
+       call $mkBoxedInt
+       get_local $scalar97
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkBoxedInt
+       call $mkNil
+       ;; Now we have (pointers to) 20 boxed ints and a NIL on the stack, and
+       ;; nothing else holding them live.  Build a list from the elements.
+       call $mkCons
+       call $mkCons
+       call $mkCons
+       call $mkCons
+       call $mkCons
+       call $mkCons
+       call $mkCons
+       call $mkCons
+       call $mkCons
+       call $mkCons
+       call $mkCons
+       call $mkCons
+       call $mkCons
+       call $mkCons
+       call $mkCons
+       call $mkCons
+       call $mkCons
+       call $mkConsIgnoringScalar
+       call $mkCons
+       call $mkConsIgnoringScalar
+     )
+   )`;
+
+let boxedIntCounter = 0;
+
+function BoxedInt() {
+    this.theInt = boxedIntCounter;
+    boxedIntCounter++;
+}
+
+function mkBoxedInt() {
+    return new BoxedInt();
+}
+
+function printBoxedInt(bi) {
+    print(bi.theInt);
+}
+
+function Cons(hd, tl) {
+    this.hd = hd;
+    this.tl = tl;
+}
+
+function mkCons(hd, tl) {
+    return new Cons(hd, tl);
+}
+
+function showList(list) {
+    print("[");
+    while (list) {
+        printBoxedInt(list.hd);
+        print(",");
+        list = list.tl;
+    }
+    print("]");
+}
+
+function checkList(list, expectedHdValue, expectedLength) {
+    while (list) {
+        if (expectedLength <= 0)
+            return false;
+        if (list.hd.theInt !== expectedHdValue) {
+            return false;
+        }
+        list = list.tl;
+        expectedHdValue++;
+        expectedLength--;
+    }
+    if (expectedLength == 0) {
+        return true;
+    } else {
+        return false;
+    }
+}
+
+let i = wasmEvalText(t, {"":{mkCons: mkCons, mkBoxedInt: mkBoxedInt}});
+
+
+function Croissant(chocolate) {
+    this.chocolate = chocolate;
+}
+
+function allocates() {
+    return new Croissant(true);
+}
+
+function handler() {
+    if (DEBUG) {
+        print('XXXXXXXX icallback: START');
+    }
+    let q = allocates();
+    let sum = 0;
+    for (let i = 0; i < 15000; i++) {
+        let x = allocates();
+        // Without this hoop jumping to create an apparent use of |x|, Ion
+        // will remove the allocation call and make the test pointless.
+        if (x == q) { sum++; }
+    }
+    // Artificial use of |sum|.  See comment above.
+    if (sum == 133713371337) { print("unlikely!"); }
+    timeout(1, handler);
+    if (DEBUG) {
+        print('XXXXXXXX icallback: END');
+    }
+    return true;
+}
+
+timeout(1, handler);
+
+for (let n = 0; n < 10000; n++) {
+    let listLowest = boxedIntCounter;
+
+    // Create the list in wasm land, possibly inducing GC on the way
+    let aList = i.exports.mkList();
+
+    // Check it is as we expect
+    let ok = checkList(aList, listLowest, 20/*expected length*/);
+    if (!ok) {
+        print("Failed on list: ");
+        showList(aList);
+    }
+    assertEq(ok, true);
+}
+
+// If we get here, the test finished successfully.
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -13614,27 +13614,21 @@ void CodeGenerator::emitIonToWasmCallBas
       MOZ_CRASH("NullRef not expressible");
     case wasm::ExprType::Limit:
       MOZ_CRASH("Limit");
   }
 
   bool profilingEnabled = isProfilerInstrumentationEnabled();
   WasmInstanceObject* instObj = lir->mir()->instanceObject();
 
-  bool wasmGcEnabled = false;
-#ifdef ENABLE_WASM_GC
-  wasmGcEnabled = gen->options.wasmGcEnabled();
-#endif
-
   Register scratch = ToRegister(lir->temp());
 
   uint32_t callOffset;
   GenerateDirectCallFromJit(masm, funcExport, instObj->instance(), stackArgs,
-                            profilingEnabled, wasmGcEnabled, scratch,
-                            &callOffset);
+                            profilingEnabled, scratch, &callOffset);
 
   // Add the instance object to the constant pool, so it is transferred to
   // the owning IonScript and so that it gets traced as long as the IonScript
   // lives.
 
   uint32_t unused;
   masm.propagateOOM(graph.addConstantToPool(ObjectValue(*instObj), &unused));
 
--- a/js/src/jit/JitFrames.cpp
+++ b/js/src/jit/JitFrames.cpp
@@ -1290,16 +1290,20 @@ static void TraceJitActivation(JSTracer*
     // when a GC happens.
     activation->setCheckRegs(false);
   }
 #endif
 
   activation->traceRematerializedFrames(trc);
   activation->traceIonRecovery(trc);
 
+  // This is used for sanity checking continuity of the sequence of wasm stack
+  // maps as we unwind.  It has no functional purpose.
+  uintptr_t highestByteVisitedInPrevWasmFrame = 0;
+
   for (JitFrameIter frames(activation); !frames.done(); ++frames) {
     if (frames.isJSJit()) {
       const JSJitFrameIter& jitFrame = frames.asJSJit();
       switch (jitFrame.type()) {
         case FrameType::Exit:
           TraceJitExitFrame(trc, jitFrame);
           break;
         case FrameType::BaselineJS:
@@ -1326,19 +1330,26 @@ static void TraceJitActivation(JSTracer*
           // in the next iteration.
           break;
         case FrameType::JSJitToWasm:
           TraceJSJitToWasmFrame(trc, jitFrame);
           break;
         default:
           MOZ_CRASH("unexpected frame type");
       }
+      highestByteVisitedInPrevWasmFrame = 0; /* "unknown" */
     } else {
       MOZ_ASSERT(frames.isWasm());
-      frames.asWasm().instance()->trace(trc);
+      uint8_t* nextPC = frames.returnAddressToFp();
+      MOZ_ASSERT(nextPC != 0);
+      wasm::WasmFrameIter& wasmFrameIter = frames.asWasm();
+      wasm::Instance* instance = wasmFrameIter.instance();
+      instance->trace(trc);
+      highestByteVisitedInPrevWasmFrame = instance->traceFrame(
+          trc, wasmFrameIter, nextPC, highestByteVisitedInPrevWasmFrame);
     }
   }
 }
 
 void TraceJitActivations(JSContext* cx, JSTracer* trc) {
   for (JitActivationIterator activations(cx); !activations.done();
        ++activations) {
     TraceJitActivation(trc, activations->asJit());
--- a/js/src/jit/MacroAssembler-inl.h
+++ b/js/src/jit/MacroAssembler-inl.h
@@ -46,37 +46,42 @@ CodeOffset MacroAssembler::PushWithPatch
   return PushWithPatch(ImmWord(uintptr_t(imm.value)));
 }
 
 // ===============================================================
 // Simple call functions.
 
 void MacroAssembler::call(TrampolinePtr code) { call(ImmPtr(code.value)); }
 
-void MacroAssembler::call(const wasm::CallSiteDesc& desc, const Register reg) {
+CodeOffset MacroAssembler::call(const wasm::CallSiteDesc& desc,
+                                const Register reg) {
   CodeOffset l = call(reg);
   append(desc, l);
+  return l;
 }
 
-void MacroAssembler::call(const wasm::CallSiteDesc& desc, uint32_t funcIndex) {
+CodeOffset MacroAssembler::call(const wasm::CallSiteDesc& desc,
+                                uint32_t funcIndex) {
   CodeOffset l = callWithPatch();
   append(desc, l, funcIndex);
+  return l;
 }
 
 void MacroAssembler::call(const wasm::CallSiteDesc& desc, wasm::Trap trap) {
   CodeOffset l = callWithPatch();
   append(desc, l, trap);
 }
 
-void MacroAssembler::call(const wasm::CallSiteDesc& desc,
-                          wasm::SymbolicAddress imm) {
+CodeOffset MacroAssembler::call(const wasm::CallSiteDesc& desc,
+                                wasm::SymbolicAddress imm) {
   MOZ_ASSERT(wasm::NeedsBuiltinThunk(imm),
              "only for functions which may appear in profiler");
-  call(imm);
-  append(desc, CodeOffset(currentOffset()));
+  CodeOffset raOffset = call(imm);
+  append(desc, raOffset);
+  return raOffset;
 }
 
 // ===============================================================
 // ABI function calls.
 
 void MacroAssembler::passABIArg(Register reg) {
   passABIArg(MoveOperand(reg), MoveOp::GENERAL);
 }
--- a/js/src/jit/MacroAssembler.cpp
+++ b/js/src/jit/MacroAssembler.cpp
@@ -3147,37 +3147,41 @@ void MacroAssembler::callWithABINoProfil
     branch32(Assembler::Equal, flagAddr, Imm32(0), &ok);
     assumeUnreachable("callWithABI: callee did not use AutoUnsafeCallWithABI");
     bind(&ok);
     pop(ReturnReg);
   }
 #endif
 }
 
-void MacroAssembler::callWithABI(wasm::BytecodeOffset bytecode,
-                                 wasm::SymbolicAddress imm,
-                                 MoveOp::Type result) {
+CodeOffset MacroAssembler::callWithABI(wasm::BytecodeOffset bytecode,
+                                       wasm::SymbolicAddress imm,
+                                       MoveOp::Type result) {
   MOZ_ASSERT(wasm::NeedsBuiltinThunk(imm));
 
   // We clobber WasmTlsReg below in the loadWasmTlsRegFromFrame(), but Ion
   // assumes it is non-volatile, so preserve it manually.
   Push(WasmTlsReg);
 
   uint32_t stackAdjust;
   callWithABIPre(&stackAdjust, /* callFromWasm = */ true);
 
   // The TLS register is used in builtin thunks and must be set, by ABI:
   // reload it after passing arguments, which might have used it at spill
   // points when placing arguments.
   loadWasmTlsRegFromFrame();
 
-  call(wasm::CallSiteDesc(bytecode.offset(), wasm::CallSite::Symbolic), imm);
+  CodeOffset raOffset = call(
+      wasm::CallSiteDesc(bytecode.offset(), wasm::CallSite::Symbolic), imm);
+
   callWithABIPost(stackAdjust, result, /* callFromWasm = */ true);
 
   Pop(WasmTlsReg);
+
+  return raOffset;
 }
 
 // ===============================================================
 // Exit frame footer.
 
 void MacroAssembler::linkExitFrame(Register cxreg, Register scratch) {
   loadPtr(Address(cxreg, JSContext::offsetOfActivation()), scratch);
   storeStackPtr(Address(scratch, JitActivation::offsetOfPackedExitFP()));
@@ -3389,18 +3393,18 @@ void MacroAssembler::wasmReserveStackChe
     branchStackPtrRhs(Assembler::Below,
                       Address(WasmTlsReg, offsetof(wasm::TlsData, stackLimit)),
                       &ok);
     wasmTrap(wasm::Trap::StackOverflow, trapOffset);
     bind(&ok);
   }
 }
 
-void MacroAssembler::wasmCallImport(const wasm::CallSiteDesc& desc,
-                                    const wasm::CalleeDesc& callee) {
+CodeOffset MacroAssembler::wasmCallImport(const wasm::CallSiteDesc& desc,
+                                          const wasm::CalleeDesc& callee) {
   // Load the callee, before the caller's registers are clobbered.
   uint32_t globalDataOffset = callee.importGlobalDataOffset();
   loadWasmGlobalPtr(globalDataOffset + offsetof(wasm::FuncImportTls, code),
                     ABINonArgReg0);
 
 #ifndef JS_CODEGEN_NONE
   static_assert(ABINonArgReg0 != WasmTlsReg, "by constraint");
 #endif
@@ -3411,20 +3415,20 @@ void MacroAssembler::wasmCallImport(cons
   loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, cx)), ABINonArgReg2);
   storePtr(ABINonArgReg1, Address(ABINonArgReg2, JSContext::offsetOfRealm()));
 
   // Switch to the callee's TLS and pinned registers and make the call.
   loadWasmGlobalPtr(globalDataOffset + offsetof(wasm::FuncImportTls, tls),
                     WasmTlsReg);
   loadWasmPinnedRegsFromTls();
 
-  call(desc, ABINonArgReg0);
+  return call(desc, ABINonArgReg0);
 }
 
-void MacroAssembler::wasmCallBuiltinInstanceMethod(
+CodeOffset MacroAssembler::wasmCallBuiltinInstanceMethod(
     const wasm::CallSiteDesc& desc, const ABIArg& instanceArg,
     wasm::SymbolicAddress builtin) {
   MOZ_ASSERT(instanceArg != ABIArg());
 
   if (instanceArg.kind() == ABIArg::GPR) {
     loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, instance)),
             instanceArg.gpr());
   } else if (instanceArg.kind() == ABIArg::Stack) {
@@ -3432,22 +3436,22 @@ void MacroAssembler::wasmCallBuiltinInst
     Register scratch = ABINonArgReg0;
     loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, instance)), scratch);
     storePtr(scratch,
              Address(getStackPointer(), instanceArg.offsetFromArgBase()));
   } else {
     MOZ_CRASH("Unknown abi passing style for pointer");
   }
 
-  call(desc, builtin);
+  return call(desc, builtin);
 }
 
-void MacroAssembler::wasmCallIndirect(const wasm::CallSiteDesc& desc,
-                                      const wasm::CalleeDesc& callee,
-                                      bool needsBoundsCheck) {
+CodeOffset MacroAssembler::wasmCallIndirect(const wasm::CallSiteDesc& desc,
+                                            const wasm::CalleeDesc& callee,
+                                            bool needsBoundsCheck) {
   Register scratch = WasmTableCallScratchReg0;
   Register index = WasmTableCallIndexReg;
 
   // Optimization opportunity: when offsetof(FunctionTableElem, code) == 0, as
   // it is at present, we can probably generate better code here by folding
   // the address computation into the load.
 
   static_assert(sizeof(wasm::FunctionTableElem) == 8 ||
@@ -3460,18 +3464,17 @@ void MacroAssembler::wasmCallIndirect(co
     loadWasmGlobalPtr(callee.tableFunctionBaseGlobalDataOffset(), scratch);
     if (sizeof(wasm::FunctionTableElem) == 8) {
       computeEffectiveAddress(BaseIndex(scratch, index, TimesEight), scratch);
     } else {
       lshift32(Imm32(4), index);
       addPtr(index, scratch);
     }
     loadPtr(Address(scratch, offsetof(wasm::FunctionTableElem, code)), scratch);
-    call(desc, scratch);
-    return;
+    return call(desc, scratch);
   }
 
   MOZ_ASSERT(callee.which() == wasm::CalleeDesc::WasmTable);
 
   // Write the functype-id into the ABI functype-id register.
   wasm::FuncTypeIdDesc funcTypeId = callee.wasmTableSigId();
   switch (funcTypeId.kind()) {
     case wasm::FuncTypeIdDescKind::Global:
@@ -3514,17 +3517,17 @@ void MacroAssembler::wasmCallIndirect(co
   wasmTrap(wasm::Trap::IndirectCallToNull, trapOffset);
   bind(&nonNull);
 
   loadWasmPinnedRegsFromTls();
   switchToWasmTlsRealm(index, WasmTableCallScratchReg1);
 
   loadPtr(Address(scratch, offsetof(wasm::FunctionTableElem, code)), scratch);
 
-  call(desc, scratch);
+  return call(desc, scratch);
 }
 
 void MacroAssembler::emitPreBarrierFastPath(JSRuntime* rt, MIRType type,
                                             Register temp1, Register temp2,
                                             Register temp3, Label* noBarrier) {
   MOZ_ASSERT(temp1 != PreBarrierReg);
   MOZ_ASSERT(temp2 != PreBarrierReg);
   MOZ_ASSERT(temp3 != PreBarrierReg);
--- a/js/src/jit/MacroAssembler.h
+++ b/js/src/jit/MacroAssembler.h
@@ -416,32 +416,36 @@ class MacroAssembler : public MacroAssem
   // Manipulated by the AutoGenericRegisterScope class.
   AllocatableRegisterSet debugTrackedRegisters_;
 #endif  // DEBUG
 
  public:
   // ===============================================================
   // Simple call functions.
 
+  // The returned CodeOffset is the assembler offset for the instruction
+  // immediately following the call; that is, for the return point.
   CodeOffset call(Register reg) PER_SHARED_ARCH;
   CodeOffset call(Label* label) PER_SHARED_ARCH;
+
   void call(const Address& addr) PER_SHARED_ARCH;
   void call(ImmWord imm) PER_SHARED_ARCH;
   // Call a target native function, which is neither traceable nor movable.
   void call(ImmPtr imm) PER_SHARED_ARCH;
-  void call(wasm::SymbolicAddress imm) PER_SHARED_ARCH;
-  inline void call(const wasm::CallSiteDesc& desc, wasm::SymbolicAddress imm);
+  CodeOffset call(wasm::SymbolicAddress imm) PER_SHARED_ARCH;
+  inline CodeOffset call(const wasm::CallSiteDesc& desc,
+                         wasm::SymbolicAddress imm);
 
   // Call a target JitCode, which must be traceable, and may be movable.
   void call(JitCode* c) PER_SHARED_ARCH;
 
   inline void call(TrampolinePtr code);
 
-  inline void call(const wasm::CallSiteDesc& desc, const Register reg);
-  inline void call(const wasm::CallSiteDesc& desc, uint32_t funcDefIndex);
+  inline CodeOffset call(const wasm::CallSiteDesc& desc, const Register reg);
+  inline CodeOffset call(const wasm::CallSiteDesc& desc, uint32_t funcDefIndex);
   inline void call(const wasm::CallSiteDesc& desc, wasm::Trap trap);
 
   CodeOffset callWithPatch() PER_SHARED_ARCH;
   void patchCall(uint32_t callerOffset, uint32_t calleeOffset) PER_SHARED_ARCH;
 
   // Push the return address and make a call. On platforms where this function
   // is not defined, push the link register (pushReturnAddress) at the entry
   // point of the callee.
@@ -577,18 +581,18 @@ class MacroAssembler : public MacroAssem
 
   inline void callWithABI(
       void* fun, MoveOp::Type result = MoveOp::GENERAL,
       CheckUnsafeCallWithABI check = CheckUnsafeCallWithABI::Check);
   inline void callWithABI(Register fun, MoveOp::Type result = MoveOp::GENERAL);
   inline void callWithABI(const Address& fun,
                           MoveOp::Type result = MoveOp::GENERAL);
 
-  void callWithABI(wasm::BytecodeOffset offset, wasm::SymbolicAddress fun,
-                   MoveOp::Type result = MoveOp::GENERAL);
+  CodeOffset callWithABI(wasm::BytecodeOffset offset, wasm::SymbolicAddress fun,
+                         MoveOp::Type result = MoveOp::GENERAL);
 
  private:
   // Reinitialize the variables which have to be cleared before making a call
   // with callWithABI.
   void setupABICall();
 
   // Reserve the stack and resolve the arguments move.
   void callWithABIPre(uint32_t* stackAdjust,
@@ -1858,29 +1862,30 @@ class MacroAssembler : public MacroAssem
       DEFINED_ON(arm64, x86, x64, mips64);
   void oolWasmTruncateCheckF32ToI64(FloatRegister input, Register64 output,
                                     TruncFlags flags, wasm::BytecodeOffset off,
                                     Label* rejoin)
       DEFINED_ON(arm, arm64, x86_shared, mips_shared);
 
   // This function takes care of loading the callee's TLS and pinned regs but
   // it is the caller's responsibility to save/restore TLS or pinned regs.
-  void wasmCallImport(const wasm::CallSiteDesc& desc,
-                      const wasm::CalleeDesc& callee);
+  CodeOffset wasmCallImport(const wasm::CallSiteDesc& desc,
+                            const wasm::CalleeDesc& callee);
 
   // WasmTableCallIndexReg must contain the index of the indirect call.
-  void wasmCallIndirect(const wasm::CallSiteDesc& desc,
-                        const wasm::CalleeDesc& callee, bool needsBoundsCheck);
+  CodeOffset wasmCallIndirect(const wasm::CallSiteDesc& desc,
+                              const wasm::CalleeDesc& callee,
+                              bool needsBoundsCheck);
 
   // This function takes care of loading the pointer to the current instance
   // as the implicit first argument. It preserves TLS and pinned registers.
   // (TLS & pinned regs are non-volatile registers in the system ABI).
-  void wasmCallBuiltinInstanceMethod(const wasm::CallSiteDesc& desc,
-                                     const ABIArg& instanceArg,
-                                     wasm::SymbolicAddress builtin);
+  CodeOffset wasmCallBuiltinInstanceMethod(const wasm::CallSiteDesc& desc,
+                                           const ABIArg& instanceArg,
+                                           wasm::SymbolicAddress builtin);
 
   // As enterFakeExitFrame(), but using register conventions appropriate for
   // wasm stubs.
   void enterFakeExitFrameForWasm(Register cxreg, Register scratch,
                                  ExitFrameType type) PER_SHARED_ARCH;
 
  public:
   // ========================================================================
--- a/js/src/jit/Registers.h
+++ b/js/src/jit/Registers.h
@@ -258,16 +258,19 @@ class MachineState {
 
   bool has(Register reg) const { return regs_[reg.code()] != nullptr; }
   bool has(FloatRegister reg) const { return fpregs_[reg.code()] != nullptr; }
   uintptr_t read(Register reg) const { return regs_[reg.code()]->r; }
   double read(FloatRegister reg) const { return fpregs_[reg.code()]->d; }
   void write(Register reg, uintptr_t value) const {
     regs_[reg.code()]->r = value;
   }
+  const Registers::RegisterContent* address(Register reg) const {
+    return regs_[reg.code()];
+  }
   const FloatRegisters::RegisterContent* address(FloatRegister reg) const {
     return fpregs_[reg.code()];
   }
 };
 
 class MacroAssembler;
 
 // Declares a register as owned within the scope of the object.
--- a/js/src/jit/arm/MacroAssembler-arm.cpp
+++ b/js/src/jit/arm/MacroAssembler-arm.cpp
@@ -4172,19 +4172,19 @@ CodeOffset MacroAssembler::call(Label* l
 void MacroAssembler::call(ImmWord imm) { call(ImmPtr((void*)imm.value)); }
 
 void MacroAssembler::call(ImmPtr imm) {
   BufferOffset bo = m_buffer.nextOffset();
   addPendingJump(bo, imm, RelocationKind::HARDCODED);
   ma_call(imm);
 }
 
-void MacroAssembler::call(wasm::SymbolicAddress imm) {
+CodeOffset MacroAssembler::call(wasm::SymbolicAddress imm) {
   movePtr(imm, CallReg);
-  call(CallReg);
+  return call(CallReg);
 }
 
 void MacroAssembler::call(const Address& addr) {
   loadPtr(addr, CallReg);
   call(CallReg);
 }
 
 void MacroAssembler::call(JitCode* c) {
--- a/js/src/jit/arm64/Assembler-arm64.cpp
+++ b/js/src/jit/arm64/Assembler-arm64.cpp
@@ -286,26 +286,35 @@ void Assembler::bind(Label* label, Buffe
     branchOffset = nextOffset;
   }
 
   // Bind the label, so that future uses may encode the offset immediately.
   label->bind(targetOffset.getOffset());
 }
 
 void Assembler::bind(RepatchLabel* label) {
+  BufferOffset next = nextOffset();
+
   // Nothing has seen the label yet: just mark the location.
   // If we've run out of memory, don't attempt to modify the buffer which may
   // not be there. Just mark the label as bound to nextOffset().
   if (!label->used() || oom()) {
-    label->bind(nextOffset().getOffset());
+    label->bind(next.getOffset());
     return;
   }
   int branchOffset = label->offset();
-  Instruction* inst = getInstructionAt(BufferOffset(branchOffset));
-  inst->SetImmPCOffsetTarget(inst + nextOffset().getOffset() - branchOffset);
+  Instruction* branch = getInstructionAt(BufferOffset(branchOffset));
+  MOZ_ASSERT(branch->IsUncondB());
+
+  // The branch must be able to reach the label.
+  ptrdiff_t relativeByteOffset = next.getOffset() - branchOffset;
+  MOZ_ASSERT(branch->IsTargetReachable(branch + relativeByteOffset));
+  branch->SetImmPCOffsetTarget(branch + relativeByteOffset);
+
+  label->bind(next.getOffset());
 }
 
 void Assembler::addJumpRelocation(BufferOffset src, RelocationKind reloc) {
   // Only JITCODE relocations are patchable at runtime.
   MOZ_ASSERT(reloc == RelocationKind::JITCODE);
 
   // The jump relocation table starts with a fixed-width integer pointing
   // to the start of the extended jump table. But, we don't know the
@@ -341,18 +350,36 @@ size_t Assembler::addPatchableJump(Buffe
     addJumpRelocation(src, reloc);
   }
 
   size_t extendedTableIndex = pendingJumps_.length();
   enoughMemory_ &= pendingJumps_.append(RelativePatch(src, nullptr, reloc));
   return extendedTableIndex;
 }
 
+// PatchJump() is only used by the IonCacheIRCompiler.
+//
+// The CodeLocationJump is the jump to be patched.
+// The code for the jump is emitted by jumpWithPatch().
 void PatchJump(CodeLocationJump& jump_, CodeLocationLabel label) {
-  MOZ_CRASH("PatchJump");
+  MOZ_ASSERT(label.isSet());
+
+  Instruction* load = (Instruction*)jump_.raw();
+  MOZ_ASSERT(load->IsLDR());
+
+  Instruction* branch = (Instruction*)load->NextInstruction()->skipPool();
+  MOZ_ASSERT(branch->IsUncondB());
+
+  // FIXME: For the moment, just assume that the load isn't needed.
+  // FIXME: That assumption implies that the branch target is always in-range.
+  if (branch->IsTargetReachable((Instruction*)label.raw())) {
+    branch->SetImmPCOffsetTarget((Instruction*)label.raw());
+  } else {
+    MOZ_CRASH("PatchJump target not reachable");
+  }
 }
 
 void Assembler::PatchDataWithValueCheck(CodeLocationLabel label,
                                         PatchedImmPtr newValue,
                                         PatchedImmPtr expected) {
   Instruction* i = (Instruction*)label.raw();
   void** pValue = i->LiteralAddress<void**>();
   MOZ_ASSERT(*pValue == expected.value);
--- a/js/src/jit/arm64/CodeGenerator-arm64.cpp
+++ b/js/src/jit/arm64/CodeGenerator-arm64.cpp
@@ -962,17 +962,17 @@ void CodeGenerator::visitUnbox(LUnbox* u
   } else {
 #ifdef DEBUG
     JSValueTag tag = MIRTypeToTag(mir->type());
     Label ok;
 
     ValueOperand input = ToValue(unbox, LUnbox::Input);
     ScratchTagScope scratch(masm, input);
     masm.splitTagForTest(input, scratch);
-    masm.branchTest32(Assembler::Condition::Equal, scratch, Imm32(tag), &ok);
+    masm.branch32(Assembler::Condition::Equal, scratch, Imm32(tag), &ok);
     masm.assumeUnreachable("Infallible unbox type mismatch");
     masm.bind(&ok);
 #endif
   }
 
   ValueOperand input = ToValue(unbox, LUnbox::Input);
   Register result = ToRegister(unbox->output());
   switch (mir->type()) {
--- a/js/src/jit/arm64/MacroAssembler-arm64.cpp
+++ b/js/src/jit/arm64/MacroAssembler-arm64.cpp
@@ -578,22 +578,22 @@ CodeOffset MacroAssembler::call(Label* l
 void MacroAssembler::call(ImmWord imm) { call(ImmPtr((void*)imm.value)); }
 
 void MacroAssembler::call(ImmPtr imm) {
   syncStackPtr();
   movePtr(imm, ip0);
   Blr(vixl::ip0);
 }
 
-void MacroAssembler::call(wasm::SymbolicAddress imm) {
+CodeOffset MacroAssembler::call(wasm::SymbolicAddress imm) {
   vixl::UseScratchRegisterScope temps(this);
   const Register scratch = temps.AcquireX().asUnsized();
   syncStackPtr();
   movePtr(imm, scratch);
-  call(scratch);
+  return call(scratch);
 }
 
 void MacroAssembler::call(const Address& addr) {
   vixl::UseScratchRegisterScope temps(this);
   const Register scratch = temps.AcquireX().asUnsized();
   syncStackPtr();
   loadPtr(addr, scratch);
   call(scratch);
--- a/js/src/jit/arm64/MacroAssembler-arm64.h
+++ b/js/src/jit/arm64/MacroAssembler-arm64.h
@@ -1220,32 +1220,32 @@ class MacroAssemblerCompat : public vixl
     syncStackPtr();
     BufferOffset loc =
         b(-1,
           LabelDoc());  // The jump target will be patched by executableCopy().
     addPendingJump(loc, ImmPtr(target->raw()), RelocationKind::JITCODE);
   }
 
   CodeOffsetJump jumpWithPatch(RepatchLabel* label) {
+    // jumpWithPatch() is only used by IonCacheIRCompiler::emitReturnFromIC().
+    // The RepatchLabel is unbound and unused.
+    MOZ_ASSERT(!label->used());
+    MOZ_ASSERT(!label->bound());
+
+    vixl::UseScratchRegisterScope temps(this);
+    const ARMRegister scratch64 = temps.AcquireX();
+
     ARMBuffer::PoolEntry pe;
     BufferOffset load_bo;
 
-    // Does not overwrite condition codes from the caller.
-    {
-      vixl::UseScratchRegisterScope temps(this);
-      const ARMRegister scratch64 = temps.AcquireX();
-      load_bo = immPool64(scratch64, (uint64_t)label, &pe);
-    }
+    // FIXME: This load is currently unused.
+    load_bo = immPool64(scratch64, (uint64_t)label, &pe);
+    BufferOffset branch_bo = b(-1, LabelDoc());
 
-    MOZ_ASSERT(!label->bound());
-
-    nop();
-    BufferOffset branch_bo = b(-1, LabelDoc());
     label->use(branch_bo.getOffset());
-
     return CodeOffsetJump(load_bo.getOffset(), pe.index());
   }
 
   void compareDouble(DoubleCondition cond, FloatRegister lhs,
                      FloatRegister rhs) {
     Fcmp(ARMFPRegister(lhs, 64), ARMFPRegister(rhs, 64));
   }
 
--- a/js/src/jit/arm64/vixl/Instructions-vixl.h
+++ b/js/src/jit/arm64/vixl/Instructions-vixl.h
@@ -310,17 +310,17 @@ class Instruction {
   bool IsLDR() const;
   bool IsNOP() const;
   bool IsCSDB() const;
   bool IsADR() const;
   bool IsADRP() const;
   bool IsMovz() const;
   bool IsMovk() const;
   bool IsBranchLinkImm() const;
-  bool IsTargetReachable(Instruction* target) const;
+  bool IsTargetReachable(const Instruction* target) const;
   ptrdiff_t ImmPCRawOffset() const;
   void SetImmPCRawOffset(ptrdiff_t offset);
   void SetBits32(int msb, int lsb, unsigned value);
 
   // Is this a stack pointer synchronization instruction as inserted by
   // MacroAssembler::syncStackPtr()?
   bool IsStackPtrSync() const;
 
--- a/js/src/jit/arm64/vixl/MozInstructions-vixl.cpp
+++ b/js/src/jit/arm64/vixl/MozInstructions-vixl.cpp
@@ -111,17 +111,17 @@ bool Instruction::IsMovk() const {
          (Mask(MoveWideImmediateMask) == MOVK_w);
 }
 
 bool Instruction::IsBranchLinkImm() const {
   return Mask(UnconditionalBranchFMask) == (UnconditionalBranchFixed | BL);
 }
 
 
-bool Instruction::IsTargetReachable(Instruction* target) const {
+bool Instruction::IsTargetReachable(const Instruction* target) const {
     VIXL_ASSERT(((target - this) & 3) == 0);
     int offset = (target - this) >> kInstructionSizeLog2;
     switch (BranchType()) {
       case CondBranchType:
         return is_int19(offset);
       case UncondBranchType:
         return is_int26(offset);
       case CompareBranchType:
--- a/js/src/jit/mips-shared/MacroAssembler-mips-shared.cpp
+++ b/js/src/jit/mips-shared/MacroAssembler-mips-shared.cpp
@@ -1452,19 +1452,19 @@ CodeOffset MacroAssembler::farJumpWithPa
 
 void MacroAssembler::patchFarJump(CodeOffset farJump, uint32_t targetOffset) {
   uint32_t* u32 =
       reinterpret_cast<uint32_t*>(editSrc(BufferOffset(farJump.offset())));
   MOZ_ASSERT(*u32 == UINT32_MAX);
   *u32 = targetOffset - farJump.offset();
 }
 
-void MacroAssembler::call(wasm::SymbolicAddress target) {
+CodeOffset MacroAssembler::call(wasm::SymbolicAddress target) {
   movePtr(target, CallReg);
-  call(CallReg);
+  return call(CallReg);
 }
 
 void MacroAssembler::call(const Address& addr) {
   loadPtr(addr, CallReg);
   call(CallReg);
 }
 
 void MacroAssembler::call(ImmWord target) { call(ImmPtr((void*)target.value)); }
--- a/js/src/jit/x86-shared/MacroAssembler-x86-shared.cpp
+++ b/js/src/jit/x86-shared/MacroAssembler-x86-shared.cpp
@@ -594,19 +594,19 @@ void MacroAssembler::PopStackPtr() { Pop
 CodeOffset MacroAssembler::call(Register reg) { return Assembler::call(reg); }
 
 CodeOffset MacroAssembler::call(Label* label) { return Assembler::call(label); }
 
 void MacroAssembler::call(const Address& addr) {
   Assembler::call(Operand(addr.base, addr.offset));
 }
 
-void MacroAssembler::call(wasm::SymbolicAddress target) {
+CodeOffset MacroAssembler::call(wasm::SymbolicAddress target) {
   mov(target, eax);
-  Assembler::call(eax);
+  return Assembler::call(eax);
 }
 
 void MacroAssembler::call(ImmWord target) { Assembler::call(target); }
 
 void MacroAssembler::call(ImmPtr target) { Assembler::call(target); }
 
 void MacroAssembler::call(JitCode* target) { Assembler::call(target); }
 
--- a/js/src/jsapi-tests/testGCGrayMarking.cpp
+++ b/js/src/jsapi-tests/testGCGrayMarking.cpp
@@ -3,16 +3,17 @@
  */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "gc/Heap.h"
 #include "gc/WeakMap.h"
 #include "gc/Zone.h"
+#include "js/Proxy.h"
 #include "jsapi-tests/tests.h"
 
 using namespace js;
 using namespace js::gc;
 
 namespace js {
 
 struct GCManagedObjectWeakMap : public ObjectWeakMap {
@@ -222,17 +223,17 @@ bool TestWeakMaps() {
   JS_GC(cx);
   CHECK(IsMarkedBlack(weakMap));
   CHECK(IsMarkedBlack(key));
   CHECK(IsMarkedBlack(value));
 
   // Test that a weakmap key is marked gray if it has a gray delegate and the
   // map is either gray or black.
 
-  JSObject* delegate = AllocDelegateForKey(key);
+  JSObject* delegate = UncheckedUnwrapWithoutExpose(key);
   blackRoot1 = weakMap;
   blackRoot2 = nullptr;
   grayRoots.grayRoot1 = delegate;
   grayRoots.grayRoot2 = nullptr;
   JS_GC(cx);
   CHECK(IsMarkedGray(delegate));
   CHECK(IsMarkedGray(key));
   CHECK(IsMarkedBlack(weakMap));
@@ -302,17 +303,16 @@ bool TestWeakMaps() {
   blackRoot2 = weakMap;
   grayRoots.grayRoot1 = nullptr;
   grayRoots.grayRoot2 = nullptr;
   JS_GC(cx);
   CHECK(IsMarkedBlack(key));
   CHECK(IsMarkedBlack(weakMap));
   CHECK(IsMarkedBlack(value));
 
-  CHECK(AllocDelegateForKey(key));
   blackRoot1 = nullptr;
   blackRoot2 = nullptr;
   grayRoots.grayRoot1 = weakMap;
   grayRoots.grayRoot2 = key;
   JS_GC(cx);
   CHECK(IsMarkedGray(key));
   CHECK(IsMarkedGray(weakMap));
   CHECK(IsMarkedGray(value));
@@ -366,17 +366,17 @@ bool TestUnassociatedWeakMaps() {
   grayRoots.grayRoot1 = nullptr;
   grayRoots.grayRoot2 = nullptr;
   JS_GC(cx);
   CHECK(IsMarkedBlack(key));
   CHECK(IsMarkedBlack(value));
 
   // Test that a weakmap key is marked gray if it has a gray delegate.
 
-  JSObject* delegate = AllocDelegateForKey(key);
+  JSObject* delegate = UncheckedUnwrapWithoutExpose(key);
   blackRoot = nullptr;
   grayRoots.grayRoot1 = delegate;
   grayRoots.grayRoot2 = nullptr;
   JS_GC(cx);
   CHECK(IsMarkedGray(delegate));
   CHECK(IsMarkedGray(key));
   CHECK(IsMarkedGray(value));
 
@@ -404,17 +404,16 @@ bool TestUnassociatedWeakMaps() {
   delegate = nullptr;
   blackRoot = key;
   grayRoots.grayRoot1 = nullptr;
   grayRoots.grayRoot2 = nullptr;
   JS_GC(cx);
   CHECK(IsMarkedBlack(key));
   CHECK(IsMarkedBlack(value));
 
-  CHECK(AllocDelegateForKey(key));
   blackRoot = nullptr;
   grayRoots.grayRoot1 = key;
   grayRoots.grayRoot2 = nullptr;
   JS_GC(cx);
   CHECK(IsMarkedGray(key));
   CHECK(IsMarkedGray(value));
 
   blackRoot = nullptr;
@@ -455,17 +454,17 @@ bool TestCCWs() {
 
   JS_SetGCParameter(cx, JSGC_MODE, JSGC_MODE_INCREMENTAL);
   JS::PrepareForFullGC(cx);
   js::SliceBudget budget(js::WorkBudget(1));
   cx->runtime()->gc.startDebugGC(GC_NORMAL, budget);
   CHECK(JS::IsIncrementalGCInProgress(cx));
 
   CHECK(!IsMarkedBlack(wrapper));
-  CHECK(wrapper->zone()->isGCMarkingBlack());
+  CHECK(wrapper->zone()->isGCMarkingBlackOnly());
 
   CHECK(GetCrossCompartmentWrapper(target) == wrapper);
   CHECK(IsMarkedBlack(wrapper));
 
   JS::FinishIncrementalGC(cx, JS::gcreason::API);
 
   // Test behaviour of gray CCWs marked black by a barrier during incremental
   // GC.
@@ -479,31 +478,31 @@ bool TestCCWs() {
   CHECK(IsMarkedGray(target));
 
   // Incremental zone GC started: the source is now unmarked.
   JS_SetGCParameter(cx, JSGC_MODE, JSGC_MODE_INCREMENTAL);
   JS::PrepareZoneForGC(wrapper->zone());
   budget = js::SliceBudget(js::WorkBudget(1));
   cx->runtime()->gc.startDebugGC(GC_NORMAL, budget);
   CHECK(JS::IsIncrementalGCInProgress(cx));
-  CHECK(wrapper->zone()->isGCMarkingBlack());
+  CHECK(wrapper->zone()->isGCMarkingBlackOnly());
   CHECK(!target->zone()->wasGCStarted());
   CHECK(!IsMarkedBlack(wrapper));
   CHECK(!IsMarkedGray(wrapper));
   CHECK(IsMarkedGray(target));
 
-  // Betweeen GC slices: source marked black by barrier, target is still
-  // gray. Target will be marked gray eventually. ObjectIsMarkedGray() is
-  // conservative and reports that target is not marked gray; ObjectIsNotGray
-  // reports the actual state.
+  // Betweeen GC slices: source marked black by barrier, target is
+  // still gray. Target will be marked gray
+  // eventually. ObjectIsMarkedGray() is conservative and reports
+  // that target is not marked gray; AssertObjectIsNotGray() will
+  // assert.
   grayRoots.grayRoot1.get();
   CHECK(IsMarkedBlack(wrapper));
   CHECK(IsMarkedGray(target));
   CHECK(!JS::ObjectIsMarkedGray(target));
-  MOZ_ASSERT(!JS::ObjectIsNotGray(target));
 
   // Final state: source and target are black.
   JS::FinishIncrementalGC(cx, JS::gcreason::API);
   CHECK(IsMarkedBlack(wrapper));
   CHECK(IsMarkedBlack(target));
 
   grayRoots.grayRoot1 = nullptr;
   grayRoots.grayRoot2 = nullptr;
@@ -647,44 +646,28 @@ JSObject* GetCrossCompartmentWrapper(JSO
   }
 
   EvictNursery();
 
   MOZ_ASSERT(obj->compartment() == global2->compartment());
   return obj;
 }
 
-static JSObject* GetKeyDelegate(JSObject* obj) {
-  return static_cast<JSObject*>(obj->as<NativeObject>().getPrivate());
-}
-
 JSObject* AllocWeakmapKeyObject() {
-  static const js::ClassExtension KeyClassExtension = {GetKeyDelegate};
-
-  static const js::Class KeyClass = {"keyWithDelegate",  JSCLASS_HAS_PRIVATE,
-                                     JS_NULL_CLASS_OPS,  JS_NULL_CLASS_SPEC,
-                                     &KeyClassExtension, JS_NULL_OBJECT_OPS};
-
-  JS::RootedObject key(cx, JS_NewObject(cx, Jsvalify(&KeyClass)));
-  if (!key) {
+  JS::RootedObject delegate(cx, JS_NewPlainObject(cx));
+  if (!delegate) {
     return nullptr;
   }
 
+  JS::RootedObject key(cx, js::Wrapper::New(cx, delegate, &js::Wrapper::singleton));
+
   EvictNursery();
   return key;
 }
 
-JSObject* AllocDelegateForKey(JSObject* key) {
-  JS::RootedObject obj(cx, JS_NewPlainObject(cx));
-  EvictNursery();
-
-  key->as<NativeObject>().setPrivate(obj);
-  return obj;
-}
-
 JSObject* AllocObjectChain(size_t length) {
   // Allocate a chain of linked JSObjects.
 
   // Use a unique property name so the shape is not shared with any other
   // objects.
   RootedString nextPropName(cx, JS_NewStringCopyZ(cx, "unique14142135"));
   RootedId nextId(cx);
   if (!JS_StringToId(cx, nextPropName, &nextId)) {
--- a/js/src/jsapi-tests/testWeakMap.cpp
+++ b/js/src/jsapi-tests/testWeakMap.cpp
@@ -65,21 +65,24 @@ BEGIN_TEST(testWeakMap_keyDelegates) {
   AutoLeaveZeal nozeal(cx);
 #endif /* JS_GC_ZEAL */
 
   JS_SetGCParameter(cx, JSGC_MODE, JSGC_MODE_INCREMENTAL);
   JS_GC(cx);
   JS::RootedObject map(cx, JS::NewWeakMapObject(cx));
   CHECK(map);
 
-  JS::RootedObject key(cx, newKey());
+  JS::RootedObject delegate(cx, newDelegate());
+  JS::RootedObject key(cx, delegate);
+  if (!JS_WrapObject(cx, &key)) {
+    return false;
+  }
   CHECK(key);
+  CHECK(delegate);
 
-  JS::RootedObject delegate(cx, newDelegate());
-  CHECK(delegate);
   keyDelegate = delegate;
 
   JS::RootedObject delegateRoot(cx);
   {
     JSAutoRealm ar(cx, delegate);
     delegateRoot = JS_NewPlainObject(cx);
     CHECK(delegateRoot);
     JS::RootedValue delegateValue(cx, JS::ObjectValue(*delegate));
@@ -141,39 +144,35 @@ static size_t DelegateObjectMoved(JSObje
     return 0;  // Object got moved before we set keyDelegate to point to it.
   }
 
   MOZ_RELEASE_ASSERT(keyDelegate == old);
   keyDelegate = obj;
   return 0;
 }
 
-static JSObject* GetKeyDelegate(JSObject* obj) { return keyDelegate; }
-
 JSObject* newKey() {
-  static const js::ClassExtension keyClassExtension = {GetKeyDelegate};
-
   static const js::Class keyClass = {
       "keyWithDelegate",  JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(1),
       JS_NULL_CLASS_OPS,  JS_NULL_CLASS_SPEC,
-      &keyClassExtension, JS_NULL_OBJECT_OPS};
+      JS_NULL_CLASS_EXT, JS_NULL_OBJECT_OPS};
 
   JS::RootedObject key(cx, JS_NewObject(cx, Jsvalify(&keyClass)));
   if (!key) {
     return nullptr;
   }
 
   return key;
 }
 
 JSObject* newCCW(JS::HandleObject sourceZone, JS::HandleObject destZone) {
   /*
    * Now ensure that this zone will be swept first by adding a cross
-   * compartment wrapper to a new objct in the same zone as the
-   * delegate obejct.
+   * compartment wrapper to a new object in the same zone as the
+   * delegate object.
    */
   JS::RootedObject object(cx);
   {
     JSAutoRealm ar(cx, destZone);
     object = JS_NewPlainObject(cx);
     if (!object) {
       return nullptr;
     }
@@ -203,17 +202,17 @@ JSObject* newDelegate() {
       nullptr, /* finalize */
       nullptr, /* call */
       nullptr, /* hasInstance */
       nullptr, /* construct */
       JS_GlobalObjectTraceHook,
   };
 
   static const js::ClassExtension delegateClassExtension = {
-      nullptr, DelegateObjectMoved};
+      DelegateObjectMoved};
 
   static const js::Class delegateClass = {
       "delegate",
       JSCLASS_GLOBAL_FLAGS | JSCLASS_HAS_RESERVED_SLOTS(1),
       &delegateClassOps,
       JS_NULL_CLASS_SPEC,
       &delegateClassExtension,
       JS_NULL_OBJECT_OPS};
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -705,17 +705,17 @@ static void ReleaseAssertObjectHasNoWrap
 JS_PUBLIC_API JSObject* JS_TransplantObject(JSContext* cx, HandleObject origobj,
                                             HandleObject target) {
   AssertHeapIsIdle();
   MOZ_ASSERT(origobj != target);
   MOZ_ASSERT(!origobj->is<CrossCompartmentWrapperObject>());
   MOZ_ASSERT(!target->is<CrossCompartmentWrapperObject>());
   MOZ_ASSERT(origobj->getClass() == target->getClass());
   ReleaseAssertObjectHasNoWrappers(cx, target);
-  MOZ_ASSERT(JS::CellIsNotGray(target));
+  JS::AssertCellIsNotGray(target);
 
   RootedValue origv(cx, ObjectValue(*origobj));
   RootedObject newIdentity(cx);
 
   // Don't allow a compacting GC to observe any intermediate state.
   AutoDisableCompactingGC nocgc(cx);
 
   AutoDisableProxyCheck adpc;
@@ -769,17 +769,17 @@ JS_PUBLIC_API JSObject* JS_TransplantObj
     if (!origobj->compartment()->putWrapper(
             cx, CrossCompartmentKey(newIdentity), origv)) {
       MOZ_CRASH();
     }
   }
 
   // The new identity object might be one of several things. Return it to avoid
   // ambiguity.
-  MOZ_ASSERT(JS::CellIsNotGray(newIdentity));
+  JS::AssertCellIsNotGray(newIdentity);
   return newIdentity;
 }
 
 /*
  * Recompute all cross-compartment wrappers for an object, resetting state.
  * Gecko uses this to clear Xray wrappers when doing a navigation that reuses
  * the inner window and global object.
  */
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -570,23 +570,16 @@ JS_FRIEND_API void js::VisitGrayWrapperT
   for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
     for (Compartment::WrapperEnum e(comp); !e.empty(); e.popFront()) {
       e.front().mutableKey().applyToWrapped(
           VisitGrayCallbackFunctor(callback, closure));
     }
   }
 }
 
-JS_FRIEND_API JSObject* js::GetWeakmapKeyDelegate(JSObject* key) {
-  if (JSWeakmapKeyDelegateOp op = key->getClass()->extWeakmapKeyDelegateOp()) {
-    return op(key);
-  }
-  return nullptr;
-}
-
 JS_FRIEND_API JSLinearString* js::StringToLinearStringSlow(JSContext* cx,
                                                            JSString* str) {
   return str->ensureLinear(cx);
 }
 
 JS_FRIEND_API void JS_SetAccumulateTelemetryCallback(
     JSContext* cx, JSAccumulateTelemetryDataCallback callback) {
   cx->runtime()->setTelemetryCallback(cx->runtime(), callback);
@@ -1041,17 +1034,17 @@ struct DumpHeapTracer : public JS::Callb
         js::WeakMapTracer(cx->runtime()),
         prefix(""),
         output(fp) {}
 
  private:
   void trace(JSObject* map, JS::GCCellPtr key, JS::GCCellPtr value) override {
     JSObject* kdelegate = nullptr;
     if (key.is<JSObject>()) {
-      kdelegate = js::GetWeakmapKeyDelegate(&key.as<JSObject>());
+      kdelegate = UncheckedUnwrapWithoutExpose(&key.as<JSObject>());
     }
 
     fprintf(output, "WeakMapEntry map=%p key=%p keyDelegate=%p value=%p\n", map,
             key.asCell(), kdelegate, value.asCell());
   }
 
   void onChild(const JS::GCCellPtr& thing) override;
 };
--- a/js/src/jsfriendapi.h
+++ b/js/src/jsfriendapi.h
@@ -146,16 +146,17 @@ enum {
   JS_TELEMETRY_GC_SCC_SWEEP_TOTAL_MS,
   JS_TELEMETRY_GC_SCC_SWEEP_MAX_PAUSE_MS,
   JS_TELEMETRY_GC_MINOR_REASON,
   JS_TELEMETRY_GC_MINOR_REASON_LONG,
   JS_TELEMETRY_GC_MINOR_US,
   JS_TELEMETRY_GC_NURSERY_BYTES,
   JS_TELEMETRY_GC_PRETENURE_COUNT,
   JS_TELEMETRY_GC_NURSERY_PROMOTION_RATE,
+  JS_TELEMETRY_GC_MARK_RATE,
   JS_TELEMETRY_PRIVILEGED_PARSER_COMPILE_LAZY_AFTER_MS,
   JS_TELEMETRY_WEB_PARSER_COMPILE_LAZY_AFTER_MS,
   JS_TELEMETRY_END
 };
 
 typedef void (*JSAccumulateTelemetryDataCallback)(int id, uint32_t sample,
                                                   const char* key);
 
@@ -489,18 +490,16 @@ extern JS_FRIEND_API bool ZoneGlobalsAre
 extern JS_FRIEND_API bool IsObjectZoneSweepingOrCompacting(JSObject* obj);
 
 typedef void (*GCThingCallback)(void* closure, JS::GCCellPtr thing);
 
 extern JS_FRIEND_API void VisitGrayWrapperTargets(JS::Zone* zone,
                                                   GCThingCallback callback,
                                                   void* closure);
 
-extern JS_FRIEND_API JSObject* GetWeakmapKeyDelegate(JSObject* key);
-
 /**
  * Invoke cellCallback on every gray JSObject in the given zone.
  */
 extern JS_FRIEND_API void IterateGrayObjects(JS::Zone* zone,
                                              GCThingCallback cellCallback,
                                              void* data);
 
 /**
--- a/js/src/proxy/BaseProxyHandler.cpp
+++ b/js/src/proxy/BaseProxyHandler.cpp
@@ -372,20 +372,16 @@ bool BaseProxyHandler::isArray(JSContext
 void BaseProxyHandler::trace(JSTracer* trc, JSObject* proxy) const {}
 
 void BaseProxyHandler::finalize(JSFreeOp* fop, JSObject* proxy) const {}
 
 size_t BaseProxyHandler::objectMoved(JSObject* proxy, JSObject* old) const {
   return 0;
 }
 
-JSObject* BaseProxyHandler::weakmapKeyDelegate(JSObject* proxy) const {
-  return nullptr;
-}
-
 bool BaseProxyHandler::getPrototype(JSContext* cx, HandleObject proxy,
                                     MutableHandleObject protop) const {
   MOZ_CRASH("must override getPrototype with dynamic prototype");
 }
 
 bool BaseProxyHandler::setPrototype(JSContext* cx, HandleObject proxy,
                                     HandleObject proto,
                                     ObjectOpResult& result) const {
--- a/js/src/proxy/Proxy.cpp
+++ b/js/src/proxy/Proxy.cpp
@@ -740,21 +740,16 @@ static bool proxy_DeleteProperty(JSConte
       continue;
     }
     TraceEdge(trc, proxy->reservedSlotPtr(i), "proxy_reserved");
   }
 
   Proxy::trace(trc, obj);
 }
 
-static JSObject* proxy_WeakmapKeyDelegate(JSObject* obj) {
-  MOZ_ASSERT(obj->is<ProxyObject>());
-  return obj->as<ProxyObject>().handler()->weakmapKeyDelegate(obj);
-}
-
 static void proxy_Finalize(FreeOp* fop, JSObject* obj) {
   // Suppress a bogus warning about finalize().
   JS::AutoSuppressGCAnalysis nogc;
 
   MOZ_ASSERT(obj->is<ProxyObject>());
   obj->as<ProxyObject>().handler()->finalize(fop, obj);
 
   if (!obj->as<ProxyObject>().usingInlineValueArray()) {
@@ -784,18 +779,17 @@ const ClassOps js::ProxyClassOps = {
     nullptr,            /* mayResolve  */
     proxy_Finalize,     /* finalize    */
     nullptr,            /* call        */
     Proxy::hasInstance, /* hasInstance */
     nullptr,            /* construct   */
     ProxyObject::trace, /* trace       */
 };
 
-const ClassExtension js::ProxyClassExtension = {proxy_WeakmapKeyDelegate,
-                                                proxy_ObjectMoved};
+const ClassExtension js::ProxyClassExtension = {proxy_ObjectMoved};
 
 const ObjectOps js::ProxyObjectOps = {
     proxy_LookupProperty, Proxy::defineProperty,
     Proxy::has,           Proxy::get,
     Proxy::set,           Proxy::getOwnPropertyDescriptor,
     proxy_DeleteProperty, Proxy::getElements,
     Proxy::fun_toString};
 
--- a/js/src/proxy/Wrapper.cpp
+++ b/js/src/proxy/Wrapper.cpp
@@ -275,21 +275,16 @@ bool ForwardingProxyHandler::isCallable(
   return target->isCallable();
 }
 
 bool ForwardingProxyHandler::isConstructor(JSObject* obj) const {
   JSObject* target = obj->as<ProxyObject>().target();
   return target->isConstructor();
 }
 
-JSObject* Wrapper::weakmapKeyDelegate(JSObject* proxy) const {
-  // This may be called during GC.
-  return UncheckedUnwrapWithoutExpose(proxy);
-}
-
 JSObject* Wrapper::New(JSContext* cx, JSObject* obj, const Wrapper* handler,
                        const WrapperOptions& options) {
   // If this is a cross-compartment wrapper allocate it in the compartment's
   // first realm. See Realm::realmForNewCCW.
   mozilla::Maybe<AutoRealmUnchecked> ar;
   if (handler->isCrossCompartmentWrapper()) {
     ar.emplace(cx, cx->compartment()->realmForNewCCW());
   }
@@ -309,22 +304,24 @@ JSObject* Wrapper::wrappedObject(JSObjec
 
   if (target) {
     // A cross-compartment wrapper should never wrap a CCW. We rely on this
     // in the wrapper handlers (we use AutoRealm on our return value, and
     // AutoRealm cannot be used with CCWs).
     MOZ_ASSERT_IF(IsCrossCompartmentWrapper(wrapper),
                   !IsCrossCompartmentWrapper(target));
 
+#ifdef DEBUG
     // An incremental GC will eventually mark the targets of black wrappers
     // black but while it is in progress we can observe gray targets.
-    MOZ_ASSERT_IF(
-        !wrapper->runtimeFromMainThread()->gc.isIncrementalGCInProgress() &&
-            wrapper->isMarkedBlack(),
-        JS::ObjectIsNotGray(target));
+    if (!wrapper->runtimeFromMainThread()->gc.isIncrementalGCInProgress() &&
+        wrapper->isMarkedBlack()) {
+      JS::AssertObjectIsNotGray(target);
+    }
+#endif
 
     // Unmark wrapper targets that should be black in case an incremental GC
     // hasn't marked them the correct color yet.
     if (!wrapper->isMarkedGray()) {
       JS::ExposeObjectToActiveJS(target);
     }
   }
 
@@ -333,18 +330,18 @@ JSObject* Wrapper::wrappedObject(JSObjec
 
 JS_FRIEND_API JSObject* js::UncheckedUnwrapWithoutExpose(JSObject* wrapped) {
   while (true) {
     if (!wrapped->is<WrapperObject>() || MOZ_UNLIKELY(IsWindowProxy(wrapped))) {
       break;
     }
     wrapped = wrapped->as<WrapperObject>().target();
 
-    // This can be called from Wrapper::weakmapKeyDelegate() on a wrapper
-    // whose referent has been moved while it is still unmarked.
+    // This can be called from when getting a weakmap key delegate() on a
+    // wrapper whose referent has been moved while it is still unmarked.
     if (wrapped) {
       wrapped = MaybeForwarded(wrapped);
     }
   }
   return wrapped;
 }
 
 JS_FRIEND_API JSObject* js::UncheckedUnwrap(JSObject* wrapped,
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -7672,23 +7672,16 @@ static bool AddMarkObservers(JSContext* 
     return false;
   }
 
   if (!args.get(0).isObject()) {
     JS_ReportErrorASCII(cx, "argument must be an Array of objects");
     return false;
   }
 
-#ifdef ENABLE_WASM_GC
-  if (gc::GCRuntime::temporaryAbortIfWasmGc(cx)) {
-    JS_ReportErrorASCII(cx, "API temporarily unavailable under wasm gc");
-    return false;
-  }
-#endif
-
   // WeakCaches are not swept during a minor GC. To prevent nursery-allocated
   // contents from having the mark bits be deceptively black until the second
   // GC, they would need to be marked weakly (cf NurseryAwareHashMap). It is
   // simpler to evict the nursery to prevent nursery objects from being
   // observed.
   cx->runtime()->gc.evictNursery();
 
   RootedObject observersArg(cx, &args[0].toObject());
--- a/js/src/vm/ArgumentsObject.cpp
+++ b/js/src/vm/ArgumentsObject.cpp
@@ -977,17 +977,16 @@ const ClassOps MappedArgumentsObject::cl
     ArgumentsObject::obj_mayResolve,
     ArgumentsObject::finalize,
     nullptr, /* call        */
     nullptr, /* hasInstance */
     nullptr, /* construct   */
     ArgumentsObject::trace};
 
 const js::ClassExtension MappedArgumentsObject::classExt_ = {
-    nullptr,                     /* weakmapKeyDelegateOp */
     ArgumentsObject::objectMoved /* objectMovedOp */
 };
 
 const ObjectOps MappedArgumentsObject::objectOps_ = {
     nullptr, /* lookupProperty */
     MappedArgumentsObject::obj_defineProperty};
 
 const Class MappedArgumentsObject::class_ = {
@@ -1014,17 +1013,16 @@ const ClassOps UnmappedArgumentsObject::
     ArgumentsObject::obj_mayResolve,
     ArgumentsObject::finalize,
     nullptr, /* call        */
     nullptr, /* hasInstance */
     nullptr, /* construct   */
     ArgumentsObject::trace};
 
 const js::ClassExtension UnmappedArgumentsObject::classExt_ = {
-    nullptr,                     /* weakmapKeyDelegateOp */
     ArgumentsObject::objectMoved /* objectMovedOp */
 };
 
 const Class UnmappedArgumentsObject::class_ = {
     "Arguments",
     JSCLASS_DELAY_METADATA_BUILDER |
         JSCLASS_HAS_RESERVED_SLOTS(UnmappedArgumentsObject::RESERVED_SLOTS) |
         JSCLASS_HAS_CACHED_PROTO(JSProto_Object) |
--- a/js/src/vm/ArrayBufferObject.cpp
+++ b/js/src/vm/ArrayBufferObject.cpp
@@ -288,17 +288,16 @@ static const ClassSpec ArrayBufferObject
                              gc::AllocKind::FUNCTION>,
     GenericCreatePrototype<ArrayBufferObject>,
     arraybuffer_functions,
     arraybuffer_properties,
     arraybuffer_proto_functions,
     arraybuffer_proto_properties};
 
 static const ClassExtension ArrayBufferObjectClassExtension = {
-    nullptr, /* weakmapKeyDelegateOp */
     ArrayBufferObject::objectMoved};
 
 const Class ArrayBufferObject::class_ = {
     "ArrayBuffer",
     JSCLASS_DELAY_METADATA_BUILDER |
         JSCLASS_HAS_RESERVED_SLOTS(RESERVED_SLOTS) |
         JSCLASS_HAS_CACHED_PROTO(JSProto_ArrayBuffer) |
         JSCLASS_BACKGROUND_FINALIZE,
--- a/js/src/vm/Compartment-inl.h
+++ b/js/src/vm/Compartment-inl.h
@@ -78,17 +78,17 @@ inline bool JS::Compartment::wrap(JSCont
    * script. Unwrap and prewrap are both steps that we take to get to the
    * identity of an incoming objects, and as such, they shuld never map
    * one identity object to another object. This means that we can safely
    * check the cache immediately, and only risk false negatives. Do this
    * in opt builds, and do both in debug builds so that we can assert
    * that we get the same answer.
    */
 #ifdef DEBUG
-  MOZ_ASSERT(JS::ValueIsNotGray(vp));
+  JS::AssertValueIsNotGray(vp);
   JS::RootedObject cacheResult(cx);
 #endif
   JS::RootedValue v(cx, vp);
   if (js::WrapperMap::Ptr p =
           crossCompartmentWrappers.lookup(js::CrossCompartmentKey(v))) {
 #ifdef DEBUG
     cacheResult = &p->value().get().toObject();
 #else
--- a/js/src/vm/Compartment.cpp
+++ b/js/src/vm/Compartment.cpp
@@ -304,17 +304,17 @@ bool Compartment::wrap(JSContext* cx, Mu
   if (!obj) {
     return true;
   }
 
   AutoDisableProxyCheck adpc;
 
   // Anything we're wrapping has already escaped into script, so must have
   // been unmarked-gray at some point in the past.
-  MOZ_ASSERT(JS::ObjectIsNotGray(obj));
+  JS::AssertObjectIsNotGray(obj);
 
   // The passed object may already be wrapped, or may fit a number of special
   // cases that we need to check for and manually correct.
   if (!getNonWrapperObjectForCurrentCompartment(cx, obj)) {
     return false;
   }
 
   // If the reification above did not result in a same-compartment object,
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -4861,21 +4861,16 @@ class MOZ_STACK_CLASS Debugger::ScriptQu
     }
   }
 };
 
 /* static */ bool Debugger::findScripts(JSContext* cx, unsigned argc,
                                         Value* vp) {
   THIS_DEBUGGER(cx, argc, vp, "findScripts", args, dbg);
 
-  if (gc::GCRuntime::temporaryAbortIfWasmGc(cx)) {
-    JS_ReportErrorASCII(cx, "API temporarily unavailable under wasm gc");
-    return false;
-  }
-
   ScriptQuery query(cx, dbg);
 
   if (args.length() >= 1) {
     RootedObject queryObject(cx, NonNullObject(cx, args[0]));
     if (!queryObject || !query.parseQuery(queryObject)) {
       return false;
     }
   } else {
@@ -5053,21 +5048,16 @@ static inline DebuggerSourceReferent AsS
   }
   return AsVariant(&obj->as<WasmInstanceObject>());
 }
 
 /* static */ bool Debugger::findSources(JSContext* cx, unsigned argc,
                                         Value* vp) {
   THIS_DEBUGGER(cx, argc, vp, "findSources", args, dbg);
 
-  if (gc::GCRuntime::temporaryAbortIfWasmGc(cx)) {
-    JS_ReportErrorASCII(cx, "API temporarily unavailable under wasm gc");
-    return false;
-  }
-
   SourceQuery query(cx, dbg);
   if (!query.findSources()) {
     return false;
   }
 
   Handle<SourceQuery::SourceSet> sources(query.foundSources());
 
   size_t resultLength = sources.count();
--- a/js/src/vm/Debugger.h
+++ b/js/src/vm/Debugger.h
@@ -436,17 +436,17 @@ class Debugger : private mozilla::Linked
   };
 
   // Barrier methods so we can have ReadBarriered<Debugger*>.
   static void readBarrier(Debugger* dbg) {
     InternalBarrierMethods<JSObject*>::readBarrier(dbg->object);
   }
   static void writeBarrierPost(Debugger** vp, Debugger* prev, Debugger* next) {}
 #ifdef DEBUG
-  static bool thingIsNotGray(Debugger* dbg) { return true; }
+  static void assertThingIsNotGray(Debugger* dbg) { return; }
 #endif
 
  private:
   GCPtrNativeObject object; /* The Debugger object. Strong reference. */
   WeakGlobalObjectSet
       debuggees; /* Debuggee globals. Cross-compartment weak references. */
   JS::ZoneSet debuggeeZones; /* Set of zones that we have debuggees in. */
   js::GCPtrObject uncaughtExceptionHook; /* Strong reference. */
--- a/js/src/vm/DebuggerMemory.cpp
+++ b/js/src/vm/DebuggerMemory.cpp
@@ -394,23 +394,16 @@ using JS::ubi::CountTypePtr;
 //    go.
 //
 // 3) We walk the tree of counts and produce JavaScript objects reporting the
 //    accumulated results.
 bool DebuggerMemory::takeCensus(JSContext* cx, unsigned argc, Value* vp) {
   THIS_DEBUGGER_MEMORY(cx, argc, vp, "Debugger.Memory.prototype.census", args,
                        memory);
 
-#ifdef ENABLE_WASM_GC
-  if (gc::GCRuntime::temporaryAbortIfWasmGc(cx)) {
-    JS_ReportErrorASCII(cx, "API temporarily unavailable under wasm gc");
-    return false;
-  }
-#endif
-
   Census census(cx);
   CountTypePtr rootType;
 
   RootedObject options(cx);
   if (args.get(0).isObject()) {
     options = &args[0].toObject();
   }
 
--- a/js/src/vm/JSContext-inl.h
+++ b/js/src/vm/JSContext-inl.h
@@ -65,17 +65,17 @@ class ContextChecks {
   void check(JS::Zone* z, int argIndex) {
     if (zone() && z != zone()) {
       fail(zone(), z, argIndex);
     }
   }
 
   void check(JSObject* obj, int argIndex) {
     if (obj) {
-      MOZ_ASSERT(JS::ObjectIsNotGray(obj));
+      JS::AssertObjectIsNotGray(obj);
       MOZ_ASSERT(!js::gc::IsAboutToBeFinalizedUnbarriered(&obj));
       check(obj->compartment(), argIndex);
     }
   }
 
   template <typename T>
   void checkAtom(T* thing, int argIndex) {
     static_assert(mozilla::IsSame<T, JSAtom>::value ||
@@ -90,17 +90,17 @@ class ContextChecks {
         MOZ_CRASH_UNSAFE_PRINTF(
             "*** Atom not marked for zone %p at argument %d", zone(), argIndex);
       }
     }
 #endif
   }
 
   void check(JSString* str, int argIndex) {
-    MOZ_ASSERT(JS::CellIsNotGray(str));
+    JS::AssertCellIsNotGray(str);
     if (str->isAtom()) {
       checkAtom(&str->asAtom(), argIndex);
     } else {
       check(str->zone(), argIndex);
     }
   }
 
   void check(JS::Symbol* symbol, int argIndex) { checkAtom(symbol, argIndex); }
@@ -154,17 +154,17 @@ class ContextChecks {
     } else if (JSID_IS_SYMBOL(id)) {
       checkAtom(JSID_TO_SYMBOL(id), argIndex);
     } else {
       MOZ_ASSERT(!JSID_IS_GCTHING(id));
     }
   }
 
   void check(JSScript* script, int argIndex) {
-    MOZ_ASSERT(JS::CellIsNotGray(script));
+    JS::AssertCellIsNotGray(script);
     if (script) {
       check(script->realm(), argIndex);
     }
   }
 
   void check(AbstractFramePtr frame, int argIndex);
 
   void check(Handle<PropertyDescriptor> desc, int argIndex) {
@@ -380,27 +380,27 @@ inline void JSContext::setZone(js::Zone*
     MOZ_ASSERT(!zone_->wasGCStarted());
     freeLists_ = atomsZoneFreeLists_;
   } else {
     freeLists_ = &zone_->arenas.freeLists();
   }
 }
 
 inline void JSContext::enterRealmOf(JSObject* target) {
-  MOZ_ASSERT(JS::CellIsNotGray(target));
+  JS::AssertCellIsNotGray(target);
   enterRealm(target->nonCCWRealm());
 }
 
 inline void JSContext::enterRealmOf(JSScript* target) {
-  MOZ_ASSERT(JS::CellIsNotGray(target));
+  JS::AssertCellIsNotGray(target);
   enterRealm(target->realm());
 }
 
 inline void JSContext::enterRealmOf(js::ObjectGroup* target) {
-  MOZ_ASSERT(JS::CellIsNotGray(target));
+  JS::AssertCellIsNotGray(target);
   enterRealm(target->realm());
 }
 
 inline void JSContext::enterNullRealm() {
   // We should never enter a realm while in the atoms zone.
   MOZ_ASSERT_IF(zone(), !zone()->isAtomsZone());
 
   setRealm(nullptr);
--- a/js/src/vm/JSScript.cpp
+++ b/js/src/vm/JSScript.cpp
@@ -3825,17 +3825,17 @@ bool js::detail::CopyScript(JSContext* c
   if (src->treatAsRunOnce() && !src->functionNonDelazifying()) {
     JS_ReportErrorASCII(cx, "No cloning toplevel run-once scripts");
     return false;
   }
 
   /* NB: Keep this in sync with XDRScript. */
 
   /* Some embeddings are not careful to use ExposeObjectToActiveJS as needed. */
-  MOZ_ASSERT(!src->sourceObject()->isMarkedGray());
+  JS::AssertObjectIsNotGray(src->sourceObject());
 
   uint32_t nscopes = src->scopes().size();
   uint32_t nconsts = src->hasConsts() ? src->consts().size() : 0;
   uint32_t nobjects = src->hasObjects() ? src->objects().size() : 0;
 
   /* Script data */
 
   size_t size = src->dataSize();
--- a/js/src/vm/NativeObject.h
+++ b/js/src/vm/NativeObject.h
@@ -1460,17 +1460,21 @@ class NativeObject : public ShapedObject
   void* getPrivate() const { return privateRef(numFixedSlots()); }
   void setPrivate(void* data) {
     void** pprivate = &privateRef(numFixedSlots());
     privateWriteBarrierPre(pprivate);
     *pprivate = data;
   }
 
   void setPrivateGCThing(gc::Cell* cell) {
-    MOZ_ASSERT_IF(IsMarkedBlack(this), !cell->isMarkedGray());
+#ifdef DEBUG
+    if (IsMarkedBlack(this)) {
+      JS::AssertCellIsNotGray(cell);
+    }
+#endif
     void** pprivate = &privateRef(numFixedSlots());
     privateWriteBarrierPre(pprivate);
     *pprivate = reinterpret_cast<void*>(cell);
     privateWriteBarrierPost(pprivate);
   }
 
   void setPrivateUnbarriered(void* data) {
     void** pprivate = &privateRef(numFixedSlots());
--- a/js/src/vm/ProxyObject.cpp
+++ b/js/src/vm/ProxyObject.cpp
@@ -48,23 +48,26 @@ static gc::AllocKind GetProxyGCObjectKin
 /* static */ ProxyObject* ProxyObject::New(JSContext* cx,
                                            const BaseProxyHandler* handler,
                                            HandleValue priv, TaggedProto proto_,
                                            const ProxyOptions& options) {
   Rooted<TaggedProto> proto(cx, proto_);
 
   const Class* clasp = options.clasp();
 
+#ifdef DEBUG
   MOZ_ASSERT(isValidProxyClass(clasp));
   MOZ_ASSERT(clasp->shouldDelayMetadataBuilder());
   MOZ_ASSERT_IF(proto.isObject(),
                 cx->compartment() == proto.toObject()->compartment());
   MOZ_ASSERT(clasp->hasFinalize());
-  MOZ_ASSERT_IF(priv.isGCThing(),
-                !JS::GCThingIsMarkedGray(JS::GCCellPtr(priv)));
+  if (priv.isGCThing()) {
+    JS::AssertCellIsNotGray(priv.toGCThing());
+  }
+#endif
 
   /*
    * Eagerly mark properties unknown for proxies, so we don't try to track
    * their properties and so that we don't need to walk the compartment if
    * their prototype changes later.  But don't do this for DOM proxies,
    * because we want to be able to keep track of them in typesets in useful
    * ways.
    */
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -535,16 +535,23 @@ JS::Realm* JitFrameIter::realm() const {
 
   if (isWasm()) {
     return asWasm().instance()->realm();
   }
 
   return asJSJit().script()->realm();
 }
 
+uint8_t* JitFrameIter::returnAddressToFp() const {
+  if (isWasm()) {
+    return asWasm().returnAddressToFp();
+  }
+  return asJSJit().returnAddressToFp();
+}
+
 bool JitFrameIter::done() const {
   if (!isSome()) {
     return true;
   }
   if (isJSJit()) {
     return asJSJit().done();
   }
   if (isWasm()) {
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -1878,16 +1878,20 @@ class JitFrameIter {
 
   // Operations common to all frame iterators.
   const jit::JitActivation* activation() const { return act_; }
   bool done() const;
   void operator++();
 
   JS::Realm* realm() const;
 
+  // Returns the return address of the frame above this one (that is, the
+  // return address that returns back to the current frame).
+  uint8_t* returnAddressToFp() const;
+
   // Operations which have an effect only on JIT frames.
   void skipNonScriptedJSFrames();
 
   // Returns true iff this is a JIT frame with a self-hosted script. Note: be
   // careful, JitFrameIter does not consider functions inlined by Ion.
   bool isSelfHostedIgnoringInlining() const;
 };
 
--- a/js/src/vm/StringType.h
+++ b/js/src/vm/StringType.h
@@ -589,17 +589,17 @@ class JSString : public js::gc::Cell {
   // access 32-bit length and flags fields so do endian trickery here.
 #if JS_BITS_PER_WORD == 32
   static constexpr size_t offsetOfFlags() {
     return offsetof(JSString, d.flags_);
   }
   static constexpr size_t offsetOfLength() {
     return offsetof(JSString, d.length_);
   }
-#elif defined(MOZ_LITTLE_ENDIAN)
+#elif MOZ_LITTLE_ENDIAN
   static constexpr size_t offsetOfFlags() {
     return offsetof(JSString, d.flags_);
   }
   static constexpr size_t offsetOfLength() {
     return offsetof(JSString, d.flags_) + sizeof(uint32_t);
   }
 #else
   static constexpr size_t offsetOfFlags() {
--- a/js/src/vm/TaggedProto.h
+++ b/js/src/vm/TaggedProto.h
@@ -82,38 +82,36 @@ struct MovableCellHasher<TaggedProto> {
   static bool match(const Key& k, const Lookup& l) {
     return k.isDynamic() == l.isDynamic() && k.isObject() == l.isObject() &&
            (!k.isObject() ||
             MovableCellHasher<JSObject*>::match(k.toObject(), l.toObject()));
   }
 };
 
 #ifdef DEBUG
-MOZ_ALWAYS_INLINE bool TaggedProtoIsNotGray(const TaggedProto& proto) {
-  if (!proto.isObject()) {
-    return true;
+MOZ_ALWAYS_INLINE void AssertTaggedProtoIsNotGray(const TaggedProto& proto) {
+  if (proto.isObject()) {
+    JS::AssertObjectIsNotGray(proto.toObject());
   }
-
-  return JS::ObjectIsNotGray(proto.toObject());
 }
 #endif
 
 template <>
 struct InternalBarrierMethods<TaggedProto> {
   static void preBarrier(TaggedProto& proto);
 
   static void postBarrier(TaggedProto* vp, TaggedProto prev, TaggedProto next);
 
   static void readBarrier(const TaggedProto& proto);
 
   static bool isMarkable(const TaggedProto& proto) { return proto.isObject(); }
 
 #ifdef DEBUG
-  static bool thingIsNotGray(const TaggedProto& proto) {
-    return TaggedProtoIsNotGray(proto);
+  static void assertThingIsNotGray(const TaggedProto& proto) {
+    AssertTaggedProtoIsNotGray(proto);
   }
 #endif
 };
 
 template <class Wrapper>
 class WrappedPtrOperations<TaggedProto, Wrapper> {
   const TaggedProto& value() const {
     return static_cast<const Wrapper*>(this)->get();
--- a/js/src/vm/Time.cpp
+++ b/js/src/vm/Time.cpp
@@ -155,22 +155,17 @@ void PRMJ_NowShutdown() { DeleteCritical
 #define MUTEX_SETSPINCOUNT(m, c) SetCriticalSectionSpinCount((m), (c))
 
 // Please see bug 363258 for why the win32 timing code is so complex.
 static int64_t PRMJ_NowImpl() {
   if (pGetSystemTimePreciseAsFileTime) {
     // Windows 8 has a new API function that does all the work.
     FILETIME ft;
     pGetSystemTimePreciseAsFileTime(&ft);
-    int64_t now = int64_t(FileTimeToUnixMicroseconds(ft));
-    // We check the FuzzyFox clock in case it was recently disabled, to prevent
-    // time from going backwards.
-    return mozilla::TimeStamp::NowFuzzyTime() > now
-               ? mozilla::TimeStamp::NowFuzzyTime()
-               : now;
+    return int64_t(FileTimeToUnixMicroseconds(ft));
   }
 
   bool calibrated = false;
   bool needsCalibration = !calibration.calibrated;
   double cachedOffset = 0.0;
   while (true) {
     if (needsCalibration) {
       MUTEX_LOCK(&calibration.data_lock);
--- a/js/src/vm/TypedArrayObject.cpp
+++ b/js/src/vm/TypedArrayObject.cpp
@@ -1900,17 +1900,16 @@ static const ClassOps TypedArrayClassOps
     TypedArrayObject::finalize,   /* finalize    */
     nullptr,                      /* call        */
     nullptr,                      /* hasInstance */
     nullptr,                      /* construct   */
     ArrayBufferViewObject::trace, /* trace  */
 };
 
 static const ClassExtension TypedArrayClassExtension = {
-    nullptr,
     TypedArrayObject::objectMoved,
 };
 
 #define IMPL_TYPED_ARRAY_PROPERTIES(_type)                            \
   {                                                                   \
     JS_INT32_PS("BYTES_PER_ELEMENT", _type##Array::BYTES_PER_ELEMENT, \
                 JSPROP_READONLY | JSPROP_PERMANENT),                  \
         JS_PS_END                                                     \
--- a/js/src/wasm/WasmBaselineCompile.cpp
+++ b/js/src/wasm/WasmBaselineCompile.cpp
@@ -137,16 +137,17 @@
 #include "jit/mips-shared/Assembler-mips-shared.h"
 #include "jit/mips64/Assembler-mips64.h"
 #endif
 
 #include "wasm/WasmGenerator.h"
 #include "wasm/WasmInstance.h"
 #include "wasm/WasmOpIter.h"
 #include "wasm/WasmSignalHandlers.h"
+#include "wasm/WasmStubs.h"
 #include "wasm/WasmValidate.h"
 
 #include "jit/MacroAssembler-inl.h"
 
 using mozilla::DebugOnly;
 using mozilla::FloorLog2;
 using mozilla::IsPowerOfTwo;
 using mozilla::Maybe;
@@ -1577,20 +1578,20 @@ class BaseStackFrame final : public Base
   void storeLocalF64(RegF64 src, const Local& dest) {
     masm.storeDouble(src, Address(sp_, localOffset(dest)));
   }
 
   void storeLocalF32(RegF32 src, const Local& dest) {
     masm.storeFloat32(src, Address(sp_, localOffset(dest)));
   }
 
- private:
   // Offset off of sp_ for `local`.
   int32_t localOffset(const Local& local) { return localOffset(local.offs); }
 
+ private:
   // Offset off of sp_ for a local with offset `offset` from Frame.
   int32_t localOffset(int32_t offset) { return masm.framePushed() - offset; }
 
  public:
   ///////////////////////////////////////////////////////////////////////////
   //
   // Dynamic area
 
@@ -1822,16 +1823,562 @@ void BaseStackFrame::zeroLocals(BaseRegA
     masm.storePtr(zero, Address(p, -(wordSize * i)));
   }
 
   ra->freeI32(p);
   ra->freeI32(lim);
   ra->freeI32(zero);
 }
 
+// Value stack: stack elements
+
+struct Stk {
+ private:
+  Stk() : kind_(Unknown), i64val_(0) {}
+
+ public:
+  enum Kind {
+    // The Mem opcodes are all clustered at the beginning to
+    // allow for a quick test within sync().
+    MemI32,  // 32-bit integer stack value ("offs")
+    MemI64,  // 64-bit integer stack value ("offs")
+    MemF32,  // 32-bit floating stack value ("offs")
+    MemF64,  // 64-bit floating stack value ("offs")
+    MemRef,  // reftype (pointer wide) stack value ("offs")
+
+    // The Local opcodes follow the Mem opcodes for a similar
+    // quick test within hasLocal().
+    LocalI32,  // Local int32 var ("slot")
+    LocalI64,  // Local int64 var ("slot")
+    LocalF32,  // Local float32 var ("slot")
+    LocalF64,  // Local double var ("slot")
+    LocalRef,  // Local reftype (pointer wide) var ("slot")
+
+    RegisterI32,  // 32-bit integer register ("i32reg")
+    RegisterI64,  // 64-bit integer register ("i64reg")
+    RegisterF32,  // 32-bit floating register ("f32reg")
+    RegisterF64,  // 64-bit floating register ("f64reg")
+    RegisterRef,  // reftype (pointer wide) register ("refReg")
+
+    ConstI32,  // 32-bit integer constant ("i32val")
+    ConstI64,  // 64-bit integer constant ("i64val")
+    ConstF32,  // 32-bit floating constant ("f32val")
+    ConstF64,  // 64-bit floating constant ("f64val")
+    ConstRef,  // reftype (pointer wide) constant ("refval")
+
+    Unknown,
+  };
+
+  Kind kind_;
+
+  static const Kind MemLast = MemRef;
+  static const Kind LocalLast = LocalRef;
+
+  union {
+    RegI32 i32reg_;
+    RegI64 i64reg_;
+    RegPtr refReg_;
+    RegF32 f32reg_;
+    RegF64 f64reg_;
+    int32_t i32val_;
+    int64_t i64val_;
+    intptr_t refval_;
+    float f32val_;
+    double f64val_;
+    uint32_t slot_;
+    uint32_t offs_;
+  };
+
+  explicit Stk(RegI32 r) : kind_(RegisterI32), i32reg_(r) {}
+  explicit Stk(RegI64 r) : kind_(RegisterI64), i64reg_(r) {}
+  explicit Stk(RegPtr r) : kind_(RegisterRef), refReg_(r) {}
+  explicit Stk(RegF32 r) : kind_(RegisterF32), f32reg_(r) {}
+  explicit Stk(RegF64 r) : kind_(RegisterF64), f64reg_(r) {}
+  explicit Stk(int32_t v) : kind_(ConstI32), i32val_(v) {}
+  explicit Stk(int64_t v) : kind_(ConstI64), i64val_(v) {}
+  explicit Stk(float v) : kind_(ConstF32), f32val_(v) {}
+  explicit Stk(double v) : kind_(ConstF64), f64val_(v) {}
+  explicit Stk(Kind k, uint32_t v) : kind_(k), slot_(v) {
+    MOZ_ASSERT(k > MemLast && k <= LocalLast);
+  }
+  static Stk StkRef(intptr_t v) {
+    Stk s;
+    s.kind_ = ConstRef;
+    s.refval_ = v;
+    return s;
+  }
+
+  void setOffs(Kind k, uint32_t v) {
+    MOZ_ASSERT(k <= MemLast);
+    kind_ = k;
+    offs_ = v;
+  }
+
+  Kind kind() const { return kind_; }
+  bool isMem() const { return kind_ <= MemLast; }
+
+  RegI32 i32reg() const {
+    MOZ_ASSERT(kind_ == RegisterI32);
+    return i32reg_;
+  }
+  RegI64 i64reg() const {
+    MOZ_ASSERT(kind_ == RegisterI64);
+    return i64reg_;
+  }
+  RegPtr refReg() const {
+    MOZ_ASSERT(kind_ == RegisterRef);
+    return refReg_;
+  }
+  RegF32 f32reg() const {
+    MOZ_ASSERT(kind_ == RegisterF32);
+    return f32reg_;
+  }
+  RegF64 f64reg() const {
+    MOZ_ASSERT(kind_ == RegisterF64);
+    return f64reg_;
+  }
+
+  int32_t i32val() const {
+    MOZ_ASSERT(kind_ == ConstI32);
+    return i32val_;
+  }
+  int64_t i64val() const {
+    MOZ_ASSERT(kind_ == ConstI64);
+    return i64val_;
+  }
+  intptr_t refval() const {
+    MOZ_ASSERT(kind_ == ConstRef);
+    return refval_;
+  }
+
+  // For these two, use an out-param instead of simply returning, to
+  // use the normal stack and not the x87 FP stack (which has effect on
+  // NaNs with the signaling bit set).
+
+  void f32val(float* out) const {
+    MOZ_ASSERT(kind_ == ConstF32);
+    *out = f32val_;
+  }
+  void f64val(double* out) const {
+    MOZ_ASSERT(kind_ == ConstF64);
+    *out = f64val_;
+  }
+
+  uint32_t slot() const {
+    MOZ_ASSERT(kind_ > MemLast && kind_ <= LocalLast);
+    return slot_;
+  }
+  uint32_t offs() const {
+    MOZ_ASSERT(isMem());
+    return offs_;
+  }
+};
+
+typedef Vector<Stk, 8, SystemAllocPolicy> StkVector;
+
+// MachineStackTracker, used for stack-slot pointerness tracking.
+
+class MachineStackTracker {
+  // Simulates the machine's stack, with one bool per word.  Index zero in
+  // this vector corresponds to the highest address in the machine stack.  The
+  // last entry corresponds to what SP currently points at.  This all assumes
+  // a grow-down stack.
+  //
+  // numPtrs_ contains the number of "true" values in vec_, and is therefore
+  // redundant.  But it serves as a constant-time way to detect the common
+  // case where vec_ holds no "true" values.
+  size_t numPtrs_;
+  Vector<bool, 64, SystemAllocPolicy> vec_;
+
+ public:
+  MachineStackTracker() : numPtrs_(0) {}
+
+  ~MachineStackTracker() {
+#ifdef DEBUG
+    size_t n = 0;
+    for (bool b : vec_) {
+      n += (b ? 1 : 0);
+    }
+    MOZ_ASSERT(n == numPtrs_);
+#endif
+  }
+
+  // Clone this MachineStackTracker, writing the result at |dst|.
+  MOZ_MUST_USE bool cloneTo(MachineStackTracker* dst) {
+    MOZ_ASSERT(dst->vec_.empty());
+    if (!dst->vec_.appendAll(vec_)) {
+      return false;
+    }
+    dst->numPtrs_ = numPtrs_;
+    return true;
+  }
+
+  // Notionally push |n| non-pointers on the stack.
+  MOZ_MUST_USE bool pushNonGCPointers(size_t n) {
+    return vec_.appendN(false, n);
+  }
+
+  // Mark the stack slot |offsetFromSP| up from the bottom as holding a
+  // pointer.
+  void setGCPointer(size_t offsetFromSP) {
+    // Offset 0 is the most recently pushed, offset 1 is the second most
+    // recently pushed item, etc.
+    MOZ_ASSERT(offsetFromSP < vec_.length());
+
+    size_t offsetFromTop = vec_.length() - 1 - offsetFromSP;
+    numPtrs_ = numPtrs_ + 1 - (vec_[offsetFromTop] ? 1 : 0);
+    vec_[offsetFromTop] = true;
+  }
+
+  // Query the pointerness of the slot |offsetFromSP| up from the bottom.
+  bool isGCPointer(size_t offsetFromSP) {
+    MOZ_ASSERT(offsetFromSP < vec_.length());
+    return vec_[offsetFromSP];
+  }
+
+  // Return the number of words tracked by this MachineStackTracker.
+  size_t length() { return vec_.length(); }
+
+  // Return the number of pointer-typed words tracked by this
+  // MachineStackTracker.
+  size_t numPtrs() {
+    MOZ_ASSERT(numPtrs_ <= length());
+    return numPtrs_;
+  }
+};
+
+// StackMapGenerator, which carries all state needed to create stack maps.
+
+enum class HasRefTypedDebugFrame { No, Yes };
+
+struct StackMapGenerator {
+ private:
+  // --- These are constant for the life of the function's compilation ---
+
+  // For generating stack maps, we'll need to know the offsets of registers
+  // as saved by the trap exit stub.
+  const MachineState& trapExitLayout_;
+  const size_t trapExitLayoutNumWords_;
+
+  // Completed stackmaps are added here
+  StackMaps* stackMaps_;
+
+  // So as to be able to get current offset when creating stack maps
+  const MacroAssembler& masm_;
+
+ public:
+  // --- These are constant once we've completed beginFunction() ---
+
+  // The number of words of arguments passed to this function in memory.
+  size_t numStackArgWords_;
+
+  MachineStackTracker mst_;  // tracks machine stack pointerness
+
+  // This holds masm.framePushed at entry to the function's body.  It is a
+  // Maybe because createStackMap needs to know whether or not we're still
+  // in the prologue.  It makes a Nothing-to-Some transition just once per
+  // function.
+  Maybe<uint32_t> framePushedAtEntryToBody_;
+
+  // --- These can change at any point ---
+
+  // This holds masm.framePushed immediately before we move the stack
+  // pointer down so as to reserve space, in a function call, for arguments
+  // passed in memory.  To be more precise: this holds the value
+  // masm.framePushed would have had after moving the stack pointer over any
+  // alignment padding pushed before the arguments proper, but before the
+  // downward movement of the stack pointer that allocates space for the
+  // arguments proper.
+  //
+  // When not inside a function call setup/teardown sequence, it is Nothing.
+  // It can make Nothing-to/from-Some transitions arbitrarily as we progress
+  // through the function body.
+  Maybe<uint32_t> framePushedBeforePushingCallArgs_;
+
+  // The number of memory-resident, ref-typed entries on the containing
+  // BaseCompiler::stk_.
+  size_t memRefsOnStk_;
+
+  StackMapGenerator(StackMaps* stackMaps, const MachineState& trapExitLayout,
+                    const size_t trapExitLayoutNumWords,
+                    const MacroAssembler& masm)
+      : trapExitLayout_(trapExitLayout),
+        trapExitLayoutNumWords_(trapExitLayoutNumWords),
+        stackMaps_(stackMaps),
+        masm_(masm),
+        memRefsOnStk_(0) {}
+
+  // At the beginning of a function, we may have live roots in registers (as
+  // arguments) at the point where we perform a stack overflow check.  This
+  // method generates the "extra" stackmap entries to describe that, in the
+  // case that the check fails and we wind up calling into the wasm exit
+  // stub, as generated by GenerateTrapExit().
+  //
+  // The resulting map must correspond precisely with the stack layout
+  // created for the integer registers as saved by (code generated by)
+  // GenerateTrapExit().  To do that we use trapExitLayout_ and
+  // trapExitLayoutNumWords_, which together comprise a description of the
+  // layout and are created by GenerateTrapExitMachineState().
+  MOZ_MUST_USE bool generateStackmapEntriesForTrapExit(
+      const ValTypeVector& args, ExitStubMapVector& extras) {
+    MOZ_ASSERT(extras.empty());
+
+    // If this doesn't hold, we can't distinguish saved and not-saved
+    // registers in the MachineState.  See MachineState::MachineState().
+    MOZ_ASSERT(trapExitLayoutNumWords_ < 0x100);
+
+    if (!extras.appendN(false, trapExitLayoutNumWords_)) {
+      return false;
+    }
+
+    for (ABIArgIter<const ValTypeVector> i(args); !i.done(); i++) {
+      if (!i->argInRegister() || i.mirType() != MIRType::Pointer) {
+        continue;
+      }
+
+      size_t offsetFromTop =
+          reinterpret_cast<size_t>(trapExitLayout_.address(i->gpr()));
+
+      // If this doesn't hold, the associated register wasn't saved by
+      // the trap exit stub.  Better to crash now than much later, in
+      // some obscure place, and possibly with security consequences.
+      MOZ_RELEASE_ASSERT(offsetFromTop < trapExitLayoutNumWords_);
+
+      // offsetFromTop is an offset in words down from the highest
+      // address in the exit stub save area.  Switch it around to be an
+      // offset up from the bottom of the (integer register) save area.
+      size_t offsetFromBottom = trapExitLayoutNumWords_ - 1 - offsetFromTop;
+
+      extras[offsetFromBottom] = true;
+    }
+
+    return true;
+  }
+
+  // Creates a stackmap associated with the instruction denoted by
+  // |assemblerOffset|, incorporating pointers from the current operand
+  // stack |stk|, incorporating possible extra pointers in |extra| at the
+  // lower addressed end, and possibly with the associated frame having a
+  // ref-typed DebugFrame as indicated by |refDebugFrame|.
+  MOZ_MUST_USE bool createStackMap(const char* who,
+                                   const ExitStubMapVector& extras,
+                                   uint32_t assemblerOffset,
+                                   HasRefTypedDebugFrame refDebugFrame,
+                                   const StkVector& stk) {
+    size_t countedPointers = mst_.numPtrs() + memRefsOnStk_;
+#ifndef DEBUG
+    // An important optimization.  If there are obviously no pointers, as
+    // we expect in the majority of cases, exit quickly.
+    if (countedPointers == 0 && extras.empty() &&
+        refDebugFrame == HasRefTypedDebugFrame::No) {
+      return true;
+    }
+#else
+    // In the debug case, create the stack map regardless, and cross-check
+    // the pointer-counting below.  We expect the final map to have
+    // |countedPointers| in total.  This doesn't include those in the
+    // DebugFrame, but they do not appear in the map's bitmap.  Note that
+    // |countedPointers| is debug-only from this point onwards.
+    for (bool b : extras) {
+      countedPointers += (b ? 1 : 0);
+    }
+#endif
+
+    // Start with the frame-setup map, and add operand-stack information
+    // to that.
+    MachineStackTracker augmentedMst;
+    if (!mst_.cloneTo(&augmentedMst)) {
+      return false;
+    }
+
+    // At this point, augmentedMst only contains entries covering the
+    // incoming argument area (if any) and for the area allocated by this
+    // function's prologue.  We now need to calculate how far the machine's
+    // stack pointer is below where it was at the start of the body.  But we
+    // must take care not to include any words pushed as arguments to an
+    // upcoming function call, since those words "belong" to the stackmap of
+    // the callee, not to the stackmap of this function.  Note however that
+    // any alignment padding pushed prior to pushing the args *does* belong to
+    // this function.  That padding is taken into account at the point where
+    // framePushedBeforePushingCallArgs_ is set.
+    Maybe<uint32_t> framePushedExcludingArgs;
+    if (framePushedAtEntryToBody_.isNothing()) {
+      // Still in the prologue.  framePushedExcludingArgs remains Nothing.
+      MOZ_ASSERT(framePushedBeforePushingCallArgs_.isNothing());
+    } else {
+      // In the body.
+      MOZ_ASSERT(masm_.framePushed() >= framePushedAtEntryToBody_.value());
+      if (framePushedBeforePushingCallArgs_.isSome()) {
+        // In the body, and we've potentially pushed some args onto the stack.
+        // We must ignore them when sizing the stackmap.
+        MOZ_ASSERT(masm_.framePushed() >=
+                   framePushedBeforePushingCallArgs_.value());
+        MOZ_ASSERT(framePushedBeforePushingCallArgs_.value() >=
+                   framePushedAtEntryToBody_.value());
+        framePushedExcludingArgs =
+            Some(framePushedBeforePushingCallArgs_.value());
+      } else {
+        // In the body, but not with call args on the stack.  The stackmap
+        // must be sized so as to extend all the way "down" to
+        // masm_.framePushed().
+        framePushedExcludingArgs = Some(masm_.framePushed());
+      }
+    }
+
+    if (framePushedExcludingArgs.isSome()) {
+      uint32_t bodyPushedBytes =
+          framePushedExcludingArgs.value() - framePushedAtEntryToBody_.value();
+      MOZ_ASSERT(0 == bodyPushedBytes % sizeof(void*));
+      if (!augmentedMst.pushNonGCPointers(bodyPushedBytes / sizeof(void*))) {
+        return false;
+      }
+    }
+
+    // Scan the operand stack, marking pointers in the just-added new
+    // section.
+    MOZ_ASSERT_IF(framePushedAtEntryToBody_.isNothing(), stk.empty());
+    MOZ_ASSERT_IF(framePushedExcludingArgs.isNothing(), stk.empty());
+
+    for (const Stk& v : stk) {
+#ifndef DEBUG
+      // We don't track roots in registers, per rationale below, so if this
+      // doesn't hold, something is seriously wrong, and we're likely to get a
+      // GC-related crash.
+      MOZ_RELEASE_ASSERT(v.kind() != Stk::RegisterRef);
+      if (v.kind() != Stk::MemRef) {
+        continue;
+      }
+#else
+      // Take the opportunity to check everything we reasonably can about
+      // operand stack elements.
+      switch (v.kind()) {
+        case Stk::MemI32:
+        case Stk::MemI64:
+        case Stk::MemF32:
+        case Stk::MemF64:
+        case Stk::ConstI32:
+        case Stk::ConstI64:
+        case Stk::ConstF32:
+        case Stk::ConstF64:
+          // All of these have uninteresting type.
+          continue;
+        case Stk::LocalI32:
+        case Stk::LocalI64:
+        case Stk::LocalF32:
+        case Stk::LocalF64:
+          // These also have uninteresting type.  Check that they live in the
+          // section of stack set up by beginFunction().  The unguarded use of
+          // |value()| here is safe due to the assertion above this loop.
+          MOZ_ASSERT(v.offs() <= framePushedAtEntryToBody_.value());
+          continue;
+        case Stk::RegisterI32:
+        case Stk::RegisterI64:
+        case Stk::RegisterF32:
+        case Stk::RegisterF64:
+          // These also have uninteresting type, but more to the point: all
+          // registers holding live values should have been flushed to the
+          // machine stack immediately prior to the instruction to which this
+          // stackmap pertains.  So these can't happen.
+          MOZ_CRASH("createStackMap: operand stack has Register-non-Ref");
+        case Stk::MemRef:
+          // This is the only case we care about.  We'll handle it after the
+          // switch.
+          break;
+        case Stk::LocalRef:
+          // We need the stackmap to mention this pointer, but it should
+          // already be in the mst_ section created by beginFunction().
+          MOZ_ASSERT(v.offs() <= framePushedAtEntryToBody_.value());
+          continue;
+        case Stk::ConstRef:
+          // This can currently only be a null pointer.
+          MOZ_ASSERT(v.refval() == 0);
+          continue;
+        case Stk::RegisterRef:
+          // This can't happen, per rationale above.
+          MOZ_CRASH("createStackMap: operand stack contains RegisterRef");
+        default:
+          MOZ_CRASH("createStackMap: unknown operand stack element");
+      }
+#endif
+      // v.offs() holds masm.framePushed() at the point immediately after it
+      // was pushed on the stack.  Since it's still on the stack,
+      // masm.framePushed() can't be less.
+      MOZ_ASSERT(v.offs() <= framePushedExcludingArgs.value());
+      uint32_t offsFromMapLowest = framePushedExcludingArgs.value() - v.offs();
+      MOZ_ASSERT(0 == offsFromMapLowest % sizeof(void*));
+      augmentedMst.setGCPointer(offsFromMapLowest / sizeof(void*));
+    }
+
+    // Create the final StackMap.  The initial map is zeroed out, so there's
+    // no need to write zero bits in it.
+    const uint32_t extraWords = extras.length();
+    const uint32_t augmentedMstWords = augmentedMst.length();
+    const uint32_t numMappedWords = extraWords + augmentedMstWords;
+    StackMap* stackMap = StackMap::create(numMappedWords);
+    if (!stackMap) {
+      return false;
+    }
+
+    {
+      // First the exit stub extra words, if any.
+      uint32_t i = 0;
+      for (bool b : extras) {
+        if (b) {
+          stackMap->setBit(i);
+        }
+        i++;
+      }
+    }
+    // Followed by the "main" part of the map.
+    for (uint32_t i = 0; i < augmentedMstWords; i++) {
+      if (augmentedMst.isGCPointer(i)) {
+        stackMap->setBit(numMappedWords - 1 - i);
+      }
+    }
+
+    stackMap->setExitStubWords(extraWords);
+
+    // Record in the map, how far down from the highest address the Frame* is.
+    // Take the opportunity to check that we haven't marked any part of the
+    // Frame itself as a pointer.
+    stackMap->setFrameOffsetFromTop(numStackArgWords_ +
+                                    sizeof(Frame) / sizeof(void*));
+#ifdef DEBUG
+    for (uint32_t i = 0; i < sizeof(Frame) / sizeof(void*); i++) {
+      MOZ_ASSERT(stackMap->getBit(stackMap->numMappedWords -
+                                  stackMap->frameOffsetFromTop + i) == 0);
+    }
+#endif
+
+    // Note the presence of a ref-typed DebugFrame, if any.
+    if (refDebugFrame == HasRefTypedDebugFrame::Yes) {
+      stackMap->setHasRefTypedDebugFrame();
+    }
+
+    // Add the completed map to the running collection thereof.
+    if (!stackMaps_->add((uint8_t*)(uintptr_t)assemblerOffset, stackMap)) {
+      return false;
+    }
+
+#ifdef DEBUG
+    {
+      // Crosscheck the map pointer counting.
+      uint32_t nw = stackMap->numMappedWords;
+      uint32_t np = 0;
+      for (uint32_t i = 0; i < nw; i++) {
+        np += stackMap->getBit(i);
+      }
+      MOZ_ASSERT(size_t(np) == countedPointers);
+    }
+#endif
+
+    return true;
+  }
+};
+
 // The baseline compiler proper.
 
 class BaseCompiler final : public BaseCompilerInterface {
   using Local = BaseStackFrame::Local;
   using LabelVector = Vector<NonAssertingLabel, 8, SystemAllocPolicy>;
   using MIRTypeVector = Vector<MIRType, 8, SystemAllocPolicy>;
 
   // Bit set used for simple bounds check elimination.  Capping this at 64
@@ -1974,16 +2521,18 @@ class BaseCompiler final : public BaseCo
       latentDoubleCmp_;  // Comparison operator, if latentOp_ == Compare, float
                          // types
 
   FuncOffsets offsets_;
   MacroAssembler& masm;  // No '_' suffix - too tedious...
   BaseRegAlloc ra;       // Ditto
   BaseStackFrame fr;
 
+  StackMapGenerator smgen_;
+
   BaseStackFrame::LocalVector localInfo_;
   Vector<OutOfLineCode*, 8, SystemAllocPolicy> outOfLine_;
 
   // On specific platforms we sometimes need to use specific registers.
 
   SpecificRegs specific_;
 
   // The join registers are used to carry values out of blocks.
@@ -1995,19 +2544,20 @@ class BaseCompiler final : public BaseCo
   RegPtr joinRegPtr_;
   RegF32 joinRegF32_;
   RegF64 joinRegF64_;
 
   // There are more members scattered throughout.
 
  public:
   BaseCompiler(const ModuleEnvironment& env, const FuncCompileInput& input,
-               const ValTypeVector& locals, Decoder& decoder,
+               const ValTypeVector& locals, const MachineState& trapExitLayout,
+               size_t trapExitLayoutNumWords, Decoder& decoder,
                ExclusiveDeferredValidationState& dvs, TempAllocator* alloc,
-               MacroAssembler* masm);
+               MacroAssembler* masm, StackMaps* stackMaps);
 
   MOZ_MUST_USE bool init();
 
   FuncOffsets finish();
 
   MOZ_MUST_USE bool emitFunction();
   void emitInitStackLocals();
 
@@ -2296,163 +2846,35 @@ class BaseCompiler final : public BaseCo
   // and immediate-constant use.  It tracks constants, latent
   // references to locals, register contents, and values on the CPU
   // stack.
   //
   // The stack can be flushed to memory using sync().  This is handy
   // to avoid problems with control flow and messy register usage
   // patterns.
 
-  struct Stk {
-   private:
-    Stk() : kind_(Unknown), i64val_(0) {}
-
-   public:
-    enum Kind {
-      // The Mem opcodes are all clustered at the beginning to
-      // allow for a quick test within sync().
-      MemI32,  // 32-bit integer stack value ("offs")
-      MemI64,  // 64-bit integer stack value ("offs")
-      MemF32,  // 32-bit floating stack value ("offs")
-      MemF64,  // 64-bit floating stack value ("offs")
-      MemRef,  // reftype (pointer wide) stack value ("offs")
-
-      // The Local opcodes follow the Mem opcodes for a similar
-      // quick test within hasLocal().
-      LocalI32,  // Local int32 var ("slot")
-      LocalI64,  // Local int64 var ("slot")
-      LocalF32,  // Local float32 var ("slot")
-      LocalF64,  // Local double var ("slot")
-      LocalRef,  // Local reftype (pointer wide) var ("slot")
-
-      RegisterI32,  // 32-bit integer register ("i32reg")
-      RegisterI64,  // 64-bit integer register ("i64reg")
-      RegisterF32,  // 32-bit floating register ("f32reg")
-      RegisterF64,  // 64-bit floating register ("f64reg")
-      RegisterRef,  // reftype (pointer wide) register ("refReg")
-
-      ConstI32,  // 32-bit integer constant ("i32val")
-      ConstI64,  // 64-bit integer constant ("i64val")
-      ConstF32,  // 32-bit floating constant ("f32val")
-      ConstF64,  // 64-bit floating constant ("f64val")
-      ConstRef,  // reftype (pointer wide) constant ("refval")
-
-      Unknown,
-    };
-
-    Kind kind_;
-
-    static const Kind MemLast = MemRef;
-    static const Kind LocalLast = LocalRef;
-
-    union {
-      RegI32 i32reg_;
-      RegI64 i64reg_;
-      RegPtr refReg_;
-      RegF32 f32reg_;
-      RegF64 f64reg_;
-      int32_t i32val_;
-      int64_t i64val_;
-      intptr_t refval_;
-      float f32val_;
-      double f64val_;
-      uint32_t slot_;
-      uint32_t offs_;
-    };
-
-    explicit Stk(RegI32 r) : kind_(RegisterI32), i32reg_(r) {}
-    explicit Stk(RegI64 r) : kind_(RegisterI64), i64reg_(r) {}
-    explicit Stk(RegPtr r) : kind_(RegisterRef), refReg_(r) {}
-    explicit Stk(RegF32 r) : kind_(RegisterF32), f32reg_(r) {}
-    explicit Stk(RegF64 r) : kind_(RegisterF64), f64reg_(r) {}
-    explicit Stk(int32_t v) : kind_(ConstI32), i32val_(v) {}
-    explicit Stk(int64_t v) : kind_(ConstI64), i64val_(v) {}
-    explicit Stk(float v) : kind_(ConstF32), f32val_(v) {}
-    explicit Stk(double v) : kind_(ConstF64), f64val_(v) {}
-    explicit Stk(Kind k, uint32_t v) : kind_(k), slot_(v) {
-      MOZ_ASSERT(k > MemLast && k <= LocalLast);
-    }
-    static Stk StkRef(intptr_t v) {
-      Stk s;
-      s.kind_ = ConstRef;
-      s.refval_ = v;
-      return s;
-    }
-
-    void setOffs(Kind k, uint32_t v) {
-      MOZ_ASSERT(k <= MemLast);
-      kind_ = k;
-      offs_ = v;
-    }
-
-    Kind kind() const { return kind_; }
-    bool isMem() const { return kind_ <= MemLast; }
-
-    RegI32 i32reg() const {
-      MOZ_ASSERT(kind_ == RegisterI32);
-      return i32reg_;
-    }
-    RegI64 i64reg() const {
-      MOZ_ASSERT(kind_ == RegisterI64);
-      return i64reg_;
-    }
-    RegPtr refReg() const {
-      MOZ_ASSERT(kind_ == RegisterRef);
-      return refReg_;
-    }
-    RegF32 f32reg() const {
-      MOZ_ASSERT(kind_ == RegisterF32);
-      return f32reg_;
-    }
-    RegF64 f64reg() const {
-      MOZ_ASSERT(kind_ == RegisterF64);
-      return f64reg_;
-    }
-
-    int32_t i32val() const {
-      MOZ_ASSERT(kind_ == ConstI32);
-      return i32val_;
-    }
-    int64_t i64val() const {
-      MOZ_ASSERT(kind_ == ConstI64);
-      return i64val_;
-    }
-    intptr_t refval() const {
-      MOZ_ASSERT(kind_ == ConstRef);
-      return refval_;
-    }
-
-    // For these two, use an out-param instead of simply returning, to
-    // use the normal stack and not the x87 FP stack (which has effect on
-    // NaNs with the signaling bit set).
-
-    void f32val(float* out) const {
-      MOZ_ASSERT(kind_ == ConstF32);
-      *out = f32val_;
-    }
-    void f64val(double* out) const {
-      MOZ_ASSERT(kind_ == ConstF64);
-      *out = f64val_;
-    }
-
-    uint32_t slot() const {
-      MOZ_ASSERT(kind_ > MemLast && kind_ <= LocalLast);
-      return slot_;
-    }
-    uint32_t offs() const {
-      MOZ_ASSERT(isMem());
-      return offs_;
-    }
-  };
-
-  Vector<Stk, 8, SystemAllocPolicy> stk_;
-
-  template <typename... Args>
-  void push(Args&&... args) {
-    stk_.infallibleEmplaceBack(Stk(std::forward<Args>(args)...));
+  StkVector stk_;
+
+#ifdef DEBUG
+  size_t countMemRefsOnStk() {
+    size_t nRefs = 0;
+    for (Stk& v : stk_) {
+      if (v.kind() == Stk::MemRef) {
+        nRefs++;
+      }
+    }
+    return nRefs;
+  }
+#endif
+
+  template <typename T>
+  void push(T item) {
+    // None of the single-arg Stk constructors create a Stk::MemRef, so
+    // there's no need to increment smgen_.memRefsOnStk_ here.
+    stk_.infallibleEmplaceBack(Stk(item));
   }
 
   void pushConstRef(intptr_t v) { stk_.infallibleEmplaceBack(Stk::StkRef(v)); }
 
   void loadConstI32(const Stk& src, RegI32 dest) {
     moveImm32(src.i32val(), dest);
   }
 
@@ -2774,29 +3196,71 @@ class BaseCompiler final : public BaseCo
           v.setOffs(Stk::MemF32, offs);
           break;
         }
         case Stk::LocalRef: {
           ScratchPtr scratch(*this);
           loadLocalRef(v, scratch);
           uint32_t offs = fr.pushPtr(scratch);
           v.setOffs(Stk::MemRef, offs);
+          smgen_.memRefsOnStk_++;
           break;
         }
         case Stk::RegisterRef: {
           uint32_t offs = fr.pushPtr(v.refReg());
           freeRef(v.refReg());
           v.setOffs(Stk::MemRef, offs);
+          smgen_.memRefsOnStk_++;
           break;
         }
         default: { break; }
       }
     }
   }
 
+  // Various methods for creating a stack map.  Stack maps are indexed by the
+  // lowest address of the instruction immediately *after* the instruction of
+  // interest.  In practice that means either: the return point of a call, the
+  // instruction immediately after a trap instruction (the "resume"
+  // instruction), or the instruction immediately following a no-op (when
+  // debugging is enabled).
+
+  // Create a vanilla stack map.
+  MOZ_MUST_USE bool createStackMap(const char* who) {
+    const ExitStubMapVector noExtras;
+    return smgen_.createStackMap(who, noExtras, masm.currentOffset(),
+                                 HasRefTypedDebugFrame::No, stk_);
+  }
+
+  // Create a stack map as vanilla, but for a custom assembler offset.
+  MOZ_MUST_USE bool createStackMap(const char* who,
+                                   CodeOffset assemblerOffset) {
+    const ExitStubMapVector noExtras;
+    return smgen_.createStackMap(who, noExtras, assemblerOffset.offset(),
+                                 HasRefTypedDebugFrame::No, stk_);
+  }
+
+  // Create a stack map as vanilla, and note the presence of a ref-typed
+  // DebugFrame on the stack.
+  MOZ_MUST_USE bool createStackMap(const char* who,
+                                   HasRefTypedDebugFrame refDebugFrame) {
+    const ExitStubMapVector noExtras;
+    return smgen_.createStackMap(who, noExtras, masm.currentOffset(),
+                                 refDebugFrame, stk_);
+  }
+
+  // The most general stack map construction.
+  MOZ_MUST_USE bool createStackMap(const char* who,
+                                   const ExitStubMapVector& extras,
+                                   uint32_t assemblerOffset,
+                                   HasRefTypedDebugFrame refDebugFrame) {
+    return smgen_.createStackMap(who, extras, assemblerOffset, refDebugFrame,
+                                 stk_);
+  }
+
   // This is an optimization used to avoid calling sync() for
   // setLocal(): if the local does not exist unresolved on the stack
   // then we can skip the sync.
 
   bool hasLocal(uint32_t slot) {
     for (size_t i = stk_.length(); i > 0; i--) {
       // Memory opcodes are first in the enum, single check against MemLast is
       // fine.
@@ -2819,64 +3283,74 @@ class BaseCompiler final : public BaseCo
       sync();  // TODO / OPTIMIZE: Improve this?  (Bug 1316817)
     }
   }
 
   // Push the register r onto the stack.
 
   void pushI32(RegI32 r) {
     MOZ_ASSERT(!isAvailableI32(r));
-    push(r);
+    push(Stk(r));
   }
 
   void pushI64(RegI64 r) {
     MOZ_ASSERT(!isAvailableI64(r));
-    push(r);
+    push(Stk(r));
   }
 
   void pushRef(RegPtr r) {
     MOZ_ASSERT(!isAvailableRef(r));
-    push(r);
+    push(Stk(r));
   }
 
   void pushF64(RegF64 r) {
     MOZ_ASSERT(!isAvailableF64(r));
-    push(r);
+    push(Stk(r));
   }
 
   void pushF32(RegF32 r) {
     MOZ_ASSERT(!isAvailableF32(r));
-    push(r);
+    push(Stk(r));
   }
 
   // Push the value onto the stack.
 
-  void pushI32(int32_t v) { push(v); }
-
-  void pushI64(int64_t v) { push(v); }
+  void pushI32(int32_t v) { push(Stk(v)); }
+
+  void pushI64(int64_t v) { push(Stk(v)); }
 
   void pushRef(intptr_t v) { pushConstRef(v); }
 
-  void pushF64(double v) { push(v); }
-
-  void pushF32(float v) { push(v); }
+  void pushF64(double v) { push(Stk(v)); }
+
+  void pushF32(float v) { push(Stk(v)); }
 
   // Push the local slot onto the stack.  The slot will not be read
   // here; it will be read when it is consumed, or when a side
   // effect to the slot forces its value to be saved.
 
-  void pushLocalI32(uint32_t slot) { push(Stk::LocalI32, slot); }
-
-  void pushLocalI64(uint32_t slot) { push(Stk::LocalI64, slot); }
-
-  void pushLocalRef(uint32_t slot) { push(Stk::LocalRef, slot); }
-
-  void pushLocalF64(uint32_t slot) { push(Stk::LocalF64, slot); }
-
-  void pushLocalF32(uint32_t slot) { push(Stk::LocalF32, slot); }
+  void pushLocalI32(uint32_t slot) {
+    stk_.infallibleEmplaceBack(Stk(Stk::LocalI32, slot));
+  }
+
+  void pushLocalI64(uint32_t slot) {
+    stk_.infallibleEmplaceBack(Stk(Stk::LocalI64, slot));
+  }
+
+  void pushLocalRef(uint32_t slot) {
+    stk_.infallibleEmplaceBack(Stk(Stk::LocalRef, slot));
+  }
+
+  void pushLocalF64(uint32_t slot) {
+    stk_.infallibleEmplaceBack(Stk(Stk::LocalF64, slot));
+  }
+
+  void pushLocalF32(uint32_t slot) {
+    stk_.infallibleEmplaceBack(Stk(Stk::LocalF32, slot));
+  }
 
   // Call only from other popI32() variants.
   // v must be the stack top.  May pop the CPU stack.
 
   void popI32(const Stk& v, RegI32 dest) {
     MOZ_ASSERT(&v == &stk_.back());
     switch (v.kind()) {
       case Stk::ConstI32:
@@ -3013,28 +3487,34 @@ class BaseCompiler final : public BaseCo
       needRef(specific);
       popRef(v, specific);
       if (v.kind() == Stk::RegisterRef) {
         freeRef(v.refReg());
       }
     }
 
     stk_.popBack();
+    if (v.kind() == Stk::MemRef) {
+      smgen_.memRefsOnStk_--;
+    }
     return specific;
   }
 
   MOZ_MUST_USE RegPtr popRef() {
     Stk& v = stk_.back();
     RegPtr r;
     if (v.kind() == Stk::RegisterRef) {
       r = v.refReg();
     } else {
       popRef(v, (r = needRef()));
     }
     stk_.popBack();
+    if (v.kind() == Stk::MemRef) {
+      smgen_.memRefsOnStk_--;
+    }
     return r;
   }
 
   // Call only from other popF64() variants.
   // v must be the stack top.  May pop the CPU stack.
 
   void popF64(const Stk& v, RegF64 dest) {
     MOZ_ASSERT(&v == &stk_.back());
@@ -3400,16 +3880,19 @@ class BaseCompiler final : public BaseCo
           freeF64(v.f64reg());
           break;
         case Stk::RegisterF32:
           freeF32(v.f32reg());
           break;
         case Stk::RegisterRef:
           freeRef(v.refReg());
           break;
+        case Stk::MemRef:
+          smgen_.memRefsOnStk_--;
+          break;
         default:
           break;
       }
     }
     stk_.shrinkTo(stackSize);
   }
 
   void popValueStackBy(uint32_t items) {
@@ -3522,79 +4005,166 @@ class BaseCompiler final : public BaseCo
     // loading of TLS into the FarJumpIsland created by linkCallSites.
     masm.nopPatchableToCall(CallSiteDesc(iter_.lastOpcodeOffset(), kind));
   }
 
   //////////////////////////////////////////////////////////////////////
   //
   // Function prologue and epilogue.
 
-  void beginFunction() {
+  MOZ_MUST_USE bool beginFunction() {
+    JitSpew(JitSpew_Codegen, "# ========================================");
     JitSpew(JitSpew_Codegen, "# Emitting wasm baseline code");
+    JitSpew(JitSpew_Codegen,
+            "# beginFunction: start of function prologue for index %d",
+            (int)func_.index);
+
+    // Make a start on the stack map for this function.  Inspect the args so
+    // as to determine which of them are both in-memory and pointer-typed, and
+    // add entries to mst_ as appropriate.
+
+    const ValTypeVector& argTys = env_.funcTypes[func_.index]->args();
+
+    size_t nStackArgBytes = stackArgAreaSize(argTys);
+    MOZ_ASSERT(nStackArgBytes % sizeof(void*) == 0);
+    smgen_.numStackArgWords_ = nStackArgBytes / sizeof(void*);
+
+    MOZ_ASSERT(smgen_.mst_.length() == 0);
+    if (!smgen_.mst_.pushNonGCPointers(smgen_.numStackArgWords_)) {
+      return false;
+    }
+
+    for (ABIArgIter<const ValTypeVector> i(argTys); !i.done(); i++) {
+      ABIArg argLoc = *i;
+      if (argLoc.kind() != ABIArg::Stack) {
+        continue;
+      }
+      const ValType& ty = argTys[i.index()];
+      if (!ty.isReference()) {
+        continue;
+      }
+      uint32_t offset = argLoc.offsetFromArgBase();
+      MOZ_ASSERT(offset < nStackArgBytes);
+      MOZ_ASSERT(offset % sizeof(void*) == 0);
+      smgen_.mst_.setGCPointer(offset / sizeof(void*));
+    }
 
     GenerateFunctionPrologue(
         masm, env_.funcTypes[func_.index]->id,
         env_.mode() == CompileMode::Tier1 ? Some(func_.index) : Nothing(),
         &offsets_);
 
+    // GenerateFunctionPrologue pushes exactly one wasm::Frame's worth of
+    // stuff, and none of the values are GC pointers.  Hence:
+    if (!smgen_.mst_.pushNonGCPointers(sizeof(Frame) / sizeof(void*))) {
+      return false;
+    }
+
     // Initialize DebugFrame fields before the stack overflow trap so that
     // we have the invariant that all observable Frames in a debugEnabled
     // Module have valid DebugFrames.
     if (env_.debugEnabled()) {
 #ifdef JS_CODEGEN_ARM64
       static_assert(DebugFrame::offsetOfFrame() % WasmStackAlignment == 0,
                     "aligned");
 #endif
       masm.reserveStack(DebugFrame::offsetOfFrame());
+      if (!smgen_.mst_.pushNonGCPointers(DebugFrame::offsetOfFrame() /
+                                         sizeof(void*))) {
+        return false;
+      }
+
       masm.store32(
           Imm32(func_.index),
           Address(masm.getStackPointer(), DebugFrame::offsetOfFuncIndex()));
       masm.storePtr(ImmWord(0), Address(masm.getStackPointer(),
                                         DebugFrame::offsetOfFlagsWord()));
-    }
+      // Zero out DebugFrame::cachedReturnJSValue_ and ::resultRef_ for
+      // safety, since it's not easy to establish whether they will always be
+      // defined before a GC.
+      masm.storePtr(ImmWord(0), Address(masm.getStackPointer(),
+                                        DebugFrame::offsetOfResults()));
+      for (size_t i = 0; i < sizeof(js::Value) / sizeof(void*); i++) {
+        masm.storePtr(ImmWord(0),
+                      Address(masm.getStackPointer(),
+                              DebugFrame::offsetOfCachedReturnJSValue() +
+                                  i * sizeof(void*)));
+      }
+    }
+
+    // Generate a stack-overflow check and its associated stack map.
 
     fr.checkStack(ABINonArgReg0, BytecodeOffset(func_.lineOrBytecode));
-    masm.reserveStack(fr.fixedSize() - masm.framePushed());
+
+    const ValTypeVector& args = funcType().args();
+    ExitStubMapVector extras;
+    if (!smgen_.generateStackmapEntriesForTrapExit(args, extras)) {
+      return false;
+    }
+    if (!createStackMap("stack check", extras, masm.currentOffset(),
+                        HasRefTypedDebugFrame::No)) {
+      return false;
+    }
+
+    size_t reservedBytes = fr.fixedSize() - masm.framePushed();
+    MOZ_ASSERT(0 == (reservedBytes % sizeof(void*)));
+
+    masm.reserveStack(reservedBytes);
     fr.onFixedStackAllocated();
+    if (!smgen_.mst_.pushNonGCPointers(reservedBytes / sizeof(void*))) {
+      return false;
+    }
 
     // Copy arguments from registers to stack.
-
-    const ValTypeVector& args = funcType().args();
-
     for (ABIArgIter<const ValTypeVector> i(args); !i.done(); i++) {
       if (!i->argInRegister()) {
         continue;
       }
       Local& l = localInfo_[i.index()];
       switch (i.mirType()) {
         case MIRType::Int32:
           fr.storeLocalI32(RegI32(i->gpr()), l);
           break;
         case MIRType::Int64:
           fr.storeLocalI64(RegI64(i->gpr64()), l);
           break;
-        case MIRType::Pointer:
+        case MIRType::Pointer: {
+          uint32_t offs = fr.localOffset(l);
+          MOZ_ASSERT(0 == (offs % sizeof(void*)));
           fr.storeLocalPtr(RegPtr(i->gpr()), l);
+          smgen_.mst_.setGCPointer(offs / sizeof(void*));
           break;
+        }
         case MIRType::Double:
           fr.storeLocalF64(RegF64(i->fpu()), l);
           break;
         case MIRType::Float32:
           fr.storeLocalF32(RegF32(i->fpu()), l);
           break;
         default:
           MOZ_CRASH("Function argument type");
       }
     }
 
     fr.zeroLocals(&ra);
 
     if (env_.debugEnabled()) {
       insertBreakablePoint(CallSiteDesc::EnterFrame);
-    }
+      if (!createStackMap("debug: breakable point")) {
+        return false;
+      }
+    }
+
+    JitSpew(JitSpew_Codegen,
+            "# beginFunction: enter body with masm.framePushed = %u",
+            masm.framePushed());
+    MOZ_ASSERT(smgen_.framePushedAtEntryToBody_.isNothing());
+    smgen_.framePushedAtEntryToBody_.emplace(masm.framePushed());
+
+    return true;
   }
 
   void saveResult() {
     MOZ_ASSERT(env_.debugEnabled());
     size_t debugFrameOffset = masm.framePushed() - DebugFrame::offsetOfFrame();
     Address resultsAddress(masm.getStackPointer(),
                            debugFrameOffset + DebugFrame::offsetOfResults());
     switch (funcType().ret().code()) {
@@ -3647,17 +4217,19 @@ class BaseCompiler final : public BaseCo
         masm.loadPtr(resultsAddress, RegPtr(ReturnReg));
         break;
       case ExprType::NullRef:
       default:
         MOZ_CRASH("Function return type");
     }
   }
 
-  bool endFunction() {
+  MOZ_MUST_USE bool endFunction() {
+    JitSpew(JitSpew_Codegen, "# endFunction: start of function epilogue");
+
     // Always branch to returnLabel_.
     masm.breakpoint();
 
     // Patch the add in the prologue so that it checks against the correct
     // frame size. Flush the constant pool in case it needs to be patched.
     masm.flush();
 
     // Precondition for patching.
@@ -3665,43 +4237,62 @@ class BaseCompiler final : public BaseCo
       return false;
     }
 
     fr.patchCheckStack();
 
     masm.bind(&returnLabel_);
 
     if (env_.debugEnabled()) {
+      // If the return type is a ref, we need to note that in the stack maps
+      // generated here.  Note that this assumes that DebugFrame::result* and
+      // DebugFrame::cachedReturnJSValue_ are either both ref-typed or they
+      // are both not ref-typed.  It can't represent the situation where one
+      // is and the other isn't.
+      HasRefTypedDebugFrame refDebugFrame = funcType().ret().isReference()
+                                                ? HasRefTypedDebugFrame::Yes
+                                                : HasRefTypedDebugFrame::No;
+
       // Store and reload the return value from DebugFrame::return so that
       // it can be clobbered, and/or modified by the debug trap.
       saveResult();
       insertBreakablePoint(CallSiteDesc::Breakpoint);
+      if (!createStackMap("debug: breakpoint", refDebugFrame)) {
+        return false;
+      }
       insertBreakablePoint(CallSiteDesc::LeaveFrame);
+      if (!createStackMap("debug: leave frame", refDebugFrame)) {
+        return false;
+      }
       restoreResult();
     }
 
     GenerateFunctionEpilogue(masm, fr.fixedSize(), &offsets_);
 
 #if defined(JS_ION_PERF)
     // FIXME - profiling code missing.  No bug for this.
 
     // Note the end of the inline code and start of the OOL code.
     // gen->perfSpewer().noteEndInlineCode(masm);
 #endif
 
+    JitSpew(JitSpew_Codegen, "# endFunction: end of function epilogue");
+    JitSpew(JitSpew_Codegen, "# endFunction: start of OOL code");
     if (!generateOutOfLineCode()) {
       return false;
     }
 
     offsets_.end = masm.currentOffset();
 
     if (!fr.checkStackHeight()) {
       return false;
     }
 
+    JitSpew(JitSpew_Codegen, "# endFunction: end of OOL code for index %d",
+            (int)func_.index);
     return !masm.oom();
   }
 
   //////////////////////////////////////////////////////////////////////
   //
   // Calls.
 
   struct FunctionCall {
@@ -3754,16 +4345,19 @@ class BaseCompiler final : public BaseCo
     call.frameAlignAdjustment = ComputeByteAlignment(
         masm.framePushed() + sizeof(Frame), JitStackAlignment);
   }
 
   void endCall(FunctionCall& call, size_t stackSpace) {
     size_t adjustment = call.stackArgAreaSize + call.frameAlignAdjustment;
     fr.freeArgAreaAndPopBytes(adjustment, stackSpace);
 
+    MOZ_ASSERT(smgen_.framePushedBeforePushingCallArgs_.isSome());
+    smgen_.framePushedBeforePushingCallArgs_.reset();
+
     if (call.isInterModule) {
       masm.loadWasmTlsRegFromFrame();
       masm.loadWasmPinnedRegsFromTls();
       masm.switchToWasmTlsRealm(ABINonArgReturnReg0, ABINonArgReturnReg1);
     } else if (call.usesSystemAbi) {
       // On x86 there are no pinned registers, so don't waste time
       // reloading the Tls.
 #ifndef JS_CODEGEN_X86
@@ -3785,16 +4379,24 @@ class BaseCompiler final : public BaseCo
     ABIArgIter<const T> i(args);
     while (!i.done()) {
       i++;
     }
     return AlignBytes(i.stackBytesConsumedSoFar(), 16u);
   }
 
   void startCallArgs(size_t stackArgAreaSize, FunctionCall* call) {
+    // Record the masm.framePushed() value at this point, before we push args
+    // for the call, but including the alignment space placed above the args.
+    // This defines the lower limit of the stackmap that will be created for
+    // this call.
+    MOZ_ASSERT(smgen_.framePushedBeforePushingCallArgs_.isNothing());
+    smgen_.framePushedBeforePushingCallArgs_.emplace(
+        masm.framePushed() + call->frameAlignAdjustment);
+
     call->stackArgAreaSize = stackArgAreaSize;
 
     size_t adjustment = call->stackArgAreaSize + call->frameAlignAdjustment;
     fr.allocArgArea(adjustment);
   }
 
   const ABIArg reservePointerArgument(FunctionCall* call) {
     return call->abi.next(MIRType::Pointer);
@@ -3942,62 +4544,62 @@ class BaseCompiler final : public BaseCo
       }
       case ValType::NullRef:
         MOZ_CRASH("NullRef not expressible");
       default:
         MOZ_CRASH("Function argument type");
     }
   }
 
-  void callDefinition(uint32_t funcIndex, const FunctionCall& call) {
+  CodeOffset callDefinition(uint32_t funcIndex, const FunctionCall& call) {
     CallSiteDesc desc(call.lineOrBytecode, CallSiteDesc::Func);
-    masm.call(desc, funcIndex);
-  }
-
-  void callSymbolic(SymbolicAddress callee, const FunctionCall& call) {
+    return masm.call(desc, funcIndex);
+  }
+
+  CodeOffset callSymbolic(SymbolicAddress callee, const FunctionCall& call) {
     CallSiteDesc desc(call.lineOrBytecode, CallSiteDesc::Symbolic);
-    masm.call(desc, callee);
+    return masm.call(desc, callee);
   }
 
   // Precondition: sync()
 
-  void callIndirect(uint32_t funcTypeIndex, uint32_t tableIndex,
-                    const Stk& indexVal, const FunctionCall& call) {
+  CodeOffset callIndirect(uint32_t funcTypeIndex, uint32_t tableIndex,
+                          const Stk& indexVal, const FunctionCall& call) {
     const FuncTypeWithId& funcType = env_.types[funcTypeIndex].funcType();
     MOZ_ASSERT(funcType.id.kind() != FuncTypeIdDescKind::None);
 
     const TableDesc& table = env_.tables[tableIndex];
 
     loadI32(indexVal, RegI32(WasmTableCallIndexReg));
 
     CallSiteDesc desc(call.lineOrBytecode, CallSiteDesc::Dynamic);
     CalleeDesc callee = CalleeDesc::wasmTable(table, funcType.id);
-    masm.wasmCallIndirect(desc, callee, NeedsBoundsCheck(true));
+    return masm.wasmCallIndirect(desc, callee, NeedsBoundsCheck(true));
   }
 
   // Precondition: sync()
 
-  void callImport(unsigned globalDataOffset, const FunctionCall& call) {
+  CodeOffset callImport(unsigned globalDataOffset, const FunctionCall& call) {
     CallSiteDesc desc(call.lineOrBytecode, CallSiteDesc::Dynamic);
     CalleeDesc callee = CalleeDesc::import(globalDataOffset);
-    masm.wasmCallImport(desc, callee);
-  }
-
-  void builtinCall(SymbolicAddress builtin, const FunctionCall& call) {
-    callSymbolic(builtin, call);
-  }
-
-  void builtinInstanceMethodCall(SymbolicAddress builtin,
-                                 const ABIArg& instanceArg,
-                                 const FunctionCall& call) {
+    return masm.wasmCallImport(desc, callee);
+  }
+
+  CodeOffset builtinCall(SymbolicAddress builtin, const FunctionCall& call) {
+    return callSymbolic(builtin, call);
+  }
+
+  CodeOffset builtinInstanceMethodCall(SymbolicAddress builtin,
+                                       const ABIArg& instanceArg,
+                                       const FunctionCall& call) {
     // Builtin method calls assume the TLS register has been set.
     masm.loadWasmTlsRegFromFrame();
 
     CallSiteDesc desc(call.lineOrBytecode, CallSiteDesc::Symbolic);
-    masm.wasmCallBuiltinInstanceMethod(desc, instanceArg, builtin);
+    return masm.wasmCallBuiltinInstanceMethod(desc, instanceArg, builtin);
   }
 
   //////////////////////////////////////////////////////////////////////
   //
   // Sundry low-level code generators.
 
   // The compiler depends on moveImm32() clearing the high bits of a 64-bit
   // register on 64-bit systems except MIPS64 where high bits are sign extended
@@ -4008,20 +4610,21 @@ class BaseCompiler final : public BaseCo
   void moveImm64(int64_t v, RegI64 dest) { masm.move64(Imm64(v), dest); }
 
   void moveImmRef(intptr_t v, RegPtr dest) { masm.movePtr(ImmWord(v), dest); }
 
   void moveImmF32(float f, RegF32 dest) { masm.loadConstantFloat32(f, dest); }
 
   void moveImmF64(double d, RegF64 dest) { masm.loadConstantDouble(d, dest); }
 
-  void addInterruptCheck() {
+  MOZ_MUST_USE bool addInterruptCheck() {
     ScratchI32 tmp(*this);
     masm.loadWasmTlsRegFromFrame(tmp);
     masm.wasmInterruptCheck(tmp, bytecodeOffset());
+    return createStackMap("addInterruptCheck");
   }
 
   void jumpTable(const LabelVector& labels, Label* theTable) {
     // Flush constant pools to ensure that the table is never interrupted by
     // constant pool entries.
     masm.flush();
 
 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64)
@@ -5867,47 +6470,55 @@ class BaseCompiler final : public BaseCo
 
     // If the pointer being stored is to a tenured object, no barrier.
     masm.branchPtrInNurseryChunk(Assembler::NotEqual, setValue, otherScratch,
                                  skipBarrier);
   }
 
   // This frees the register `valueAddr`.
 
-  void emitPostBarrier(RegPtr valueAddr) {
+  MOZ_MUST_USE bool emitPostBarrier(RegPtr valueAddr) {
     uint32_t bytecodeOffset = iter_.lastOpcodeOffset();
 
     // The `valueAddr` is a raw pointer to the cell within some GC object or
     // TLS area, and we guarantee that the GC will not run while the
     // postbarrier call is active, so push a uintptr_t value.
 #ifdef JS_64BIT
     pushI64(RegI64(Register64(valueAddr)));
-    emitInstanceCall(bytecodeOffset, SigPL_, ExprType::Void,
-                     SymbolicAddress::PostBarrier);
+    if (!emitInstanceCall(bytecodeOffset, SigPL_, ExprType::Void,
+                          SymbolicAddress::PostBarrier)) {
+      return false;
+    }
 #else
     pushI32(RegI32(valueAddr));
-    emitInstanceCall(bytecodeOffset, SigPI_, ExprType::Void,
-                     SymbolicAddress::PostBarrier);
-#endif
-  }
-
-  void emitBarrieredStore(const Maybe<RegPtr>& object, RegPtr valueAddr,
-                          RegPtr value) {
+    if (!emitInstanceCall(bytecodeOffset, SigPI_, ExprType::Void,
+                          SymbolicAddress::PostBarrier)) {
+      return false;
+    }
+#endif
+    return true;
+  }
+
+  MOZ_MUST_USE bool emitBarrieredStore(const Maybe<RegPtr>& object,
+                                       RegPtr valueAddr, RegPtr value) {
     emitPreBarrier(valueAddr);  // Preserves valueAddr
     masm.storePtr(value, Address(valueAddr, 0));
 
     Label skipBarrier;
     sync();
 
     RegPtr otherScratch = needRef();
     emitPostBarrierGuard(object, otherScratch, value, &skipBarrier);
     freeRef(otherScratch);
 
-    emitPostBarrier(valueAddr);
+    if (!emitPostBarrier(valueAddr)) {
+      return false;
+    }
     masm.bind(&skipBarrier);
+    return true;
   }
 
   ////////////////////////////////////////////////////////////
   //
   // Machinery for optimized conditional branches.
   //
   // To disable this optimization it is enough always to return false from
   // sniffConditionalControl{Cmp,Eqz}.
@@ -6116,17 +6727,18 @@ class BaseCompiler final : public BaseCo
   void emitMultiplyI64();
   void emitMultiplyF32();
   void emitMultiplyF64();
   void emitQuotientI32();
   void emitQuotientU32();
   void emitRemainderI32();
   void emitRemainderU32();
 #ifdef RABALDR_INT_DIV_I64_CALLOUT
-  void emitDivOrModI64BuiltinCall(SymbolicAddress callee, ValType operandType);
+  MOZ_MUST_USE bool emitDivOrModI64BuiltinCall(SymbolicAddress callee,
+                                               ValType operandType);
 #else
   void emitQuotientI64();
   void emitQuotientU64();
   void emitRemainderI64();
   void emitRemainderU64();
 #endif
   void emitDivideF32();
   void emitDivideF64();
@@ -6204,18 +6816,19 @@ class BaseCompiler final : public BaseCo
   void emitConvertI64ToF32();
   void emitConvertU64ToF32();
   void emitConvertI64ToF64();
   void emitConvertU64ToF64();
 #endif
   void emitReinterpretI32AsF32();
   void emitReinterpretI64AsF64();
   void emitRound(RoundingMode roundingMode, ValType operandType);
-  void emitInstanceCall(uint32_t lineOrBytecode, const MIRTypeVector& sig,
-                        ExprType retType, SymbolicAddress builtin);
+  MOZ_MUST_USE bool emitInstanceCall(uint32_t lineOrBytecode,
+                                     const MIRTypeVector& sig, ExprType retType,
+                                     SymbolicAddress builtin);
   MOZ_MUST_USE bool emitGrowMemory();
   MOZ_MUST_USE bool emitCurrentMemory();
 
   MOZ_MUST_USE bool emitRefNull();
   void emitRefIsNull();
 
   MOZ_MUST_USE bool emitAtomicCmpXchg(ValType type, Scalar::Type viewType);
   MOZ_MUST_USE bool emitAtomicLoad(ValType type, Scalar::Type viewType);
@@ -7526,17 +8139,19 @@ bool BaseCompiler::emitLoop() {
   }
 
   initControl(controlItem());
   bceSafe_ = 0;
 
   if (!deadCode_) {
     masm.nopAlign(CodeAlignment);
     masm.bind(&controlItem(0).label);
-    addInterruptCheck();
+    if (!addInterruptCheck()) {
+      return false;
+    }
   }
 
   return true;
 }
 
 void BaseCompiler::endLoop(ExprType type) {
   Control& block = controlItem();
 
@@ -8026,20 +8641,26 @@ bool BaseCompiler::emitCall() {
   FunctionCall baselineCall(lineOrBytecode);
   beginCall(baselineCall, UseABI::Wasm,
             import ? InterModule::True : InterModule::False);
 
   if (!emitCallArgs(funcType.args(), &baselineCall)) {
     return false;
   }
 
+  CodeOffset raOffset;
   if (import) {
-    callImport(env_.funcImportGlobalDataOffsets[funcIndex], baselineCall);
+    raOffset =
+        callImport(env_.funcImportGlobalDataOffsets[funcIndex], baselineCall);
   } else {
-    callDefinition(funcIndex, baselineCall);
+    raOffset = callDefinition(funcIndex, baselineCall);
+  }
+
+  if (!createStackMap("emitCall", raOffset)) {
+    return false;
   }
 
   endCall(baselineCall, stackSpace);
 
   popValueStackBy(numArgs);
 
   pushReturnedIfNonVoid(baselineCall, funcType.ret());
 
@@ -8078,17 +8699,21 @@ bool BaseCompiler::emitCallIndirect() {
 
   FunctionCall baselineCall(lineOrBytecode);
   beginCall(baselineCall, UseABI::Wasm, InterModule::True);
 
   if (!emitCallArgs(funcType.args(), &baselineCall)) {
     return false;
   }
 
-  callIndirect(funcTypeIndex, tableIndex, callee, baselineCall);
+  CodeOffset raOffset =
+      callIndirect(funcTypeIndex, tableIndex, callee, baselineCall);
+  if (!createStackMap("emitCallIndirect", raOffset)) {
+    return false;
+  }
 
   endCall(baselineCall, stackSpace);
 
   popValueStackBy(numArgs);
 
   pushReturnedIfNonVoid(baselineCall, funcType.ret());
 
   return true;
@@ -8138,29 +8763,32 @@ bool BaseCompiler::emitUnaryMathBuiltinC
 
   FunctionCall baselineCall(lineOrBytecode);
   beginCall(baselineCall, UseABI::Builtin, InterModule::False);
 
   if (!emitCallArgs(signature, &baselineCall)) {
     return false;
   }
 
-  builtinCall(callee, baselineCall);
+  CodeOffset raOffset = builtinCall(callee, baselineCall);
+  if (!createStackMap("emitUnaryMathBuiltin[..]", raOffset)) {
+    return false;
+  }
 
   endCall(baselineCall, stackSpace);
 
   popValueStackBy(numArgs);
 
   pushReturnedIfNonVoid(baselineCall, retType);
 
   return true;
 }
 
 #ifdef RABALDR_INT_DIV_I64_CALLOUT
-void BaseCompiler::emitDivOrModI64BuiltinCall(SymbolicAddress callee,
+bool BaseCompiler::emitDivOrModI64BuiltinCall(SymbolicAddress callee,
                                               ValType operandType) {
   MOZ_ASSERT(operandType == ValType::I64);
   MOZ_ASSERT(!deadCode_);
 
   sync();
 
   needI64(specific_.abiReturnRegI64);
 
@@ -8177,22 +8805,26 @@ void BaseCompiler::emitDivOrModI64Builti
     checkDivideSignedOverflowI64(rhs, srcDest, &done, ZeroOnOverflow(true));
   }
 
   masm.setupWasmABICall();
   masm.passABIArg(srcDest.high);
   masm.passABIArg(srcDest.low);
   masm.passABIArg(rhs.high);
   masm.passABIArg(rhs.low);
-  masm.callWithABI(bytecodeOffset(), callee);
+  CodeOffset raOffset = masm.callWithABI(bytecodeOffset(), callee);
+  if (!createStackMap("emitDivOrModI64Bui[..]", raOffset)) {
+    return false;
+  }
 
   masm.bind(&done);
 
   freeI64(rhs);
   pushI64(srcDest);
+  return true;
 }
 #endif  // RABALDR_INT_DIV_I64_CALLOUT
 
 #ifdef RABALDR_I64_TO_FLOAT_CALLOUT
 bool BaseCompiler::emitConvertInt64ToFloatingCallout(SymbolicAddress callee,
                                                      ValType operandType,
                                                      ValType resultType) {
   sync();
@@ -8203,19 +8835,22 @@ bool BaseCompiler::emitConvertInt64ToFlo
 
   masm.setupWasmABICall();
 #ifdef JS_PUNBOX64
   MOZ_CRASH("BaseCompiler platform hook: emitConvertInt64ToFloatingCallout");
 #else
   masm.passABIArg(input.high);
   masm.passABIArg(input.low);
 #endif
-  masm.callWithABI(
+  CodeOffset raOffset = masm.callWithABI(
       bytecodeOffset(), callee,
       resultType == ValType::F32 ? MoveOp::FLOAT32 : MoveOp::DOUBLE);
+  if (!createStackMap("emitConvertInt64To[..]", raOffset)) {
+    return false;
+  }
 
   freeI64(input);
 
   if (resultType == ValType::F32) {
     pushF32(captureReturnedF32(call));
   } else {
     pushF64(captureReturnedF64(call));
   }
@@ -8245,17 +8880,20 @@ bool BaseCompiler::emitConvertFloatingTo
   pushF64(otherReg);
 
   sync();
 
   FunctionCall call(0);
 
   masm.setupWasmABICall();
   masm.passABIArg(doubleInput, MoveOp::DOUBLE);
-  masm.callWithABI(bytecodeOffset(), callee);
+  CodeOffset raOffset = masm.callWithABI(bytecodeOffset(), callee);
+  if (!createStackMap("emitConvertFloatin[..]", raOffset)) {
+    return false;
+  }
 
   freeF64(doubleInput);
 
   RegI64 rv = captureReturnedI64();
 
   RegF64 inputVal = popF64();
 
   TruncFlags flags = 0;
@@ -8551,17 +9189,20 @@ bool BaseCompiler::emitSetGlobal() {
       RegPtr valueAddr(PreBarrierReg);
       needRef(valueAddr);
       {
         ScratchI32 tmp(*this);
         masm.computeEffectiveAddress(addressOfGlobalVar(global, tmp),
                                      valueAddr);
       }
       RegPtr rv = popRef();
-      emitBarrieredStore(Nothing(), valueAddr, rv);  // Consumes valueAddr
+      if (!emitBarrieredStore(Nothing(), valueAddr,
+                              rv)) {  // Consumes valueAddr
+        return false;
+      }
       freeRef(rv);
       break;
     }
     case ValType::NullRef:
       MOZ_CRASH("NullRef not expressible");
     default:
       MOZ_CRASH("Global variable type");
       break;
@@ -9056,17 +9697,17 @@ void BaseCompiler::emitCompareRef(Assemb
   pop2xRef(&rs1, &rs2);
   RegI32 rd = needI32();
   masm.cmpPtrSet(compareOp, rs1, rs2, rd);
   freeRef(rs1);
   freeRef(rs2);
   pushI32(rd);
 }
 
-void BaseCompiler::emitInstanceCall(uint32_t lineOrBytecode,
+bool BaseCompiler::emitInstanceCall(uint32_t lineOrBytecode,
                                     const MIRTypeVector& sig, ExprType retType,
                                     SymbolicAddress builtin) {
   MOZ_ASSERT(sig[0] == MIRType::Pointer);
 
   sync();
 
   uint32_t numArgs = sig.length() - 1 /* instance */;
   size_t stackSpace = stackConsumed(numArgs);
@@ -9089,67 +9730,69 @@ void BaseCompiler::emitInstanceCall(uint
       case MIRType::Pointer:
         t = ValType::AnyRef;
         break;
       default:
         MOZ_CRASH("Unexpected type");
     }
     passArg(t, peek(numArgs - i), &baselineCall);
   }
-  builtinInstanceMethodCall(builtin, instanceArg, baselineCall);
+  CodeOffset raOffset =
+      builtinInstanceMethodCall(builtin, instanceArg, baselineCall);
+  if (!createStackMap("emitInstanceCall", raOffset)) {
+    return false;
+  }
+
   endCall(baselineCall, stackSpace);
 
   popValueStackBy(numArgs);
 
   // Note, many clients of emitInstanceCall currently assume that pushing the
   // result here does not destroy ReturnReg.
   //
   // Furthermore, clients assume that even if retType == ExprType::Void, the
   // callee may have returned a status result and left it in ReturnReg for us
   // to find, and that that register will not be destroyed here (or above).
   // In this case the callee will have a C++ declaration stating that there is
   // a return value.  Examples include memory and table operations that are
   // implemented as callouts.
 
   pushReturnedIfNonVoid(baselineCall, retType);
+  return true;
 }
 
 bool BaseCompiler::emitGrowMemory() {
   uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
 
   Nothing arg;
   if (!iter_.readGrowMemory(&arg)) {
     return false;
   }
 
   if (deadCode_) {
     return true;
   }
 
-  // infallible
-  emitInstanceCall(lineOrBytecode, SigPI_, ExprType::I32,
-                   SymbolicAddress::GrowMemory);
-  return true;
+  return emitInstanceCall(lineOrBytecode, SigPI_, ExprType::I32,
+                          SymbolicAddress::GrowMemory);
 }
 
 bool BaseCompiler::emitCurrentMemory() {
   uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
 
   if (!iter_.readCurrentMemory()) {
     return false;
   }
 
   if (deadCode_) {
     return true;
   }
 
-  // infallible
-  emitInstanceCall(lineOrBytecode, SigP_, ExprType::I32,
-                   SymbolicAddress::CurrentMemory);
-  return true;
+  return emitInstanceCall(lineOrBytecode, SigP_, ExprType::I32,
+                          SymbolicAddress::CurrentMemory);
 }
 
 bool BaseCompiler::emitRefNull() {
   if (!iter_.readRefNull()) {
     return false;
   }
 
   if (deadCode_) {
@@ -9453,22 +10096,26 @@ bool BaseCompiler::emitWait(ValType type
 
   if (deadCode_) {
     return true;
   }
 
   // Returns -1 on trap, otherwise nonnegative result.
   switch (type.code()) {
     case ValType::I32:
-      emitInstanceCall(lineOrBytecode, SigPIIL_, ExprType::I32,
-                       SymbolicAddress::WaitI32);
+      if (!emitInstanceCall(lineOrBytecode, SigPIIL_, ExprType::I32,
+                            SymbolicAddress::WaitI32)) {
+        return false;
+      }
       break;
     case ValType::I64:
-      emitInstanceCall(lineOrBytecode, SigPILL_, ExprType::I32,
-                       SymbolicAddress::WaitI64);
+      if (!emitInstanceCall(lineOrBytecode, SigPILL_, ExprType::I32,
+                            SymbolicAddress::WaitI64)) {
+        return false;
+      }
       break;
     default:
       MOZ_CRASH();
   }
 
   Label ok;
   masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
   trap(Trap::ThrowReported);
@@ -9486,18 +10133,20 @@ bool BaseCompiler::emitWake() {
     return false;
   }
 
   if (deadCode_) {
     return true;
   }
 
   // Returns -1 on trap, otherwise nonnegative result.
-  emitInstanceCall(lineOrBytecode, SigPII_, ExprType::I32,
-                   SymbolicAddress::Wake);
+  if (!emitInstanceCall(lineOrBytecode, SigPII_, ExprType::I32,
+                        SymbolicAddress::Wake)) {
+    return false;
+  }
 
   Label ok;
   masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
   trap(Trap::ThrowReported);
   masm.bind(&ok);
 
   return true;
 }
@@ -9517,23 +10166,27 @@ bool BaseCompiler::emitMemOrTableCopy(bo
   if (deadCode_) {
     return true;
   }
 
   // Returns -1 on trap, otherwise 0.
   if (isMem) {
     MOZ_ASSERT(srcMemOrTableIndex == 0);
     MOZ_ASSERT(dstMemOrTableIndex == 0);
-    emitInstanceCall(lineOrBytecode, SigPIII_, ExprType::Void,
-                     SymbolicAddress::MemCopy);
+    if (!emitInstanceCall(lineOrBytecode, SigPIII_, ExprType::Void,
+                          SymbolicAddress::MemCopy)) {
+      return false;
+    }
   } else {
     pushI32(dstMemOrTableIndex);
     pushI32(srcMemOrTableIndex);
-    emitInstanceCall(lineOrBytecode, SigPIIIII_, ExprType::Void,
-                     SymbolicAddress::TableCopy);
+    if (!emitInstanceCall(lineOrBytecode, SigPIIIII_, ExprType::Void,
+                          SymbolicAddress::TableCopy)) {
+      return false;
+    }
   }
 
   Label ok;
   masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
   trap(Trap::ThrowReported);
   masm.bind(&ok);
 
   return true;
@@ -9552,17 +10205,19 @@ bool BaseCompiler::emitMemOrTableDrop(bo
   }
 
   // Despite the cast to int32_t, the callee regards the value as unsigned.
   //
   // Returns -1 on trap, otherwise 0.
   pushI32(int32_t(segIndex));
   SymbolicAddress callee =
       isMem ? SymbolicAddress::MemDrop : SymbolicAddress::TableDrop;
-  emitInstanceCall(lineOrBytecode, SigPI_, ExprType::Void, callee);
+  if (!emitInstanceCall(lineOrBytecode, SigPI_, ExprType::Void, callee)) {
+    return false;
+  }
 
   Label ok;
   masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
   trap(Trap::ThrowReported);
   masm.bind(&ok);
 
   return true;
 }
@@ -9575,18 +10230,20 @@ bool BaseCompiler::emitMemFill() {
     return false;
   }
 
   if (deadCode_) {
     return true;
   }
 
   // Returns -1 on trap, otherwise 0.
-  emitInstanceCall(lineOrBytecode, SigPIII_, ExprType::Void,
-                   SymbolicAddress::MemFill);
+  if (!emitInstanceCall(lineOrBytecode, SigPIII_, ExprType::Void,
+                        SymbolicAddress::MemFill)) {
+    return false;
+  }
 
   Label ok;
   masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
   trap(Trap::ThrowReported);
   masm.bind(&ok);
 
   return true;
 }
@@ -9604,22 +10261,26 @@ bool BaseCompiler::emitMemOrTableInit(bo
 
   if (deadCode_) {
     return true;
   }
 
   // Returns -1 on trap, otherwise 0.
   pushI32(int32_t(segIndex));
   if (isMem) {
-    emitInstanceCall(lineOrBytecode, SigPIIII_, ExprType::Void,
-                     SymbolicAddress::MemInit);
+    if (!emitInstanceCall(lineOrBytecode, SigPIIII_, ExprType::Void,
+                          SymbolicAddress::MemInit)) {
+      return false;
+    }
   } else {
     pushI32(dstTableIndex);
-    emitInstanceCall(lineOrBytecode, SigPIIIII_, ExprType::Void,
-                     SymbolicAddress::TableInit);
+    if (!emitInstanceCall(lineOrBytecode, SigPIIIII_, ExprType::Void,
+                          SymbolicAddress::TableInit)) {
+      return false;
+    }
   }
 
   Label ok;
   masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
   trap(Trap::ThrowReported);
   masm.bind(&ok);
 
   return true;
@@ -9637,18 +10298,20 @@ bool BaseCompiler::emitTableGet() {
   if (deadCode_) {
     return true;
   }
   // get(index:u32, table:u32) -> anyref
   //
   // Returns (void*)-1 for error, this will not be confused with a real ref
   // value.
   pushI32(tableIndex);
-  emitInstanceCall(lineOrBytecode, SigPII_, ExprType::AnyRef,
-                   SymbolicAddress::TableGet);
+  if (!emitInstanceCall(lineOrBytecode, SigPII_, ExprType::AnyRef,
+                        SymbolicAddress::TableGet)) {
+    return false;
+  }
   Label noTrap;
   masm.branchPtr(Assembler::NotEqual, ReturnReg, Imm32(-1), &noTrap);
   trap(Trap::ThrowReported);
   masm.bind(&noTrap);
 
   return true;
 }
 
@@ -9663,20 +10326,18 @@ bool BaseCompiler::emitTableGrow() {
   }
   if (deadCode_) {
     return true;
   }
   // grow(delta:u32, initValue:anyref, table:u32) -> u32
   //
   // infallible.
   pushI32(tableIndex);
-  emitInstanceCall(lineOrBytecode, SigPIPI_, ExprType::I32,
-                   SymbolicAddress::TableGrow);
-
-  return true;
+  return emitInstanceCall(lineOrBytecode, SigPIPI_, ExprType::I32,
+                          SymbolicAddress::TableGrow);
 }
 
 MOZ_MUST_USE
 bool BaseCompiler::emitTableSet() {
   uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
   Nothing index, value;
   uint32_t tableIndex;
   if (!iter_.readTableSet(&tableIndex, &index, &value)) {
@@ -9684,18 +10345,20 @@ bool BaseCompiler::emitTableSet() {
   }
   if (deadCode_) {
     return true;
   }
   // set(index:u32, value:ref, table:u32) -> i32
   //
   // Returns -1 on range error, otherwise 0 (which is then ignored).
   pushI32(tableIndex);
-  emitInstanceCall(lineOrBytecode, SigPIPI_, ExprType::Void,
-                   SymbolicAddress::TableSet);
+  if (!emitInstanceCall(lineOrBytecode, SigPIPI_, ExprType::Void,
+                        SymbolicAddress::TableSet)) {
+    return false;
+  }
   Label noTrap;
   masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &noTrap);
   trap(Trap::ThrowReported);
   masm.bind(&noTrap);
   return true;
 }
 
 MOZ_MUST_USE
@@ -9707,19 +10370,18 @@ bool BaseCompiler::emitTableSize() {
   }
   if (deadCode_) {
     return true;
   }
   // size(table:u32) -> u32
   //
   // infallible.
   pushI32(tableIndex);
-  emitInstanceCall(lineOrBytecode, SigPI_, ExprType::I32,
-                   SymbolicAddress::TableSize);
-  return true;
+  return emitInstanceCall(lineOrBytecode, SigPI_, ExprType::I32,
+                          SymbolicAddress::TableSize);
 }
 
 bool BaseCompiler::emitStructNew() {
   uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
 
   uint32_t typeIndex;
   BaseOpIter::ValueVector args;
   if (!iter_.readStructNew(&typeIndex, &args)) {
@@ -9733,18 +10395,20 @@ bool BaseCompiler::emitStructNew() {
   // Allocate zeroed storage.  The parameter to StructNew is an index into a
   // descriptor table that the instance has.
   //
   // Returns null on OOM.
 
   const StructType& structType = env_.types[typeIndex].structType();
 
   pushI32(structType.moduleIndex_);
-  emitInstanceCall(lineOrBytecode, SigPI_, ExprType::AnyRef,
-                   SymbolicAddress::StructNew);
+  if (!emitInstanceCall(lineOrBytecode, SigPI_, ExprType::AnyRef,
+                        SymbolicAddress::StructNew)) {
+    return false;
+  }
 
   // Null pointer check.
 
   Label ok;
   masm.branchTestPtr(Assembler::NonZero, ReturnReg, ReturnReg, &ok);
   trap(Trap::ThrowReported);
   masm.bind(&ok);
 
@@ -9826,18 +10490,20 @@ bool BaseCompiler::emitStructNew() {
           freeRef(rowner);
         }
 
         freeRef(value);
 
         pushRef(rp);  // Save rp across the call
         RegPtr valueAddr = needRef();
         masm.computeEffectiveAddress(Address(rdata, offs), valueAddr);
-        emitPostBarrier(valueAddr);  // Consumes valueAddr
-        popRef(rp);                  // Restore rp
+        if (!emitPostBarrier(valueAddr)) {  // Consumes valueAddr
+          return false;
+        }
+        popRef(rp);  // Restore rp
         if (!structType.isInline_) {
           masm.loadPtr(Address(rp, OutlineTypedObject::offsetOfData()), rdata);
         }
 
         masm.bind(&skipBarrier);
         break;
       }
       case ValType::NullRef:
@@ -10006,17 +10672,19 @@ bool BaseCompiler::emitStructSet() {
     case ValType::F64: {
       masm.storeDouble(rd, Address(rp, offs));
       freeF64(rd);
       break;
     }
     case ValType::Ref:
     case ValType::AnyRef: {
       masm.computeEffectiveAddress(Address(rp, offs), valueAddr);
-      emitBarrieredStore(Some(rp), valueAddr, rr);  // Consumes valueAddr
+      if (!emitBarrieredStore(Some(rp), valueAddr, rr)) {  // Consumes valueAddr
+        return false;
+      }
       freeRef(rr);
       break;
     }
     case ValType::NullRef: {
       MOZ_CRASH("NullRef not expressible");
     }
     default: { MOZ_CRASH("Unexpected field type"); }
   }
@@ -10055,23 +10723,23 @@ bool BaseCompiler::emitStructNarrow() {
   //
   // Infallible.
   const StructType& outputStruct =
       env_.types[outputType.refTypeIndex()].structType();
 
   pushI32(mustUnboxAnyref);
   pushI32(outputStruct.moduleIndex_);
   pushRef(rp);
-  emitInstanceCall(lineOrBytecode, SigPIIP_, ExprType::AnyRef,
-                   SymbolicAddress::StructNarrow);
-
-  return true;
+  return emitInstanceCall(lineOrBytecode, SigPIIP_, ExprType::AnyRef,
+                          SymbolicAddress::StructNarrow);
 }
 
 bool BaseCompiler::emitBody() {
+  MOZ_ASSERT(smgen_.framePushedAtEntryToBody_.isSome());
+
   if (!iter_.readFunctionStart(funcType().ret())) {
     return false;
   }
 
   initControl(controlItem());
 
   uint32_t overhead = 0;
 
@@ -10102,24 +10770,44 @@ bool BaseCompiler::emitBody() {
   iter_.readConversion(inType, outType, &unused_a) && (deadCode_ || doEmit())
 
 #define emitCalloutConversionOOM(doEmit, symbol, inType, outType) \
   iter_.readConversion(inType, outType, &unused_a) &&             \
       (deadCode_ || doEmit(symbol, inType, outType))
 
 #define emitIntDivCallout(doEmit, symbol, type)   \
   iter_.readBinary(type, &unused_a, &unused_b) && \
-      (deadCode_ || (doEmit(symbol, type), true))
+      (deadCode_ || doEmit(symbol, type))
+
+#ifdef DEBUG
+    // Check that the number of ref-typed entries in the operand stack matches
+    // reality.
+#define CHECK_POINTER_COUNT                                  \
+  do {                                                       \
+    MOZ_ASSERT(countMemRefsOnStk() == smgen_.memRefsOnStk_); \
+  } while (0)
+#else
+#define CHECK_POINTER_COUNT \
+  do {                      \
+  } while (0)
+#endif
 
 #define CHECK(E) \
   if (!(E)) return false
-#define NEXT() continue
+#define NEXT()           \
+  {                      \
+    CHECK_POINTER_COUNT; \
+    continue;            \
+  }
 #define CHECK_NEXT(E)     \
   if (!(E)) return false; \
-  continue
+  {                       \
+    CHECK_POINTER_COUNT;  \
+    continue;             \
+  }
 
     // TODO / EVALUATE (bug 1316845): Not obvious that this attempt at
     // reducing overhead is really paying off relative to making the check
     // every iteration.
 
     if (overhead == 0) {
       // Check every 50 expressions -- a happy medium between
       // memory usage and checking overhead.
@@ -10142,17 +10830,28 @@ bool BaseCompiler::emitBody() {
     // When env_.debugEnabled(), every operator has breakpoint site but Op::End.
     if (env_.debugEnabled() && op.b0 != (uint16_t)Op::End) {
       // TODO sync only registers that can be clobbered by the exit
       // prologue/epilogue or disable these registers for use in
       // baseline compiler when env_.debugEnabled() is set.
       sync();
 
       insertBreakablePoint(CallSiteDesc::Breakpoint);
-    }
+      if (!createStackMap("debug: per insn")) {
+        return false;
+      }
+    }
+
+    // Going below framePushedAtEntryToBody_ would imply that we've
+    // popped off the machine stack, part of the frame created by
+    // beginFunction().
+    MOZ_ASSERT(masm.framePushed() >= smgen_.framePushedAtEntryToBody_.value());
+
+    // At this point we're definitely not generating code for a function call.
+    MOZ_ASSERT(smgen_.framePushedBeforePushingCallArgs_.isNothing());
 
     switch (op.b0) {
       case uint16_t(Op::End):
         if (!emitEnd()) {
           return false;
         }
 
         if (iter_.controlStackEmpty()) {
@@ -11034,63 +11733,70 @@ bool BaseCompiler::emitBody() {
 
       default:
         return iter_.unrecognizedOpcode(&op);
     }
 
 #undef CHECK
 #undef NEXT
 #undef CHECK_NEXT
+#undef CHECK_POINTER_COUNT
 #undef emitBinary
 #undef emitUnary
 #undef emitComparison
 #undef emitConversion
 #undef emitConversionOOM
 #undef emitCalloutConversionOOM
 
     MOZ_CRASH("unreachable");
   }
 
   MOZ_CRASH("unreachable");
 }
 
 bool BaseCompiler::emitFunction() {
-  beginFunction();
+  if (!beginFunction()) {
+    return false;
+  }
 
   if (!emitBody()) {
     return false;
   }
 
   if (!endFunction()) {
     return false;
   }
 
   return true;
 }
 
 BaseCompiler::BaseCompiler(const ModuleEnvironment& env,
                            const FuncCompileInput& func,
-                           const ValTypeVector& locals, Decoder& decoder,
+                           const ValTypeVector& locals,
+                           const MachineState& trapExitLayout,
+                           size_t trapExitLayoutNumWords, Decoder& decoder,
                            ExclusiveDeferredValidationState& dvs,
-                           TempAllocator* alloc, MacroAssembler* masm)
+                           TempAllocator* alloc, MacroAssembler* masm,
+                           StackMaps* stackMaps)
     : env_(env),
       iter_(env, decoder, dvs),
       func_(func),
       lastReadCallSite_(0),
       alloc_(*alloc),
       locals_(locals),
       deadCode_(false),
       bceSafe_(0),
       latentOp_(LatentOp::None),
       latentType_(ValType::I32),
       latentIntCmp_(Assembler::Equal),
       latentDoubleCmp_(Assembler::DoubleEqual),
       masm(*masm),
       ra(*this),
       fr(*masm),
+      smgen_(stackMaps, trapExitLayout, trapExitLayoutNumWords, *masm),
       joinRegI32_(RegI32(ReturnReg)),
       joinRegI64_(RegI64(ReturnReg64)),
       joinRegPtr_(RegPtr(ReturnReg)),
       joinRegF32_(RegF32(ReturnFloat32Reg)),
       joinRegF64_(RegF64(ReturnDoubleReg)) {}
 
 bool BaseCompiler::init() {
   if (!SigD_.append(ValType::F64)) {
@@ -11153,16 +11859,19 @@ bool BaseCompiler::init() {
 
   return true;
 }
 
 FuncOffsets BaseCompiler::finish() {
   MOZ_ASSERT(done(), "all bytes must be consumed");
   MOZ_ASSERT(func_.callSiteLineNums.length() == lastReadCallSite_);
 
+  MOZ_ASSERT(stk_.empty());
+  MOZ_ASSERT(smgen_.memRefsOnStk_ == 0);
+
   masm.flushBuffer();
 
   return offsets_;
 }
 
 }  // namespace wasm
 }  // namespace js
 
@@ -11205,33 +11914,39 @@ bool js::wasm::BaselineCompileFunctions(
   WasmMacroAssembler masm(alloc);
 
   // Swap in already-allocated empty vectors to avoid malloc/free.
   MOZ_ASSERT(code->empty());
   if (!code->swap(masm)) {
     return false;
   }
 
+  // Create a description of the stack layout created by GenerateTrapExit().
+  MachineState trapExitLayout;
+  size_t trapExitLayoutNumWords;
+  GenerateTrapExitMachineState(&trapExitLayout, &trapExitLayoutNumWords);
+
   for (const FuncCompileInput& func : inputs) {
     Decoder d(func.begin, func.end, func.lineOrBytecode, error);
 
     // Build the local types vector.
 
     ValTypeVector locals;
     if (!locals.appendAll(env.funcTypes[func.index]->args())) {
       return false;
     }
     if (!DecodeLocalEntries(d, env.kind, env.types, env.gcTypesEnabled(),
                             &locals)) {
       return false;
     }
 
     // One-pass baseline compilation.
 
-    BaseCompiler f(env, func, locals, d, dvs, &alloc, &masm);
+    BaseCompiler f(env, func, locals, trapExitLayout, trapExitLayoutNumWords, d,
+                   dvs, &alloc, &masm, &code->stackMaps);
     if (!f.init()) {
       return false;
     }
     if (!f.emitFunction()) {
       return false;
     }
     if (!code->codeRanges.emplaceBack(func.index, func.lineOrBytecode,
                                       f.finish())) {
@@ -11242,11 +11957,49 @@ bool js::wasm::BaselineCompileFunctions(
   masm.finish();
   if (masm.oom()) {
     return false;
   }
 
   return code->swap(masm);
 }
 
+#ifdef DEBUG
+bool js::wasm::IsValidStackMapKey(bool debugEnabled, const uint8_t* nextPC) {
+#if defined(JS_CODEGEN_X64) || defined(JS_CODEGEN_X86)
+  const uint8_t* insn = nextPC;
+  return (insn[-2] == 0x0F && insn[-1] == 0x0B) ||  // ud2
+         (insn[-2] == 0xFF && insn[-1] == 0xD0) ||  // call *%{rax,eax}
+         insn[-5] == 0xE8 ||                        // call simm32
+         (debugEnabled && insn[-5] == 0x0F && insn[-4] == 0x1F &&
+          insn[-3] == 0x44 && insn[-2] == 0x00 &&
+          insn[-1] == 0x00);  // nop_five
+
+#elif defined(JS_CODEGEN_ARM)
+  const uint32_t* insn = (const uint32_t*)nextPC;
+  return ((uintptr_t(insn) & 3) == 0) &&              // must be ARM, not Thumb
+         (insn[-1] == 0xe7f000f0 ||                   // udf
+          (insn[-1] & 0xfffffff0) == 0xe12fff30 ||    // blx reg (ARM, enc A1)
+          (insn[-1] & 0xff000000) == 0xeb000000 ||    // bl simm24 (ARM, enc A1)
+          (debugEnabled && insn[-1] == 0xe320f000));  // "as_nop"
+
+#elif defined(JS_CODEGEN_ARM64)
+#ifdef JS_SIMULATOR_ARM64
+  const uint32_t hltInsn = 0xd45bd600;
+#else
+  const uint32_t hltInsn = 0xd4a00000;
+#endif
+  const uint32_t* insn = (const uint32_t*)nextPC;
+  return ((uintptr_t(insn) & 3) == 0) &&
+         (insn[-1] == hltInsn ||                      // hlt
+          (insn[-1] & 0xfffffc1f) == 0xd63f0000 ||    // blr reg
+          (insn[-1] & 0xfc000000) == 0x94000000 ||    // bl simm26
+          (debugEnabled && insn[-1] == 0xd503201f));  // nop
+
+#else
+  MOZ_CRASH("IsValidStackMapKey: requires implementation on this platform");
+#endif
+}
+#endif
+
 #undef RABALDR_INT_DIV_I64_CALLOUT
 #undef RABALDR_I64_TO_FLOAT_CALLOUT
 #undef RABALDR_FLOAT_TO_I64_CALLOUT
--- a/js/src/wasm/WasmBaselineCompile.h
+++ b/js/src/wasm/WasmBaselineCompile.h
@@ -76,12 +76,18 @@ class BaseLocalIter {
 #ifdef DEBUG
   bool isArg() const {
     MOZ_ASSERT(!done_);
     return !argsIter_.done();
   }
 #endif
 };
 
+#ifdef DEBUG
+// Check whether |nextPC| is a valid code address for a stackmap created by
+// this compiler.
+bool IsValidStackMapKey(bool debugEnabled, const uint8_t* nextPC);
+#endif
+
 }  // namespace wasm
 }  // namespace js
 
 #endif  // asmjs_wasm_baseline_compile_h
--- a/js/src/wasm/WasmCode.cpp
+++ b/js/src/wasm/WasmCode.cpp
@@ -1256,16 +1256,26 @@ const CodeRange* Code::lookupFuncRange(v
     const CodeRange* result = codeTier(t).lookupRange(pc);
     if (result && result->isFunction()) {
       return result;
     }
   }
   return nullptr;
 }
 
+const StackMap* Code::lookupStackMap(uint8_t* nextPC) const {
+  for (Tier t : tiers()) {
+    const StackMap* result = metadata(t).stackMaps.findMap(nextPC);
+    if (result) {
+      return result;
+    }
+  }
+  return nullptr;
+}
+
 struct TrapSitePCOffset {
   const TrapSiteVector& trapSites;
   explicit TrapSitePCOffset(const TrapSiteVector& trapSites)
       : trapSites(trapSites) {}
   uint32_t operator[](size_t index) const { return trapSites[index].pcOffset; }
 };
 
 bool Code::lookupTrap(void* pc, Trap* trapOut, BytecodeOffset* bytecode) const {
--- a/js/src/wasm/WasmCode.h
+++ b/js/src/wasm/WasmCode.h
@@ -406,16 +406,17 @@ struct MetadataTier {
   const Tier tier;
 
   Uint32Vector funcToCodeRange;
   CodeRangeVector codeRanges;
   CallSiteVector callSites;
   TrapSiteVectorArray trapSites;
   FuncImportVector funcImports;
   FuncExportVector funcExports;
+  StackMaps stackMaps;
 
   // Debug information, not serialized.
   Uint32Vector debugTrapFarJumpOffsets;
 
   FuncExport& lookupFuncExport(uint32_t funcIndex,
                                size_t* funcExportIndex = nullptr);
   const FuncExport& lookupFuncExport(uint32_t funcIndex,
                                      size_t* funcExportIndex = nullptr) const;
@@ -694,16 +695,17 @@ class Code : public ShareableBase<Code> 
   const MetadataTier& metadata(Tier iter) const {
     return codeTier(iter).metadata();
   }
 
   // Metadata lookup functions:
 
   const CallSite* lookupCallSite(void* returnAddress) const;
   const CodeRange* lookupFuncRange(void* pc) const;
+  const StackMap* lookupStackMap(uint8_t* nextPC) const;
   bool containsCodePC(const void* pc) const;
   bool lookupTrap(void* pc, Trap* trap, BytecodeOffset* bytecode) const;
 
   // To save memory, profilingLabels_ are generated lazily when profiling mode
   // is enabled.
 
   void ensureProfilingLabels(bool profilingEnabled) const;
   const char* profilingLabel(uint32_t funcIndex) const;
--- a/js/src/wasm/WasmFrameIter.cpp
+++ b/js/src/wasm/WasmFrameIter.cpp
@@ -37,17 +37,18 @@ WasmFrameIter::WasmFrameIter(JitActivati
     : activation_(activation),
       code_(nullptr),
       codeRange_(nullptr),
       lineOrBytecode_(0),
       fp_(fp ? fp : activation->wasmExitFP()),
       unwoundIonCallerFP_(nullptr),
       unwoundIonFrameType_(jit::FrameType(-1)),
       unwind_(Unwind::False),
-      unwoundAddressOfReturnAddress_(nullptr) {
+      unwoundAddressOfReturnAddress_(nullptr),
+      returnAddressToFp_(nullptr) {
   MOZ_ASSERT(fp_);
 
   // When the stack is captured during a trap (viz., to create the .stack
   // for an Error object), use the pc/bytecode information captured by the
   // signal handler in the runtime.
 
   if (activation->isWasmTrapping()) {
     const TrapData& trapData = activation->wasmTrapData();
@@ -102,16 +103,17 @@ void WasmFrameIter::operator++() {
   }
 
   popFrame();
 }
 
 void WasmFrameIter::popFrame() {
   Frame* prevFP = fp_;
   fp_ = prevFP->callerFP;
+  returnAddressToFp_ = (uint8_t*)prevFP->returnAddress;
 
   if (uintptr_t(fp_) & ExitOrJitEntryFPTag) {
     // We just unwound a frame pointer which has the low bit set,
     // indicating this is a direct call from the jit into the wasm
     // function's body. The call stack resembles this at this point:
     //
     // |---------------------|
     // |      JIT FRAME      |
@@ -298,16 +300,25 @@ DebugFrame* WasmFrameIter::debugFrame() 
 }
 
 jit::FrameType WasmFrameIter::unwoundIonFrameType() const {
   MOZ_ASSERT(unwoundIonCallerFP_);
   MOZ_ASSERT(unwoundIonFrameType_ != jit::FrameType(-1));
   return unwoundIonFrameType_;
 }
 
+uint8_t* WasmFrameIter::returnAddressToFp() const {
+  if (returnAddressToFp_) {
+    return returnAddressToFp_;
+  }
+  MOZ_ASSERT(activation_->isWasmTrapping());
+  // The next instruction is the instruction following the trap instruction.
+  return (uint8_t*)activation_->wasmTrapData().resumePC;
+}
+
 /*****************************************************************************/
 // Prologue/epilogue code generation
 
 // These constants reflect statically-determined offsets in the
 // prologue/epilogue. The offsets are dynamically asserted during code
 // generation.
 #if defined(JS_CODEGEN_X64)
 static const unsigned PushedRetAddr = 0;
--- a/js/src/wasm/WasmFrameIter.h
+++ b/js/src/wasm/W