Bug 1324002 - Mark atoms separately in each zone, r=jonco,mccr8,peterv.
authorBrian Hackett <bhackett1024@gmail.com>
Mon, 30 Jan 2017 06:31:47 -0700
changeset 331701 7311c06a7271a8f33f4f73aa0c88e8d4806e488b
parent 331700 c1cc45eb50c4e001b62db5f1fcd5459e3f9dd72f
child 331702 e717a96e766da2cf93c6d0457cac19c99b38e4b7
push id31281
push userkwierso@gmail.com
push dateMon, 30 Jan 2017 23:45:09 +0000
treeherdermozilla-central@1fe66bd0efba [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjonco, mccr8, peterv
bugs1324002
milestone54.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1324002 - Mark atoms separately in each zone, r=jonco,mccr8,peterv.
dom/bindings/BindingUtils.h
dom/bindings/Codegen.py
dom/plugins/base/nsJSNPRuntime.cpp
js/public/MemoryMetrics.h
js/src/builtin/Intl.cpp
js/src/builtin/RegExp.cpp
js/src/gc/AtomMarking.cpp
js/src/gc/AtomMarking.h
js/src/gc/GCRuntime.h
js/src/gc/Heap-inl.h
js/src/gc/Heap.h
js/src/gc/Marking.cpp
js/src/gc/Statistics.cpp
js/src/gc/Statistics.h
js/src/gc/StoreBuffer-inl.h
js/src/gc/StoreBuffer.cpp
js/src/gc/Zone.h
js/src/jit/CodeGenerator.cpp
js/src/jsapi.cpp
js/src/jsapi.h
js/src/jsatom.cpp
js/src/jsatom.h
js/src/jscntxt.h
js/src/jscntxtinlines.h
js/src/jscompartment.cpp
js/src/jscompartmentinlines.h
js/src/jsfriendapi.cpp
js/src/jsgc.cpp
js/src/jsobj.cpp
js/src/jsscript.cpp
js/src/moz.build
js/src/proxy/CrossCompartmentWrapper.cpp
js/src/threading/Mutex.cpp
js/src/threading/Mutex.h
js/src/vm/Debugger.cpp
js/src/vm/Debugger.h
js/src/vm/MemoryMetrics.cpp
js/src/vm/NativeObject-inl.h
js/src/vm/NativeObject.h
js/src/vm/RegExpObject.cpp
js/src/vm/Runtime.h
js/src/vm/SavedFrame.h
js/src/vm/SavedStacks.cpp
js/src/vm/Scope.cpp
js/src/vm/Scope.h
js/src/vm/Shape.h
js/src/vm/StructuredClone.cpp
js/src/vm/Symbol.cpp
js/src/vm/TypeInference.cpp
js/xpconnect/loader/mozJSComponentLoader.cpp
js/xpconnect/src/ExportHelpers.cpp
js/xpconnect/src/Sandbox.cpp
js/xpconnect/src/XPCJSContext.cpp
js/xpconnect/wrappers/XrayWrapper.cpp
--- a/dom/bindings/BindingUtils.h
+++ b/dom/bindings/BindingUtils.h
@@ -807,25 +807,27 @@ MaybeWrapNonDOMObjectOrNullValue(JSConte
 }
 
 // If rval is a gcthing and is not in the compartment of cx, wrap rval
 // into the compartment of cx (typically by replacing it with an Xray or
 // cross-compartment wrapper around the original object).
 MOZ_ALWAYS_INLINE bool
 MaybeWrapValue(JSContext* cx, JS::MutableHandle<JS::Value> rval)
 {
-  if (rval.isString()) {
-    return MaybeWrapStringValue(cx, rval);
+  if (rval.isGCThing()) {
+    if (rval.isString()) {
+      return MaybeWrapStringValue(cx, rval);
+    }
+    if (rval.isObject()) {
+      return MaybeWrapObjectValue(cx, rval);
+    }
+    MOZ_ASSERT(rval.isSymbol());
+    JS_MarkCrossZoneId(cx, SYMBOL_TO_JSID(rval.toSymbol()));
   }
-
-  if (!rval.isObject()) {
-    return true;
-  }
-
-  return MaybeWrapObjectValue(cx, rval);
+  return true;
 }
 
 namespace binding_detail {
 enum GetOrCreateReflectorWrapBehavior {
   eWrapIntoContextCompartment,
   eDontWrapIntoContextCompartment
 };
 
--- a/dom/bindings/Codegen.py
+++ b/dom/bindings/Codegen.py
@@ -10950,16 +10950,17 @@ class CGResolveOwnPropertyViaResolve(CGA
             {
               // Since we're dealing with an Xray, do the resolve on the
               // underlying object first.  That gives it a chance to
               // define properties on the actual object as needed, and
               // then use the fact that it created the objects as a flag
               // to avoid re-resolving the properties if someone deletes
               // them.
               JSAutoCompartment ac(cx, obj);
+              JS_MarkCrossZoneId(cx, id);
               JS::Rooted<JS::PropertyDescriptor> objDesc(cx);
               if (!self->DoResolve(cx, obj, id, &objDesc)) {
                 return false;
               }
               // If desc.value() is undefined, then the DoResolve call
               // has already defined the property on the object.  Don't
               // try to also define it.
               if (objDesc.object() &&
--- a/dom/plugins/base/nsJSNPRuntime.cpp
+++ b/dom/plugins/base/nsJSNPRuntime.cpp
@@ -717,16 +717,22 @@ GetProperty(JSContext *cx, JSObject *obj
 {
   NS_ASSERTION(NPIdentifierIsInt(npid) || NPIdentifierIsString(npid),
                "id must be either string or int!\n");
   JS::Rooted<JSObject *> obj(cx, objArg);
   JS::Rooted<jsid> id(cx, NPIdentifierToJSId(npid));
   return ::JS_GetPropertyById(cx, obj, id, rval);
 }
 
+static void
+MarkCrossZoneNPIdentifier(JSContext* cx, NPIdentifier npid)
+{
+  JS_MarkCrossZoneId(cx, NPIdentifierToJSId(npid));
+}
+
 // static
 bool
 nsJSObjWrapper::NP_HasMethod(NPObject *npobj, NPIdentifier id)
 {
   NPP npp = NPPStack::Peek();
   nsIGlobalObject* globalObject = GetGlobalObject(npp);
   if (NS_WARN_IF(!globalObject)) {
     return false;
@@ -740,16 +746,17 @@ nsJSObjWrapper::NP_HasMethod(NPObject *n
                           "Null npobj in nsJSObjWrapper::NP_HasMethod!");
 
     return false;
   }
 
   nsJSObjWrapper *npjsobj = (nsJSObjWrapper *)npobj;
 
   JSAutoCompartment ac(cx, npjsobj->mJSObj);
+  MarkCrossZoneNPIdentifier(cx, id);
 
   AutoJSExceptionSuppressor suppressor(aes, npjsobj);
 
   JS::Rooted<JS::Value> v(cx);
   bool ok = GetProperty(cx, npjsobj->mJSObj, id, &v);
 
   return ok && !v.isPrimitive() &&
     ::JS_ObjectIsFunction(cx, v.toObjectOrNull());
@@ -779,16 +786,17 @@ doInvoke(NPObject *npobj, NPIdentifier m
 
   // Initialize *result
   VOID_TO_NPVARIANT(*result);
 
   nsJSObjWrapper *npjsobj = (nsJSObjWrapper *)npobj;
 
   JS::Rooted<JSObject*> jsobj(cx, npjsobj->mJSObj);
   JSAutoCompartment ac(cx, jsobj);
+  MarkCrossZoneNPIdentifier(cx, method);
   JS::Rooted<JS::Value> fv(cx);
 
   AutoJSExceptionSuppressor suppressor(aes, npjsobj);
 
   if (method != NPIdentifier_VOID) {
     if (!GetProperty(cx, jsobj, method, &fv) ||
         ::JS_TypeOfValue(cx, fv) != JSTYPE_FUNCTION) {
       return false;
@@ -871,16 +879,17 @@ nsJSObjWrapper::NP_HasProperty(NPObject 
   }
 
   nsJSObjWrapper *npjsobj = (nsJSObjWrapper *)npobj;
   bool found, ok = false;
 
   AutoJSExceptionSuppressor suppressor(aes, npjsobj);
   JS::Rooted<JSObject*> jsobj(cx, npjsobj->mJSObj);
   JSAutoCompartment ac(cx, jsobj);
+  MarkCrossZoneNPIdentifier(cx, npid);
 
   NS_ASSERTION(NPIdentifierIsInt(npid) || NPIdentifierIsString(npid),
                "id must be either string or int!\n");
   JS::Rooted<jsid> id(cx, NPIdentifierToJSId(npid));
   ok = ::JS_HasPropertyById(cx, jsobj, id, &found);
   return ok && found;
 }
 
@@ -907,16 +916,17 @@ nsJSObjWrapper::NP_GetProperty(NPObject 
 
     return false;
   }
 
   nsJSObjWrapper *npjsobj = (nsJSObjWrapper *)npobj;
 
   AutoJSExceptionSuppressor suppressor(aes, npjsobj);
   JSAutoCompartment ac(cx, npjsobj->mJSObj);
+  MarkCrossZoneNPIdentifier(cx, id);
 
   JS::Rooted<JS::Value> v(cx);
   return (GetProperty(cx, npjsobj->mJSObj, id, &v) &&
           JSValToNPVariant(npp, cx, v, result));
 }
 
 // static
 bool
@@ -943,16 +953,17 @@ nsJSObjWrapper::NP_SetProperty(NPObject 
   }
 
   nsJSObjWrapper *npjsobj = (nsJSObjWrapper *)npobj;
   bool ok = false;
 
   AutoJSExceptionSuppressor suppressor(aes, npjsobj);
   JS::Rooted<JSObject*> jsObj(cx, npjsobj->mJSObj);
   JSAutoCompartment ac(cx, jsObj);
+  MarkCrossZoneNPIdentifier(cx, npid);
 
   JS::Rooted<JS::Value> v(cx, NPVariantToJSVal(npp, cx, value));
 
   NS_ASSERTION(NPIdentifierIsInt(npid) || NPIdentifierIsString(npid),
                "id must be either string or int!\n");
   JS::Rooted<jsid> id(cx, NPIdentifierToJSId(npid));
   ok = ::JS_SetPropertyById(cx, jsObj, id, v);
 
@@ -980,16 +991,17 @@ nsJSObjWrapper::NP_RemoveProperty(NPObje
   }
 
   nsJSObjWrapper *npjsobj = (nsJSObjWrapper *)npobj;
 
   AutoJSExceptionSuppressor suppressor(aes, npjsobj);
   JS::ObjectOpResult result;
   JS::Rooted<JSObject*> obj(cx, npjsobj->mJSObj);
   JSAutoCompartment ac(cx, obj);
+  MarkCrossZoneNPIdentifier(cx, npid);
 
   NS_ASSERTION(NPIdentifierIsInt(npid) || NPIdentifierIsString(npid),
                "id must be either string or int!\n");
   JS::Rooted<jsid> id(cx, NPIdentifierToJSId(npid));
   if (!::JS_DeletePropertyById(cx, obj, id, result))
     return false;
 
   if (result) {
@@ -2308,15 +2320,16 @@ nsJSObjWrapper::HasOwnProperty(NPObject 
   }
 
   nsJSObjWrapper *npjsobj = (nsJSObjWrapper *)npobj;
   bool found, ok = false;
 
   AutoJSExceptionSuppressor suppressor(aes, npjsobj);
   JS::Rooted<JSObject*> jsobj(cx, npjsobj->mJSObj);
   JSAutoCompartment ac(cx, jsobj);
+  MarkCrossZoneNPIdentifier(cx, npid);
 
   NS_ASSERTION(NPIdentifierIsInt(npid) || NPIdentifierIsString(npid),
                "id must be either string or int!\n");
   JS::Rooted<jsid> id(cx, NPIdentifierToJSId(npid));
   ok = ::JS_AlreadyHasOwnPropertyById(cx, jsobj, id, &found);
   return ok && found;
 }
--- a/js/public/MemoryMetrics.h
+++ b/js/public/MemoryMetrics.h
@@ -502,16 +502,17 @@ struct NotableScriptSourceInfo : public 
  * These measurements relate directly to the JSRuntime, and not to zones and
  * compartments within it.
  */
 struct RuntimeSizes
 {
 #define FOR_EACH_SIZE(macro) \
     macro(_, MallocHeap, object) \
     macro(_, MallocHeap, atomsTable) \
+    macro(_, MallocHeap, atomsMarkBitmaps) \
     macro(_, MallocHeap, contexts) \
     macro(_, MallocHeap, temporary) \
     macro(_, MallocHeap, interpreterStack) \
     macro(_, MallocHeap, mathCache) \
     macro(_, MallocHeap, sharedImmutableStringsCache) \
     macro(_, MallocHeap, sharedIntlData) \
     macro(_, MallocHeap, uncompressedSourceCache) \
     macro(_, MallocHeap, scriptData)
--- a/js/src/builtin/Intl.cpp
+++ b/js/src/builtin/Intl.cpp
@@ -2845,20 +2845,22 @@ js::intl_IsValidTimeZoneName(JSContext* 
 
     SharedIntlData& sharedIntlData = cx->sharedIntlData;
 
     RootedString timeZone(cx, args[0].toString());
     RootedString validatedTimeZone(cx);
     if (!sharedIntlData.validateTimeZoneName(cx, timeZone, &validatedTimeZone))
         return false;
 
-    if (validatedTimeZone)
+    if (validatedTimeZone) {
+        cx->markAtom(validatedTimeZone);
         args.rval().setString(validatedTimeZone);
-    else
+    } else {
         args.rval().setNull();
+    }
 
     return true;
 }
 
 bool
 js::intl_canonicalizeTimeZone(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
--- a/js/src/builtin/RegExp.cpp
+++ b/js/src/builtin/RegExp.cpp
@@ -340,16 +340,17 @@ regexp_compile_impl(JSContext* cx, const
             // Step 3b.
             RegExpGuard g(cx);
             if (!RegExpToShared(cx, patternObj, &g))
                 return false;
 
             sourceAtom = g->getSource();
             flags = g->getFlags();
         }
+        cx->markAtom(sourceAtom);
 
         // Step 5, minus lastIndex zeroing.
         regexp->initIgnoringLastIndex(sourceAtom, flags);
     } else {
         // Step 4.
         RootedValue P(cx, patternValue);
         RootedValue F(cx, args.get(1));
 
new file mode 100644
--- /dev/null
+++ b/js/src/gc/AtomMarking.cpp
@@ -0,0 +1,351 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "gc/AtomMarking.h"
+
+#include "jscompartment.h"
+
+#include "jsgcinlines.h"
+#include "gc/Heap-inl.h"
+
+namespace js {
+namespace gc {
+
+// Atom Marking Overview
+//
+// Things in the atoms zone (which includes atomized strings and other things,
+// all of which we will refer to as 'atoms' here) may be pointed to freely by
+// things in other zones. To avoid the need to perform garbage collections of
+// the entire runtime to collect atoms, we compute a separate atom mark bitmap
+// for each zone that is always an overapproximation of the atoms that zone is
+// using. When an atom is not in the mark bitmap for any zone, it can be
+// destroyed.
+//
+// To minimize interference with the rest of the GC, atom marking and sweeping
+// is done by manipulating the mark bitmaps in the chunks used for the atoms.
+// When the atoms zone is being collected, the mark bitmaps for the chunk(s)
+// used by the atoms are updated normally during marking. After marking
+// finishes, the chunk mark bitmaps are translated to a more efficient atom
+// mark bitmap (see below) that is stored on the zones which the GC collected
+// (computeBitmapFromChunkMarkBits). Before sweeping begins, the chunk mark
+// bitmaps are updated with any atoms that might be referenced by zones which
+// weren't collected (updateChunkMarkBits). The GC sweeping will then release
+// all atoms which are not marked by any zone.
+//
+// The representation of atom mark bitmaps is as follows:
+//
+// Each arena in the atoms zone has an atomBitmapStart() value indicating the
+// word index into the bitmap of the first thing in the arena. Each arena uses
+// ArenaBitmapWords of data to store its bitmap, which uses the same
+// representation as chunk mark bitmaps: one bit is allocated per Cell, with
+// bits for space between things being unused when things are larger than a
+// single Cell.
+
+static inline void
+SetBit(uintptr_t* bitmap, size_t bit)
+{
+    bitmap[bit / JS_BITS_PER_WORD] |= uintptr_t(1) << (bit % JS_BITS_PER_WORD);
+}
+
+static inline bool
+GetBit(uintptr_t* bitmap, size_t bit)
+{
+    return bitmap[bit / JS_BITS_PER_WORD] & (uintptr_t(1) << (bit % JS_BITS_PER_WORD));
+}
+
+static inline bool
+EnsureBitmapLength(AtomMarkingRuntime::Bitmap& bitmap, size_t nwords)
+{
+    if (nwords > bitmap.length()) {
+        size_t needed = nwords - bitmap.length();
+        if (needed)
+            return bitmap.appendN(0, needed);
+    }
+    return true;
+}
+
+void
+AtomMarkingRuntime::registerArena(Arena* arena)
+{
+    MOZ_ASSERT(arena->getThingSize() != 0);
+    MOZ_ASSERT(arena->getThingSize() % CellSize == 0);
+    MOZ_ASSERT(arena->zone->isAtomsZone());
+    MOZ_ASSERT(arena->zone->runtimeFromAnyThread()->currentThreadHasExclusiveAccess());
+
+    // We need to find a range of bits from the atoms bitmap for this arena.
+
+    // Look for a free range of bits compatible with this arena.
+    if (freeArenaIndexes.length()) {
+        arena->atomBitmapStart() = freeArenaIndexes.popCopy();
+        return;
+    }
+
+    // Allocate a range of bits from the end for this arena.
+    arena->atomBitmapStart() = allocatedWords;
+    allocatedWords += ArenaBitmapWords;
+}
+
+void
+AtomMarkingRuntime::unregisterArena(Arena* arena)
+{
+    MOZ_ASSERT(arena->zone->isAtomsZone());
+
+    // Leak these atom bits if we run out of memory.
+    mozilla::Unused << freeArenaIndexes.emplaceBack(arena->atomBitmapStart());
+}
+
+bool
+AtomMarkingRuntime::computeBitmapFromChunkMarkBits(JSRuntime* runtime, Bitmap& bitmap)
+{
+    MOZ_ASSERT(runtime->currentThreadHasExclusiveAccess());
+
+    MOZ_ASSERT(bitmap.empty());
+    if (!EnsureBitmapLength(bitmap, allocatedWords))
+        return false;
+
+    Zone* atomsZone = runtime->unsafeAtomsCompartment()->zone();
+    for (auto thingKind : AllAllocKinds()) {
+        for (ArenaIter aiter(atomsZone, thingKind); !aiter.done(); aiter.next()) {
+            Arena* arena = aiter.get();
+            uintptr_t* chunkWords = arena->chunk()->bitmap.arenaBits(arena);
+            uintptr_t* bitmapWords = &bitmap[arena->atomBitmapStart()];
+            mozilla::PodCopy(bitmapWords, chunkWords, ArenaBitmapWords);
+        }
+    }
+
+    return true;
+}
+
+void
+AtomMarkingRuntime::updateZoneBitmap(Zone* zone, const Bitmap& bitmap)
+{
+    if (zone->isAtomsZone())
+        return;
+
+    // |bitmap| was produced by computeBitmapFromChunkMarkBits, so it should
+    // have the maximum possible size.
+    MOZ_ASSERT(zone->markedAtoms.length() <= bitmap.length());
+
+    // Take the bitwise and between the two mark bitmaps to get the best new
+    // overapproximation we can. |bitmap| might include bits that are not in
+    // the zone's mark bitmap, if additional zones were collected by the GC.
+    for (size_t i = 0; i < zone->markedAtoms.length(); i++)
+        zone->markedAtoms[i] &= bitmap[i];
+}
+
+// Set any bits in the chunk mark bitmaps for atoms which are marked in bitmap.
+static void
+AddBitmapToChunkMarkBits(JSRuntime* runtime, AtomMarkingRuntime::Bitmap& bitmap)
+{
+    // Make sure that by copying the mark bits for one arena in word sizes we
+    // do not affect the mark bits for other arenas.
+    static_assert(ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD,
+                  "ArenaBitmapWords must evenly divide ArenaBitmapBits");
+
+    Zone* atomsZone = runtime->unsafeAtomsCompartment()->zone();
+    for (auto thingKind : AllAllocKinds()) {
+        for (ArenaIter aiter(atomsZone, thingKind); !aiter.done(); aiter.next()) {
+            Arena* arena = aiter.get();
+            uintptr_t* chunkWords = arena->chunk()->bitmap.arenaBits(arena);
+
+            // The bitmap might not be long enough, in which case remaining
+            // bits are implicitly zero.
+            if (bitmap.length() <= arena->atomBitmapStart())
+                continue;
+            MOZ_ASSERT(bitmap.length() >= arena->atomBitmapStart() + ArenaBitmapWords);
+
+            uintptr_t* bitmapWords = &bitmap[arena->atomBitmapStart()];
+            for (size_t i = 0; i < ArenaBitmapWords; i++)
+                chunkWords[i] |= bitmapWords[i];
+        }
+    }
+}
+
+void
+AtomMarkingRuntime::updateChunkMarkBits(JSRuntime* runtime)
+{
+    MOZ_ASSERT(runtime->currentThreadHasExclusiveAccess());
+
+    // Try to compute a simple union of the zone atom bitmaps before updating
+    // the chunk mark bitmaps. If this allocation fails then fall back to
+    // updating the chunk mark bitmaps separately for each zone.
+    Bitmap markedUnion;
+    if (EnsureBitmapLength(markedUnion, allocatedWords)) {
+        for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
+            // We only need to update the chunk mark bits for zones which were
+            // not collected in the current GC. Atoms which are referenced by
+            // collected zones have already been marked.
+            if (!zone->isCollectingFromAnyThread()) {
+                MOZ_ASSERT(zone->markedAtoms.length() <= allocatedWords);
+                for (size_t i = 0; i < zone->markedAtoms.length(); i++)
+                    markedUnion[i] |= zone->markedAtoms[i];
+            }
+        }
+        AddBitmapToChunkMarkBits(runtime, markedUnion);
+    } else {
+        for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
+            if (!zone->isCollectingFromAnyThread())
+                AddBitmapToChunkMarkBits(runtime, zone->markedAtoms);
+        }
+    }
+}
+
+static inline size_t
+GetAtomBit(TenuredCell* thing)
+{
+    MOZ_ASSERT(thing->zoneFromAnyThread()->isAtomsZone());
+    Arena* arena = thing->arena();
+    size_t arenaBit = (reinterpret_cast<uintptr_t>(thing) - arena->address()) / CellSize;
+    return arena->atomBitmapStart() * JS_BITS_PER_WORD + arenaBit;
+}
+
+static bool
+ThingIsPermanent(TenuredCell* thing)
+{
+    JS::TraceKind kind = thing->getTraceKind();
+    if (kind == JS::TraceKind::String && static_cast<JSString*>(thing)->isPermanentAtom())
+        return true;
+    if (kind == JS::TraceKind::Symbol && static_cast<JS::Symbol*>(thing)->isWellKnownSymbol())
+        return true;
+    return false;
+}
+
+void
+AtomMarkingRuntime::markAtom(ExclusiveContext* cx, TenuredCell* thing)
+{
+    // The context's zone will be null during initialization of the runtime.
+    if (!thing || !cx->zone())
+        return;
+    MOZ_ASSERT(!cx->zone()->isAtomsZone());
+
+    if (ThingIsPermanent(thing) || !thing->zoneFromAnyThread()->isAtomsZone())
+        return;
+
+    size_t bit = GetAtomBit(thing);
+
+    {
+        AutoEnterOOMUnsafeRegion oomUnsafe;
+        if (!EnsureBitmapLength(cx->zone()->markedAtoms, allocatedWords))
+            oomUnsafe.crash("Atom bitmap OOM");
+    }
+
+    SetBit(cx->zone()->markedAtoms.begin(), bit);
+
+    if (cx->isJSContext()) {
+        // Trigger a read barrier on the atom, in case there is an incremental
+        // GC in progress. This is necessary if the atom is being marked
+        // because a reference to it was obtained from another zone which is
+        // not being collected by the incremental GC.
+        TenuredCell::readBarrier(thing);
+    }
+}
+
+void
+AtomMarkingRuntime::markId(ExclusiveContext* cx, jsid id)
+{
+    if (JSID_IS_GCTHING(id))
+        markAtom(cx, &JSID_TO_GCTHING(id).asCell()->asTenured());
+}
+
+void
+AtomMarkingRuntime::markAtomValue(ExclusiveContext* cx, const Value& value)
+{
+    if (value.isGCThing()) {
+        Cell* thing = value.toGCThing();
+        if (thing && !IsInsideNursery(thing))
+            markAtom(cx, &thing->asTenured());
+    }
+}
+
+void
+AtomMarkingRuntime::adoptMarkedAtoms(Zone* target, Zone* source)
+{
+    MOZ_ASSERT(target->runtimeFromAnyThread()->currentThreadHasExclusiveAccess());
+
+    Bitmap* targetBitmap = &target->markedAtoms;
+    Bitmap* sourceBitmap = &source->markedAtoms;
+    if (targetBitmap->length() < sourceBitmap->length())
+        std::swap(targetBitmap, sourceBitmap);
+    for (size_t i = 0; i < sourceBitmap->length(); i++)
+        (*targetBitmap)[i] |= (*sourceBitmap)[i];
+
+    if (targetBitmap != &target->markedAtoms)
+        target->markedAtoms = Move(source->markedAtoms);
+    else
+        source->markedAtoms.clear();
+}
+
+#ifdef DEBUG
+
+bool
+AtomMarkingRuntime::atomIsMarked(Zone* zone, Cell* thingArg)
+{
+    if (!thingArg || IsInsideNursery(thingArg))
+        return true;
+    TenuredCell* thing = &thingArg->asTenured();
+
+    if (!zone->runtimeFromAnyThread()->permanentAtoms)
+        return true;
+
+    if (ThingIsPermanent(thing) || !thing->zoneFromAnyThread()->isAtomsZone())
+        return true;
+
+    JS::TraceKind kind = thing->getTraceKind();
+    if (kind == JS::TraceKind::String) {
+        JSAtom* atom = static_cast<JSAtom*>(thing);
+        if (AtomIsPinnedInRuntime(zone->runtimeFromAnyThread(), atom))
+            return true;
+    }
+
+    size_t bit = GetAtomBit(thing);
+    if (bit >= zone->markedAtoms.length() * JS_BITS_PER_WORD)
+        return false;
+    return GetBit(zone->markedAtoms.begin(), bit);
+}
+
+bool
+AtomMarkingRuntime::idIsMarked(Zone* zone, jsid id)
+{
+    if (JSID_IS_GCTHING(id))
+        return atomIsMarked(zone, JSID_TO_GCTHING(id).asCell());
+    return true;
+}
+
+bool
+AtomMarkingRuntime::valueIsMarked(Zone* zone, const Value& value)
+{
+    if (value.isGCThing())
+        return atomIsMarked(zone, value.toGCThing());
+    return true;
+}
+
+#endif // DEBUG
+
+} // namespace gc
+
+#ifdef DEBUG
+
+bool
+AtomIsMarked(Zone* zone, JSAtom* atom)
+{
+    return zone->runtimeFromAnyThread()->gc.atomMarking.atomIsMarked(zone, atom);
+}
+
+bool
+AtomIsMarked(Zone* zone, jsid id)
+{
+    return zone->runtimeFromAnyThread()->gc.atomMarking.idIsMarked(zone, id);
+}
+
+bool
+AtomIsMarked(Zone* zone, const Value& value)
+{
+    return zone->runtimeFromAnyThread()->gc.atomMarking.valueIsMarked(zone, value);
+}
+
+#endif // DEBUG
+
+} // namespace js
new file mode 100644
--- /dev/null
+++ b/js/src/gc/AtomMarking.h
@@ -0,0 +1,72 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef gc_AtomMarking_h
+#define gc_AtomMarking_h
+
+#include "NamespaceImports.h"
+#include "gc/Heap.h"
+
+namespace js {
+namespace gc {
+
+// This class manages state used for marking atoms during GCs.
+// See AtomMarking.cpp for details.
+class AtomMarkingRuntime
+{
+    // Unused arena atom bitmap indexes. Protected by the GC lock.
+    Vector<size_t, 0, SystemAllocPolicy> freeArenaIndexes;
+
+    // The extent of all allocated and free words in atom mark bitmaps.
+    // This monotonically increases and may be read from without locking.
+    mozilla::Atomic<size_t> allocatedWords;
+
+  public:
+    typedef Vector<uintptr_t, 0, SystemAllocPolicy> Bitmap;
+
+    AtomMarkingRuntime()
+      : allocatedWords(0)
+    {}
+
+    // Mark an arena as holding things in the atoms zone.
+    void registerArena(Arena* arena);
+
+    // Mark an arena as no longer holding things in the atoms zone.
+    void unregisterArena(Arena* arena);
+
+    // Fill |bitmap| with an atom marking bitmap based on the things that are
+    // currently marked in the chunks used by atoms zone arenas. This returns
+    // false on an allocation failure (but does not report an exception).
+    bool computeBitmapFromChunkMarkBits(JSRuntime* runtime, Bitmap& bitmap);
+
+    // Update the atom marking bitmap in |zone| according to another
+    // overapproximation of the reachable atoms in |bitmap|.
+    void updateZoneBitmap(Zone* zone, const Bitmap& bitmap);
+
+    // Set any bits in the chunk mark bitmaps for atoms which are marked in any
+    // zone in the runtime.
+    void updateChunkMarkBits(JSRuntime* runtime);
+
+    // Mark an atom or id as being newly reachable by the context's zone.
+    void markAtom(ExclusiveContext* cx, TenuredCell* thing);
+    void markId(ExclusiveContext* cx, jsid id);
+    void markAtomValue(ExclusiveContext* cx, const Value& value);
+
+    // Mark all atoms in |source| as being reachable within |target|.
+    void adoptMarkedAtoms(Zone* target, Zone* source);
+
+#ifdef DEBUG
+    // Return whether |thing/id| is in the atom marking bitmap for |zone|.
+    bool atomIsMarked(Zone* zone, Cell* thing);
+    bool idIsMarked(Zone* zone, jsid id);
+    bool valueIsMarked(Zone* zone, const Value& value);
+#endif
+};
+
+} // namespace gc
+} // namespace js
+
+#endif // gc_AtomMarking_h
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -8,16 +8,17 @@
 #define gc_GCRuntime_h
 
 #include "mozilla/Atomics.h"
 #include "mozilla/EnumSet.h"
 
 #include "jsfriendapi.h"
 #include "jsgc.h"
 
+#include "gc/AtomMarking.h"
 #include "gc/Heap.h"
 #include "gc/Nursery.h"
 #include "gc/Statistics.h"
 #include "gc/StoreBuffer.h"
 #include "gc/Tracer.h"
 #include "js/GCAnnotations.h"
 
 namespace js {
@@ -1035,16 +1036,20 @@ class GCRuntime
     HeapUsage usage;
 
     /* GC scheduling state and parameters. */
     GCSchedulingTunables tunables;
     GCSchedulingState schedulingState;
 
     MemProfiler mMemProfiler;
 
+    // State used for managing atom mark bitmaps in each zone. Protected by the
+    // exclusive access lock.
+    AtomMarkingRuntime atomMarking;
+
   private:
     // When empty, chunks reside in the emptyChunks pool and are re-used as
     // needed or eventually expired if not re-used. The emptyChunks pool gets
     // refilled from the background allocation task heuristically so that empty
     // chunks should always available for immediate allocation without syscalls.
     ChunkPool             emptyChunks_;
 
     // Chunks which have had some, but not all, of their arenas allocated live
--- a/js/src/gc/Heap-inl.h
+++ b/js/src/gc/Heap-inl.h
@@ -2,28 +2,56 @@
  * vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef gc_Heap_inl_h
 #define gc_Heap_inl_h
 
+#include "gc/Heap.h"
+
 #include "gc/StoreBuffer.h"
+#include "gc/Zone.h"
 
 inline void
 js::gc::Arena::init(JS::Zone* zoneArg, AllocKind kind)
 {
     MOZ_ASSERT(firstFreeSpan.isEmpty());
     MOZ_ASSERT(!zone);
     MOZ_ASSERT(!allocated());
     MOZ_ASSERT(!hasDelayedMarking);
     MOZ_ASSERT(!allocatedDuringIncremental);
     MOZ_ASSERT(!markOverflow);
     MOZ_ASSERT(!auxNextLink);
 
     zone = zoneArg;
     allocKind = size_t(kind);
     setAsFullyUnused();
-    bufferedCells = &ArenaCellSet::Empty;
+    if (zone->isAtomsZone())
+        zone->runtimeFromAnyThread()->gc.atomMarking.registerArena(this);
+    else
+        bufferedCells() = &ArenaCellSet::Empty;
+}
+
+inline void
+js::gc::Arena::release()
+{
+    if (zone->isAtomsZone())
+        zone->runtimeFromAnyThread()->gc.atomMarking.unregisterArena(this);
+    setAsNotAllocated();
+}
+
+inline js::gc::ArenaCellSet*&
+js::gc::Arena::bufferedCells()
+{
+    MOZ_ASSERT(zone && !zone->isAtomsZone());
+    return bufferedCells_;
+}
+
+inline size_t&
+js::gc::Arena::atomBitmapStart()
+{
+    MOZ_ASSERT(zone && zone->isAtomsZone());
+    return atomBitmapStart_;
 }
 
 #endif
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -527,21 +527,34 @@ class Arena
     size_t hasDelayedMarking : 1;
     size_t allocatedDuringIncremental : 1;
     size_t markOverflow : 1;
     size_t auxNextLink : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
     static_assert(ArenaShift >= 8 + 1 + 1 + 1,
                   "Arena::auxNextLink packing assumes that ArenaShift has "
                   "enough bits to cover allocKind and hasDelayedMarking.");
 
-    /*
-     * If non-null, points to an ArenaCellSet that represents the set of cells
-     * in this arena that are in the nursery's store buffer.
-     */
-    ArenaCellSet* bufferedCells;
+  private:
+    union {
+        /*
+         * For arenas in zones other than the atoms zone, if non-null, points
+         * to an ArenaCellSet that represents the set of cells in this arena
+         * that are in the nursery's store buffer.
+         */
+        ArenaCellSet* bufferedCells_;
+
+        /*
+         * For arenas in the atoms zone, the starting index into zone atom
+         * marking bitmaps (see AtomMarking.h) of the things in this zone.
+         * Atoms never refer to nursery things, so no store buffer index is
+         * needed.
+         */
+        size_t atomBitmapStart_;
+    };
+  public:
 
     /*
      * The size of data should be |ArenaSize - offsetof(data)|, but the offset
      * is not yet known to the compiler, so we do it by hand. |firstFreeSpan|
      * takes up 8 bytes on 64-bit due to alignment requirements; the rest are
      * obvious. This constant is stored in js/HeapAPI.h.
      */
     uint8_t data[ArenaSize - ArenaHeaderSize];
@@ -553,27 +566,32 @@ class Arena
     void setAsFullyUnused() {
         AllocKind kind = getAllocKind();
         firstFreeSpan.first = firstThingOffset(kind);
         firstFreeSpan.last = lastThingOffset(kind);
         FreeSpan* last = firstFreeSpan.nextSpanUnchecked(this);
         last->initAsEmpty();
     }
 
+    // Initialize an arena to its unallocated state. For arenas that were
+    // previously allocated for some zone, use release() instead.
     void setAsNotAllocated() {
         firstFreeSpan.initAsEmpty();
         zone = nullptr;
         allocKind = size_t(AllocKind::LIMIT);
         hasDelayedMarking = 0;
         allocatedDuringIncremental = 0;
         markOverflow = 0;
         auxNextLink = 0;
-        bufferedCells = nullptr;
+        bufferedCells_ = nullptr;
     }
 
+    // Return an allocated arena to its unallocated state.
+    inline void release();
+
     uintptr_t address() const {
         checkAddress();
         return uintptr_t(this);
     }
 
     inline void checkAddress() const;
 
     inline Chunk* chunk() const;
@@ -595,18 +613,19 @@ class Arena
     static size_t thingsSpan(AllocKind kind) { return thingsPerArena(kind) * thingSize(kind); }
 
     static size_t firstThingOffset(AllocKind kind) { return FirstThingOffsets[size_t(kind)]; }
     static size_t lastThingOffset(AllocKind kind) { return ArenaSize - thingSize(kind); }
 
     size_t getThingSize() const { return thingSize(getAllocKind()); }
     size_t getThingsPerArena() const { return thingsPerArena(getAllocKind()); }
     size_t getThingsSpan() const { return getThingsPerArena() * getThingSize(); }
+    size_t getFirstThingOffset() const { return firstThingOffset(getAllocKind()); }
 
-    uintptr_t thingsStart() const { return address() + firstThingOffset(getAllocKind()); }
+    uintptr_t thingsStart() const { return address() + getFirstThingOffset(); }
     uintptr_t thingsEnd() const { return address() + ArenaSize; }
 
     bool isEmpty() const {
         // Arena is empty if its first span covers the whole arena.
         firstFreeSpan.checkSpan(this);
         AllocKind kind = getAllocKind();
         return firstFreeSpan.first == firstThingOffset(kind) &&
                firstFreeSpan.last == lastThingOffset(kind);
@@ -680,26 +699,25 @@ class Arena
     }
 
     void unsetAllocDuringSweep() {
         MOZ_ASSERT(allocatedDuringIncremental);
         allocatedDuringIncremental = 0;
         auxNextLink = 0;
     }
 
+    inline ArenaCellSet*& bufferedCells();
+    inline size_t& atomBitmapStart();
+
     template <typename T>
     size_t finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize);
 
     static void staticAsserts();
 
     void unmarkAll();
-
-    static size_t offsetOfBufferedCells() {
-        return offsetof(Arena, bufferedCells);
-    }
 };
 
 static_assert(ArenaZoneOffset == offsetof(Arena, zone),
               "The hardcoded API zone offset must match the actual offset.");
 
 static_assert(sizeof(Arena) == ArenaSize,
               "ArenaSize must match the actual size of the Arena structure.");
 
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -923,16 +923,23 @@ CheckTraversedEdge(S source, T* target)
 {
     // Atoms and Symbols do not have or mark their internal pointers, respectively.
     MOZ_ASSERT(!ThingIsPermanentAtomOrWellKnownSymbol(source));
 
     // The Zones must match, unless the target is an atom.
     MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(target),
                   target->zone()->isAtomsZone() || target->zone() == source->zone());
 
+    // If we are marking an atom, that atom must be marked in the source zone's
+    // atom bitmap.
+    MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(target) &&
+                  target->zone()->isAtomsZone() && !source->zone()->isAtomsZone(),
+                  target->runtimeFromAnyThread()->gc.atomMarking
+                      .atomIsMarked(source->zone(), reinterpret_cast<TenuredCell*>(target)));
+
     // Atoms and Symbols do not have access to a compartment pointer, or we'd need
     // to adjust the subsequent check to catch that case.
     MOZ_ASSERT_IF(ThingIsPermanentAtomOrWellKnownSymbol(target), !target->maybeCompartment());
     MOZ_ASSERT_IF(target->zoneFromAnyThread()->isAtomsZone(), !target->maybeCompartment());
     // If we have access to a compartment pointer for both things, they must match.
     MOZ_ASSERT_IF(source->maybeCompartment() && target->maybeCompartment(),
                   source->maybeCompartment() == target->maybeCompartment());
 }
@@ -2349,18 +2356,18 @@ TraceBufferedCells(TenuringTracer& mover
 }
 
 void
 js::gc::StoreBuffer::traceWholeCells(TenuringTracer& mover)
 {
     for (ArenaCellSet* cells = bufferWholeCell; cells; cells = cells->next) {
         Arena* arena = cells->arena;
 
-        MOZ_ASSERT(arena->bufferedCells == cells);
-        arena->bufferedCells = &ArenaCellSet::Empty;
+        MOZ_ASSERT(arena->bufferedCells() == cells);
+        arena->bufferedCells() = &ArenaCellSet::Empty;
 
         JS::TraceKind kind = MapAllocToTraceKind(arena->getAllocKind());
         switch (kind) {
           case JS::TraceKind::Object:
             TraceBufferedCells<JSObject>(mover, arena, cells);
             break;
           case JS::TraceKind::Script:
             TraceBufferedCells<JSScript>(mover, arena, cells);
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -154,17 +154,16 @@ static const PhaseInfo phases[] = {
             { PHASE_SWEEP_MARK_WEAK, "Mark Weak", PHASE_SWEEP_MARK, 13 },
             { PHASE_SWEEP_MARK_INCOMING_GRAY, "Mark Incoming Gray Pointers", PHASE_SWEEP_MARK, 14 },
             { PHASE_SWEEP_MARK_GRAY, "Mark Gray", PHASE_SWEEP_MARK, 15 },
             { PHASE_SWEEP_MARK_GRAY_WEAK, "Mark Gray and Weak", PHASE_SWEEP_MARK, 16 },
         { PHASE_FINALIZE_START, "Finalize Start Callbacks", PHASE_SWEEP, 17 },
             { PHASE_WEAK_ZONEGROUP_CALLBACK, "Per-Slice Weak Callback", PHASE_FINALIZE_START, 57 },
             { PHASE_WEAK_COMPARTMENT_CALLBACK, "Per-Compartment Weak Callback", PHASE_FINALIZE_START, 58 },
         { PHASE_SWEEP_ATOMS, "Sweep Atoms", PHASE_SWEEP, 18 },
-        { PHASE_SWEEP_SYMBOL_REGISTRY, "Sweep Symbol Registry", PHASE_SWEEP, 19 },
         { PHASE_SWEEP_COMPARTMENTS, "Sweep Compartments", PHASE_SWEEP, 20 },
             { PHASE_SWEEP_DISCARD_CODE, "Sweep Discard Code", PHASE_SWEEP_COMPARTMENTS, 21 },
             { PHASE_SWEEP_INNER_VIEWS, "Sweep Inner Views", PHASE_SWEEP_COMPARTMENTS, 22 },
             { PHASE_SWEEP_CC_WRAPPER, "Sweep Cross Compartment Wrappers", PHASE_SWEEP_COMPARTMENTS, 23 },
             { PHASE_SWEEP_BASE_SHAPE, "Sweep Base Shapes", PHASE_SWEEP_COMPARTMENTS, 24 },
             { PHASE_SWEEP_INITIAL_SHAPE, "Sweep Initial Shapes", PHASE_SWEEP_COMPARTMENTS, 25 },
             { PHASE_SWEEP_TYPE_OBJECT, "Sweep Type Objects", PHASE_SWEEP_COMPARTMENTS, 26 },
             { PHASE_SWEEP_BREAKPOINT, "Sweep Breakpoints", PHASE_SWEEP_COMPARTMENTS, 27 },
--- a/js/src/gc/Statistics.h
+++ b/js/src/gc/Statistics.h
@@ -44,17 +44,16 @@ enum Phase : uint8_t {
     PHASE_SWEEP_MARK_WEAK,
     PHASE_SWEEP_MARK_INCOMING_GRAY,
     PHASE_SWEEP_MARK_GRAY,
     PHASE_SWEEP_MARK_GRAY_WEAK,
     PHASE_FINALIZE_START,
     PHASE_WEAK_ZONEGROUP_CALLBACK,
     PHASE_WEAK_COMPARTMENT_CALLBACK,
     PHASE_SWEEP_ATOMS,
-    PHASE_SWEEP_SYMBOL_REGISTRY,
     PHASE_SWEEP_COMPARTMENTS,
     PHASE_SWEEP_DISCARD_CODE,
     PHASE_SWEEP_INNER_VIEWS,
     PHASE_SWEEP_CC_WRAPPER,
     PHASE_SWEEP_BASE_SHAPE,
     PHASE_SWEEP_INITIAL_SHAPE,
     PHASE_SWEEP_TYPE_OBJECT,
     PHASE_SWEEP_BREAKPOINT,
--- a/js/src/gc/StoreBuffer-inl.h
+++ b/js/src/gc/StoreBuffer-inl.h
@@ -6,16 +6,18 @@
 
 #ifndef gc_StoreBuffer_inl_h
 #define gc_StoreBuffer_inl_h
 
 #include "gc/StoreBuffer.h"
 
 #include "gc/Heap.h"
 
+#include "gc/Heap-inl.h"
+
 namespace js {
 namespace gc {
 
 inline /* static */ size_t
 ArenaCellSet::getCellIndex(const TenuredCell* cell)
 {
     MOZ_ASSERT((uintptr_t(cell) & ~ArenaMask) % CellSize == 0);
     return (uintptr_t(cell) & ArenaMask) / CellSize;
@@ -43,27 +45,27 @@ ArenaCellSet::putCell(size_t cellIndex)
 
 inline void
 ArenaCellSet::check() const
 {
 #ifdef DEBUG
     bool bitsZero = bits.isAllClear();
     MOZ_ASSERT(isEmpty() == bitsZero);
     MOZ_ASSERT(isEmpty() == !arena);
-    MOZ_ASSERT_IF(!isEmpty(), arena->bufferedCells == this);
+    MOZ_ASSERT_IF(!isEmpty(), arena->bufferedCells() == this);
 #endif
 }
 
 inline void
 StoreBuffer::putWholeCell(Cell* cell)
 {
     MOZ_ASSERT(cell->isTenured());
 
     Arena* arena = cell->asTenured().arena();
-    ArenaCellSet* cells = arena->bufferedCells;
+    ArenaCellSet* cells = arena->bufferedCells();
     if (cells->isEmpty()) {
         cells = AllocateWholeCellSet(arena);
         if (!cells)
             return;
     }
 
     cells->putCell(&cell->asTenured());
     cells->check();
--- a/js/src/gc/StoreBuffer.cpp
+++ b/js/src/gc/StoreBuffer.cpp
@@ -75,17 +75,17 @@ StoreBuffer::clear()
     cancelIonCompilations_ = false;
 
     bufferVal.clear();
     bufferCell.clear();
     bufferSlot.clear();
     bufferGeneric.clear();
 
     for (ArenaCellSet* set = bufferWholeCell; set; set = set->next)
-         set->arena->bufferedCells = nullptr;
+        set->arena->bufferedCells() = nullptr;
     bufferWholeCell = nullptr;
 }
 
 void
 StoreBuffer::setAboutToOverflow()
 {
     if (!aboutToOverflow_) {
         aboutToOverflow_ = true;
@@ -138,16 +138,16 @@ js::gc::AllocateWholeCellSet(Arena* aren
         return nullptr;
     }
 
     if (nursery.freeSpace() < ArenaCellSet::NurseryFreeThresholdBytes)
         rt->gc.storeBuffer.setAboutToOverflow();
 
     auto cells = static_cast<ArenaCellSet*>(data);
     new (cells) ArenaCellSet(arena);
-    arena->bufferedCells = cells;
+    arena->bufferedCells() = cells;
     rt->gc.storeBuffer.addToWholeCellBuffer(cells);
     return cells;
 }
 
 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>;
 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::CellPtrEdge>;
 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>;
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -146,17 +146,18 @@ struct Zone : public JS::shadow::Zone,
     void findOutgoingEdges(js::gc::ZoneComponentFinder& finder);
 
     void discardJitCode(js::FreeOp* fop, bool discardBaselineCode = true);
 
     void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
                                 size_t* typePool,
                                 size_t* baselineStubsOptimized,
                                 size_t* uniqueIdMap,
-                                size_t* shapeTables);
+                                size_t* shapeTables,
+                                size_t* atomsMarkBitmaps);
 
     void resetGCMallocBytes();
     void setGCMaxMallocBytes(size_t value);
     void updateMallocCounter(size_t nbytes) {
         // Note: this code may be run from worker threads. We tolerate any
         // thread races when updating gcMallocBytes.
         gcMallocBytes -= ptrdiff_t(nbytes);
         if (MOZ_UNLIKELY(isTooMuchMalloc()))
@@ -376,16 +377,19 @@ struct Zone : public JS::shadow::Zone,
 
     // Whether a GC has been triggered as a result of gcMallocBytes falling
     // below zero.
     //
     // This should be a bool, but Atomic only supports 32-bit and pointer-sized
     // types.
     mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> gcMallocGCTriggered;
 
+    // Bitmap of atoms marked by this zone.
+    js::gc::AtomMarkingRuntime::Bitmap markedAtoms;
+
     // Track heap usage under this Zone.
     js::gc::HeapUsage usage;
 
     // Thresholds used to trigger GC.
     js::gc::ZoneHeapThreshold threshold;
 
     // Amount of data to allocate before triggering a new incremental slice for
     // the current GC.
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -3555,17 +3555,17 @@ EmitStoreBufferCheckForConstant(MacroAss
                                 AllocatableGeneralRegisterSet& regs, Label* exit, Label* callVM)
 {
     Register temp = regs.takeAny();
 
     const gc::TenuredCell* cell = &object->asTenured();
     gc::Arena* arena = cell->arena();
 
     Register cells = temp;
-    masm.loadPtr(AbsoluteAddress(&arena->bufferedCells), cells);
+    masm.loadPtr(AbsoluteAddress(&arena->bufferedCells()), cells);
 
     size_t index = gc::ArenaCellSet::getCellIndex(cell);
     size_t word;
     uint32_t mask;
     gc::ArenaCellSet::getWordIndexAndMask(index, &word, &mask);
     size_t offset = gc::ArenaCellSet::offsetOfBits() + word * sizeof(uint32_t);
 
     masm.branchTest32(Assembler::NonZero, Address(cells, offset), Imm32(mask), exit);
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -778,16 +778,28 @@ JS_SetCompartmentPrivate(JSCompartment* 
 }
 
 JS_PUBLIC_API(void*)
 JS_GetCompartmentPrivate(JSCompartment* compartment)
 {
     return compartment->data;
 }
 
+JS_PUBLIC_API(void)
+JS_MarkCrossZoneId(JSContext* cx, jsid id)
+{
+    cx->markId(id);
+}
+
+JS_PUBLIC_API(void)
+JS_MarkCrossZoneIdValue(JSContext* cx, const Value& value)
+{
+    cx->markAtomValue(value);
+}
+
 JS_PUBLIC_API(JSAddonId*)
 JS::NewAddonId(JSContext* cx, HandleString str)
 {
     return static_cast<JSAddonId*>(JS_AtomizeAndPinJSString(cx, str));
 }
 
 JS_PUBLIC_API(JSString*)
 JS::StringOfAddonId(JSAddonId* id)
@@ -1137,16 +1149,17 @@ ProtoKeyToId(JSContext* cx, JSProtoKey k
 
 } /* namespace JS */
 
 JS_PUBLIC_API(JSProtoKey)
 JS_IdToProtoKey(JSContext* cx, HandleId id)
 {
     AssertHeapIsIdle(cx);
     CHECK_REQUEST(cx);
+    assertSameCompartment(cx, id);
 
     if (!JSID_IS_ATOM(id))
         return JSProto_Null;
 
     JSAtom* atom = JSID_TO_ATOM(id);
     const JSStdName* stdnm = LookupStdName(cx->names(), atom, standard_class_names);
     if (!stdnm)
         return JSProto_Null;
@@ -1564,16 +1577,17 @@ JS_StringToId(JSContext* cx, HandleStrin
     return ValueToId<CanGC>(cx, value, idp);
 }
 
 JS_PUBLIC_API(bool)
 JS_IdToValue(JSContext* cx, jsid id, MutableHandleValue vp)
 {
     AssertHeapIsIdle(cx);
     CHECK_REQUEST(cx);
+    assertSameCompartment(cx, id);
     vp.set(IdToValue(id));
     assertSameCompartment(cx, vp);
     return true;
 }
 
 JS_PUBLIC_API(bool)
 JS::ToPrimitive(JSContext* cx, HandleObject obj, JSType hint, MutableHandleValue vp)
 {
@@ -2015,17 +2029,17 @@ JS_GetOwnUCPropertyDescriptor(JSContext*
     RootedId id(cx, AtomToId(atom));
     return JS_GetOwnPropertyDescriptorById(cx, obj, id, desc);
 }
 
 JS_PUBLIC_API(bool)
 JS_GetPropertyDescriptorById(JSContext* cx, HandleObject obj, HandleId id,
                              MutableHandle<PropertyDescriptor> desc)
 {
-    assertSameCompartment(cx, obj);
+    assertSameCompartment(cx, obj, id);
     return GetPropertyDescriptor(cx, obj, id, desc);
 }
 
 JS_PUBLIC_API(bool)
 JS_GetPropertyDescriptor(JSContext* cx, HandleObject obj, const char* name,
                          MutableHandle<PropertyDescriptor> desc)
 {
     JSAtom* atom = Atomize(cx, name, strlen(name));
@@ -3400,16 +3414,17 @@ JS_NewFunction(JSContext* cx, JSNative n
 }
 
 JS_PUBLIC_API(JSFunction*)
 JS::GetSelfHostedFunction(JSContext* cx, const char* selfHostedName, HandleId id, unsigned nargs)
 {
     MOZ_ASSERT(!cx->runtime()->isAtomsCompartment(cx->compartment()));
     AssertHeapIsIdle(cx);
     CHECK_REQUEST(cx);
+    assertSameCompartment(cx, id);
 
     RootedAtom name(cx, IdToFunctionName(cx, id));
     if (!name)
         return nullptr;
 
     JSAtom* shAtom = Atomize(cx, selfHostedName, strlen(selfHostedName));
     if (!shAtom)
         return nullptr;
@@ -3418,16 +3433,18 @@ JS::GetSelfHostedFunction(JSContext* cx,
     if (!GlobalObject::getSelfHostedFunction(cx, cx->global(), shName, name, nargs, &funVal))
         return nullptr;
     return &funVal.toObject().as<JSFunction>();
 }
 
 JS_PUBLIC_API(JSFunction*)
 JS::NewFunctionFromSpec(JSContext* cx, const JSFunctionSpec* fs, HandleId id)
 {
+    assertSameCompartment(cx, id);
+
     // Delay cloning self-hosted functions until they are called. This is
     // achieved by passing DefineFunction a nullptr JSNative which produces an
     // interpreted JSFunction where !hasScript. Interpreted call paths then
     // call InitializeLazyFunctionScript if !hasScript.
     if (fs->selfHostedName) {
         MOZ_ASSERT(!fs->call.op);
         MOZ_ASSERT(!fs->call.info);
 
@@ -3704,17 +3721,17 @@ JS_DefineUCFunction(JSContext* cx, Handl
 
 extern JS_PUBLIC_API(JSFunction*)
 JS_DefineFunctionById(JSContext* cx, HandleObject obj, HandleId id, JSNative call,
                       unsigned nargs, unsigned attrs)
 {
     MOZ_ASSERT(!cx->runtime()->isAtomsCompartment(cx->compartment()));
     AssertHeapIsIdle(cx);
     CHECK_REQUEST(cx);
-    assertSameCompartment(cx, obj);
+    assertSameCompartment(cx, obj, id);
     return DefineFunction(cx, obj, id, call, nargs, attrs);
 }
 
 /* Use the fastest available getc. */
 #if defined(HAVE_GETC_UNLOCKED)
 # define fast_getc getc_unlocked
 #elif defined(HAVE__GETC_NOLOCK)
 # define fast_getc _getc_nolock
--- a/js/src/jsapi.h
+++ b/js/src/jsapi.h
@@ -1420,16 +1420,32 @@ typedef void (*JSIterateCompartmentCallb
  * there is no guarantee that the compartment will survive after the callback
  * returns. Also, barriers are disabled via the TraceSession.
  */
 extern JS_PUBLIC_API(void)
 JS_IterateCompartments(JSContext* cx, void* data,
                        JSIterateCompartmentCallback compartmentCallback);
 
 /**
+ * Mark a jsid after entering a new compartment. Different zones separately
+ * mark the ids in a runtime, and this must be used any time an id is obtained
+ * from one compartment and then used in another compartment, unless the two
+ * compartments are guaranteed to be in the same zone.
+ */
+extern JS_PUBLIC_API(void)
+JS_MarkCrossZoneId(JSContext* cx, jsid id);
+
+/**
+ * If value stores a jsid (an atomized string or symbol), mark that id as for
+ * JS_MarkCrossZoneId.
+ */
+extern JS_PUBLIC_API(void)
+JS_MarkCrossZoneIdValue(JSContext* cx, const JS::Value& value);
+
+/**
  * Initialize standard JS class constructors, prototypes, and any top-level
  * functions and constants associated with the standard classes (e.g. isNaN
  * for Number).
  *
  * NB: This sets cx's global object to obj if it was null.
  */
 extern JS_PUBLIC_API(bool)
 JS_InitStandardClasses(JSContext* cx, JS::Handle<JSObject*> obj);
--- a/js/src/jsatom.cpp
+++ b/js/src/jsatom.cpp
@@ -302,16 +302,35 @@ AtomIsPinned(JSContext* cx, JSAtom* atom
 
     p = cx->runtime()->atoms(lock).lookup(lookup);
     if (!p)
         return false;
 
     return p->isPinned();
 }
 
+#ifdef DEBUG
+
+bool
+AtomIsPinnedInRuntime(JSRuntime* rt, JSAtom* atom)
+{
+    Maybe<AutoLockForExclusiveAccess> lock;
+    if (!rt->currentThreadHasExclusiveAccess())
+        lock.emplace(rt);
+
+    AtomHasher::Lookup lookup(atom);
+
+    AtomSet::Ptr p = rt->unsafeAtoms().lookup(lookup);
+    MOZ_ASSERT(p);
+
+    return p->isPinned();
+}
+
+#endif // DEBUG
+
 /* |tbchars| must not point into an inline or short string. */
 template <typename CharT>
 MOZ_ALWAYS_INLINE
 static JSAtom*
 AtomizeAndCopyChars(ExclusiveContext* cx, const CharT* tbchars, size_t length, PinningBehavior pin)
 {
     if (JSAtom* s = cx->staticStrings().lookup(tbchars, length))
          return s;
@@ -331,41 +350,46 @@ AtomizeAndCopyChars(ExclusiveContext* cx
 
     AutoLockForExclusiveAccess lock(cx);
 
     AtomSet& atoms = cx->atoms(lock);
     AtomSet::AddPtr p = atoms.lookupForAdd(lookup);
     if (p) {
         JSAtom* atom = p->asPtr(cx);
         p->setPinned(bool(pin));
+        cx->markAtom(atom);
         return atom;
     }
 
-    AutoCompartment ac(cx, cx->atomsCompartment(lock), &lock);
+    JSAtom* atom;
+    {
+        AutoCompartment ac(cx, cx->atomsCompartment(lock), &lock);
 
-    JSFlatString* flat = NewStringCopyN<NoGC>(cx, tbchars, length);
-    if (!flat) {
-        // Grudgingly forgo last-ditch GC. The alternative would be to release
-        // the lock, manually GC here, and retry from the top. If you fix this,
-        // please also fix or comment the similar case in Symbol::new_.
-        ReportOutOfMemory(cx);
-        return nullptr;
+        JSFlatString* flat = NewStringCopyN<NoGC>(cx, tbchars, length);
+        if (!flat) {
+            // Grudgingly forgo last-ditch GC. The alternative would be to release
+            // the lock, manually GC here, and retry from the top. If you fix this,
+            // please also fix or comment the similar case in Symbol::new_.
+            ReportOutOfMemory(cx);
+            return nullptr;
+        }
+
+        atom = flat->morphAtomizedStringIntoAtom(lookup.hash);
+        MOZ_ASSERT(atom->hash() == lookup.hash);
+
+        // We have held the lock since looking up p, and the operations we've done
+        // since then can't GC; therefore the atoms table has not been modified and
+        // p is still valid.
+        if (!atoms.add(p, AtomStateEntry(atom, bool(pin)))) {
+            ReportOutOfMemory(cx); /* SystemAllocPolicy does not report OOM. */
+            return nullptr;
+        }
     }
 
-    JSAtom* atom = flat->morphAtomizedStringIntoAtom(lookup.hash);
-    MOZ_ASSERT(atom->hash() == lookup.hash);
-
-    // We have held the lock since looking up p, and the operations we've done
-    // since then can't GC; therefore the atoms table has not been modified and
-    // p is still valid.
-    if (!atoms.add(p, AtomStateEntry(atom, bool(pin)))) {
-        ReportOutOfMemory(cx); /* SystemAllocPolicy does not report OOM. */
-        return nullptr;
-    }
-
+    cx->markAtom(atom);
     return atom;
 }
 
 template JSAtom*
 AtomizeAndCopyChars(ExclusiveContext* cx, const char16_t* tbchars, size_t length, PinningBehavior pin);
 
 template JSAtom*
 AtomizeAndCopyChars(ExclusiveContext* cx, const Latin1Char* tbchars, size_t length, PinningBehavior pin);
--- a/js/src/jsatom.h
+++ b/js/src/jsatom.h
@@ -127,16 +127,24 @@ public:
 
 class PropertyName;
 
 }  /* namespace js */
 
 extern bool
 AtomIsPinned(JSContext* cx, JSAtom* atom);
 
+#ifdef DEBUG
+
+// This may be called either with or without the atoms lock held.
+extern bool
+AtomIsPinnedInRuntime(JSRuntime* rt, JSAtom* atom);
+
+#endif // DEBUG
+
 /* Well-known predefined C strings. */
 #define DECLARE_PROTO_STR(name,code,init,clasp) extern const char js_##name##_str[];
 JS_FOR_EACH_PROTOTYPE(DECLARE_PROTO_STR)
 #undef DECLARE_PROTO_STR
 
 #define DECLARE_CONST_CHAR_STR(idpart, id, text)  extern const char js_##idpart##_str[];
 FOR_EACH_COMMON_PROPERTYNAME(DECLARE_CONST_CHAR_STR)
 #undef DECLARE_CONST_CHAR_STR
@@ -230,11 +238,19 @@ enum XDRMode {
 
 template <XDRMode mode>
 class XDRState;
 
 template<XDRMode mode>
 bool
 XDRAtom(XDRState<mode>* xdr, js::MutableHandleAtom atomp);
 
+#ifdef DEBUG
+
+bool AtomIsMarked(Zone* zone, JSAtom* atom);
+bool AtomIsMarked(Zone* zone, jsid id);
+bool AtomIsMarked(Zone* zone, const Value& value);
+
+#endif // DEBUG
+
 } /* namespace js */
 
 #endif /* jsatom_h */
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -307,16 +307,30 @@ class ExclusiveContext : public ContextF
     }
     SymbolRegistry& symbolRegistry(js::AutoLockForExclusiveAccess& lock) {
         return runtime_->symbolRegistry(lock);
     }
     ScriptDataTable& scriptDataTable(AutoLockForExclusiveAccess& lock) {
         return runtime_->scriptDataTable(lock);
     }
 
+    // Methods to access other runtime data that checks locking internally.
+    gc::AtomMarkingRuntime& atomMarking() {
+        return runtime_->gc.atomMarking;
+    }
+    void markAtom(gc::TenuredCell* atom) {
+        atomMarking().markAtom(this, atom);
+    }
+    void markId(jsid id) {
+        atomMarking().markId(this, id);
+    }
+    void markAtomValue(const Value& value) {
+        atomMarking().markAtomValue(this, value);
+    }
+
     // Methods specific to any HelperThread for the context.
     bool addPendingCompileError(frontend::CompileError** err);
     void addPendingOverRecursed();
     void addPendingOutOfMemory();
 
   private:
     static JS::Error reportedError;
     static JS::OOM reportedOOM;
@@ -364,16 +378,17 @@ struct JSContext : public js::ExclusiveC
     using ExclusiveContext::make_unique;
     using ExclusiveContext::new_;
     using ExclusiveContext::permanentAtoms;
     using ExclusiveContext::pod_calloc;
     using ExclusiveContext::pod_malloc;
     using ExclusiveContext::staticStrings;
     using ExclusiveContext::updateMallocCounter;
     using ExclusiveContext::wellKnownSymbols;
+    using ExclusiveContext::atomMarking;
 
     JSRuntime* runtime() { return this; }
     js::PerThreadData& mainThread() { return this->JSRuntime::mainThread; }
 
     static size_t offsetOfActivation() {
         return offsetof(JSContext, activation_);
     }
     static size_t offsetOfWasmActivation() {
--- a/js/src/jscntxtinlines.h
+++ b/js/src/jscntxtinlines.h
@@ -78,27 +78,47 @@ class CompartmentChecker
         check(rooted.get());
     }
 
     template<typename T>
     void check(Handle<T> handle) {
         check(handle.get());
     }
 
+    void checkAtom(gc::Cell* cell) {
+#ifdef DEBUG
+        // Atoms which move across zone boundaries need to be marked in the new
+        // zone, see JS_MarkCrossZoneId.
+        if (compartment) {
+            JSRuntime* rt = compartment->runtimeFromAnyThread();
+            MOZ_ASSERT(rt->gc.atomMarking.atomIsMarked(compartment->zone(), cell));
+        }
+#endif
+    }
+
     void check(JSString* str) {
         MOZ_ASSERT(!str->isMarked(gc::GRAY));
-        if (!str->isAtom())
+        if (str->isAtom()) {
+            checkAtom(str);
+        } else {
             checkZone(str->zone());
+        }
+    }
+
+    void check(JS::Symbol* symbol) {
+        checkAtom(symbol);
     }
 
     void check(const js::Value& v) {
         if (v.isObject())
             check(&v.toObject());
         else if (v.isString())
             check(v.toString());
+        else if (v.isSymbol())
+            check(v.toSymbol());
     }
 
     void check(const ValueArray& arr) {
         for (size_t i = 0; i < arr.length; i++)
             check(arr.array[i]);
     }
 
     void check(const JSValueArray& arr) {
@@ -111,17 +131,20 @@ class CompartmentChecker
             check(arr[i]);
     }
 
     void check(const CallArgs& args) {
         for (Value* p = args.base(); p != args.end(); ++p)
             check(*p);
     }
 
-    void check(jsid id) {}
+    void check(jsid id) {
+        if (JSID_IS_GCTHING(id))
+            checkAtom(JSID_TO_GCTHING(id).asCell());
+    }
 
     void check(JSScript* script) {
         MOZ_ASSERT_IF(script, !script->isMarked(gc::GRAY));
         if (script)
             check(script->compartment());
     }
 
     void check(InterpreterFrame* fp);
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -317,19 +317,22 @@ JSCompartment::wrap(JSContext* cx, Mutab
     MOZ_ASSERT(!cx->runtime()->isAtomsCompartment(this));
     MOZ_ASSERT(cx->compartment() == this);
 
     /* If the string is already in this compartment, we are done. */
     JSString* str = strp;
     if (str->zoneFromAnyThread() == zone())
         return true;
 
-    /* If the string is an atom, we don't have to copy. */
+    /*
+     * If the string is an atom, we don't have to copy, but we do need to mark
+     * the atom as being in use by the new zone.
+     */
     if (str->isAtom()) {
-        MOZ_ASSERT(str->isPermanentAtom() || str->zone()->isAtomsZone());
+        cx->markAtom(str);
         return true;
     }
 
     /* Check the cache. */
     RootedValue key(cx, StringValue(str));
     if (WrapperMap::Ptr p = crossCompartmentWrappers.lookup(CrossCompartmentKey(key))) {
         strp.set(p->value().get().toString());
         return true;
--- a/js/src/jscompartmentinlines.h
+++ b/js/src/jscompartmentinlines.h
@@ -61,20 +61,23 @@ inline bool
 JSCompartment::wrap(JSContext* cx, JS::MutableHandleValue vp)
 {
     /* Only GC things have to be wrapped or copied. */
     if (!vp.isGCThing())
         return true;
 
     /*
      * Symbols are GC things, but never need to be wrapped or copied because
-     * they are always allocated in the atoms compartment.
+     * they are always allocated in the atoms compartment. They still need to
+     * be marked in the new compartment's zone, however.
      */
-    if (vp.isSymbol())
+    if (vp.isSymbol()) {
+        cx->markAtomValue(vp);
         return true;
+    }
 
     /* Handle strings. */
     if (vp.isString()) {
         JS::RootedString str(cx, vp.toString());
         if (!wrap(cx, &str))
             return false;
         vp.setString(str);
         return true;
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -474,16 +474,17 @@ js::NewFunctionWithReserved(JSContext* c
 
 JS_FRIEND_API(JSFunction*)
 js::NewFunctionByIdWithReserved(JSContext* cx, JSNative native, unsigned nargs, unsigned flags,
                                 jsid id)
 {
     MOZ_ASSERT(JSID_IS_STRING(id));
     MOZ_ASSERT(!cx->runtime()->isAtomsCompartment(cx->compartment()));
     CHECK_REQUEST(cx);
+    assertSameCompartment(cx, id);
 
     RootedAtom atom(cx, JSID_TO_ATOM(id));
     return (flags & JSFUN_CONSTRUCTOR) ?
         NewNativeConstructor(cx, native, nargs, atom, gc::AllocKind::FUNCTION_EXTENDED) :
         NewNativeFunction(cx, native, nargs, atom, gc::AllocKind::FUNCTION_EXTENDED);
 }
 
 JS_FRIEND_API(const Value&)
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -9,22 +9,16 @@
  * most sweeping carried out in the background on a parallel thread.
  *
  * Full vs. zone GC
  * ----------------
  *
  * The collector can collect all zones at once, or a subset. These types of
  * collection are referred to as a full GC and a zone GC respectively.
  *
- * The atoms zone is only collected in a full GC since objects in any zone may
- * have pointers to atoms, and these are not recorded in the cross compartment
- * pointer map. Also, the atoms zone is not collected if any thread has an
- * AutoKeepAtoms instance on the stack, or there are any exclusive threads using
- * the runtime.
- *
  * It is possible for an incremental collection that started out as a full GC to
  * become a zone GC if new zones are created during the course of the
  * collection.
  *
  * Incremental collection
  * ----------------------
  *
  * For a collection to be carried out incrementally the following conditions
@@ -174,16 +168,26 @@
  * -------------
  *
  * Compacting GC happens at the end of a major GC as part of the last slice.
  * There are three parts:
  *
  *  - Arenas are selected for compaction.
  *  - The contents of those arenas are moved to new arenas.
  *  - All references to moved things are updated.
+ *
+ * Collecting Atoms
+ * ----------------
+ *
+ * Atoms are collected differently from other GC things. They are contained in
+ * a special zone and things in other zones may have pointers to them that are
+ * not recorded in the cross compartment pointer map. Each zone holds a bitmap
+ * with the atoms it might be keeping alive, and atoms are only collected if
+ * they are not included in any zone's atom bitmap. See AtomMarking.cpp for how
+ * this bitmap is managed.
  */
 
 #include "jsgcinlines.h"
 
 #include "mozilla/ArrayUtils.h"
 #include "mozilla/DebugOnly.h"
 #include "mozilla/MacroForEach.h"
 #include "mozilla/MemoryReporting.h"
@@ -233,16 +237,17 @@
 #include "vm/Symbol.h"
 #include "vm/Time.h"
 #include "vm/TraceLogging.h"
 #include "vm/WrapperObject.h"
 
 #include "jsobjinlines.h"
 #include "jsscriptinlines.h"
 
+#include "gc/Heap-inl.h"
 #include "vm/Stack-inl.h"
 #include "vm/String-inl.h"
 
 using namespace js;
 using namespace js::gc;
 
 using mozilla::ArrayLength;
 using mozilla::Get;
@@ -717,17 +722,17 @@ Chunk::recycleArena(Arena* arena, Sorted
 }
 
 void
 Chunk::releaseArena(JSRuntime* rt, Arena* arena, const AutoLockGC& lock)
 {
     MOZ_ASSERT(arena->allocated());
     MOZ_ASSERT(!arena->hasDelayedMarking);
 
-    arena->setAsNotAllocated();
+    arena->release();
     addArenaToFreeList(rt, arena);
     updateChunkListAfterFree(rt, lock);
 }
 
 bool
 Chunk::decommitOneFreeArena(JSRuntime* rt, AutoLockGC& lock)
 {
     MOZ_ASSERT(info.numArenasFreeCommitted > 0);
@@ -1935,17 +1940,17 @@ RelocateCell(Zone* zone, TenuredCell* sr
 
 static void
 RelocateArena(Arena* arena, SliceBudget& sliceBudget)
 {
     MOZ_ASSERT(arena->allocated());
     MOZ_ASSERT(!arena->hasDelayedMarking);
     MOZ_ASSERT(!arena->markOverflow);
     MOZ_ASSERT(!arena->allocatedDuringIncremental);
-    MOZ_ASSERT(arena->bufferedCells->isEmpty());
+    MOZ_ASSERT(arena->bufferedCells()->isEmpty());
 
     Zone* zone = arena->zone;
 
     AllocKind thingKind = arena->getAllocKind();
     size_t thingSize = arena->getThingSize();
 
     for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
         RelocateCell(zone, i.getCell(), thingKind, thingSize);
@@ -3764,34 +3769,33 @@ GCRuntime::beginMarkPhase(JS::gcreason::
     }
 
     if (!rt->gc.cleanUpEverything) {
         if (JSCompartment* comp = jit::TopmostIonActivationCompartment(rt))
             comp->zone()->setPreservingCode(true);
     }
 
     /*
-     * Atoms are not in the cross-compartment map. If there are any zones that
-     * are not being collected then we cannot collect the atoms zone, otherwise
-     * the non-collected zones could contain pointers to atoms that we would
-     * miss.
-     *
      * If keepAtoms() is true then either an instance of AutoKeepAtoms is
      * currently on the stack or parsing is currently happening on another
      * thread. In either case we don't have information about which atoms are
      * roots, so we must skip collecting atoms.
      *
      * Note that only affects the first slice of an incremental GC since root
      * marking is completed before we return to the mutator.
      *
      * Off-main-thread parsing is inhibited after the start of GC which prevents
      * races between creating atoms during parsing and sweeping atoms on the
      * main thread.
+     *
+     * Otherwise, we always schedule a GC in the atoms zone so that atoms which
+     * the other collected zones are using are marked, and we can update the
+     * set of atoms in use by the other collected zones at the end of the GC.
      */
-    if (isFull && !rt->keepAtoms()) {
+    if (!rt->keepAtoms()) {
         Zone* atomsZone = rt->atomsCompartment(lock)->zone();
         if (atomsZone->isGCScheduled()) {
             MOZ_ASSERT(!atomsZone->isCollecting());
             atomsZone->setGCState(Zone::Mark);
             any = true;
         }
     }
 
@@ -4840,17 +4844,29 @@ MAKE_GC_SWEEP_TASK(SweepInitialShapesTas
 MAKE_GC_SWEEP_TASK(SweepObjectGroupsTask);
 MAKE_GC_SWEEP_TASK(SweepRegExpsTask);
 MAKE_GC_SWEEP_TASK(SweepMiscTask);
 #undef MAKE_GC_SWEEP_TASK
 
 /* virtual */ void
 SweepAtomsTask::run()
 {
+    AtomMarkingRuntime::Bitmap marked;
+    if (runtime->gc.atomMarking.computeBitmapFromChunkMarkBits(runtime, marked)) {
+        for (GCZonesIter zone(runtime); !zone.done(); zone.next())
+            runtime->gc.atomMarking.updateZoneBitmap(zone, marked);
+    } else {
+        // Ignore OOM in computeBitmapFromChunkMarkBits. The updateZoneBitmap
+        // call can only remove atoms from the zone bitmap, so it is
+        // conservative to just not call it.
+    }
+
+    runtime->gc.atomMarking.updateChunkMarkBits(runtime);
     runtime->sweepAtoms();
+    runtime->unsafeSymbolRegistry().sweep();
     for (CompartmentsIter comp(runtime, SkipAtoms); !comp.done(); comp.next())
         comp->sweepVarNames();
 }
 
 /* virtual */ void
 SweepCCWrappersTask::run()
 {
     for (GCCompartmentGroupIter c(runtime); !c.done(); c.next())
@@ -5067,21 +5083,16 @@ GCRuntime::beginSweepingZoneGroup(AutoLo
 
         {
             gcstats::AutoPhase ap(stats, gcstats::PHASE_SWEEP_BREAKPOINT);
             for (GCZoneGroupIter zone(rt); !zone.done(); zone.next())
                 zone->sweepUniqueIds(&fop);
         }
     }
 
-    if (sweepingAtoms) {
-        gcstats::AutoPhase ap(stats, gcstats::PHASE_SWEEP_SYMBOL_REGISTRY);
-        rt->symbolRegistry(lock).sweep();
-    }
-
     // Rejoin our off-main-thread tasks.
     if (sweepingAtoms) {
         AutoLockHelperThreadState helperLock;
         joinTask(sweepAtomsTask, gcstats::PHASE_SWEEP_ATOMS, helperLock);
     }
 
     {
         gcstats::AutoPhase ap(stats, gcstats::PHASE_SWEEP_COMPARTMENTS);
@@ -6713,16 +6724,19 @@ gc::MergeCompartments(JSCompartment* sou
 
     // Merge the allocator, stats and UIDs in source's zone into target's zone.
     target->zone()->arenas.adoptArenas(cx, &source->zone()->arenas);
     target->zone()->usage.adopt(source->zone()->usage);
     target->zone()->adoptUniqueIds(source->zone());
 
     // Merge other info in source's zone into target's zone.
     target->zone()->types.typeLifoAlloc.transferFrom(&source->zone()->types.typeLifoAlloc);
+
+    // Atoms which are marked in source's zone are now marked in target's zone.
+    cx->atomMarking().adoptMarkedAtoms(target->zone(), source->zone());
 }
 
 void
 GCRuntime::runDebugGC()
 {
 #ifdef JS_GC_ZEAL
     if (rt->mainThread.suppressGC)
         return;
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -1048,16 +1048,17 @@ JS_CopyPropertyFrom(JSContext* cx, Handl
         return true;
 
     if (copyBehavior == MakeNonConfigurableIntoConfigurable) {
         // Mask off the JSPROP_PERMANENT bit.
         desc.attributesRef() &= ~JSPROP_PERMANENT;
     }
 
     JSAutoCompartment ac(cx, target);
+    cx->markId(id);
     RootedId wrappedId(cx, id);
     if (!cx->compartment()->wrap(cx, &desc))
         return false;
 
     return DefineProperty(cx, target, wrappedId, desc);
 }
 
 JS_FRIEND_API(bool)
@@ -1209,16 +1210,18 @@ static bool
 DeepCloneValue(JSContext* cx, Value* vp, NewObjectKind newKind)
 {
     if (vp->isObject()) {
         RootedObject obj(cx, &vp->toObject());
         obj = DeepCloneObjectLiteral(cx, obj, newKind);
         if (!obj)
             return false;
         vp->setObject(*obj);
+    } else {
+        cx->markAtomValue(*vp);
     }
     return true;
 }
 
 JSObject*
 js::DeepCloneObjectLiteral(JSContext* cx, HandleObject obj, NewObjectKind newKind)
 {
     /* NB: Keep this in sync with XDRObjectLiteral. */
@@ -1247,16 +1250,17 @@ js::DeepCloneObjectLiteral(JSContext* cx
                                            arrayKind);
     }
 
     Rooted<IdValueVector> properties(cx, IdValueVector(cx));
     if (!GetScriptPlainObjectProperties(cx, obj, &properties))
         return nullptr;
 
     for (size_t i = 0; i < properties.length(); i++) {
+        cx->markId(properties[i].get().id);
         if (!DeepCloneValue(cx, &properties[i].get().value, newKind))
             return nullptr;
     }
 
     if (obj->isSingleton())
         newKind = SingletonObject;
 
     return ObjectGroup::newPlainObject(cx, properties.begin(), properties.length(), newKind);
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -3260,16 +3260,21 @@ js::detail::CopyScript(JSContext* cx, Ha
 
     /* This assignment must occur before all the Rebase calls. */
     dst->data = data.forget();
     dst->dataSize_ = size;
     MOZ_ASSERT(bool(dst->data) == bool(src->data));
     if (dst->data)
         memcpy(dst->data, src->data, size);
 
+    if (cx->zone() != src->zoneFromAnyThread()) {
+        for (size_t i = 0; i < src->scriptData()->natoms(); i++)
+            cx->markAtom(src->scriptData()->atoms()[i]);
+    }
+
     /* Script filenames, bytecodes and atoms are runtime-wide. */
     dst->setScriptData(src->scriptData());
 
     dst->lineno_ = src->lineno();
     dst->mainOffset_ = src->mainOffset();
     dst->nfixed_ = src->nfixed();
     dst->nslots_ = src->nslots();
     dst->bodyScopeIndex_ = src->bodyScopeIndex_;
--- a/js/src/moz.build
+++ b/js/src/moz.build
@@ -180,16 +180,17 @@ UNIFIED_SOURCES += [
     'ds/MemoryProtectionExceptionHandler.cpp',
     'frontend/BytecodeCompiler.cpp',
     'frontend/BytecodeEmitter.cpp',
     'frontend/FoldConstants.cpp',
     'frontend/NameFunctions.cpp',
     'frontend/ParseNode.cpp',
     'frontend/TokenStream.cpp',
     'gc/Allocator.cpp',
+    'gc/AtomMarking.cpp',
     'gc/Barrier.cpp',
     'gc/GCTrace.cpp',
     'gc/Iteration.cpp',
     'gc/Marking.cpp',
     'gc/Memory.cpp',
     'gc/MemoryProfiler.cpp',
     'gc/Nursery.cpp',
     'gc/RootMarking.cpp',
--- a/js/src/proxy/CrossCompartmentWrapper.cpp
+++ b/js/src/proxy/CrossCompartmentWrapper.cpp
@@ -22,64 +22,79 @@ using namespace js;
             AutoCompartment call(cx, wrappedObject(wrapper));   \
             ok = (pre) && (op);                                 \
         }                                                       \
         return ok && (post);                                    \
     JS_END_MACRO
 
 #define NOTHING (true)
 
+static bool
+MarkAtoms(JSContext* cx, jsid id)
+{
+    cx->markId(id);
+    return true;
+}
+
+static bool
+MarkAtoms(JSContext* cx, const AutoIdVector& ids)
+{
+    for (size_t i = 0; i < ids.length(); i++)
+        cx->markId(ids[i]);
+    return true;
+}
+
 bool
 CrossCompartmentWrapper::getPropertyDescriptor(JSContext* cx, HandleObject wrapper, HandleId id,
                                                MutableHandle<PropertyDescriptor> desc) const
 {
     PIERCE(cx, wrapper,
-           NOTHING,
+           MarkAtoms(cx, id),
            Wrapper::getPropertyDescriptor(cx, wrapper, id, desc),
            cx->compartment()->wrap(cx, desc));
 }
 
 bool
 CrossCompartmentWrapper::getOwnPropertyDescriptor(JSContext* cx, HandleObject wrapper, HandleId id,
                                                   MutableHandle<PropertyDescriptor> desc) const
 {
     PIERCE(cx, wrapper,
-           NOTHING,
+           MarkAtoms(cx, id),
            Wrapper::getOwnPropertyDescriptor(cx, wrapper, id, desc),
            cx->compartment()->wrap(cx, desc));
 }
 
 bool
 CrossCompartmentWrapper::defineProperty(JSContext* cx, HandleObject wrapper, HandleId id,
                                         Handle<PropertyDescriptor> desc,
                                         ObjectOpResult& result) const
 {
     Rooted<PropertyDescriptor> desc2(cx, desc);
     PIERCE(cx, wrapper,
-           cx->compartment()->wrap(cx, &desc2),
+           MarkAtoms(cx, id) && cx->compartment()->wrap(cx, &desc2),
            Wrapper::defineProperty(cx, wrapper, id, desc2, result),
            NOTHING);
 }
 
 bool
 CrossCompartmentWrapper::ownPropertyKeys(JSContext* cx, HandleObject wrapper,
                                          AutoIdVector& props) const
 {
     PIERCE(cx, wrapper,
            NOTHING,
            Wrapper::ownPropertyKeys(cx, wrapper, props),
-           NOTHING);
+           MarkAtoms(cx, props));
 }
 
 bool
 CrossCompartmentWrapper::delete_(JSContext* cx, HandleObject wrapper, HandleId id,
                                  ObjectOpResult& result) const
 {
     PIERCE(cx, wrapper,
-           NOTHING,
+           MarkAtoms(cx, id),
            Wrapper::delete_(cx, wrapper, id, result),
            NOTHING);
 }
 
 bool
 CrossCompartmentWrapper::getPrototype(JSContext* cx, HandleObject wrapper,
                                       MutableHandleObject protop) const
 {
@@ -157,26 +172,26 @@ CrossCompartmentWrapper::isExtensible(JS
            Wrapper::isExtensible(cx, wrapper, extensible),
            NOTHING);
 }
 
 bool
 CrossCompartmentWrapper::has(JSContext* cx, HandleObject wrapper, HandleId id, bool* bp) const
 {
     PIERCE(cx, wrapper,
-           NOTHING,
+           MarkAtoms(cx, id),
            Wrapper::has(cx, wrapper, id, bp),
            NOTHING);
 }
 
 bool
 CrossCompartmentWrapper::hasOwn(JSContext* cx, HandleObject wrapper, HandleId id, bool* bp) const
 {
     PIERCE(cx, wrapper,
-           NOTHING,
+           MarkAtoms(cx, id),
            Wrapper::hasOwn(cx, wrapper, id, bp),
            NOTHING);
 }
 
 static bool
 WrapReceiver(JSContext* cx, HandleObject wrapper, MutableHandleValue receiver)
 {
     // Usually the receiver is the wrapper and we can just unwrap it. If the
@@ -198,46 +213,47 @@ WrapReceiver(JSContext* cx, HandleObject
 
 bool
 CrossCompartmentWrapper::get(JSContext* cx, HandleObject wrapper, HandleValue receiver,
                              HandleId id, MutableHandleValue vp) const
 {
     RootedValue receiverCopy(cx, receiver);
     {
         AutoCompartment call(cx, wrappedObject(wrapper));
-        if (!WrapReceiver(cx, wrapper, &receiverCopy))
+        if (!MarkAtoms(cx, id) || !WrapReceiver(cx, wrapper, &receiverCopy))
             return false;
 
         if (!Wrapper::get(cx, wrapper, receiverCopy, id, vp))
             return false;
     }
     return cx->compartment()->wrap(cx, vp);
 }
 
 bool
 CrossCompartmentWrapper::set(JSContext* cx, HandleObject wrapper, HandleId id, HandleValue v,
                              HandleValue receiver, ObjectOpResult& result) const
 {
     RootedValue valCopy(cx, v);
     RootedValue receiverCopy(cx, receiver);
     PIERCE(cx, wrapper,
+           MarkAtoms(cx, id) &&
            cx->compartment()->wrap(cx, &valCopy) &&
            WrapReceiver(cx, wrapper, &receiverCopy),
            Wrapper::set(cx, wrapper, id, valCopy, receiverCopy, result),
            NOTHING);
 }
 
 bool
 CrossCompartmentWrapper::getOwnEnumerablePropertyKeys(JSContext* cx, HandleObject wrapper,
                                                       AutoIdVector& props) const
 {
     PIERCE(cx, wrapper,
            NOTHING,
            Wrapper::getOwnEnumerablePropertyKeys(cx, wrapper, props),
-           NOTHING);
+           MarkAtoms(cx, props));
 }
 
 /*
  * We can reify non-escaping iterator objects instead of having to wrap them. This
  * allows fast iteration over objects across a compartment boundary.
  */
 static bool
 CanReify(HandleObject obj)
--- a/js/src/threading/Mutex.cpp
+++ b/js/src/threading/Mutex.cpp
@@ -66,9 +66,20 @@ void
 js::Mutex::unlock()
 {
   auto& stack = heldMutexStack();
   MOZ_ASSERT(stack.back() == this);
   MutexImpl::unlock();
   stack.popBack();
 }
 
+bool
+js::Mutex::ownedByCurrentThread() const
+{
+  auto& stack = heldMutexStack();
+  for (size_t i = 0; i < stack.length(); i++) {
+    if (stack[i] == this)
+      return true;
+  }
+  return false;
+}
+
 #endif
--- a/js/src/threading/Mutex.h
+++ b/js/src/threading/Mutex.h
@@ -103,16 +103,17 @@ public:
   explicit Mutex(const MutexId& id)
    : id_(id)
   {
     MOZ_ASSERT(id_.order != 0);
   }
 
   void lock();
   void unlock();
+  bool ownedByCurrentThread() const;
 
 private:
   const MutexId id_;
 
   using MutexVector = mozilla::Vector<const Mutex*>;
   static MOZ_THREAD_LOCAL(MutexVector*) HeldMutexStack;
   static MutexVector& heldMutexStack();
 };
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -2300,16 +2300,17 @@ Debugger::appendAllocationSite(JSContext
         return false;
 
     RootedAtom ctorName(cx);
     {
         AutoCompartment ac(cx, obj);
         if (!JSObject::constructorDisplayAtom(cx, obj, &ctorName))
             return false;
     }
+    cx->markAtom(ctorName);
 
     auto className = obj->getClass()->name;
     auto size = JS::ubi::Node(obj.get()).size(cx->runtime()->debuggerMallocSizeOf);
     auto inNursery = gc::IsInsideNursery(obj);
 
     if (!allocationsLog.emplaceBack(wrappedFrame, when, className, ctorName, size, inNursery)) {
         ReportOutOfMemory(cx);
         return false;
@@ -8068,16 +8069,17 @@ DebuggerGenericEval(JSContext* cx, const
     /* If evalWithBindings, create the inner environment. */
     if (bindings) {
         RootedPlainObject nenv(cx, NewObjectWithGivenProto<PlainObject>(cx, nullptr));
         if (!nenv)
             return false;
         RootedId id(cx);
         for (size_t i = 0; i < keys.length(); i++) {
             id = keys[i];
+            cx->markId(id);
             MutableHandleValue val = values[i];
             if (!cx->compartment()->wrap(cx, val) ||
                 !NativeDefineProperty(cx, nenv, id, val, nullptr, nullptr, 0))
             {
                 return false;
             }
         }
 
@@ -9020,17 +9022,17 @@ DebuggerObject::nameGetter(JSContext* cx
 {
     THIS_DEBUGOBJECT(cx, argc, vp, "get name", args, object)
 
     if (!object->isFunction()) {
         args.rval().setUndefined();
         return true;
     }
 
-    RootedString result(cx, object->name());
+    RootedString result(cx, object->name(cx));
     if (result)
         args.rval().setString(result);
     else
         args.rval().setUndefined();
     return true;
 }
 
 /* static */ bool
@@ -9038,17 +9040,17 @@ DebuggerObject::displayNameGetter(JSCont
 {
     THIS_DEBUGOBJECT(cx, argc, vp, "get displayName", args, object)
 
     if (!object->isFunction()) {
         args.rval().setUndefined();
         return true;
     }
 
-    RootedString result(cx, object->displayName());
+    RootedString result(cx, object->displayName(cx));
     if (result)
         args.rval().setString(result);
     else
         args.rval().setUndefined();
     return true;
 }
 
 /* static */ bool
@@ -10071,29 +10073,33 @@ DebuggerObject::getGlobal(JSContext* cx,
     RootedObject referent(cx, object->referent());
     Debugger* dbg = object->owner();
 
     RootedObject global(cx, &referent->global());
     return dbg->wrapDebuggeeObject(cx, global, result);
 }
 
 JSAtom*
-DebuggerObject::name() const
+DebuggerObject::name(JSContext* cx) const
 {
     MOZ_ASSERT(isFunction());
 
-    return referent()->as<JSFunction>().explicitName();
+    JSAtom* atom = referent()->as<JSFunction>().explicitName();
+    cx->markAtom(atom);
+    return atom;
 }
 
 JSAtom*
-DebuggerObject::displayName() const
+DebuggerObject::displayName(JSContext* cx) const
 {
     MOZ_ASSERT(isFunction());
 
-    return referent()->as<JSFunction>().displayAtom();
+    JSAtom* atom = referent()->as<JSFunction>().displayAtom();
+    cx->markAtom(atom);
+    return atom;
 }
 
 JS::PromiseState
 DebuggerObject::promiseState() const
 {
     return promise()->state();
 }
 
@@ -10127,17 +10133,19 @@ DebuggerObject::getParameterNames(JSCont
             return false;
 
         MOZ_ASSERT(referent->nargs() == script->numArgs());
 
         if (referent->nargs() > 0) {
             PositionalFormalParameterIter fi(script);
             for (size_t i = 0; i < referent->nargs(); i++, fi++) {
                 MOZ_ASSERT(fi.argumentSlot() == i);
-                result[i].set(fi.name());
+                JSAtom* atom = fi.name();
+                cx->markAtom(atom);
+                result[i].set(atom);
             }
         }
     } else {
         for (size_t i = 0; i < referent->nargs(); i++)
             result[i].set(nullptr);
     }
 
     return true;
@@ -10389,16 +10397,19 @@ DebuggerObject::getOwnPropertyNames(JSCo
         Maybe<AutoCompartment> ac;
         ac.emplace(cx, referent);
 
         ErrorCopier ec(ac);
         if (!GetPropertyKeys(cx, referent, JSITER_OWNONLY | JSITER_HIDDEN, &ids))
             return false;
     }
 
+    for (size_t i = 0; i < ids.length(); i++)
+        cx->markId(ids[i]);
+
     return result.append(ids.begin(), ids.end());
 }
 
 /* static */ bool
 DebuggerObject::getOwnPropertySymbols(JSContext* cx, HandleDebuggerObject object,
                                       MutableHandle<IdVector> result)
 {
     RootedObject referent(cx, object->referent());
@@ -10410,30 +10421,34 @@ DebuggerObject::getOwnPropertySymbols(JS
 
         ErrorCopier ec(ac);
         if (!GetPropertyKeys(cx, referent,
                              JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS | JSITER_SYMBOLSONLY,
                              &ids))
             return false;
     }
 
+    for (size_t i = 0; i < ids.length(); i++)
+        cx->markId(ids[i]);
+
     return result.append(ids.begin(), ids.end());
 }
 
 /* static */ bool
 DebuggerObject::getOwnPropertyDescriptor(JSContext* cx, HandleDebuggerObject object,
                                          HandleId id, MutableHandle<PropertyDescriptor> desc)
 {
     RootedObject referent(cx, object->referent());
     Debugger* dbg = object->owner();
 
     /* Bug: This can cause the debuggee to run! */
     {
         Maybe<AutoCompartment> ac;
         ac.emplace(cx, referent);
+        cx->markId(id);
 
         ErrorCopier ec(ac);
         if (!GetOwnPropertyDescriptor(cx, referent, id, desc))
             return false;
     }
 
     if (desc.object()) {
         /* Rewrap the debuggee values in desc for the debugger. */
@@ -10507,16 +10522,17 @@ DebuggerObject::defineProperty(JSContext
     if (!dbg->unwrapPropertyDescriptor(cx, referent, &desc))
         return false;
     JS_TRY_OR_RETURN_FALSE(cx, CheckPropertyDescriptorAccessors(cx, desc));
 
     Maybe<AutoCompartment> ac;
     ac.emplace(cx, referent);
     if (!cx->compartment()->wrap(cx, &desc))
         return false;
+    cx->markId(id);
 
     ErrorCopier ec(ac);
     if (!DefineProperty(cx, referent, id, desc))
         return false;
 
     return true;
 }
 
@@ -10537,16 +10553,17 @@ DebuggerObject::defineProperties(JSConte
         JS_TRY_OR_RETURN_FALSE(cx, CheckPropertyDescriptorAccessors(cx, descs[i]));
     }
 
     Maybe<AutoCompartment> ac;
     ac.emplace(cx, referent);
     for (size_t i = 0; i < descs.length(); i++) {
         if (!cx->compartment()->wrap(cx, descs[i]))
             return false;
+        cx->markId(ids[i]);
     }
 
     ErrorCopier ec(ac);
     for (size_t i = 0; i < descs.length(); i++) {
         if (!DefineProperty(cx, referent, ids[i], descs[i]))
             return false;
     }
 
@@ -10557,16 +10574,18 @@ DebuggerObject::defineProperties(JSConte
 DebuggerObject::deleteProperty(JSContext* cx, HandleDebuggerObject object, HandleId id,
                                ObjectOpResult& result)
 {
     RootedObject referent(cx, object->referent());
 
     Maybe<AutoCompartment> ac;
     ac.emplace(cx, referent);
 
+    cx->markId(id);
+
     ErrorCopier ec(ac);
     return DeleteProperty(cx, referent, id, result);
 }
 
 /* static */ bool
 DebuggerObject::call(JSContext* cx, HandleDebuggerObject object, HandleValue thisv_,
                      Handle<ValueVector> args, MutableHandleValue result)
 {
@@ -11267,16 +11286,17 @@ DebuggerEnvironment::getNames(JSContext*
         ErrorCopier ec(ac);
         if (!GetPropertyKeys(cx, referent, JSITER_HIDDEN, &ids))
             return false;
     }
 
     for (size_t i = 0; i < ids.length(); ++i) {
         jsid id = ids[i];
         if (JSID_IS_ATOM(id) && IsIdentifier(JSID_TO_ATOM(id))) {
+            cx->markId(id);
             if (!result.append(id))
                 return false;
         }
     }
 
     return true;
 }
 
@@ -11288,16 +11308,18 @@ DebuggerEnvironment::find(JSContext* cx,
 
     Rooted<Env*> env(cx, environment->referent());
     Debugger* dbg = environment->owner();
 
     {
         Maybe<AutoCompartment> ac;
         ac.emplace(cx, env);
 
+        cx->markId(id);
+
         /* This can trigger resolve hooks. */
         ErrorCopier ec(ac);
         for (; env; env = env->enclosingEnvironment()) {
             bool found;
             if (!HasProperty(cx, env, id, &found))
                 return false;
             if (found)
                 break;
@@ -11320,16 +11342,18 @@ DebuggerEnvironment::getVariable(JSConte
 
     Rooted<Env*> referent(cx, environment->referent());
     Debugger* dbg = environment->owner();
 
     {
         Maybe<AutoCompartment> ac;
         ac.emplace(cx, referent);
 
+        cx->markId(id);
+
         /* This can trigger getters. */
         ErrorCopier ec(ac);
 
         bool found;
         if (!HasProperty(cx, referent, id, &found))
             return false;
         if (!found) {
             result.setUndefined();
@@ -11376,16 +11400,17 @@ DebuggerEnvironment::setVariable(JSConte
     if (!dbg->unwrapDebuggeeValue(cx, &value))
         return false;
 
     {
         Maybe<AutoCompartment> ac;
         ac.emplace(cx, referent);
         if (!cx->compartment()->wrap(cx, &value))
             return false;
+        cx->markId(id);
 
         /* This can trigger setters. */
         ErrorCopier ec(ac);
 
         /* Make sure the environment actually has the specified binding. */
         bool found;
         if (!HasProperty(cx, referent, id, &found))
             return false;
--- a/js/src/vm/Debugger.h
+++ b/js/src/vm/Debugger.h
@@ -1440,18 +1440,18 @@ class DebuggerObject : public NativeObje
     bool isCallable() const;
     bool isFunction() const;
     bool isDebuggeeFunction() const;
     bool isBoundFunction() const;
     bool isArrowFunction() const;
     bool isGlobal() const;
     bool isScriptedProxy() const;
     bool isPromise() const;
-    JSAtom* name() const;
-    JSAtom* displayName() const;
+    JSAtom* name(JSContext* cx) const;
+    JSAtom* displayName(JSContext* cx) const;
     JS::PromiseState promiseState() const;
     double promiseLifetime() const;
     double promiseTimeToResolution() const;
 
   private:
     enum {
         OWNER_SLOT
     };
--- a/js/src/vm/MemoryMetrics.cpp
+++ b/js/src/vm/MemoryMetrics.cpp
@@ -318,17 +318,18 @@ StatsZoneCallback(JSRuntime* rt, void* d
         MOZ_CRASH("oom");
     rtStats->initExtraZoneStats(zone, &zStats);
     rtStats->currZoneStats = &zStats;
 
     zone->addSizeOfIncludingThis(rtStats->mallocSizeOf_,
                                  &zStats.typePool,
                                  &zStats.baselineStubsOptimized,
                                  &zStats.uniqueIdMap,
-                                 &zStats.shapeTables);
+                                 &zStats.shapeTables,
+                                 &rtStats->runtime.atomsMarkBitmaps);
 }
 
 static void
 StatsCompartmentCallback(JSContext* cx, void* data, JSCompartment* compartment)
 {
     // Append a new CompartmentStats to the vector.
     RuntimeStats* rtStats = static_cast<StatsClosure*>(data)->rtStats;
 
--- a/js/src/vm/NativeObject-inl.h
+++ b/js/src/vm/NativeObject-inl.h
@@ -13,16 +13,18 @@
 
 #include "builtin/TypedObject.h"
 #include "proxy/Proxy.h"
 #include "vm/ProxyObject.h"
 #include "vm/TypedArrayObject.h"
 
 #include "jsobjinlines.h"
 
+#include "gc/Heap-inl.h"
+
 namespace js {
 
 inline uint8_t*
 NativeObject::fixedData(size_t nslots) const
 {
     MOZ_ASSERT(ClassCanHaveFixedData(getClass()));
     MOZ_ASSERT(nslots == numFixedSlots() + (hasPrivate() ? 1 : 0));
     return reinterpret_cast<uint8_t*>(&fixedSlots()[nslots]);
@@ -291,16 +293,24 @@ NativeObject::setSlotWithType(ExclusiveC
 inline void
 NativeObject::updateShapeAfterMovingGC()
 {
     Shape* shape = shape_;
     if (IsForwarded(shape))
         shape_.unsafeSet(Forwarded(shape));
 }
 
+inline bool
+NativeObject::isInWholeCellBuffer() const
+{
+    const gc::TenuredCell* cell = &asTenured();
+    gc::ArenaCellSet* cells = cell->arena()->bufferedCells();
+    return cells && cells->hasCell(cell);
+}
+
 /* Make an object with pregenerated shape from a NEWOBJECT bytecode. */
 static inline PlainObject*
 CopyInitializerObject(JSContext* cx, HandlePlainObject baseobj, NewObjectKind newKind = GenericObject)
 {
     MOZ_ASSERT(!baseobj->inDictionaryMode());
 
     gc::AllocKind allocKind = gc::GetGCObjectFixedSlotsKind(baseobj->numFixedSlots());
     allocKind = gc::GetBackgroundAllocKind(allocKind);
--- a/js/src/vm/NativeObject.h
+++ b/js/src/vm/NativeObject.h
@@ -470,21 +470,17 @@ class NativeObject : public ShapedObject
     // Newly-created TypedArrays that map a SharedArrayBuffer are
     // marked as shared by giving them an ObjectElements that has the
     // ObjectElements::SHARED_MEMORY flag set.
     void setIsSharedMemory() {
         MOZ_ASSERT(elements_ == emptyObjectElements);
         elements_ = emptyObjectElementsShared;
     }
 
-    bool isInWholeCellBuffer() const {
-        const gc::TenuredCell* cell = &asTenured();
-        gc::ArenaCellSet* cells = cell->arena()->bufferedCells;
-        return cells && cells->hasCell(cell);
-    }
+    inline bool isInWholeCellBuffer() const;
 
   protected:
 #ifdef DEBUG
     void checkShapeConsistency();
 #else
     void checkShapeConsistency() { }
 #endif
 
@@ -837,26 +833,32 @@ class NativeObject : public ShapedObject
         return *getSlotAddress(slot);
     }
 
     const HeapSlot& getSlotRef(uint32_t slot) const {
         MOZ_ASSERT(slotInRange(slot));
         return *getSlotAddress(slot);
     }
 
+    // Check requirements on values stored to this object.
+    inline void checkStoredValue(const Value& v) {
+        MOZ_ASSERT(IsObjectValueInCompartment(v, compartment()));
+        MOZ_ASSERT(AtomIsMarked(zoneFromAnyThread(), v));
+    }
+
     void setSlot(uint32_t slot, const Value& value) {
         MOZ_ASSERT(slotInRange(slot));
-        MOZ_ASSERT(IsObjectValueInCompartment(value, compartment()));
+        checkStoredValue(value);
         getSlotRef(slot).set(this, HeapSlot::Slot, slot, value);
     }
 
     void initSlot(uint32_t slot, const Value& value) {
         MOZ_ASSERT(getSlot(slot).isUndefined());
         MOZ_ASSERT(slotInRange(slot));
-        MOZ_ASSERT(IsObjectValueInCompartment(value, compartment()));
+        checkStoredValue(value);
         initSlotUnchecked(slot, value);
     }
 
     void initSlotUnchecked(uint32_t slot, const Value& value) {
         getSlotAddressUnchecked(slot)->init(this, HeapSlot::Slot, slot, value);
     }
 
     // MAX_FIXED_SLOTS is the biggest number of fixed slots our GC
@@ -924,23 +926,23 @@ class NativeObject : public ShapedObject
 
     const Value& getFixedSlot(uint32_t slot) const {
         MOZ_ASSERT(slot < numFixedSlots());
         return fixedSlots()[slot];
     }
 
     void setFixedSlot(uint32_t slot, const Value& value) {
         MOZ_ASSERT(slot < numFixedSlots());
-        MOZ_ASSERT(IsObjectValueInCompartment(value, compartment()));
+        checkStoredValue(value);
         fixedSlots()[slot].set(this, HeapSlot::Slot, slot, value);
     }
 
     void initFixedSlot(uint32_t slot, const Value& value) {
         MOZ_ASSERT(slot < numFixedSlots());
-        MOZ_ASSERT(IsObjectValueInCompartment(value, compartment()));
+        checkStoredValue(value);
         fixedSlots()[slot].init(this, HeapSlot::Slot, slot, value);
     }
 
     /*
      * Get the number of dynamic slots to allocate to cover the properties in
      * an object with the given number of fixed slots and slot span. The slot
      * capacity is not stored explicitly, and the allocated size of the slot
      * array is kept in sync with this count.
@@ -1028,16 +1030,17 @@ class NativeObject : public ShapedObject
     }
 
     // Use this function with care.  This is done to allow sparsifying frozen
     // objects, but should only be called in a few places, and should be
     // audited carefully!
     void setDenseElementUnchecked(uint32_t index, const Value& val) {
         MOZ_ASSERT(index < getDenseInitializedLength());
         MOZ_ASSERT(!denseElementsAreCopyOnWrite());
+        checkStoredValue(val);
         elements_[index].set(this, HeapSlot::Element, index, val);
     }
 
   public:
     void setDenseInitializedLength(uint32_t length) {
         MOZ_ASSERT(!denseElementsAreFrozen());
         setDenseInitializedLengthUnchecked(length);
     }
@@ -1049,16 +1052,17 @@ class NativeObject : public ShapedObject
         MOZ_ASSERT(!denseElementsAreFrozen());
         setDenseElementUnchecked(index, val);
     }
 
     void initDenseElement(uint32_t index, const Value& val) {
         MOZ_ASSERT(index < getDenseInitializedLength());
         MOZ_ASSERT(!denseElementsAreCopyOnWrite());
         MOZ_ASSERT(!denseElementsAreFrozen());
+        checkStoredValue(val);
         elements_[index].init(this, HeapSlot::Element, index, val);
     }
 
     void setDenseElementMaybeConvertDouble(uint32_t index, const Value& val) {
         if (val.isInt32() && shouldConvertDoubleElements())
             setDenseElement(index, DoubleValue(val.toInt32()));
         else
             setDenseElement(index, val);
@@ -1073,29 +1077,37 @@ class NativeObject : public ShapedObject
                                                         HandleNativeObject obj, uint32_t index);
 
     inline Value getDenseOrTypedArrayElement(uint32_t idx);
 
     void copyDenseElements(uint32_t dstStart, const Value* src, uint32_t count) {
         MOZ_ASSERT(dstStart + count <= getDenseCapacity());
         MOZ_ASSERT(!denseElementsAreCopyOnWrite());
         MOZ_ASSERT(!denseElementsAreFrozen());
+#ifdef DEBUG
+        for (uint32_t i = 0; i < count; ++i)
+            checkStoredValue(src[i]);
+#endif
         if (JS::shadow::Zone::asShadowZone(zone())->needsIncrementalBarrier()) {
             for (uint32_t i = 0; i < count; ++i)
                 elements_[dstStart + i].set(this, HeapSlot::Element, dstStart + i, src[i]);
         } else {
             memcpy(&elements_[dstStart], src, count * sizeof(HeapSlot));
             elementsRangeWriteBarrierPost(dstStart, count);
         }
     }
 
     void initDenseElements(uint32_t dstStart, const Value* src, uint32_t count) {
         MOZ_ASSERT(dstStart + count <= getDenseCapacity());
         MOZ_ASSERT(!denseElementsAreCopyOnWrite());
         MOZ_ASSERT(!denseElementsAreFrozen());
+#ifdef DEBUG
+        for (uint32_t i = 0; i < count; ++i)
+            checkStoredValue(src[i]);
+#endif
         memcpy(&elements_[dstStart], src, count * sizeof(HeapSlot));
         elementsRangeWriteBarrierPost(dstStart, count);
     }
 
     void moveDenseElements(uint32_t dstStart, uint32_t srcStart, uint32_t count) {
         MOZ_ASSERT(dstStart + count <= getDenseCapacity());
         MOZ_ASSERT(srcStart + count <= getDenseInitializedLength());
         MOZ_ASSERT(!denseElementsAreCopyOnWrite());
--- a/js/src/vm/RegExpObject.cpp
+++ b/js/src/vm/RegExpObject.cpp
@@ -1531,16 +1531,18 @@ template bool
 js::XDRScriptRegExpObject(XDRState<XDR_DECODE>* xdr, MutableHandle<RegExpObject*> objp);
 
 JSObject*
 js::CloneScriptRegExpObject(JSContext* cx, RegExpObject& reobj)
 {
     /* NB: Keep this in sync with XDRScriptRegExpObject. */
 
     RootedAtom source(cx, reobj.getSource());
+    cx->markAtom(source);
+
     return RegExpObject::create(cx, source, reobj.getFlags(), nullptr, cx->tempLifoAlloc());
 }
 
 JS_FRIEND_API(bool)
 js::RegExpToSharedNonInline(JSContext* cx, HandleObject obj, js::RegExpGuard* g)
 {
     return RegExpToShared(cx, obj, g);
 }
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -689,16 +689,23 @@ struct JSRuntime : public JS::shadow::Ru
   public:
     void setUsedByExclusiveThread(JS::Zone* zone);
     void clearUsedByExclusiveThread(JS::Zone* zone);
 
     bool exclusiveThreadsPresent() const {
         return numExclusiveThreads > 0;
     }
 
+#ifdef DEBUG
+    bool currentThreadHasExclusiveAccess() const {
+        return (!exclusiveThreadsPresent() && mainThreadHasExclusiveAccess) ||
+            exclusiveAccessLock.ownedByCurrentThread();
+    }
+#endif
+
     // How many compartments there are across all zones. This number includes
     // ExclusiveContext compartments, so it isn't necessarily equal to the
     // number of compartments visited by CompartmentsIter.
     size_t              numCompartments;
 
     /* Locale-specific callbacks for string conversion. */
     const JSLocaleCallbacks* localeCallbacks;
 
@@ -1075,32 +1082,41 @@ struct JSRuntime : public JS::shadow::Ru
     void finishAtoms();
     bool atomsAreFinished() const { return !atoms_; }
 
     void sweepAtoms();
 
     js::AtomSet& atoms(js::AutoLockForExclusiveAccess& lock) {
         return *atoms_;
     }
+    js::AtomSet& unsafeAtoms() {
+        return *atoms_;
+    }
     JSCompartment* atomsCompartment(js::AutoLockForExclusiveAccess& lock) {
         return atomsCompartment_;
     }
+    JSCompartment* unsafeAtomsCompartment() {
+        return atomsCompartment_;
+    }
 
     bool isAtomsCompartment(JSCompartment* comp) {
         return comp == atomsCompartment_;
     }
 
     // The atoms compartment is the only one in its zone.
     inline bool isAtomsZone(const JS::Zone* zone) const;
 
     bool activeGCInAtomsZone();
 
     js::SymbolRegistry& symbolRegistry(js::AutoLockForExclusiveAccess& lock) {
         return symbolRegistry_;
     }
+    js::SymbolRegistry& unsafeSymbolRegistry() {
+        return symbolRegistry_;
+    }
 
     // Permanent atoms are fixed during initialization of the runtime and are
     // not modified or collected until the runtime is destroyed. These may be
     // shared with another, longer living runtime through |parentRuntime| and
     // can be freely accessed with no locking necessary.
 
     // Permanent atoms pre-allocated for general use.
     js::StaticStrings* staticStrings;
--- a/js/src/vm/SavedFrame.h
+++ b/js/src/vm/SavedFrame.h
@@ -154,17 +154,17 @@ class SavedFrame : public NativeObject {
       public:
         inline Lookup& get() { return lookup; }
         inline Lookup* operator->() { return &lookup; }
     };
 
   private:
     static SavedFrame* create(JSContext* cx);
     static MOZ_MUST_USE bool finishSavedFrameInit(JSContext* cx, HandleObject ctor, HandleObject proto);
-    void initFromLookup(HandleLookup lookup);
+    void initFromLookup(JSContext* cx, HandleLookup lookup);
     void initSource(JSAtom* source);
     void initLine(uint32_t line);
     void initColumn(uint32_t column);
     void initFunctionDisplayName(JSAtom* maybeName);
     void initAsyncCause(JSAtom* maybeCause);
     void initParent(SavedFrame* maybeParent);
     void initPrincipalsAlreadyHeld(JSPrincipals* principals);
     void initPrincipals(JSPrincipals* principals);
--- a/js/src/vm/SavedStacks.cpp
+++ b/js/src/vm/SavedStacks.cpp
@@ -484,18 +484,29 @@ SavedFrame::initAsyncCause(JSAtom* maybe
 
 void
 SavedFrame::initParent(SavedFrame* maybeParent)
 {
     initReservedSlot(JSSLOT_PARENT, ObjectOrNullValue(maybeParent));
 }
 
 void
-SavedFrame::initFromLookup(SavedFrame::HandleLookup lookup)
+SavedFrame::initFromLookup(JSContext* cx, SavedFrame::HandleLookup lookup)
 {
+    // Make sure any atoms used in the lookup are marked in the current zone.
+    // Normally we would try to keep these mark bits up to date around the
+    // points where the context moves between compartments, but Lookups live on
+    // the stack (where the atoms are kept alive regardless) and this is a
+    // more convenient pinchpoint.
+    cx->markAtom(lookup->source);
+    if (lookup->functionDisplayName)
+        cx->markAtom(lookup->functionDisplayName);
+    if (lookup->asyncCause)
+        cx->markAtom(lookup->asyncCause);
+
     initSource(lookup->source);
     initLine(lookup->line);
     initColumn(lookup->column);
     initFunctionDisplayName(lookup->functionDisplayName);
     initAsyncCause(lookup->asyncCause);
     initParent(lookup->parent);
     initPrincipals(lookup->principals);
 }
@@ -724,24 +735,27 @@ UnwrapSavedFrame(JSContext* cx, HandleOb
 JS_PUBLIC_API(SavedFrameResult)
 GetSavedFrameSource(JSContext* cx, HandleObject savedFrame, MutableHandleString sourcep,
                     SavedFrameSelfHosted selfHosted /* = SavedFrameSelfHosted::Include */)
 {
     AssertHeapIsIdle(cx);
     CHECK_REQUEST(cx);
     MOZ_RELEASE_ASSERT(cx->compartment());
 
-    AutoMaybeEnterFrameCompartment ac(cx, savedFrame);
-    bool skippedAsync;
-    js::RootedSavedFrame frame(cx, UnwrapSavedFrame(cx, savedFrame, selfHosted, skippedAsync));
-    if (!frame) {
-        sourcep.set(cx->runtime()->emptyString);
-        return SavedFrameResult::AccessDenied;
+    {
+        AutoMaybeEnterFrameCompartment ac(cx, savedFrame);
+        bool skippedAsync;
+        js::RootedSavedFrame frame(cx, UnwrapSavedFrame(cx, savedFrame, selfHosted, skippedAsync));
+        if (!frame) {
+            sourcep.set(cx->runtime()->emptyString);
+            return SavedFrameResult::AccessDenied;
+        }
+        sourcep.set(frame->getSource());
     }
-    sourcep.set(frame->getSource());
+    cx->markAtom(sourcep);
     return SavedFrameResult::Ok;
 }
 
 JS_PUBLIC_API(SavedFrameResult)
 GetSavedFrameLine(JSContext* cx, HandleObject savedFrame, uint32_t* linep,
                   SavedFrameSelfHosted selfHosted /* = SavedFrameSelfHosted::Include */)
 {
     AssertHeapIsIdle(cx);
@@ -783,51 +797,59 @@ GetSavedFrameColumn(JSContext* cx, Handl
 JS_PUBLIC_API(SavedFrameResult)
 GetSavedFrameFunctionDisplayName(JSContext* cx, HandleObject savedFrame, MutableHandleString namep,
                                  SavedFrameSelfHosted selfHosted /* = SavedFrameSelfHosted::Include */)
 {
     AssertHeapIsIdle(cx);
     CHECK_REQUEST(cx);
     MOZ_RELEASE_ASSERT(cx->compartment());
 
-    AutoMaybeEnterFrameCompartment ac(cx, savedFrame);
-    bool skippedAsync;
-    js::RootedSavedFrame frame(cx, UnwrapSavedFrame(cx, savedFrame, selfHosted, skippedAsync));
-    if (!frame) {
-        namep.set(nullptr);
-        return SavedFrameResult::AccessDenied;
+    {
+        AutoMaybeEnterFrameCompartment ac(cx, savedFrame);
+        bool skippedAsync;
+        js::RootedSavedFrame frame(cx, UnwrapSavedFrame(cx, savedFrame, selfHosted, skippedAsync));
+        if (!frame) {
+            namep.set(nullptr);
+            return SavedFrameResult::AccessDenied;
+        }
+        namep.set(frame->getFunctionDisplayName());
     }
-    namep.set(frame->getFunctionDisplayName());
+    if (namep)
+        cx->markAtom(namep);
     return SavedFrameResult::Ok;
 }
 
 JS_PUBLIC_API(SavedFrameResult)
 GetSavedFrameAsyncCause(JSContext* cx, HandleObject savedFrame, MutableHandleString asyncCausep,
                         SavedFrameSelfHosted unused_ /* = SavedFrameSelfHosted::Include */)
 {
     AssertHeapIsIdle(cx);
     CHECK_REQUEST(cx);
     MOZ_RELEASE_ASSERT(cx->compartment());
 
-    AutoMaybeEnterFrameCompartment ac(cx, savedFrame);
-    bool skippedAsync;
-    // This function is always called with self-hosted frames excluded by
-    // GetValueIfNotCached in dom/bindings/Exceptions.cpp. However, we want
-    // to include them because our Promise implementation causes us to have
-    // the async cause on a self-hosted frame. So we just ignore the
-    // parameter and always include self-hosted frames.
-    js::RootedSavedFrame frame(cx, UnwrapSavedFrame(cx, savedFrame, SavedFrameSelfHosted::Include,
-                                                    skippedAsync));
-    if (!frame) {
-        asyncCausep.set(nullptr);
-        return SavedFrameResult::AccessDenied;
+    {
+        AutoMaybeEnterFrameCompartment ac(cx, savedFrame);
+        bool skippedAsync;
+        // This function is always called with self-hosted frames excluded by
+        // GetValueIfNotCached in dom/bindings/Exceptions.cpp. However, we want
+        // to include them because our Promise implementation causes us to have
+        // the async cause on a self-hosted frame. So we just ignore the
+        // parameter and always include self-hosted frames.
+        js::RootedSavedFrame frame(cx, UnwrapSavedFrame(cx, savedFrame, SavedFrameSelfHosted::Include,
+                                                        skippedAsync));
+        if (!frame) {
+            asyncCausep.set(nullptr);
+            return SavedFrameResult::AccessDenied;
+        }
+        asyncCausep.set(frame->getAsyncCause());
+        if (!asyncCausep && skippedAsync)
+            asyncCausep.set(cx->names().Async);
     }
-    asyncCausep.set(frame->getAsyncCause());
-    if (!asyncCausep && skippedAsync)
-        asyncCausep.set(cx->names().Async);
+    if (asyncCausep)
+        cx->markAtom(asyncCausep);
     return SavedFrameResult::Ok;
 }
 
 JS_PUBLIC_API(SavedFrameResult)
 GetSavedFrameAsyncParent(JSContext* cx, HandleObject savedFrame, MutableHandleObject asyncParentp,
                          SavedFrameSelfHosted selfHosted /* = SavedFrameSelfHosted::Include */)
 {
     AssertHeapIsIdle(cx);
@@ -1487,17 +1509,17 @@ SavedStacks::getOrCreateSavedFrame(JSCon
 }
 
 SavedFrame*
 SavedStacks::createFrameFromLookup(JSContext* cx, SavedFrame::HandleLookup lookup)
 {
     RootedSavedFrame frame(cx, SavedFrame::create(cx));
     if (!frame)
         return nullptr;
-    frame->initFromLookup(lookup);
+    frame->initFromLookup(cx, lookup);
 
     if (!FreezeObject(cx, frame))
         return nullptr;
 
     return frame;
 }
 
 bool
--- a/js/src/vm/Scope.cpp
+++ b/js/src/vm/Scope.cpp
@@ -123,29 +123,40 @@ CreateEnvironmentShape(ExclusiveContext*
         return nullptr;
 
     RootedAtom name(cx);
     StackBaseShape stackBase(cx, cls, baseShapeFlags);
     for (; bi; bi++) {
         BindingLocation loc = bi.location();
         if (loc.kind() == BindingLocation::Kind::Environment) {
             name = bi.name();
+            cx->markAtom(name);
             shape = NextEnvironmentShape(cx, name, bi.kind(), loc.slot(), stackBase, shape);
             if (!shape)
                 return nullptr;
         }
     }
 
     return shape;
 }
 
 template <typename ConcreteScope>
 static UniquePtr<typename ConcreteScope::Data>
 CopyScopeData(ExclusiveContext* cx, Handle<typename ConcreteScope::Data*> data)
 {
+    // Make sure the binding names are marked in the context's zone, if we are
+    // copying data from another zone.
+    BindingName* names = nullptr;
+    uint32_t length = 0;
+    ConcreteScope::getDataNamesAndLength(data, &names, &length);
+    for (size_t i = 0; i < length; i++) {
+        if (JSAtom* name = names[i].name())
+            cx->markAtom(name);
+    }
+
     size_t dataSize = ConcreteScope::sizeOfData(data->length);
     uint8_t* copyBytes = cx->zone()->pod_malloc<uint8_t>(dataSize);
     if (!copyBytes) {
         ReportOutOfMemory(cx);
         return nullptr;
     }
 
     mozilla::PodCopy<uint8_t>(copyBytes, reinterpret_cast<uint8_t*>(data.get()), dataSize);
--- a/js/src/vm/Scope.h
+++ b/js/src/vm/Scope.h
@@ -369,16 +369,21 @@ class LexicalScope : public Scope
 
         void trace(JSTracer* trc);
     };
 
     static size_t sizeOfData(uint32_t length) {
         return sizeof(Data) + (length ? length - 1 : 0) * sizeof(BindingName);
     }
 
+    static void getDataNamesAndLength(Data* data, BindingName** names, uint32_t* length) {
+        *names = data->names;
+        *length = data->length;
+    }
+
     static LexicalScope* create(ExclusiveContext* cx, ScopeKind kind, Handle<Data*> data,
                                 uint32_t firstFrameSlot, HandleScope enclosing);
 
     template <XDRMode mode>
     static bool XDR(XDRState<mode>* xdr, ScopeKind kind, HandleScope enclosing,
                     MutableHandleScope scope);
 
   protected:
@@ -481,16 +486,21 @@ class FunctionScope : public Scope
 
         void trace(JSTracer* trc);
     };
 
     static size_t sizeOfData(uint32_t length) {
         return sizeof(Data) + (length ? length - 1 : 0) * sizeof(BindingName);
     }
 
+    static void getDataNamesAndLength(Data* data, BindingName** names, uint32_t* length) {
+        *names = data->names;
+        *length = data->length;
+    }
+
     static FunctionScope* create(ExclusiveContext* cx, Handle<Data*> data,
                                  bool hasParameterExprs, bool needsEnvironment,
                                  HandleFunction fun, HandleScope enclosing);
 
     static FunctionScope* clone(JSContext* cx, Handle<FunctionScope*> scope, HandleFunction fun,
                                 HandleScope enclosing);
 
     template <XDRMode mode>
@@ -574,16 +584,21 @@ class VarScope : public Scope
 
         void trace(JSTracer* trc);
     };
 
     static size_t sizeOfData(uint32_t length) {
         return sizeof(Data) + (length ? length - 1 : 0) * sizeof(BindingName);
     }
 
+    static void getDataNamesAndLength(Data* data, BindingName** names, uint32_t* length) {
+        *names = data->names;
+        *length = data->length;
+    }
+
     static VarScope* create(ExclusiveContext* cx, ScopeKind kind, Handle<Data*> data,
                             uint32_t firstFrameSlot, bool needsEnvironment,
                             HandleScope enclosing);
 
     template <XDRMode mode>
     static bool XDR(XDRState<mode>* xdr, ScopeKind kind, HandleScope enclosing,
                     MutableHandleScope scope);
 
@@ -662,16 +677,21 @@ class GlobalScope : public Scope
 
         void trace(JSTracer* trc);
     };
 
     static size_t sizeOfData(uint32_t length) {
         return sizeof(Data) + (length ? length - 1 : 0) * sizeof(BindingName);
     }
 
+    static void getDataNamesAndLength(Data* data, BindingName** names, uint32_t* length) {
+        *names = data->names;
+        *length = data->length;
+    }
+
     static GlobalScope* create(ExclusiveContext* cx, ScopeKind kind, Handle<Data*> data);
 
     static GlobalScope* createEmpty(ExclusiveContext* cx, ScopeKind kind) {
         return create(cx, kind, nullptr);
     }
 
     static GlobalScope* clone(JSContext* cx, Handle<GlobalScope*> scope, ScopeKind kind);
 
@@ -761,16 +781,21 @@ class EvalScope : public Scope
 
         void trace(JSTracer* trc);
     };
 
     static size_t sizeOfData(uint32_t length) {
         return sizeof(Data) + (length ? length - 1 : 0) * sizeof(BindingName);
     }
 
+    static void getDataNamesAndLength(Data* data, BindingName** names, uint32_t* length) {
+        *names = data->names;
+        *length = data->length;
+    }
+
     static EvalScope* create(ExclusiveContext* cx, ScopeKind kind, Handle<Data*> data,
                              HandleScope enclosing);
 
     template <XDRMode mode>
     static bool XDR(XDRState<mode>* xdr, ScopeKind kind, HandleScope enclosing,
                     MutableHandleScope scope);
 
   private:
@@ -862,16 +887,21 @@ class ModuleScope : public Scope
 
         void trace(JSTracer* trc);
     };
 
     static size_t sizeOfData(uint32_t length) {
         return sizeof(Data) + (length ? length - 1 : 0) * sizeof(BindingName);
     }
 
+    static void getDataNamesAndLength(Data* data, BindingName** names, uint32_t* length) {
+        *names = data->names;
+        *length = data->length;
+    }
+
     static ModuleScope* create(ExclusiveContext* cx, Handle<Data*> data,
                                Handle<ModuleObject*> module, HandleScope enclosing);
 
   private:
     static UniquePtr<Data> copyData(ExclusiveContext* cx, Handle<Data*> data,
                                     MutableHandleShape envShape);
 
     Data& data() {
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -10,16 +10,17 @@
 #include "mozilla/Attributes.h"
 #include "mozilla/GuardObjects.h"
 #include "mozilla/MathAlgorithms.h"
 #include "mozilla/Maybe.h"
 #include "mozilla/MemoryReporting.h"
 #include "mozilla/TemplateLib.h"
 
 #include "jsapi.h"
+#include "jsatom.h"
 #include "jsfriendapi.h"
 #include "jspropertytree.h"
 #include "jstypes.h"
 #include "NamespaceImports.h"
 
 #include "gc/Barrier.h"
 #include "gc/Heap.h"
 #include "gc/Marking.h"
@@ -1401,16 +1402,18 @@ Shape::Shape(const StackShape& other, ui
     parent(nullptr)
 {
 #ifdef DEBUG
     gc::AllocKind allocKind = getAllocKind();
     MOZ_ASSERT_IF(other.isAccessorShape(), allocKind == gc::AllocKind::ACCESSOR_SHAPE);
     MOZ_ASSERT_IF(allocKind == gc::AllocKind::SHAPE, !other.isAccessorShape());
 #endif
 
+    MOZ_ASSERT_IF(!isEmptyShape(), AtomIsMarked(zone(), propid()));
+
     MOZ_ASSERT_IF(attrs & (JSPROP_GETTER | JSPROP_SETTER), attrs & JSPROP_SHARED);
     kids.setNull();
 }
 
 // This class is used to add a post barrier on the AccessorShape's getter/setter
 // objects. It updates the pointers and the shape's entry in the parent's
 // KidsHash table.
 class ShapeGetterSetterRef : public gc::BufferableRef
--- a/js/src/vm/StructuredClone.cpp
+++ b/js/src/vm/StructuredClone.cpp
@@ -2245,16 +2245,17 @@ JSStructuredCloneReader::readSavedFrame(
     if (!startRead(&name) || !(name.isString() || name.isNull()))
         return nullptr;
     JSAtom* atomName = nullptr;
     if (name.isString()) {
         atomName = AtomizeString(context(), name.toString());
         if (!atomName)
             return nullptr;
     }
+
     savedFrame->initFunctionDisplayName(atomName);
 
     RootedValue cause(context());
     if (!startRead(&cause) || !(cause.isString() || cause.isNull()))
         return nullptr;
     JSAtom* atomCause = nullptr;
     if (cause.isString()) {
         atomCause = AtomizeString(context(), cause.toString());
--- a/js/src/vm/Symbol.cpp
+++ b/js/src/vm/Symbol.cpp
@@ -42,46 +42,57 @@ Symbol::new_(ExclusiveContext* cx, JS::S
         atom = AtomizeString(cx, description);
         if (!atom)
             return nullptr;
     }
 
     // Lock to allocate. If symbol allocation becomes a bottleneck, this can
     // probably be replaced with an assertion that we're on the main thread.
     AutoLockForExclusiveAccess lock(cx);
-    AutoCompartment ac(cx, cx->atomsCompartment(lock), &lock);
-    return newInternal(cx, code, cx->compartment()->randomHashCode(), atom, lock);
+    Symbol* sym;
+    {
+        AutoCompartment ac(cx, cx->atomsCompartment(lock), &lock);
+        sym = newInternal(cx, code, cx->compartment()->randomHashCode(), atom, lock);
+    }
+    cx->markAtom(sym);
+    return sym;
 }
 
 Symbol*
 Symbol::for_(js::ExclusiveContext* cx, HandleString description)
 {
     JSAtom* atom = AtomizeString(cx, description);
     if (!atom)
         return nullptr;
 
     AutoLockForExclusiveAccess lock(cx);
 
     SymbolRegistry& registry = cx->symbolRegistry(lock);
     SymbolRegistry::AddPtr p = registry.lookupForAdd(atom);
-    if (p)
+    if (p) {
+        cx->markAtom(*p);
         return *p;
-
-    AutoCompartment ac(cx, cx->atomsCompartment(lock), &lock);
-    Symbol* sym = newInternal(cx, SymbolCode::InSymbolRegistry, atom->hash(), atom, lock);
-    if (!sym)
-        return nullptr;
+    }
 
-    // p is still valid here because we have held the lock since the
-    // lookupForAdd call, and newInternal can't GC.
-    if (!registry.add(p, sym)) {
-        // SystemAllocPolicy does not report OOM.
-        ReportOutOfMemory(cx);
-        return nullptr;
+    Symbol* sym;
+    {
+        AutoCompartment ac(cx, cx->atomsCompartment(lock), &lock);
+        sym = newInternal(cx, SymbolCode::InSymbolRegistry, atom->hash(), atom, lock);
+        if (!sym)
+            return nullptr;
+
+        // p is still valid here because we have held the lock since the
+        // lookupForAdd call, and newInternal can't GC.
+        if (!registry.add(p, sym)) {
+            // SystemAllocPolicy does not report OOM.
+            ReportOutOfMemory(cx);
+            return nullptr;
+        }
     }
+    cx->markAtom(sym);
     return sym;
 }
 
 #ifdef DEBUG
 void
 Symbol::dump(FILE* fp)
 {
     if (isWellKnownSymbol()) {
--- a/js/src/vm/TypeInference.cpp
+++ b/js/src/vm/TypeInference.cpp
@@ -4461,26 +4461,28 @@ TypeScript::destroy()
     js_free(this);
 }
 
 void
 Zone::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
                              size_t* typePool,
                              size_t* baselineStubsOptimized,
                              size_t* uniqueIdMap,
-                             size_t* shapeTables)
+                             size_t* shapeTables,
+                             size_t* atomsMarkBitmaps)
 {
     *typePool += types.typeLifoAlloc.sizeOfExcludingThis(mallocSizeOf);
     if (jitZone()) {
         *baselineStubsOptimized +=
             jitZone()->optimizedStubSpace()->sizeOfExcludingThis(mallocSizeOf);
     }
     *uniqueIdMap += uniqueIds_.sizeOfExcludingThis(mallocSizeOf);
     *shapeTables += baseShapes.sizeOfExcludingThis(mallocSizeOf)
                   + initialShapes.sizeOfExcludingThis(mallocSizeOf);
+    *atomsMarkBitmaps += markedAtoms.sizeOfExcludingThis(mallocSizeOf);
 }
 
 TypeZone::TypeZone(Zone* zone)
   : zone_(zone),
     typeLifoAlloc(TYPE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
     generation(0),
     compilerOutputs(nullptr),
     sweepTypeLifoAlloc(TYPE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
--- a/js/xpconnect/loader/mozJSComponentLoader.cpp
+++ b/js/xpconnect/loader/mozJSComponentLoader.cpp
@@ -1309,16 +1309,18 @@ mozJSComponentLoader::ImportInto(const n
                 rv = info.GetLocation(location);
                 NS_ENSURE_SUCCESS(rv, rv);
                 return ReportOnCallerUTF8(cxhelper, ERROR_GETTING_SYMBOL,
                                           location.get(), bytes.ptr());
             }
 
             JSAutoCompartment target_ac(cx, targetObj);
 
+	    JS_MarkCrossZoneId(cx, symbolId);
+
             if (!JS_WrapValue(cx, &value) ||
                 !JS_SetPropertyById(cx, targetObj, symbolId, value)) {
                 JSAutoByteString bytes;
                 RootedString symbolStr(cx, JSID_TO_STRING(symbolId));
                 if (!bytes.encodeUtf8(cx, symbolStr))
                     return NS_ERROR_FAILURE;
                 nsCString location;
                 rv = info.GetLocation(location);
--- a/js/xpconnect/src/ExportHelpers.cpp
+++ b/js/xpconnect/src/ExportHelpers.cpp
@@ -403,19 +403,22 @@ ExportFunction(JSContext* cx, HandleValu
         RootedId id(cx, options.defineAs);
         if (JSID_IS_VOID(id)) {
             // If there wasn't any function name specified,
             // copy the name from the function being imported.
             JSFunction* fun = JS_GetObjectFunction(funObj);
             RootedString funName(cx, JS_GetFunctionId(fun));
             if (!funName)
                 funName = JS_AtomizeAndPinString(cx, "");
+            JS_MarkCrossZoneIdValue(cx, StringValue(funName));
 
             if (!JS_StringToId(cx, funName, &id))
                 return false;
+        } else {
+            JS_MarkCrossZoneId(cx, id);
         }
         MOZ_ASSERT(JSID_IS_STRING(id));
 
         // The function forwarder will live in the target compartment. Since
         // this function will be referenced from its private slot, to avoid a
         // GC hazard, we must wrap it to the same compartment.
         if (!JS_WrapObject(cx, &funObj))
             return false;
@@ -468,16 +471,18 @@ CreateObjectIn(JSContext* cx, HandleValu
     if (define && js::IsScriptedProxy(scope)) {
         JS_ReportErrorASCII(cx, "Defining property on proxy object is not allowed");
         return false;
     }
 
     RootedObject obj(cx);
     {
         JSAutoCompartment ac(cx, scope);
+        JS_MarkCrossZoneId(cx, options.defineAs);
+
         obj = JS_NewPlainObject(cx);
         if (!obj)
             return false;
 
         if (define) {
             if (!JS_DefinePropertyById(cx, scope, options.defineAs, obj,
                                        JSPROP_ENUMERATE,
                                        JS_STUBGETTER, JS_STUBSETTER))
--- a/js/xpconnect/src/Sandbox.cpp
+++ b/js/xpconnect/src/Sandbox.cpp
@@ -192,16 +192,17 @@ SandboxImport(JSContext* cx, unsigned ar
 
         // Use the actual function name as the name.
         funname = JS_GetFunctionId(fun);
         if (!funname) {
             XPCThrower::Throw(NS_ERROR_INVALID_ARG, cx);
             return false;
         }
     }
+    JS_MarkCrossZoneIdValue(cx, StringValue(funname));
 
     RootedId id(cx);
     if (!JS_StringToId(cx, funname, &id))
         return false;
 
     // We need to resolve the this object, because this function is used
     // unbound and should still work and act on the original sandbox.
     RootedObject thisObject(cx, JS_THIS_OBJECT(cx, vp));
--- a/js/xpconnect/src/XPCJSContext.cpp
+++ b/js/xpconnect/src/XPCJSContext.cpp
@@ -2365,16 +2365,20 @@ ReportJSRuntimeExplicitTreeStats(const J
     RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/runtime-object"),
         KIND_HEAP, rtStats.runtime.object,
         "The JSRuntime object.");
 
     RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/atoms-table"),
         KIND_HEAP, rtStats.runtime.atomsTable,
         "The atoms table.");
 
+    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/atoms-mark-bitmaps"),
+        KIND_HEAP, rtStats.runtime.atomsMarkBitmaps,
+        "Mark bitmaps for atoms held by each zone.");
+
     RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/contexts"),
         KIND_HEAP, rtStats.runtime.contexts,
         "JSContext objects and structures that belong to them.");
 
     RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/temporary"),
         KIND_HEAP, rtStats.runtime.temporary,
         "Transient data (mostly parse nodes) held by the JSRuntime during "
         "compilation.");
--- a/js/xpconnect/wrappers/XrayWrapper.cpp
+++ b/js/xpconnect/wrappers/XrayWrapper.cpp
@@ -313,16 +313,17 @@ bool JSXrayTraits::getOwnPropertyFromWra
                                                    HandleObject wrapper,
                                                    HandleId id,
                                                    MutableHandle<PropertyDescriptor> outDesc)
 {
     MOZ_ASSERT(js::IsObjectInContextCompartment(wrapper, cx));
     RootedObject target(cx, getTargetObject(wrapper));
     {
         JSAutoCompartment ac(cx, target);
+        JS_MarkCrossZoneId(cx, id);
         if (!getOwnPropertyFromTargetIfSafe(cx, target, wrapper, id, outDesc))
             return false;
     }
     return JS_WrapPropertyDescriptor(cx, outDesc);
 }
 
 bool JSXrayTraits::getOwnPropertyFromTargetIfSafe(JSContext* cx,
                                                   HandleObject target,
@@ -343,47 +344,52 @@ bool JSXrayTraits::getOwnPropertyFromTar
 
     // If the property doesn't exist at all, we're done.
     if (!desc.object())
         return true;
 
     // Disallow accessor properties.
     if (desc.hasGetterOrSetter()) {
         JSAutoCompartment ac(cx, wrapper);
+        JS_MarkCrossZoneId(cx, id);
         return ReportWrapperDenial(cx, id, WrapperDenialForXray, "property has accessor");
     }
 
     // Apply extra scrutiny to objects.
     if (desc.value().isObject()) {
         RootedObject propObj(cx, js::UncheckedUnwrap(&desc.value().toObject()));
         JSAutoCompartment ac(cx, propObj);
 
         // Disallow non-subsumed objects.
         if (!AccessCheck::subsumes(target, propObj)) {
             JSAutoCompartment ac(cx, wrapper);
+            JS_MarkCrossZoneId(cx, id);
             return ReportWrapperDenial(cx, id, WrapperDenialForXray, "value not same-origin with target");
         }
 
         // Disallow non-Xrayable objects.
         XrayType xrayType = GetXrayType(propObj);
         if (xrayType == NotXray || xrayType == XrayForOpaqueObject) {
             JSAutoCompartment ac(cx, wrapper);
+            JS_MarkCrossZoneId(cx, id);
             return ReportWrapperDenial(cx, id, WrapperDenialForXray, "value not Xrayable");
         }
 
         // Disallow callables.
         if (JS::IsCallable(propObj)) {
             JSAutoCompartment ac(cx, wrapper);
+            JS_MarkCrossZoneId(cx, id);
             return ReportWrapperDenial(cx, id, WrapperDenialForXray, "value is callable");
         }
     }
 
     // Disallow any property that shadows something on its (Xrayed)
     // prototype chain.
     JSAutoCompartment ac2(cx, wrapper);
+    JS_MarkCrossZoneId(cx, id);
     RootedObject proto(cx);
     bool foundOnProto = false;
     if (!JS_GetPrototype(cx, wrapper, &proto) ||
         (proto && !JS_HasPropertyById(cx, proto, id, &foundOnProto)))
     {
         return false;
     }
     if (foundOnProto)
@@ -549,16 +555,17 @@ JSXrayTraits::resolveOwnProperty(JSConte
         } else if (IsTypedArrayKey(key)) {
             if (IsArrayIndex(GetArrayIndexFromId(cx, id))) {
                 // WebExtensions can't use cloneInto(), so we just let them do
                 // the slow thing to maximize compatibility.
                 if (CompartmentPrivate::Get(CurrentGlobalOrNull(cx))->isWebExtensionContentScript) {
                     Rooted<PropertyDescriptor> innerDesc(cx);
                     {
                         JSAutoCompartment ac(cx, target);
+                        JS_MarkCrossZoneId(cx, id);
                         if (!JS_GetOwnPropertyDescriptorById(cx, target, id, &innerDesc))
                             return false;
                     }
                     if (innerDesc.isDataDescriptor() && innerDesc.value().isNumber()) {
                         desc.setValue(innerDesc.value());
                         desc.object().set(wrapper);
                     }
                     return true;
@@ -571,16 +578,18 @@ JSXrayTraits::resolveOwnProperty(JSConte
             }
         } else if (key == JSProto_Function) {
             if (id == GetJSIDByIndex(cx, XPCJSContext::IDX_LENGTH)) {
                 FillPropertyDescriptor(desc, wrapper, JSPROP_PERMANENT | JSPROP_READONLY,
                                        NumberValue(JS_GetFunctionArity(JS_GetObjectFunction(target))));
                 return true;
             } else if (id == GetJSIDByIndex(cx, XPCJSContext::IDX_NAME)) {
                 RootedString fname(cx, JS_GetFunctionId(JS_GetObjectFunction(target)));
+                if (fname)
+                    JS_MarkCrossZoneIdValue(cx, StringValue(fname));
                 FillPropertyDescriptor(desc, wrapper, JSPROP_PERMANENT | JSPROP_READONLY,
                                        fname ? StringValue(fname) : JS_GetEmptyStringValue(cx));
             } else {
                 // Look for various static properties/methods and the
                 // 'prototype' property.
                 JSProtoKey standardConstructor = constructorFor(holder);
                 if (standardConstructor != JSProto_Null) {
                     // Handle the 'prototype' property to make
@@ -701,16 +710,17 @@ JSXrayTraits::delete_(JSContext* cx, Han
     // property from the underlying object that they are able to resolve. Note
     // that this deleting may fail if the property is non-configurable.
     JSProtoKey key = getProtoKey(holder);
     bool isObjectOrArrayInstance = (key == JSProto_Object || key == JSProto_Array) &&
                                    !isPrototype(holder);
     if (isObjectOrArrayInstance) {
         RootedObject target(cx, getTargetObject(wrapper));
         JSAutoCompartment ac(cx, target);
+        JS_MarkCrossZoneId(cx, id);
         Rooted<PropertyDescriptor> desc(cx);
         if (!getOwnPropertyFromTargetIfSafe(cx, target, wrapper, id, &desc))
             return false;
         if (desc.object())
             return JS_DeletePropertyById(cx, target, id, result);
     }
     return result.succeed();
 }
@@ -758,16 +768,17 @@ JSXrayTraits::defineProperty(JSContext* 
         }
         if (existingDesc.object() && existingDesc.object() != wrapper) {
             JS_ReportErrorASCII(cx, "Not allowed to shadow non-own Xray-resolved property on [Object] or [Array] XrayWrapper");
             return false;
         }
 
         Rooted<PropertyDescriptor> wrappedDesc(cx, desc);
         JSAutoCompartment ac(cx, target);
+        JS_MarkCrossZoneId(cx, id);
         if (!JS_WrapPropertyDescriptor(cx, &wrappedDesc) ||
             !JS_DefinePropertyById(cx, target, id, wrappedDesc, result))
         {
             return false;
         }
         *defined = true;
         return true;
     }
@@ -776,16 +787,17 @@ JSXrayTraits::defineProperty(JSContext* 
     // validating that the key and value are both numbers, we can avoid doing any wrapping.
     if (isInstance && IsTypedArrayKey(key) &&
         CompartmentPrivate::Get(JS::CurrentGlobalOrNull(cx))->isWebExtensionContentScript &&
         desc.isDataDescriptor() && (desc.value().isNumber() || desc.value().isUndefined()) &&
         IsArrayIndex(GetArrayIndexFromId(cx, id)))
     {
         RootedObject target(cx, getTargetObject(wrapper));
         JSAutoCompartment ac(cx, target);
+        JS_MarkCrossZoneId(cx, id);
         if (!JS_DefinePropertyById(cx, target, id, desc, result))
             return false;
         *defined = true;
         return true;
     }
 
     return true;
 }
@@ -854,16 +866,18 @@ JSXrayTraits::enumerateNames(JSContext* 
                     Rooted<PropertyDescriptor> desc(cx);
                     RootedId id(cx, targetProps[i]);
                     if (!getOwnPropertyFromTargetIfSafe(cx, target, wrapper, id, &desc))
                         return false;
                     if (desc.object())
                         props.infallibleAppend(id);
                 }
             }
+            for (size_t i = 0; i < props.length(); ++i)
+                JS_MarkCrossZoneId(cx, props[i]);
             return true;
         } else if (IsTypedArrayKey(key)) {
             uint32_t length = JS_GetTypedArrayLength(target);
             // TypedArrays enumerate every indexed property in range, but
             // |length| is a getter that lives on the proto, like it should be.
             if (!props.reserve(length))
                 return false;
             for (int32_t i = 0; i <= int32_t(length - 1); ++i)
@@ -1486,16 +1500,17 @@ XrayTraits::resolveOwnProperty(JSContext
     if (!getExpandoObject(cx, target, wrapper, &expando))
         return false;
 
     // Check for expando properties first. Note that the expando object lives
     // in the target compartment.
     bool found = false;
     if (expando) {
         JSAutoCompartment ac(cx, expando);
+        JS_MarkCrossZoneId(cx, id);
         if (!JS_GetOwnPropertyDescriptorById(cx, expando, id, desc))
             return false;
         found = !!desc.object();
     }
 
     // Next, check for ES builtins.
     if (!found && JS_IsGlobalObject(target)) {
         JSProtoKey key = JS_IdToProtoKey(cx, id);
@@ -1581,16 +1596,17 @@ XPCWrappedNativeXrayTraits::enumerateNam
     // Go through the properties we found on the underlying object and see if
     // they appear on the XrayWrapper. If it throws (which may happen if the
     // wrapper is a SecurityWrapper), just clear the exception and move on.
     MOZ_ASSERT(!JS_IsExceptionPending(cx));
     if (!props.reserve(wnProps.length()))
         return false;
     for (size_t n = 0; n < wnProps.length(); ++n) {
         RootedId id(cx, wnProps[n]);
+        JS_MarkCrossZoneId(cx, id);
         bool hasProp;
         if (JS_HasPropertyById(cx, wrapper, id, &hasProp) && hasProp)
             props.infallibleAppend(id);
         JS_ClearPendingException(cx);
     }
     return true;
 }
 
@@ -2174,16 +2190,17 @@ XrayWrapper<Base, Traits>::definePropert
         return false;
     if (defined)
         return true;
 
     // We're placing an expando. The expando objects live in the target
     // compartment, so we need to enter it.
     RootedObject target(cx, Traits::singleton.getTargetObject(wrapper));
     JSAutoCompartment ac(cx, target);
+    JS_MarkCrossZoneId(cx, id);
 
     // Grab the relevant expando object.
     RootedObject expandoObject(cx, Traits::singleton.ensureExpandoObject(cx, wrapper,
                                                                          target));
     if (!expandoObject)
         return false;
 
     // Wrap the property descriptor for the target compartment.
@@ -2217,16 +2234,17 @@ XrayWrapper<Base, Traits>::delete_(JSCon
     // Check the expando object.
     RootedObject target(cx, Traits::getTargetObject(wrapper));
     RootedObject expando(cx);
     if (!Traits::singleton.getExpandoObject(cx, target, wrapper, &expando))
         return false;
 
     if (expando) {
         JSAutoCompartment ac(cx, expando);
+        JS_MarkCrossZoneId(cx, id);
         bool hasProp;
         if (!JS_HasPropertyById(cx, expando, id, &hasProp)) {
             return false;
         }
         if (hasProp) {
             return JS_DeletePropertyById(cx, expando, id, result);
         }
     }
@@ -2467,16 +2485,18 @@ XrayWrapper<Base, Traits>::getPropertyKe
     if (!Traits::singleton.getExpandoObject(cx, target, wrapper, &expando))
         return false;
 
     if (expando) {
         JSAutoCompartment ac(cx, expando);
         if (!js::GetPropertyKeys(cx, expando, flags, &props))
             return false;
     }
+    for (size_t i = 0; i < props.length(); ++i)
+        JS_MarkCrossZoneId(cx, props[i]);
 
     return Traits::singleton.enumerateNames(cx, wrapper, flags, props);
 }
 
 /*
  * The Permissive / Security variants should be used depending on whether the
  * compartment of the wrapper is guranteed to subsume the compartment of the
  * wrapped object (i.e. - whether it is safe from a security perspective to