Bug 1213005 - Try harder not to fire read barriers during GC; r=sfink
authorTerrence Cole <terrence@mozilla.com>
Thu, 08 Oct 2015 11:24:16 -0700
changeset 304055 103c2e08b3187d0759b86437b12d4688bcb30b01
parent 304054 4dfb3e6470787d60558f27d70b6e447a865be65c
child 304056 59164fa9fe90766b454a101d02ab77d1db87c20b
push id1001
push userraliiev@mozilla.com
push dateMon, 18 Jan 2016 19:06:03 +0000
treeherdermozilla-release@8b89261f3ac4 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerssfink
bugs1213005
milestone44.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1213005 - Try harder not to fire read barriers during GC; r=sfink
js/src/gc/Barrier.h
js/src/gc/GCRuntime.h
js/src/gc/Heap.h
js/src/gc/Marking.cpp
js/src/gc/Zone.cpp
js/src/jsatom.cpp
js/src/jsatom.h
js/src/jsatominlines.h
js/src/jscompartment.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jspubtd.h
js/src/vm/Debugger.cpp
js/src/vm/Debugger.h
js/src/vm/ObjectGroup.cpp
js/src/vm/Runtime.h
js/src/vm/ScopeObject.cpp
js/src/vm/Shape.cpp
js/src/vm/Shape.h
js/src/vm/Symbol.cpp
js/src/vm/TypeInference.cpp
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -816,18 +816,18 @@ struct DefaultHasher<PreBarriered<T>> : 
 /* Useful for hashtables with a ReadBarriered as key. */
 template <class T>
 struct ReadBarrieredHasher
 {
     typedef ReadBarriered<T> Key;
     typedef T Lookup;
 
     static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
-    static bool match(const Key& k, Lookup l) { return k.get() == l; }
-    static void rekey(Key& k, const Key& newKey) { k.set(newKey); }
+    static bool match(const Key& k, Lookup l) { return k.unbarrieredGet() == l; }
+    static void rekey(Key& k, const Key& newKey) { k.set(newKey.unbarrieredGet()); }
 };
 
 /* Specialized hashing policy for ReadBarriereds. */
 template <class T>
 struct DefaultHasher<ReadBarriered<T>> : ReadBarrieredHasher<T> { };
 
 class ArrayObject;
 class ArrayBufferObject;
@@ -889,24 +889,21 @@ typedef PreBarriered<jsid> PreBarrieredI
 typedef RelocatablePtr<jsid> RelocatableId;
 typedef HeapPtr<jsid> HeapId;
 
 typedef ImmutableTenuredPtr<PropertyName*> ImmutablePropertyNamePtr;
 typedef ImmutableTenuredPtr<JS::Symbol*> ImmutableSymbolPtr;
 
 typedef ReadBarriered<DebugScopeObject*> ReadBarrieredDebugScopeObject;
 typedef ReadBarriered<GlobalObject*> ReadBarrieredGlobalObject;
-typedef ReadBarriered<JSFunction*> ReadBarrieredFunction;
 typedef ReadBarriered<JSObject*> ReadBarrieredObject;
 typedef ReadBarriered<JSScript*> ReadBarrieredScript;
 typedef ReadBarriered<ScriptSourceObject*> ReadBarrieredScriptSourceObject;
 typedef ReadBarriered<Shape*> ReadBarrieredShape;
-typedef ReadBarriered<UnownedBaseShape*> ReadBarrieredUnownedBaseShape;
 typedef ReadBarriered<jit::JitCode*> ReadBarrieredJitCode;
 typedef ReadBarriered<ObjectGroup*> ReadBarrieredObjectGroup;
-typedef ReadBarriered<JSAtom*> ReadBarrieredAtom;
 typedef ReadBarriered<JS::Symbol*> ReadBarrieredSymbol;
 
 typedef ReadBarriered<Value> ReadBarrieredValue;
 
 } /* namespace js */
 
 #endif /* gc_Barrier_h */
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -659,16 +659,17 @@ class GCRuntime
         MOZ_ASSERT(nextCellUniqueId_ > 0);
         return nextCellUniqueId_++;
     }
 
   public:
     // Internal public interface
     js::gc::State state() const { return incrementalState; }
     bool isHeapCompacting() const { return state() == COMPACT; }
+    bool isForegroundSweeping() const { return state() == SWEEP; }
     bool isBackgroundSweeping() { return helperState.isBackgroundSweeping(); }
     void waitBackgroundSweepEnd() { helperState.waitBackgroundSweepEnd(); }
     void waitBackgroundSweepOrAllocEnd() {
         helperState.waitBackgroundSweepEnd();
         allocTask.cancel(GCParallelTask::CancelAndWait);
     }
 
     void requestMinorGC(JS::gcreason::Reason reason);
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -1452,18 +1452,23 @@ TenuredCell::isInsideZone(JS::Zone* zone
 
 /* static */ MOZ_ALWAYS_INLINE void
 TenuredCell::readBarrier(TenuredCell* thing)
 {
     MOZ_ASSERT(!CurrentThreadIsIonCompiling());
     MOZ_ASSERT(!isNullLike(thing));
     if (thing->shadowRuntimeFromAnyThread()->isHeapBusy())
         return;
+    MOZ_ASSERT_IF(CurrentThreadCanAccessRuntime(thing->runtimeFromAnyThread()),
+                  !thing->shadowRuntimeFromAnyThread()->isHeapCollecting());
 
     JS::shadow::Zone* shadowZone = thing->shadowZoneFromAnyThread();
+    MOZ_ASSERT_IF(!CurrentThreadCanAccessRuntime(thing->runtimeFromAnyThread()),
+                  !shadowZone->needsIncrementalBarrier());
+
     if (shadowZone->needsIncrementalBarrier()) {
         MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
         Cell* tmp = thing;
         TraceManuallyBarrieredGenericPointerEdge(shadowZone->barrierTracer(), &tmp, "read barrier");
         MOZ_ASSERT(tmp == thing);
     }
     if (thing->isMarked(GRAY))
         UnmarkGrayCellRecursively(thing, thing->getTraceKind());
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -2441,21 +2441,21 @@ TypeSet::MarkTypeRoot(JSTracer* trc, Typ
     AssertRootMarkingPhase(trc);
     MarkTypeUnbarriered(trc, v, name);
 }
 
 void
 TypeSet::MarkTypeUnbarriered(JSTracer* trc, TypeSet::Type* v, const char* name)
 {
     if (v->isSingletonUnchecked()) {
-        JSObject* obj = v->singleton();
+        JSObject* obj = v->singletonNoBarrier();
         DispatchToTracer(trc, &obj, name);
         *v = TypeSet::ObjectType(obj);
     } else if (v->isGroupUnchecked()) {
-        ObjectGroup* group = v->group();
+        ObjectGroup* group = v->groupNoBarrier();
         DispatchToTracer(trc, &group, name);
         *v = TypeSet::ObjectType(group);
     }
 }
 
 
 /*** Cycle Collector Barrier Implementation *******************************************************/
 
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -142,17 +142,17 @@ Zone::logPromotionsToTenured()
     auto now = JS_GetCurrentEmbedderTime();
     JSRuntime* rt = runtimeFromAnyThread();
 
     for (auto** dbgp = dbgs->begin(); dbgp != dbgs->end(); dbgp++) {
         if (!(*dbgp)->isEnabled() || !(*dbgp)->isTrackingTenurePromotions())
             continue;
 
         for (auto range = awaitingTenureLogging.all(); !range.empty(); range.popFront()) {
-            if ((*dbgp)->isDebuggee(range.front()->compartment()))
+            if ((*dbgp)->isDebuggeeUnbarriered(range.front()->compartment()))
                 (*dbgp)->logTenurePromotion(rt, *range.front(), now);
         }
     }
 
     awaitingTenureLogging.clear();
 }
 
 void
@@ -306,17 +306,17 @@ Zone::canCollect()
     return true;
 }
 
 void
 Zone::notifyObservingDebuggers()
 {
     for (CompartmentsInZoneIter comps(this); !comps.done(); comps.next()) {
         JSRuntime* rt = runtimeFromAnyThread();
-        RootedGlobalObject global(rt, comps->maybeGlobal());
+        RootedGlobalObject global(rt, comps->unsafeUnbarrieredMaybeGlobal());
         if (!global)
             continue;
 
         GlobalObject::DebuggerVector* dbgs = global->getDebuggers();
         if (!dbgs)
             continue;
 
         for (GlobalObject::DebuggerVector::Range r = dbgs->all(); !r.empty(); r.popFront()) {
--- a/js/src/jsatom.cpp
+++ b/js/src/jsatom.cpp
@@ -194,21 +194,19 @@ void
 js::MarkAtoms(JSTracer* trc)
 {
     JSRuntime* rt = trc->runtime();
     for (AtomSet::Enum e(rt->atoms()); !e.empty(); e.popFront()) {
         const AtomStateEntry& entry = e.front();
         if (!entry.isPinned())
             continue;
 
-        JSAtom* atom = entry.asPtr();
-        bool tagged = entry.isPinned();
+        JSAtom* atom = entry.asPtrUnbarriered();
         TraceRoot(trc, &atom, "interned_atom");
-        if (entry.asPtr() != atom)
-            e.rekeyFront(AtomHasher::Lookup(atom), AtomStateEntry(atom, tagged));
+        MOZ_ASSERT(entry.asPtrUnbarriered() == atom);
     }
 }
 
 void
 js::MarkPermanentAtoms(JSTracer* trc)
 {
     JSRuntime* rt = trc->runtime();
 
@@ -247,17 +245,17 @@ js::MarkWellKnownSymbols(JSTracer* trc)
 void
 JSRuntime::sweepAtoms()
 {
     if (!atoms_)
         return;
 
     for (AtomSet::Enum e(*atoms_); !e.empty(); e.popFront()) {
         AtomStateEntry entry = e.front();
-        JSAtom* atom = entry.asPtr();
+        JSAtom* atom = entry.asPtrUnbarriered();
         bool isDying = IsAboutToBeFinalizedUnbarriered(&atom);
 
         /* Pinned or interned key cannot be finalized. */
         MOZ_ASSERT_IF(hasContexts() && entry.isPinned(), !isDying);
 
         if (isDying)
             e.removeFront();
     }
--- a/js/src/jsatom.h
+++ b/js/src/jsatom.h
@@ -70,16 +70,17 @@ class AtomStateEntry
      * Non-branching code sequence. Note that the const_cast is safe because
      * the hash function doesn't consider the tag to be a portion of the key.
      */
     void setPinned(bool pinned) const {
         const_cast<AtomStateEntry*>(this)->bits |= uintptr_t(pinned);
     }
 
     JSAtom* asPtr() const;
+    JSAtom* asPtrUnbarriered() const;
 };
 
 struct AtomHasher
 {
     struct Lookup
     {
         union {
             const JS::Latin1Char* latin1Chars;
--- a/js/src/jsatominlines.h
+++ b/js/src/jsatominlines.h
@@ -21,16 +21,23 @@ inline JSAtom*
 js::AtomStateEntry::asPtr() const
 {
     MOZ_ASSERT(bits != 0);
     JSAtom* atom = reinterpret_cast<JSAtom*>(bits & NO_TAG_MASK);
     JSString::readBarrier(atom);
     return atom;
 }
 
+inline JSAtom*
+js::AtomStateEntry::asPtrUnbarriered() const
+{
+    MOZ_ASSERT(bits != 0);
+    return reinterpret_cast<JSAtom*>(bits & NO_TAG_MASK);
+}
+
 namespace js {
 
 inline jsid
 AtomToId(JSAtom* atom)
 {
     JS_STATIC_ASSERT(JSID_INT_MIN == 0);
 
     uint32_t index;
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -259,17 +259,18 @@ JSCompartment::checkWrapperMapAfterMovin
      * Assert that the postbarriers have worked and that nothing is left in
      * wrapperMap that points into the nursery, and that the hash table entries
      * are discoverable.
      */
     for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
         CrossCompartmentKey key = e.front().key();
         CheckGCThingAfterMovingGC(key.debugger);
         CheckGCThingAfterMovingGC(key.wrapped);
-        CheckGCThingAfterMovingGC(static_cast<Cell*>(e.front().value().get().toGCThing()));
+        CheckGCThingAfterMovingGC(
+                static_cast<Cell*>(e.front().value().unbarrieredGet().toGCThing()));
 
         WrapperMap::Ptr ptr = crossCompartmentWrappers.lookup(key);
         MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &e.front());
     }
 }
 #endif
 
 bool
@@ -540,17 +541,17 @@ JSCompartment::getNonSyntacticLexicalSco
 
 void
 JSCompartment::traceOutgoingCrossCompartmentWrappers(JSTracer* trc)
 {
     MOZ_ASSERT(trc->runtime()->isHeapMajorCollecting());
     MOZ_ASSERT(!zone()->isCollecting() || trc->runtime()->gc.isHeapCompacting());
 
     for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
-        Value v = e.front().value();
+        Value v = e.front().value().unbarrieredGet();
         if (e.front().key().kind == CrossCompartmentKey::ObjectWrapper) {
             ProxyObject* wrapper = &v.toObject().as<ProxyObject>();
 
             /*
              * We have a cross-compartment wrapper. Its private pointer may
              * point into the compartment being collected, so we should mark it.
              */
             TraceEdge(trc, wrapper->slotOfPrivate(), "cross-compartment wrapper");
@@ -667,19 +668,19 @@ void
 JSCompartment::sweepSavedStacks()
 {
     savedStacks_.sweep(runtimeFromAnyThread());
 }
 
 void
 JSCompartment::sweepGlobalObject(FreeOp* fop)
 {
-    if (global_.unbarrieredGet() && IsAboutToBeFinalized(&global_)) {
+    if (global_ && IsAboutToBeFinalized(&global_)) {
         if (isDebuggee())
-            Debugger::detachAllDebuggersFromGlobal(fop, global_);
+            Debugger::detachAllDebuggersFromGlobal(fop, global_.unbarrieredGet());
         global_.set(nullptr);
     }
 }
 
 void
 JSCompartment::sweepObjectPendingMetadata()
 {
     if (objectMetadataState.is<PendingMetadata>()) {
@@ -1006,17 +1007,20 @@ JSCompartment::ensureDelazifyScriptsForD
 void
 JSCompartment::updateDebuggerObservesFlag(unsigned flag)
 {
     MOZ_ASSERT(isDebuggee());
     MOZ_ASSERT(flag == DebuggerObservesAllExecution ||
                flag == DebuggerObservesCoverage ||
                flag == DebuggerObservesAsmJS);
 
-    const GlobalObject::DebuggerVector* v = maybeGlobal()->getDebuggers();
+    GlobalObject* global = zone()->runtimeFromMainThread()->gc.isForegroundSweeping()
+                           ? unsafeUnbarrieredMaybeGlobal()
+                           : maybeGlobal();
+    const GlobalObject::DebuggerVector* v = global->getDebuggers();
     for (Debugger * const* p = v->begin(); p != v->end(); p++) {
         Debugger* dbg = *p;
         if (flag == DebuggerObservesAllExecution ? dbg->observesAllExecution() :
             flag == DebuggerObservesCoverage ? dbg->observesCoverage() :
             dbg->observesAsmJS())
         {
             debugModeBits |= flag;
             return;
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -5221,17 +5221,17 @@ GCRuntime::beginSweepPhase(bool destroyi
 
     releaseObservedTypes = shouldReleaseObservedTypes();
 
 #ifdef DEBUG
     for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
         MOZ_ASSERT(!c->gcIncomingGrayPointers);
         for (JSCompartment::WrapperEnum e(c); !e.empty(); e.popFront()) {
             if (e.front().key().kind != CrossCompartmentKey::StringWrapper)
-                AssertNotOnGrayList(&e.front().value().get().toObject());
+                AssertNotOnGrayList(&e.front().value().unbarrieredGet().toObject());
         }
     }
 #endif
 
     DropStringWrappers(rt);
 
     findZoneGroups();
     endMarkingZoneGroup();
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -1272,17 +1272,17 @@ CheckGCThingAfterMovingGC(T* t)
         MOZ_RELEASE_ASSERT(!RelocationOverlay::isCellForwarded(t));
     }
 }
 
 template <typename T>
 inline void
 CheckGCThingAfterMovingGC(const ReadBarriered<T*>& t)
 {
-    CheckGCThingAfterMovingGC(t.get());
+    CheckGCThingAfterMovingGC(t.unbarrieredGet());
 }
 
 struct CheckValueAfterMovingGCFunctor : public VoidDefaultAdaptor<Value> {
     template <typename T> void operator()(T* t) { CheckGCThingAfterMovingGC(t); }
 };
 
 inline void
 CheckValueAfterMovingGC(const JS::Value& value)
--- a/js/src/jspubtd.h
+++ b/js/src/jspubtd.h
@@ -160,16 +160,19 @@ struct Runtime
 
   public:
     Runtime()
       : heapState_(JS::HeapState::Idle)
       , gcStoreBufferPtr_(nullptr)
     {}
 
     bool isHeapBusy() const { return heapState_ != JS::HeapState::Idle; }
+    bool isHeapMajorCollecting() const { return heapState_ == JS::HeapState::MajorCollecting; }
+    bool isHeapMinorCollecting() const { return heapState_ == JS::HeapState::MinorCollecting; }
+    bool isHeapCollecting() const { return isHeapMinorCollecting() || isHeapMajorCollecting(); }
 
     js::gc::StoreBuffer* gcStoreBufferPtr() { return gcStoreBufferPtr_; }
 
     static JS::shadow::Runtime* asShadowRuntime(JSRuntime* rt) {
         return reinterpret_cast<JS::shadow::Runtime*>(rt);
     }
 
   protected:
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -1697,20 +1697,20 @@ Debugger::slowPathOnIonCompilation(JSCon
         cx->clearPendingException();
         return;
     }
 
     MOZ_ASSERT(status == JSTRAP_CONTINUE);
 }
 
 bool
-Debugger::isDebuggee(const JSCompartment* compartment) const
+Debugger::isDebuggeeUnbarriered(const JSCompartment* compartment) const
 {
     MOZ_ASSERT(compartment);
-    return compartment->isDebuggee() && debuggees.has(compartment->maybeGlobal());
+    return compartment->isDebuggee() && debuggees.has(compartment->unsafeUnbarrieredMaybeGlobal());
 }
 
 Debugger::TenurePromotionsLogEntry::TenurePromotionsLogEntry(JSRuntime* rt, JSObject& obj, double when)
   : className(obj.getClass()->name),
     when(when),
     frame(getObjectAllocationSite(obj)),
     size(JS::ubi::Node(&obj).size(rt->debuggerMallocSizeOf))
 { }
@@ -2466,17 +2466,17 @@ Debugger::markAllIteratively(GCMarker* t
 
     /*
      * Find all Debugger objects in danger of GC. This code is a little
      * convoluted since the easiest way to find them is via their debuggees.
      */
     JSRuntime* rt = trc->runtime();
     for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
         if (c->isDebuggee()) {
-            GlobalObject* global = c->maybeGlobal();
+            GlobalObject* global = c->unsafeUnbarrieredMaybeGlobal();
             if (!IsMarkedUnbarriered(&global))
                 continue;
 
             /*
              * Every debuggee has at least one debugger, so in this case
              * getDebuggers can't return nullptr.
              */
             const GlobalObject::DebuggerVector* debuggers = global->getDebuggers();
@@ -2533,20 +2533,20 @@ Debugger::markAllIteratively(GCMarker* t
  */
 /* static */ void
 Debugger::markAll(JSTracer* trc)
 {
     JSRuntime* rt = trc->runtime();
     for (Debugger* dbg : rt->debuggerList) {
         WeakGlobalObjectSet& debuggees = dbg->debuggees;
         for (WeakGlobalObjectSet::Enum e(debuggees); !e.empty(); e.popFront()) {
-            GlobalObject* global = e.front();
+            GlobalObject* global = e.front().unbarrieredGet();
             TraceManuallyBarrieredEdge(trc, &global, "Global Object");
-            if (global != e.front())
-                e.rekeyFront(ReadBarrieredGlobalObject(global));
+            if (global != e.front().unbarrieredGet())
+                e.rekeyFront(global, ReadBarrieredGlobalObject(global));
         }
 
         HeapPtrNativeObject& dbgobj = dbg->toJSObjectRef();
         TraceEdge(trc, &dbgobj, "Debugger Object");
 
         dbg->scripts.trace(trc);
         dbg->sources.trace(trc);
         dbg->objects.trace(trc);
@@ -2610,17 +2610,17 @@ Debugger::sweepAll(FreeOp* fop)
     for (Debugger* dbg : rt->debuggerList) {
         if (IsAboutToBeFinalized(&dbg->object)) {
             /*
              * dbg is being GC'd. Detach it from its debuggees. The debuggee
              * might be GC'd too. Since detaching requires access to both
              * objects, this must be done before finalize time.
              */
             for (WeakGlobalObjectSet::Enum e(dbg->debuggees); !e.empty(); e.popFront())
-                dbg->removeDebuggeeGlobal(fop, e.front(), &e);
+                dbg->removeDebuggeeGlobal(fop, e.front().unbarrieredGet(), &e);
         }
     }
 }
 
 /* static */ void
 Debugger::detachAllDebuggersFromGlobal(FreeOp* fop, GlobalObject* global)
 {
     const GlobalObject::DebuggerVector* debuggers = global->getDebuggers();
@@ -3450,17 +3450,17 @@ Debugger::addDebuggeeGlobal(JSContext* c
 }
 
 void
 Debugger::recomputeDebuggeeZoneSet()
 {
     AutoEnterOOMUnsafeRegion oomUnsafe;
     debuggeeZones.clear();
     for (auto range = debuggees.all(); !range.empty(); range.popFront()) {
-        if (!debuggeeZones.put(range.front()->zone()))
+        if (!debuggeeZones.put(range.front().unbarrieredGet()->zone()))
             oomUnsafe.crash("Debugger::removeDebuggeeGlobal");
     }
 }
 
 template<typename V>
 static Debugger**
 findDebuggerInVector(Debugger* dbg, V* vec)
 {
@@ -3479,17 +3479,17 @@ Debugger::removeDebuggeeGlobal(FreeOp* f
 {
     /*
      * The caller might have found global by enumerating this->debuggees; if
      * so, use HashSet::Enum::removeFront rather than HashSet::remove below,
      * to avoid invalidating the live enumerator.
      */
     MOZ_ASSERT(debuggees.has(global));
     MOZ_ASSERT(debuggeeZones.has(global->zone()));
-    MOZ_ASSERT_IF(debugEnum, debugEnum->front() == global);
+    MOZ_ASSERT_IF(debugEnum, debugEnum->front().unbarrieredGet() == global);
 
     /*
      * FIXME Debugger::slowPathOnLeaveFrame needs to kill all Debugger.Frame
      * objects referring to a particular JS stack frame. This is hard if
      * Debugger objects that are no longer debugging the relevant global might
      * have live Frame objects. So we take the easy way out and kill them here.
      * This is a bug, since it's observable and contrary to the spec. One
      * possible fix would be to put such objects into a compartment-wide bag
@@ -4098,17 +4098,17 @@ class MOZ_STACK_CLASS Debugger::ObjectQu
          * exist some path from this non-debuggee node back to a node in our
          * debuggee compartments. However, if that were true, then the incoming
          * cross compartment edge back into a debuggee compartment is already
          * listed as an edge in the RootList we started traversal with, and
          * therefore we don't need to follow edges to or from this non-debuggee
          * node.
          */
         JSCompartment* comp = referent.compartment();
-        if (comp && !dbg->isDebuggee(comp)) {
+        if (comp && !dbg->isDebuggeeUnbarriered(comp)) {
             traversal.abandonReferent();
             return true;
         }
 
         /*
          * If the referent is an object and matches our query's restrictions,
          * add it to the vector accumulating results. Skip objects that should
          * never be exposed to JS, like ScopeObjects and internal functions.
--- a/js/src/vm/Debugger.h
+++ b/js/src/vm/Debugger.h
@@ -254,17 +254,17 @@ class Debugger : private mozilla::Linked
     // must be 0 and Observing must be 1.
     enum IsObserving {
         NotObserving = 0,
         Observing = 1
     };
 
     // Return true if the given compartment is a debuggee of this debugger,
     // false otherwise.
-    bool isDebuggee(const JSCompartment* compartment) const;
+    bool isDebuggeeUnbarriered(const JSCompartment* compartment) const;
 
     // Return true if this Debugger observed a debuggee that participated in the
     // GC identified by the given GC number. Return false otherwise.
     bool observedGC(uint64_t majorGCNumber) const {
         return observedGCs.has(majorGCNumber);
     }
 
     // Notify this Debugger that one or more of its debuggees is participating
--- a/js/src/vm/ObjectGroup.cpp
+++ b/js/src/vm/ObjectGroup.cpp
@@ -395,18 +395,18 @@ struct ObjectGroupCompartment::NewEntry
 
     static inline HashNumber hash(const Lookup& lookup) {
         return PointerHasher<JSObject*, 3>::hash(lookup.hashProto.raw()) ^
                PointerHasher<const Class*, 3>::hash(lookup.clasp) ^
                PointerHasher<JSObject*, 3>::hash(lookup.associated);
     }
 
     static inline bool match(const NewEntry& key, const Lookup& lookup) {
-        return key.group->proto() == lookup.matchProto &&
-               (!lookup.clasp || key.group->clasp() == lookup.clasp) &&
+        return key.group.unbarrieredGet()->proto() == lookup.matchProto &&
+               (!lookup.clasp || key.group.unbarrieredGet()->clasp() == lookup.clasp) &&
                key.associated == lookup.associated;
     }
 
     static void rekey(NewEntry& k, const NewEntry& newKey) { k = newKey; }
 };
 
 // This class is used to add a post barrier on a NewTable entry, as the key is
 // calculated from a prototype object which may be moved by generational GC.
@@ -1804,31 +1804,31 @@ ObjectGroupCompartment::fixupNewTableAft
     /*
      * Each entry's hash depends on the object's prototype and we can't tell
      * whether that has been moved or not in sweepNewObjectGroupTable().
      */
     if (table && table->initialized()) {
         for (NewTable::Enum e(*table); !e.empty(); e.popFront()) {
             NewEntry entry = e.front();
             bool needRekey = false;
-            if (IsForwarded(entry.group.get())) {
-                entry.group.set(Forwarded(entry.group.get()));
+            if (IsForwarded(entry.group.unbarrieredGet())) {
+                entry.group.set(Forwarded(entry.group.unbarrieredGet()));
                 needRekey = true;
             }
-            TaggedProto proto = entry.group->proto();
+            TaggedProto proto = entry.group.unbarrieredGet()->proto();
             if (proto.isObject() && IsForwarded(proto.toObject())) {
                 proto = TaggedProto(Forwarded(proto.toObject()));
                 needRekey = true;
             }
             if (entry.associated && IsForwarded(entry.associated)) {
                 entry.associated = Forwarded(entry.associated);
                 needRekey = true;
             }
             if (needRekey) {
-                const Class* clasp = entry.group->clasp();
+                const Class* clasp = entry.group.unbarrieredGet()->clasp();
                 if (entry.associated && entry.associated->is<JSFunction>())
                     clasp = nullptr;
                 NewEntry::Lookup lookup(clasp, proto, entry.associated);
                 e.rekeyFront(lookup, entry);
             }
         }
     }
 }
@@ -1842,23 +1842,23 @@ ObjectGroupCompartment::checkNewTableAft
      * Assert that nothing points into the nursery or needs to be relocated, and
      * that the hash table entries are discoverable.
      */
     if (!table || !table->initialized())
         return;
 
     for (NewTable::Enum e(*table); !e.empty(); e.popFront()) {
         NewEntry entry = e.front();
-        CheckGCThingAfterMovingGC(entry.group.get());
-        TaggedProto proto = entry.group->proto();
+        CheckGCThingAfterMovingGC(entry.group.unbarrieredGet());
+        TaggedProto proto = entry.group.unbarrieredGet()->proto();
         if (proto.isObject())
             CheckGCThingAfterMovingGC(proto.toObject());
         CheckGCThingAfterMovingGC(entry.associated);
 
-        const Class* clasp = entry.group->clasp();
+        const Class* clasp = entry.group.unbarrieredGet()->clasp();
         if (entry.associated && entry.associated->is<JSFunction>())
             clasp = nullptr;
 
         NewEntry::Lookup lookup(clasp, proto, entry.associated);
         NewTable::Ptr ptr = table->lookup(lookup);
         MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &e.front());
     }
 }
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -1029,20 +1029,16 @@ struct JSRuntime : public JS::shadow::Ru
 #endif
 
     /* Garbage collector state, used by jsgc.c. */
     js::gc::GCRuntime   gc;
 
     /* Garbage collector state has been sucessfully initialized. */
     bool                gcInitialized;
 
-    bool isHeapMajorCollecting() const { return heapState_ == JS::HeapState::MajorCollecting; }
-    bool isHeapMinorCollecting() const { return heapState_ == JS::HeapState::MinorCollecting; }
-    bool isHeapCollecting() const { return isHeapMinorCollecting() || isHeapMajorCollecting(); }
-
     int gcZeal() { return gc.zeal(); }
 
     void lockGC() {
         assertCanLock(js::GCLock);
         gc.lockGC();
     }
 
     void unlockGC() {
--- a/js/src/vm/ScopeObject.cpp
+++ b/js/src/vm/ScopeObject.cpp
@@ -2259,17 +2259,17 @@ DebugScopes::sweep(JSRuntime* rt)
              * that the synthetic SO is also about to be finalized too, and thus
              * the loop below will take care of things. But complex GC behavior
              * means that marks are only conservative approximations of
              * liveness; we should assume that anything could be marked.
              *
              * Thus, we must explicitly remove the entries from both liveScopes
              * and missingScopes here.
              */
-            liveScopes.remove(&e.front().value()->scope());
+            liveScopes.remove(&e.front().value().unbarrieredGet()->scope());
             e.removeFront();
         } else {
             MissingScopeKey key = e.front().key();
             if (IsForwarded(key.staticScope())) {
                 key.updateStaticScope(Forwarded(key.staticScope()));
                 e.rekeyFront(key);
             }
         }
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -1200,19 +1200,20 @@ Shape::setObjectFlags(ExclusiveContext* 
 StackBaseShape::hash(const Lookup& lookup)
 {
     HashNumber hash = lookup.flags;
     hash = RotateLeft(hash, 4) ^ (uintptr_t(lookup.clasp) >> 3);
     return hash;
 }
 
 /* static */ inline bool
-StackBaseShape::match(UnownedBaseShape* key, const Lookup& lookup)
+StackBaseShape::match(ReadBarriered<UnownedBaseShape*> key, const Lookup& lookup)
 {
-    return key->flags == lookup.flags && key->clasp_ == lookup.clasp;
+    return key.unbarrieredGet()->flags == lookup.flags &&
+           key.unbarrieredGet()->clasp_ == lookup.clasp;
 }
 
 inline
 BaseShape::BaseShape(const StackBaseShape& base)
   : clasp_(base.clasp),
     compartment_(base.compartment),
     flags(base.flags),
     slotSpan_(0),
@@ -1446,17 +1447,17 @@ JSCompartment::checkInitialShapesTableAf
     /*
      * Assert that the postbarriers have worked and that nothing is left in
      * initialShapes that points into the nursery, and that the hash table
      * entries are discoverable.
      */
     for (InitialShapeSet::Enum e(initialShapes); !e.empty(); e.popFront()) {
         InitialShapeEntry entry = e.front();
         TaggedProto proto = entry.proto;
-        Shape* shape = entry.shape.get();
+        Shape* shape = entry.shape.unbarrieredGet();
 
         if (proto.isObject())
             CheckGCThingAfterMovingGC(proto.toObject());
 
         InitialShapeEntry::Lookup lookup(shape->getObjectClass(),
                                          proto,
                                          shape->numFixedSlots(),
                                          shape->getObjectFlags());
@@ -1626,29 +1627,29 @@ void
 JSCompartment::fixupInitialShapeTable()
 {
     if (!initialShapes.initialized())
         return;
 
     for (InitialShapeSet::Enum e(initialShapes); !e.empty(); e.popFront()) {
         InitialShapeEntry entry = e.front();
         bool needRekey = false;
-        if (IsForwarded(entry.shape.get())) {
-            entry.shape.set(Forwarded(entry.shape.get()));
+        if (IsForwarded(entry.shape.unbarrieredGet())) {
+            entry.shape.set(Forwarded(entry.shape.unbarrieredGet()));
             needRekey = true;
         }
         if (entry.proto.isObject() && IsForwarded(entry.proto.toObject())) {
             entry.proto = TaggedProto(Forwarded(entry.proto.toObject()));
             needRekey = true;
         }
         if (needRekey) {
-            InitialShapeEntry::Lookup relookup(entry.shape->getObjectClass(),
+            InitialShapeEntry::Lookup relookup(entry.shape.unbarrieredGet()->getObjectClass(),
                                                entry.proto,
-                                               entry.shape->numFixedSlots(),
-                                               entry.shape->getObjectFlags());
+                                               entry.shape.unbarrieredGet()->numFixedSlots(),
+                                               entry.shape.unbarrieredGet()->getObjectFlags());
             e.rekeyFront(relookup, entry);
         }
     }
 }
 
 void
 AutoRooterGetterSetter::Inner::trace(JSTracer* trc)
 {
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -476,17 +476,17 @@ BaseShape::toUnowned()
 UnownedBaseShape*
 BaseShape::baseUnowned()
 {
     MOZ_ASSERT(isOwned() && unowned_);
     return unowned_;
 }
 
 /* Entries for the per-compartment baseShapes set of unowned base shapes. */
-struct StackBaseShape : public DefaultHasher<ReadBarrieredUnownedBaseShape>
+struct StackBaseShape : public DefaultHasher<ReadBarriered<UnownedBaseShape*>>
 {
     uint32_t flags;
     const Class* clasp;
     JSCompartment* compartment;
 
     explicit StackBaseShape(BaseShape* base)
       : flags(base->flags & BaseShape::OBJECT_FLAG_MASK),
         clasp(base->clasp_),
@@ -508,20 +508,20 @@ struct StackBaseShape : public DefaultHa
         MOZ_IMPLICIT Lookup(UnownedBaseShape* base)
           : flags(base->getObjectFlags()), clasp(base->clasp())
         {
             MOZ_ASSERT(!base->isOwned());
         }
     };
 
     static inline HashNumber hash(const Lookup& lookup);
-    static inline bool match(UnownedBaseShape* key, const Lookup& lookup);
+    static inline bool match(ReadBarriered<UnownedBaseShape*> key, const Lookup& lookup);
 };
 
-typedef HashSet<ReadBarrieredUnownedBaseShape,
+typedef HashSet<ReadBarriered<UnownedBaseShape*>,
                 StackBaseShape,
                 SystemAllocPolicy> BaseShapeSet;
 
 
 class Shape : public gc::TenuredCell
 {
     friend class ::JSObject;
     friend class ::JSFunction;
--- a/js/src/vm/Symbol.cpp
+++ b/js/src/vm/Symbol.cpp
@@ -104,21 +104,21 @@ Symbol::dump(FILE* fp)
     }
 }
 #endif  // DEBUG
 
 void
 SymbolRegistry::sweep()
 {
     for (Enum e(*this); !e.empty(); e.popFront()) {
-        Symbol* sym = e.front();
-        if (IsAboutToBeFinalizedUnbarriered(&sym))
+        mozilla::DebugOnly<Symbol*> sym = e.front().unbarrieredGet();
+        if (IsAboutToBeFinalized(&e.mutableFront()))
             e.removeFront();
         else
-            MOZ_ASSERT(sym == e.front());
+            MOZ_ASSERT(sym == e.front().unbarrieredGet());
     }
 }
 
 bool
 js::SymbolDescriptiveString(JSContext* cx, Symbol* sym, MutableHandleValue result)
 {
     // steps 2-5
     StringBuffer sb(cx);
--- a/js/src/vm/TypeInference.cpp
+++ b/js/src/vm/TypeInference.cpp
@@ -3376,17 +3376,17 @@ PreliminaryObjectArray::sweep()
         if (*ptr && IsAboutToBeFinalizedUnbarriered(ptr)) {
             // Before we clear this reference, change the object's group to the
             // Object.prototype group. This is done to ensure JSObject::finalize
             // sees a NativeObject Class even if we change the current group's
             // Class to one of the unboxed object classes in the meantime. If
             // the compartment's global is dead, we don't do anything as the
             // group's Class is not going to change in that case.
             JSObject* obj = *ptr;
-            GlobalObject* global = obj->compartment()->maybeGlobal();
+            GlobalObject* global = obj->compartment()->unsafeUnbarrieredMaybeGlobal();
             if (global && !obj->isSingleton()) {
                 JSObject* objectProto = GetBuiltinPrototypePure(global, JSProto_Object);
                 obj->setGroup(objectProto->groupRaw());
                 MOZ_ASSERT(obj->is<NativeObject>());
                 MOZ_ASSERT(obj->getClass() == objectProto->getClass());
                 MOZ_ASSERT(!obj->getClass()->finalize);
             }
 
@@ -4069,17 +4069,18 @@ ConstraintTypeSet::sweep(Zone* zone, Aut
                     *pentry = key;
                 } else {
                     oom.setOOM();
                     flags |= TYPE_FLAG_ANYOBJECT;
                     clearObjects();
                     objectCount = 0;
                     break;
                 }
-            } else if (key->isGroup() && key->group()->unknownPropertiesDontCheckGeneration()) {
+            } else if (key->isGroup() &&
+                       key->groupNoBarrier()->unknownPropertiesDontCheckGeneration()) {
                 // Object sets containing objects with unknown properties might
                 // not be complete. Mark the type set as unknown, which it will
                 // be treated as during Ion compilation.
                 //
                 // Note that we don't have to do this when the type set might
                 // be missing the native group corresponding to an unboxed
                 // object group. In this case, the native group points to the
                 // unboxed object group via its addendum, so as long as objects
@@ -4093,17 +4094,17 @@ ConstraintTypeSet::sweep(Zone* zone, Aut
         setBaseObjectCount(objectCount);
     } else if (objectCount == 1) {
         ObjectKey* key = (ObjectKey*) objectSet;
         if (!IsObjectKeyAboutToBeFinalized(&key)) {
             objectSet = reinterpret_cast<ObjectKey**>(key);
         } else {
             // As above, mark type sets containing objects with unknown
             // properties as unknown.
-            if (key->isGroup() && key->group()->unknownPropertiesDontCheckGeneration())
+            if (key->isGroup() && key->groupNoBarrier()->unknownPropertiesDontCheckGeneration())
                 flags |= TYPE_FLAG_ANYOBJECT;
             objectSet = nullptr;
             setBaseObjectCount(0);
         }
     }
 
     /*
      * Type constraints only hold weak references. Copy constraints referring