Bug 1468867 - Rename heap state checking functions r=sfink
authorJon Coppeard <jcoppeard@mozilla.com>
Fri, 15 Jun 2018 15:05:06 -0700
changeset 479496 bbdcb2e0eb3e4e6184015a95083aab52bfe0d57e
parent 479495 461ae20806865d625e7c842e75d6453bde89445d
child 479497 05070f7aa5bf180a97a76dc23b3ab194f9cd4622
push id1757
push userffxbld-merge
push dateFri, 24 Aug 2018 17:02:43 +0000
treeherdermozilla-release@736023aebdb1 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerssfink
bugs1468867
milestone62.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1468867 - Rename heap state checking functions r=sfink
js/public/HeapAPI.h
js/src/builtin/intl/SharedIntlData.cpp
js/src/gc/Allocator.cpp
js/src/gc/Barrier.cpp
js/src/gc/Cell.h
js/src/gc/DeletePolicy.h
js/src/gc/GC-inl.h
js/src/gc/GC.cpp
js/src/gc/Marking.cpp
js/src/gc/Nursery.cpp
js/src/gc/PrivateIterators-inl.h
js/src/gc/PublicIterators.cpp
js/src/gc/RootMarking.cpp
js/src/gc/StoreBuffer.h
js/src/gc/Verifier.cpp
js/src/gc/WeakMap.h
js/src/gc/Zone.h
js/src/jit/Ion.cpp
js/src/jit/JSJitFrameIter.cpp
js/src/jit/JitFrames.cpp
js/src/jit/JitcodeMap.cpp
js/src/jsapi.cpp
js/src/jspubtd.h
js/src/proxy/Wrapper.cpp
js/src/shell/js.cpp
js/src/vm/BytecodeUtil.cpp
js/src/vm/Compartment.cpp
js/src/vm/JSContext-inl.h
js/src/vm/JSContext.cpp
js/src/vm/Realm.cpp
js/src/vm/RegExpObject.cpp
js/src/vm/Runtime.cpp
js/src/vm/Runtime.h
js/src/vm/TypeInference.cpp
--- a/js/public/HeapAPI.h
+++ b/js/public/HeapAPI.h
@@ -586,20 +586,20 @@ namespace js {
 namespace gc {
 
 static MOZ_ALWAYS_INLINE bool
 IsIncrementalBarrierNeededOnTenuredGCThing(const JS::GCCellPtr thing)
 {
     MOZ_ASSERT(thing);
     MOZ_ASSERT(!js::gc::IsInsideNursery(thing.asCell()));
 
-    // TODO: I'd like to assert !CurrentThreadIsHeapBusy() here but this gets
+    // TODO: I'd like to assert !RuntimeHeapIsBusy() here but this gets
     // called while we are tracing the heap, e.g. during memory reporting
     // (see bug 1313318).
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
 
     JS::Zone* zone = JS::GetTenuredGCThingZone(thing);
     return JS::shadow::Zone::asShadowZone(zone)->needsIncrementalBarrier();
 }
 
 static MOZ_ALWAYS_INLINE void
 ExposeGCThingToActiveJS(JS::GCCellPtr thing)
 {
@@ -627,17 +627,17 @@ extern JS_PUBLIC_API(bool)
 EdgeNeedsSweepUnbarrieredSlow(T* thingp);
 
 static MOZ_ALWAYS_INLINE bool
 EdgeNeedsSweepUnbarriered(JSObject** objp)
 {
     // This function does not handle updating nursery pointers. Raw JSObject
     // pointers should be updated separately or replaced with
     // JS::Heap<JSObject*> which handles this automatically.
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapMinorCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
     if (IsInsideNursery(reinterpret_cast<Cell*>(*objp)))
         return false;
 
     auto zone = JS::shadow::Zone::asShadowZone(detail::GetGCThingZone(uintptr_t(*objp)));
     if (!zone->isGCSweepingOrCompacting())
         return false;
 
     return EdgeNeedsSweepUnbarrieredSlow(objp);
--- a/js/src/builtin/intl/SharedIntlData.cpp
+++ b/js/src/builtin/intl/SharedIntlData.cpp
@@ -398,17 +398,17 @@ js::intl::SharedIntlData::destroyInstanc
     ianaLinksCanonicalizedDifferentlyByICU.finish();
     upperCaseFirstLocales.finish();
 }
 
 void
 js::intl::SharedIntlData::trace(JSTracer* trc)
 {
     // Atoms are always tenured.
-    if (!JS::CurrentThreadIsHeapMinorCollecting()) {
+    if (!JS::RuntimeHeapIsMinorCollecting()) {
         availableTimeZones.trace(trc);
         ianaZonesTreatedAsLinksByICU.trace(trc);
         ianaLinksCanonicalizedDifferentlyByICU.trace(trc);
         upperCaseFirstLocales.trace(trc);
     }
 }
 
 size_t
--- a/js/src/gc/Allocator.cpp
+++ b/js/src/gc/Allocator.cpp
@@ -279,17 +279,17 @@ GCRuntime::checkAllocatorState(JSContext
                   kind == AllocKind::ATOM ||
                   kind == AllocKind::FAT_INLINE_ATOM ||
                   kind == AllocKind::SYMBOL ||
                   kind == AllocKind::JITCODE ||
                   kind == AllocKind::SCOPE);
     MOZ_ASSERT_IF(!cx->zone()->isAtomsZone(),
                   kind != AllocKind::ATOM &&
                   kind != AllocKind::FAT_INLINE_ATOM);
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
+    MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
     MOZ_ASSERT(cx->isAllocAllowed());
 #endif
 
     // Crash if we perform a GC action when it is not safe.
     if (allowGC && !cx->suppressGC)
         cx->verifyIsSafeToGC();
 
     // For testing out of memory conditions
@@ -375,17 +375,17 @@ GCRuntime::refillFreeListFromAnyThread(J
 }
 
 /* static */ TenuredCell*
 GCRuntime::refillFreeListFromMainThread(JSContext* cx, AllocKind thingKind)
 {
     // It should not be possible to allocate on the main thread while we are
     // inside a GC.
     Zone *zone = cx->zone();
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy(), "allocating while under GC");
+    MOZ_ASSERT(!JS::RuntimeHeapIsBusy(), "allocating while under GC");
 
     return cx->arenas()->allocateFromArena(zone, thingKind, ShouldCheckThresholds::CheckThresholds);
 }
 
 /* static */ TenuredCell*
 GCRuntime::refillFreeListFromHelperThread(JSContext* cx, AllocKind thingKind)
 {
     // A GC may be happening on the main thread, but zones used by off thread
@@ -400,18 +400,18 @@ GCRuntime::refillFreeListFromHelperThrea
 GCRuntime::refillFreeListInGC(Zone* zone, AllocKind thingKind)
 {
     /*
      * Called by compacting GC to refill a free list while we are in a GC.
      */
 
     zone->arenas.checkEmptyFreeList(thingKind);
     mozilla::DebugOnly<JSRuntime*> rt = zone->runtimeFromMainThread();
-    MOZ_ASSERT(JS::CurrentThreadIsHeapCollecting());
-    MOZ_ASSERT_IF(!JS::CurrentThreadIsHeapMinorCollecting(), !rt->gc.isBackgroundSweeping());
+    MOZ_ASSERT(JS::RuntimeHeapIsCollecting());
+    MOZ_ASSERT_IF(!JS::RuntimeHeapIsMinorCollecting(), !rt->gc.isBackgroundSweeping());
 
     return zone->arenas.allocateFromArena(zone, thingKind, ShouldCheckThresholds::DontCheckThresholds);
 }
 
 TenuredCell*
 ArenaLists::allocateFromArena(JS::Zone* zone, AllocKind thingKind,
                               ShouldCheckThresholds checkThresholds)
 {
--- a/js/src/gc/Barrier.cpp
+++ b/js/src/gc/Barrier.cpp
@@ -19,17 +19,17 @@
 #include "wasm/WasmJS.h"
 
 namespace js {
 
 bool
 RuntimeFromMainThreadIsHeapMajorCollecting(JS::shadow::Zone* shadowZone)
 {
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(shadowZone->runtimeFromMainThread()));
-    return JS::CurrentThreadIsHeapMajorCollecting();
+    return JS::RuntimeHeapIsMajorCollecting();
 }
 
 #ifdef DEBUG
 
 bool
 IsMarkedBlack(JSObject* obj)
 {
     return obj->isMarkedBlack();
--- a/js/src/gc/Cell.h
+++ b/js/src/gc/Cell.h
@@ -388,17 +388,17 @@ TenuredCell::readBarrier(TenuredCell* th
         Cell* tmp = thing;
         TraceManuallyBarrieredGenericPointerEdge(shadowZone->barrierTracer(), &tmp, "read barrier");
         MOZ_ASSERT(tmp == thing);
     }
 
     if (thing->isMarkedGray()) {
         // There shouldn't be anything marked grey unless we're on the main thread.
         MOZ_ASSERT(CurrentThreadCanAccessRuntime(thing->runtimeFromAnyThread()));
-        if (!JS::CurrentThreadIsHeapCollecting())
+        if (!JS::RuntimeHeapIsCollecting())
             JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr(thing, thing->getTraceKind()));
     }
 }
 
 void
 AssertSafeToSkipBarrier(TenuredCell* thing);
 
 /* static */ MOZ_ALWAYS_INLINE void
--- a/js/src/gc/DeletePolicy.h
+++ b/js/src/gc/DeletePolicy.h
@@ -69,17 +69,17 @@ IsClearEdgesTracer(JSTracer *trc)
  * into the object and make it safe to delete.
  */
 template <typename T>
 struct GCManagedDeletePolicy
 {
     void operator()(const T* constPtr) {
         if (constPtr) {
             auto ptr = const_cast<T*>(constPtr);
-            if (JS::CurrentThreadIsHeapCollecting()) {
+            if (JS::RuntimeHeapIsCollecting()) {
                 MOZ_ASSERT(js::CurrentThreadIsGCSweeping());
                 // Do not attempt to clear out storebuffer edges.
             } else {
                 gc::ClearEdgesTracer trc;
                 ptr->trace(&trc);
             }
             js_delete(ptr);
         }
--- a/js/src/gc/GC-inl.h
+++ b/js/src/gc/GC-inl.h
@@ -125,17 +125,17 @@ class ArenaCellIterImpl
     void init(Arena* arena, CellIterNeedsBarrier mayNeedBarrier) {
         MOZ_ASSERT(!initialized);
         MOZ_ASSERT(arena);
         initialized = true;
         AllocKind kind = arena->getAllocKind();
         firstThingOffset = Arena::firstThingOffset(kind);
         thingSize = Arena::thingSize(kind);
         traceKind = MapAllocToTraceKind(kind);
-        needsBarrier = mayNeedBarrier && !JS::CurrentThreadIsHeapCollecting();
+        needsBarrier = mayNeedBarrier && !JS::RuntimeHeapIsCollecting();
         reset(arena);
     }
 
     // Use this to move from an Arena of a particular kind to another Arena of
     // the same kind.
     void reset(Arena* arena) {
         MOZ_ASSERT(initialized);
         MOZ_ASSERT(arena);
@@ -183,17 +183,17 @@ JSObject*
 ArenaCellIterImpl::get<JSObject>() const;
 
 class ArenaCellIter : public ArenaCellIterImpl
 {
   public:
     explicit ArenaCellIter(Arena* arena)
       : ArenaCellIterImpl(arena, CellIterMayNeedBarrier)
     {
-        MOZ_ASSERT(JS::CurrentThreadIsHeapTracing());
+        MOZ_ASSERT(JS::RuntimeHeapIsTracing());
     }
 };
 
 template <typename T>
 class ZoneCellIter;
 
 template <>
 class ZoneCellIter<TenuredCell> {
@@ -212,17 +212,17 @@ class ZoneCellIter<TenuredCell> {
         initForTenuredIteration(zone, kind);
     }
 
     void initForTenuredIteration(JS::Zone* zone, AllocKind kind) {
         JSRuntime* rt = zone->runtimeFromAnyThread();
 
         // If called from outside a GC, ensure that the heap is in a state
         // that allows us to iterate.
-        if (!JS::CurrentThreadIsHeapBusy()) {
+        if (!JS::RuntimeHeapIsBusy()) {
             // Assert that no GCs can occur while a ZoneCellIter is live.
             nogc.emplace();
         }
 
         // We have a single-threaded runtime, so there's no need to protect
         // against other threads iterating or allocating. However, we do have
         // background finalization; we may have to wait for this to finish if
         // it's currently active.
--- a/js/src/gc/GC.cpp
+++ b/js/src/gc/GC.cpp
@@ -1729,17 +1729,17 @@ GCRuntime::getParameter(JSGCParamKey key
         MOZ_ASSERT(key == JSGC_NUMBER);
         return uint32_t(number);
     }
 }
 
 void
 GCRuntime::setMarkStackLimit(size_t limit, AutoLockGC& lock)
 {
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
+    MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
     AutoUnlockGC unlock(lock);
     AutoStopVerifyingBarriers pauseVerification(rt, false);
     marker.setMaxCapacity(limit);
 }
 
 bool
 GCRuntime::addBlackRootsTracer(JSTraceDataOp traceOp, void* data)
 {
@@ -3325,34 +3325,34 @@ GCRuntime::triggerGC(JS::gcreason::Reaso
     /*
      * Don't trigger GCs if this is being called off the main thread from
      * onTooMuchMalloc().
      */
     if (!CurrentThreadCanAccessRuntime(rt))
         return false;
 
     /* GC is already running. */
-    if (JS::CurrentThreadIsHeapCollecting())
+    if (JS::RuntimeHeapIsCollecting())
         return false;
 
     JS::PrepareForFullGC(rt->mainContextFromOwnThread());
     requestMajorGC(reason);
     return true;
 }
 
 void
 GCRuntime::maybeAllocTriggerZoneGC(Zone* zone, const AutoLockGC& lock)
 {
     if (!CurrentThreadCanAccessRuntime(rt)) {
         // Zones in use by a helper thread can't be collected.
         MOZ_ASSERT(zone->usedByHelperThread() || zone->isAtomsZone());
         return;
     }
 
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
 
     size_t usedBytes = zone->usage.gcBytes();
     size_t thresholdBytes = zone->threshold.gcTriggerBytes();
 
     if (usedBytes >= thresholdBytes) {
         // The threshold has been surpassed, immediately trigger a GC, which
         // will be done non-incrementally.
         triggerZoneGC(zone, JS::gcreason::ALLOC_TRIGGER, usedBytes, thresholdBytes);
@@ -3389,17 +3389,17 @@ GCRuntime::maybeAllocTriggerZoneGC(Zone*
 }
 
 bool
 GCRuntime::triggerZoneGC(Zone* zone, JS::gcreason::Reason reason, size_t used, size_t threshold)
 {
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
 
     /* GC is already running. */
-    if (JS::CurrentThreadIsHeapBusy())
+    if (JS::RuntimeHeapIsBusy())
         return false;
 
 #ifdef JS_GC_ZEAL
     if (hasZealMode(ZealMode::Alloc)) {
         MOZ_RELEASE_ASSERT(triggerGC(reason));
         return true;
     }
 #endif
@@ -3449,17 +3449,17 @@ GCRuntime::maybeGC(Zone* zone)
     }
 }
 
 void
 GCRuntime::triggerFullGCForAtoms(JSContext* cx)
 {
     MOZ_ASSERT(fullGCForAtomsRequested_);
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
     MOZ_ASSERT(cx->canCollectAtoms());
     fullGCForAtomsRequested_ = false;
     MOZ_RELEASE_ASSERT(triggerGC(JS::gcreason::DELAYED_ATOMS_GC));
 }
 
 // Do all possible decommit immediately from the current thread without
 // releasing the GC lock or allocating any memory.
 void
@@ -3700,25 +3700,25 @@ GCRuntime::queueZonesForBackgroundSweep(
     AutoLockGC lock(rt);
     backgroundSweepZones.ref().transferFrom(zones);
     helperState.maybeStartBackgroundSweep(lock, helperLock);
 }
 
 void
 GCRuntime::freeUnusedLifoBlocksAfterSweeping(LifoAlloc* lifo)
 {
-    MOZ_ASSERT(JS::CurrentThreadIsHeapBusy());
+    MOZ_ASSERT(JS::RuntimeHeapIsBusy());
     AutoLockGC lock(rt);
     blocksToFreeAfterSweeping.ref().transferUnusedFrom(lifo);
 }
 
 void
 GCRuntime::freeAllLifoBlocksAfterSweeping(LifoAlloc* lifo)
 {
-    MOZ_ASSERT(JS::CurrentThreadIsHeapBusy());
+    MOZ_ASSERT(JS::RuntimeHeapIsBusy());
     AutoLockGC lock(rt);
     blocksToFreeAfterSweeping.ref().transferFrom(lifo);
 }
 
 void
 GCRuntime::freeAllLifoBlocksAfterMinorGC(LifoAlloc* lifo)
 {
     blocksToFreeAfterMinorGC.ref().transferFrom(lifo);
@@ -5888,17 +5888,17 @@ GCRuntime::endSweepingSweepGroup(FreeOp*
 }
 
 void
 GCRuntime::beginSweepPhase(JS::gcreason::Reason reason, AutoTraceSession& session)
 {
     /*
      * Sweep phase.
      *
-     * Finalize as we sweep, outside of lock but with CurrentThreadIsHeapBusy()
+     * Finalize as we sweep, outside of lock but with RuntimeHeapIsBusy()
      * true so that any attempt to allocate a GC-thing from a finalizer will
      * fail, rather than nest badly and leave the unmarked newborn to be swept.
      */
 
     MOZ_ASSERT(!abortSweepAfterCurrentGroup);
 
     AutoSetThreadIsSweeping threadIsSweeping;
 
@@ -6888,22 +6888,22 @@ AutoTraceSession::AutoTraceSession(JSRun
     // Session always begins with lock held, see comment in class definition.
     maybeLock.emplace(rt);
 
     rt->heapState_ = heapState;
 }
 
 AutoTraceSession::~AutoTraceSession()
 {
-    MOZ_ASSERT(JS::CurrentThreadIsHeapBusy());
+    MOZ_ASSERT(JS::RuntimeHeapIsBusy());
     runtime->heapState_ = prevState;
 }
 
 JS_PUBLIC_API(JS::HeapState)
-JS::CurrentThreadHeapState()
+JS::RuntimeHeapState()
 {
     return TlsContext.get()->runtime()->heapState();
 }
 
 GCRuntime::IncrementalResult
 GCRuntime::resetIncrementalGC(gc::AbortReason reason, AutoTraceSession& session)
 {
     MOZ_ASSERT(reason != gc::AbortReason::None);
@@ -7633,17 +7633,17 @@ GCRuntime::maybeDoCycleCollection()
 }
 
 void
 GCRuntime::checkCanCallAPI()
 {
     MOZ_RELEASE_ASSERT(CurrentThreadCanAccessRuntime(rt));
 
     /* If we attempt to invoke the GC while we are running in the GC, assert. */
-    MOZ_RELEASE_ASSERT(!JS::CurrentThreadIsHeapBusy());
+    MOZ_RELEASE_ASSERT(!JS::RuntimeHeapIsBusy());
 
     MOZ_ASSERT(rt->mainContextFromOwnThread()->isAllocAllowed());
 }
 
 bool
 GCRuntime::checkIfGCAllowedInCurrentState(JS::gcreason::Reason reason)
 {
     if (rt->mainContextFromOwnThread()->suppressGC)
@@ -7890,17 +7890,17 @@ GCRuntime::onOutOfMallocMemory(const Aut
     // might let the OS scrape together enough pages to satisfy the failing
     // malloc request.
     decommitAllWithoutUnlocking(lock);
 }
 
 void
 GCRuntime::minorGC(JS::gcreason::Reason reason, gcstats::PhaseKind phase)
 {
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
+    MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
 
     MOZ_ASSERT_IF(reason == JS::gcreason::EVICT_NURSERY,
                   !rt->mainContextFromOwnThread()->suppressGC);
     if (rt->mainContextFromOwnThread()->suppressGC)
         return;
 
     gcstats::AutoPhase ap(rt->gc.stats(), phase);
 
@@ -8289,17 +8289,17 @@ GCRuntime::runDebugGC()
     }
 
 #endif
 }
 
 void
 GCRuntime::setFullCompartmentChecks(bool enabled)
 {
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapMajorCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
     fullCompartmentChecks = enabled;
 }
 
 void
 GCRuntime::notifyRootsRemoved()
 {
     rootsRemoved = true;
 
@@ -8309,30 +8309,30 @@ GCRuntime::notifyRootsRemoved()
         nextScheduled = 1;
 #endif
 }
 
 #ifdef JS_GC_ZEAL
 bool
 GCRuntime::selectForMarking(JSObject* object)
 {
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapMajorCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
     return selectedForMarking.ref().append(object);
 }
 
 void
 GCRuntime::clearSelectedForMarking()
 {
     selectedForMarking.ref().clearAndFree();
 }
 
 void
 GCRuntime::setDeterministic(bool enabled)
 {
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapMajorCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
     deterministicOnly = enabled;
 }
 #endif
 
 #ifdef ENABLE_WASM_GC
 /* static */ bool
 GCRuntime::temporaryAbortIfWasmGc(JSContext* cx) {
     return cx->options().wasmGc() && cx->suppressGC;
@@ -8497,30 +8497,30 @@ AutoAssertNoNurseryAlloc::~AutoAssertNoN
 {
     TlsContext.get()->allowNurseryAlloc();
 }
 
 JS::AutoEnterCycleCollection::AutoEnterCycleCollection(JSRuntime* rt)
   : runtime_(rt)
 {
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
+    MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
     runtime_->heapState_ = HeapState::CycleCollecting;
 }
 
 JS::AutoEnterCycleCollection::~AutoEnterCycleCollection()
 {
-    MOZ_ASSERT(JS::CurrentThreadIsHeapCycleCollecting());
+    MOZ_ASSERT(JS::RuntimeHeapIsCycleCollecting());
     runtime_->heapState_ = HeapState::Idle;
 }
 
 JS::AutoAssertGCCallback::AutoAssertGCCallback()
   : AutoSuppressGCAnalysis()
 {
-    MOZ_ASSERT(JS::CurrentThreadIsHeapCollecting());
+    MOZ_ASSERT(JS::RuntimeHeapIsCollecting());
 }
 
 #endif // DEBUG
 
 JS_FRIEND_API(const char*)
 JS::GCTraceKindToAscii(JS::TraceKind kind)
 {
     switch(kind) {
@@ -8805,44 +8805,44 @@ JS_PUBLIC_API(bool)
 JS::IsIncrementalGCInProgress(JSRuntime* rt)
 {
     return rt->gc.isIncrementalGCInProgress() && !rt->gc.isVerifyPreBarriersEnabled();
 }
 
 JS_PUBLIC_API(bool)
 JS::IsIncrementalBarrierNeeded(JSContext* cx)
 {
-    if (JS::CurrentThreadIsHeapBusy())
+    if (JS::RuntimeHeapIsBusy())
         return false;
 
     auto state = cx->runtime()->gc.state();
     return state != gc::State::NotActive && state <= gc::State::Sweep;
 }
 
 JS_PUBLIC_API(void)
 JS::IncrementalPreWriteBarrier(JSObject* obj)
 {
     if (!obj)
         return;
 
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapMajorCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
     JSObject::writeBarrierPre(obj);
 }
 
 struct IncrementalReadBarrierFunctor {
     template <typename T> void operator()(T* t) { T::readBarrier(t); }
 };
 
 JS_PUBLIC_API(void)
 JS::IncrementalReadBarrier(GCCellPtr thing)
 {
     if (!thing)
         return;
 
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapMajorCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
     DispatchTyped(IncrementalReadBarrierFunctor(), thing);
 }
 
 JS_PUBLIC_API(bool)
 JS::WasIncrementalGC(JSRuntime* rt)
 {
     return rt->gc.isIncrementalGc();
 }
@@ -9200,17 +9200,17 @@ js::gc::detail::CellIsNotGray(const Cell
     // of cells that will be marked black by the next GC slice in an incremental
     // GC. For performance reasons we don't do this in CellIsMarkedGrayIfKnown.
 
     if (!CanCheckGrayBits(cell))
         return true;
 
     // TODO: I'd like to AssertHeapIsIdle() here, but this ends up getting
     // called during GC and while iterating the heap for memory reporting.
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapCycleCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsCycleCollecting());
 
     auto tc = &cell->asTenured();
     if (!detail::CellIsMarkedGray(tc))
         return true;
 
     // The cell is gray, but may eventually be marked black if we are in an
     // incremental GC and the cell is reachable by something on the mark stack.
 
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -263,17 +263,17 @@ js::CheckTracedThing(JSTracer* trc, T* t
      * if it has not then we assume it is allocated, but if it has then it is
      * either free or uninitialized in which case we check the free list.
      *
      * Further complications are that background sweeping may be running and
      * concurrently modifiying the free list and that tracing is done off
      * thread during compacting GC and reading the contents of the thing by
      * IsThingPoisoned would be racy in this case.
      */
-    MOZ_ASSERT_IF(JS::CurrentThreadIsHeapBusy() &&
+    MOZ_ASSERT_IF(JS::RuntimeHeapIsBusy() &&
                   !zone->isGCCompacting() &&
                   !rt->gc.isBackgroundSweeping(),
                   !IsThingPoisoned(thing) || !InFreeList(thing->asTenured().arena(), thing));
 #endif
 }
 
 template <typename S>
 struct CheckTracedFunctor : public VoidDefaultAdaptor<S> {
@@ -2570,17 +2570,17 @@ GCMarker::sizeOfExcludingThis(mozilla::M
         size += zone->gcGrayRoots().sizeOfExcludingThis(mallocSizeOf);
     return size;
 }
 
 #ifdef DEBUG
 Zone*
 GCMarker::stackContainsCrossZonePointerTo(const Cell* target) const
 {
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
 
     Zone* targetZone = target->asTenured().zone();
 
     for (MarkStackIter iter(stack); !iter.done(); iter.next()) {
         if (iter.peekTag() != MarkStack::ObjectTag)
             continue;
 
         auto source = iter.peekPtr().as<JSObject>();
@@ -3209,17 +3209,17 @@ CheckIsMarkedThing(T* thingp)
 
 #ifdef DEBUG
     MOZ_ASSERT(thingp);
     MOZ_ASSERT(*thingp);
     JSRuntime* rt = (*thingp)->runtimeFromAnyThread();
     MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(*thingp),
                   CurrentThreadCanAccessRuntime(rt) ||
                   CurrentThreadCanAccessZone((*thingp)->zoneFromAnyThread()) ||
-                  (JS::CurrentThreadIsHeapCollecting() && rt->gc.state() == State::Sweep));
+                  (JS::RuntimeHeapIsCollecting() && rt->gc.state() == State::Sweep));
 #endif
 }
 
 template <typename T>
 static bool
 IsMarkedInternalCommon(T* thingp)
 {
     CheckIsMarkedThing(thingp);
@@ -3294,17 +3294,17 @@ js::gc::IsAboutToBeFinalizedInternal(T**
     T* thing = *thingp;
     JSRuntime* rt = thing->runtimeFromAnyThread();
 
     /* Permanent atoms are never finalized by non-owning runtimes. */
     if (ThingIsPermanentAtomOrWellKnownSymbol(thing) && TlsContext.get()->runtime() != rt)
         return false;
 
     if (IsInsideNursery(thing)) {
-        return JS::CurrentThreadIsHeapMinorCollecting() &&
+        return JS::RuntimeHeapIsMinorCollecting() &&
                !Nursery::getForwardedPointer(reinterpret_cast<Cell**>(thingp));
     }
 
     Zone* zone = thing->asTenured().zoneFromAnyThread();
     if (zone->isGCSweeping()) {
         return IsAboutToBeFinalizedDuringSweep(thing->asTenured());
     } else if (zone->isGCCompacting() && IsForwarded(thing)) {
         *thingp = Forwarded(thing);
@@ -3509,18 +3509,18 @@ UnmarkGrayGCThing(JSRuntime* rt, JS::GCC
     gcstats::AutoPhase innerPhase(rt->gc.stats(), gcstats::PhaseKind::UNMARK_GRAY);
     unmarker.unmark(thing);
     return unmarker.unmarkedAny;
 }
 
 JS_FRIEND_API(bool)
 JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr thing)
 {
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapCycleCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsCycleCollecting());
 
     JSRuntime* rt = thing.asCell()->runtimeFromMainThread();
     gcstats::AutoPhase outerPhase(rt->gc.stats(), gcstats::PhaseKind::BARRIER);
     return UnmarkGrayGCThing(rt, thing);
 }
 
 bool
 js::UnmarkGrayShapeRecursively(Shape* shape)
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -349,17 +349,17 @@ js::Nursery::allocateString(Zone* zone, 
     gcTracer.traceNurseryAlloc(cell, kind);
     return cell;
 }
 
 void*
 js::Nursery::allocate(size_t size)
 {
     MOZ_ASSERT(isEnabled());
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
+    MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime()));
     MOZ_ASSERT_IF(currentChunk_ == currentStartChunk_, position() >= currentStartPosition_);
     MOZ_ASSERT(position() % CellAlignBytes == 0);
     MOZ_ASSERT(size % CellAlignBytes == 0);
 
 #ifdef JS_GC_ZEAL
     static const size_t CanarySize = (sizeof(Nursery::Canary) + CellAlignBytes - 1) & ~CellAlignMask;
     if (runtime()->gc.hasZealMode(ZealMode::CheckNursery))
--- a/js/src/gc/PrivateIterators-inl.h
+++ b/js/src/gc/PrivateIterators-inl.h
@@ -58,17 +58,17 @@ class GrayObjectIter : public ZoneCellIt
 };
 
 class GCZonesIter
 {
     ZonesIter zone;
 
   public:
     explicit GCZonesIter(JSRuntime* rt, ZoneSelector selector = WithAtoms) : zone(rt, selector) {
-        MOZ_ASSERT(JS::CurrentThreadIsHeapBusy());
+        MOZ_ASSERT(JS::RuntimeHeapIsBusy());
         MOZ_ASSERT_IF(rt->gc.atomsZone->isCollectingFromAnyThread(),
                       !rt->hasHelperThreadZones());
 
         if (!done() && !zone->isCollectingFromAnyThread())
             next();
     }
 
     bool done() const { return zone.done(); }
--- a/js/src/gc/PublicIterators.cpp
+++ b/js/src/gc/PublicIterators.cpp
@@ -115,26 +115,26 @@ IterateGrayObjects(Zone* zone, GCThingCa
                 cellCallback(data, JS::GCCellPtr(obj.get()));
         }
     }
 }
 
 void
 js::IterateGrayObjects(Zone* zone, GCThingCallback cellCallback, void* data)
 {
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
+    MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
     AutoPrepareForTracing prep(TlsContext.get());
     ::IterateGrayObjects(zone, cellCallback, data);
 }
 
 void
 js::IterateGrayObjectsUnderCC(Zone* zone, GCThingCallback cellCallback, void* data)
 {
     mozilla::DebugOnly<JSRuntime*> rt = zone->runtimeFromMainThread();
-    MOZ_ASSERT(JS::CurrentThreadIsHeapCycleCollecting());
+    MOZ_ASSERT(JS::RuntimeHeapIsCycleCollecting());
     MOZ_ASSERT(!rt->gc.isIncrementalGCInProgress());
     ::IterateGrayObjects(zone, cellCallback, data);
 }
 
 JS_PUBLIC_API(void)
 JS_IterateCompartments(JSContext* cx, void* data,
                        JSIterateCompartmentCallback compartmentCallback)
 {
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -384,17 +384,17 @@ js::gc::GCRuntime::traceRuntimeCommon(JS
     // parent pointer if traceRoots actually traces anything.
     for (RealmsIter r(rt); !r.done(); r.next())
         r->traceRoots(trc, traceOrMark);
 
     // Trace helper thread roots.
     HelperThreadState().trace(trc, session);
 
     // Trace the embedding's black and gray roots.
-    if (!JS::CurrentThreadIsHeapMinorCollecting()) {
+    if (!JS::RuntimeHeapIsMinorCollecting()) {
         gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_EMBEDDING);
 
         /*
          * The embedding can register additional roots here.
          *
          * We don't need to trace these in a minor GC because all pointers into
          * the nursery should be in the store buffer, and we want to avoid the
          * time taken to trace all these roots.
@@ -523,17 +523,17 @@ js::gc::GCRuntime::bufferGrayRoots()
       grayBufferState = GrayBufferState::Okay;
     }
 }
 
 template <typename T>
 inline void
 BufferGrayRootsTracer::bufferRoot(T* thing)
 {
-    MOZ_ASSERT(JS::CurrentThreadIsHeapBusy());
+    MOZ_ASSERT(JS::RuntimeHeapIsBusy());
     MOZ_ASSERT(thing);
     // Check if |thing| is corrupt by calling a method that touches the heap.
     MOZ_ASSERT(thing->getTraceKind() <= JS::TraceKind::Null);
 
     TenuredCell* tenured = &thing->asTenured();
 
     // This is run from a helper thread while the mutator is paused so we have
     // to use *FromAnyThread methods here.
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -401,27 +401,27 @@ class StoreBuffer
             static bool match(const SlotsEdge& k, const Lookup& l) { return k == l; }
         } Hasher;
 
         static const auto FullBufferReason = JS::gcreason::FULL_SLOT_BUFFER;
     };
 
     template <typename Buffer, typename Edge>
     void unput(Buffer& buffer, const Edge& edge) {
-        MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
+        MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
         MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
         if (!isEnabled())
             return;
         mozilla::ReentrancyGuard g(*this);
         buffer.unput(this, edge);
     }
 
     template <typename Buffer, typename Edge>
     void put(Buffer& buffer, const Edge& edge) {
-        MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
+        MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
         MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
         if (!isEnabled())
             return;
         mozilla::ReentrancyGuard g(*this);
         if (edge.maybeInRememberedSet(nursery_))
             buffer.put(this, edge);
     }
 
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -713,17 +713,17 @@ CheckGrayMarkingTracer::check(AutoTraceS
         return true; // Ignore failure.
 
     return failures == 0;
 }
 
 JS_FRIEND_API(bool)
 js::CheckGrayMarkingState(JSRuntime* rt)
 {
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
     MOZ_ASSERT(!rt->gc.isIncrementalGCInProgress());
     if (!rt->gc.areGrayBitsValid())
         return true;
 
     gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::TRACE_HEAP);
     AutoTraceSession session(rt, JS::HeapState::Tracing);
     CheckGrayMarkingTracer tracer(rt);
     if (!tracer.init())
--- a/js/src/gc/WeakMap.h
+++ b/js/src/gc/WeakMap.h
@@ -195,17 +195,17 @@ class WeakMap : public HashMap<Key, Valu
             TraceEdge(marker, &p->value(), "WeakMap ephemeron value");
             TraceEdge(marker, &key, "proxy-preserved WeakMap ephemeron key");
             MOZ_ASSERT(key == p->key()); // No moving
         }
         key.unsafeSet(nullptr); // Prevent destructor from running barriers.
     }
 
     void trace(JSTracer* trc) override {
-        MOZ_ASSERT_IF(JS::CurrentThreadIsHeapBusy(), isInList());
+        MOZ_ASSERT_IF(JS::RuntimeHeapIsBusy(), isInList());
 
         TraceNullableEdge(trc, &memberOf, "WeakMap owner");
 
         if (!Base::initialized())
             return;
 
         if (trc->isMarkingTracer()) {
             MOZ_ASSERT(trc->weakMapAction() == ExpandWeakMaps);
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -221,51 +221,51 @@ class Zone : public JS::shadow::Zone,
         return runtimeFromMainThread()->onOutOfMemory(allocFunc, nbytes, reallocPtr);
     }
     void reportAllocationOverflow() { js::ReportAllocationOverflow(nullptr); }
 
     void beginSweepTypes(bool releaseTypes);
 
     bool hasMarkedRealms();
 
-    void scheduleGC() { MOZ_ASSERT(!CurrentThreadIsHeapBusy()); gcScheduled_ = true; }
+    void scheduleGC() { MOZ_ASSERT(!RuntimeHeapIsBusy()); gcScheduled_ = true; }
     void unscheduleGC() { gcScheduled_ = false; }
     bool isGCScheduled() { return gcScheduled_; }
 
     void setPreservingCode(bool preserving) { gcPreserveCode_ = preserving; }
     bool isPreservingCode() const { return gcPreserveCode_; }
 
     // Whether this zone can currently be collected. This doesn't take account
     // of AutoKeepAtoms for the atoms zone.
     bool canCollect();
 
     void changeGCState(GCState prev, GCState next) {
-        MOZ_ASSERT(CurrentThreadIsHeapBusy());
+        MOZ_ASSERT(RuntimeHeapIsBusy());
         MOZ_ASSERT(gcState() == prev);
         MOZ_ASSERT_IF(next != NoGC, canCollect());
         gcState_ = next;
     }
 
     bool isCollecting() const {
         MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(runtimeFromMainThread()));
         return isCollectingFromAnyThread();
     }
 
     bool isCollectingFromAnyThread() const {
-        if (CurrentThreadIsHeapCollecting())
+        if (RuntimeHeapIsCollecting())
             return gcState_ != NoGC;
         else
             return needsIncrementalBarrier();
     }
 
     // If this returns true, all object tracing must be done with a GC marking
     // tracer.
     bool requireGCTracer() const {
         JSRuntime* rt = runtimeFromAnyThread();
-        return CurrentThreadIsHeapMajorCollecting() && !rt->gc.isHeapCompacting() && gcState_ != NoGC;
+        return RuntimeHeapIsMajorCollecting() && !rt->gc.isHeapCompacting() && gcState_ != NoGC;
     }
 
     bool shouldMarkInZone() const {
         return needsIncrementalBarrier() || isGCMarking();
     }
 
     // Get a number that is incremented whenever this zone is collected, and
     // possibly at other times too.
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -579,17 +579,17 @@ jit::LazyLinkTopActivation(JSContext* cx
     MOZ_ASSERT(calleeScript->jitCodeRaw());
 
     return calleeScript->jitCodeRaw();
 }
 
 /* static */ void
 JitRuntime::Trace(JSTracer* trc, AutoLockForExclusiveAccess& lock)
 {
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapMinorCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
 
     // Shared stubs are allocated in the atoms zone, so do not iterate
     // them after the atoms heap after it has been "finished."
     if (trc->runtime()->atomsAreFinished())
         return;
 
     Zone* zone = trc->runtime()->atomsZone(lock);
     for (auto i = zone->cellIter<JitCode>(); !i.done(); i.next()) {
@@ -773,17 +773,17 @@ JitCode::traceChildren(JSTracer* trc)
 
     if (jumpRelocTableBytes_) {
         uint8_t* start = code_ + jumpRelocTableOffset();
         CompactBufferReader reader(start, start + jumpRelocTableBytes_);
         MacroAssembler::TraceJumpRelocations(trc, this, reader);
     }
     if (dataRelocTableBytes_) {
         // If we're moving objects, we need writable JIT code.
-        bool movingObjects = JS::CurrentThreadIsHeapMinorCollecting() || zone()->isGCCompacting();
+        bool movingObjects = JS::RuntimeHeapIsMinorCollecting() || zone()->isGCCompacting();
         MaybeAutoWritableJitCode awjc(this, movingObjects ? Reprotect : DontReprotect);
 
         uint8_t* start = code_ + dataRelocTableOffset();
         CompactBufferReader reader(start, start + dataRelocTableBytes_);
         MacroAssembler::TraceDataRelocations(trc, this, reader);
     }
 }
 
--- a/js/src/jit/JSJitFrameIter.cpp
+++ b/js/src/jit/JSJitFrameIter.cpp
@@ -404,17 +404,17 @@ JSJitFrameIter::verifyReturnAddressUsing
     // Don't verify while off thread.
     if (!CurrentThreadCanAccessRuntime(rt))
         return true;
 
     // Don't verify if sampling is being suppressed.
     if (!TlsContext.get()->isProfilerSamplingEnabled())
         return true;
 
-    if (JS::CurrentThreadIsHeapMinorCollecting())
+    if (JS::RuntimeHeapIsMinorCollecting())
         return true;
 
     JitRuntime* jitrt = rt->jitRuntime();
 
     // Look up and print bytecode info for the native address.
     const JitcodeGlobalEntry* entry = jitrt->getJitcodeGlobalTable()->lookup(returnAddressToFp_);
     if (!entry)
         return true;
--- a/js/src/jit/JitFrames.cpp
+++ b/js/src/jit/JitFrames.cpp
@@ -1335,17 +1335,17 @@ TraceJitActivations(JSContext* cx, JSTra
 {
     for (JitActivationIterator activations(cx); !activations.done(); ++activations)
         TraceJitActivation(trc, activations->asJit());
 }
 
 void
 UpdateJitActivationsForMinorGC(JSRuntime* rt)
 {
-    MOZ_ASSERT(JS::CurrentThreadIsHeapMinorCollecting());
+    MOZ_ASSERT(JS::RuntimeHeapIsMinorCollecting());
     JSContext* cx = rt->mainContextFromOwnThread();
     for (JitActivationIterator activations(cx); !activations.done(); ++activations) {
         for (OnlyJSJitFrameIter iter(activations); !iter.done(); ++iter) {
             if (iter.frame().type() == JitFrame_IonJS)
                 UpdateIonJSFrameForMinorGC(rt, iter.frame());
         }
     }
 }
--- a/js/src/jit/JitcodeMap.cpp
+++ b/js/src/jit/JitcodeMap.cpp
@@ -735,17 +735,17 @@ struct Unconditionally
 };
 
 void
 JitcodeGlobalTable::traceForMinorGC(JSTracer* trc)
 {
     // Trace only entries that can directly contain nursery pointers.
 
     MOZ_ASSERT(trc->runtime()->geckoProfiler().enabled());
-    MOZ_ASSERT(JS::CurrentThreadIsHeapMinorCollecting());
+    MOZ_ASSERT(JS::RuntimeHeapIsMinorCollecting());
 
     JSContext* cx = trc->runtime()->mainContextFromOwnThread();
     AutoSuppressProfilerSampling suppressSampling(cx);
     JitcodeGlobalEntry::IonEntry* entry = nurseryEntries_;
     while (entry) {
         entry->trace<Unconditionally>(trc);
         JitcodeGlobalEntry::IonEntry* prev = entry;
         entry = entry->nextNursery_;
@@ -785,17 +785,17 @@ JitcodeGlobalTable::markIteratively(GCMa
     // the frame was on-stack at the beginning of the sweep phase, or 2) the
     // frame was pushed between incremental sweep slices. Frames of case 1)
     // are already marked. Frames of case 2) must have been reachable to have
     // been newly pushed, and thus are already marked.
     //
     // The approach above obviates the need for read barriers. The assumption
     // above is checked in JitcodeGlobalTable::lookupForSampler.
 
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapMinorCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
 
     AutoSuppressProfilerSampling suppressSampling(TlsContext.get());
 
     // If the profiler is off, rangeStart will be Nothing() and all entries are
     // considered to be expired.
     Maybe<uint64_t> rangeStart = marker->runtime()->profilerSampleBufferRangeStart();
 
     bool markedAny = false;
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -318,35 +318,35 @@ JS_GetEmptyString(JSContext* cx)
     return cx->emptyString();
 }
 
 namespace js {
 
 void
 AssertHeapIsIdle()
 {
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
+    MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
 }
 
 } // namespace js
 
 static void
 AssertHeapIsIdleOrIterating()
 {
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
 }
 
 static void
 AssertHeapIsIdleOrStringIsFlat(JSString* str)
 {
     /*
      * We allow some functions to be called during a GC as long as the argument
      * is a flat string, since that will not cause allocation.
      */
-    MOZ_ASSERT_IF(JS::CurrentThreadIsHeapBusy(), str->isFlat());
+    MOZ_ASSERT_IF(JS::RuntimeHeapIsBusy(), str->isFlat());
 }
 
 JS_PUBLIC_API(bool)
 JS_ValueToObject(JSContext* cx, HandleValue value, MutableHandleObject objp)
 {
     AssertHeapIsIdle();
     CHECK_REQUEST(cx);
     assertSameCompartment(cx, value);
--- a/js/src/jspubtd.h
+++ b/js/src/jspubtd.h
@@ -113,53 +113,53 @@ enum class HeapState {
     Idle,             // doing nothing with the GC heap
     Tracing,          // tracing the GC heap without collecting, e.g. IterateCompartments()
     MajorCollecting,  // doing a GC of the major heap
     MinorCollecting,  // doing a GC of the minor heap (nursery)
     CycleCollecting   // in the "Unlink" phase of cycle collection
 };
 
 JS_PUBLIC_API(HeapState)
-CurrentThreadHeapState();
+RuntimeHeapState();
 
 static inline bool
-CurrentThreadIsHeapBusy()
+RuntimeHeapIsBusy()
 {
-    return CurrentThreadHeapState() != HeapState::Idle;
+    return RuntimeHeapState() != HeapState::Idle;
 }
 
 static inline bool
-CurrentThreadIsHeapTracing()
+RuntimeHeapIsTracing()
 {
-    return CurrentThreadHeapState() == HeapState::Tracing;
+    return RuntimeHeapState() == HeapState::Tracing;
 }
 
 static inline bool
-CurrentThreadIsHeapMajorCollecting()
+RuntimeHeapIsMajorCollecting()
 {
-    return CurrentThreadHeapState() == HeapState::MajorCollecting;
+    return RuntimeHeapState() == HeapState::MajorCollecting;
 }
 
 static inline bool
-CurrentThreadIsHeapMinorCollecting()
+RuntimeHeapIsMinorCollecting()
 {
-    return CurrentThreadHeapState() == HeapState::MinorCollecting;
+    return RuntimeHeapState() == HeapState::MinorCollecting;
 }
 
 static inline bool
-CurrentThreadIsHeapCollecting()
+RuntimeHeapIsCollecting()
 {
-    HeapState state = CurrentThreadHeapState();
+    HeapState state = RuntimeHeapState();
     return state == HeapState::MajorCollecting || state == HeapState::MinorCollecting;
 }
 
 static inline bool
-CurrentThreadIsHeapCycleCollecting()
+RuntimeHeapIsCycleCollecting()
 {
-    return CurrentThreadHeapState() == HeapState::CycleCollecting;
+    return RuntimeHeapState() == HeapState::CycleCollecting;
 }
 
 // Decorates the Unlinking phase of CycleCollection so that accidental use
 // of barriered accessors results in assertions instead of leaks.
 class MOZ_STACK_CLASS JS_PUBLIC_API(AutoEnterCycleCollection)
 {
 #ifdef DEBUG
     JSRuntime* runtime_;
--- a/js/src/proxy/Wrapper.cpp
+++ b/js/src/proxy/Wrapper.cpp
@@ -370,17 +370,17 @@ js::UncheckedUnwrapWithoutExpose(JSObjec
             wrapped = MaybeForwarded(wrapped);
     }
     return wrapped;
 }
 
 JS_FRIEND_API(JSObject*)
 js::UncheckedUnwrap(JSObject* wrapped, bool stopAtWindowProxy, unsigned* flagsp)
 {
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(wrapped->runtimeFromAnyThread()));
 
     unsigned flags = 0;
     while (true) {
         if (!wrapped->is<WrapperObject>() ||
             MOZ_UNLIKELY(stopAtWindowProxy && IsWindowProxy(wrapped)))
         {
             break;
@@ -402,17 +402,17 @@ js::CheckedUnwrap(JSObject* obj, bool st
         if (!obj || obj == wrapper)
             return obj;
     }
 }
 
 JS_FRIEND_API(JSObject*)
 js::UnwrapOneChecked(JSObject* obj, bool stopAtWindowProxy)
 {
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(obj->runtimeFromAnyThread()));
 
     if (!obj->is<WrapperObject>() ||
         MOZ_UNLIKELY(stopAtWindowProxy && IsWindowProxy(obj)))
     {
         return obj;
     }
 
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -1658,17 +1658,17 @@ ParseCompileOptions(JSContext* cx, Compi
 
 static void
 my_LargeAllocFailCallback()
 {
     JSContext* cx = TlsContext.get();
     if (!cx || cx->helperThread())
         return;
 
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
+    MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
 
     JS::PrepareForFullGC(cx);
     cx->runtime()->gc.gc(GC_NORMAL, JS::gcreason::SHARED_MEMORY_LIMIT);
 }
 
 static const uint32_t CacheEntry_SOURCE = 0;
 static const uint32_t CacheEntry_BYTECODE = 1;
 
--- a/js/src/vm/BytecodeUtil.cpp
+++ b/js/src/vm/BytecodeUtil.cpp
@@ -1144,17 +1144,17 @@ ToDisassemblySource(JSContext* cx, Handl
         if (!copy) {
             ReportOutOfMemory(cx);
             return false;
         }
         bytes->initBytes(std::move(copy));
         return true;
     }
 
-    if (JS::CurrentThreadIsHeapBusy() || !cx->isAllocAllowed()) {
+    if (JS::RuntimeHeapIsBusy() || !cx->isAllocAllowed()) {
         UniqueChars source = JS_smprintf("<value>");
         if (!source) {
             ReportOutOfMemory(cx);
             return false;
         }
         bytes->initBytes(std::move(source));
         return true;
     }
--- a/js/src/vm/Compartment.cpp
+++ b/js/src/vm/Compartment.cpp
@@ -380,17 +380,17 @@ Compartment::wrap(JSContext* cx, Mutable
             return false;
     }
     return true;
 }
 
 void
 Compartment::traceOutgoingCrossCompartmentWrappers(JSTracer* trc)
 {
-    MOZ_ASSERT(JS::CurrentThreadIsHeapMajorCollecting());
+    MOZ_ASSERT(JS::RuntimeHeapIsMajorCollecting());
     MOZ_ASSERT(!zone()->isCollectingFromAnyThread() || trc->runtime()->gc.isHeapCompacting());
 
     for (NonStringWrapperEnum e(this); !e.empty(); e.popFront()) {
         if (e.front().key().is<JSObject*>()) {
             Value v = e.front().value().unbarrieredGet();
             ProxyObject* wrapper = &v.toObject().as<ProxyObject>();
 
             /*
@@ -401,17 +401,17 @@ Compartment::traceOutgoingCrossCompartme
         }
     }
 }
 
 /* static */ void
 Compartment::traceIncomingCrossCompartmentEdgesForZoneGC(JSTracer* trc)
 {
     gcstats::AutoPhase ap(trc->runtime()->gc.stats(), gcstats::PhaseKind::MARK_CCWS);
-    MOZ_ASSERT(JS::CurrentThreadIsHeapMajorCollecting());
+    MOZ_ASSERT(JS::RuntimeHeapIsMajorCollecting());
     for (CompartmentsIter c(trc->runtime()); !c.done(); c.next()) {
         if (!c->zone()->isCollecting())
             c->traceOutgoingCrossCompartmentWrappers(trc);
     }
     Debugger::traceIncomingCrossCompartmentEdges(trc);
 }
 
 void
--- a/js/src/vm/JSContext-inl.h
+++ b/js/src/vm/JSContext-inl.h
@@ -176,17 +176,17 @@ class CompartmentChecker
     }
 };
 
 /*
  * Don't perform these checks when called from a finalizer. The checking
  * depends on other objects not having been swept yet.
  */
 #define START_ASSERT_SAME_COMPARTMENT()                                 \
-    if (JS::CurrentThreadIsHeapCollecting())                            \
+    if (JS::RuntimeHeapIsCollecting())                            \
         return;                                                         \
     CompartmentChecker c(cx)
 
 template <class T1> inline void
 releaseAssertSameCompartment(JSContext* cx, const T1& t1)
 {
     START_ASSERT_SAME_COMPARTMENT();
     c.check(t1);
--- a/js/src/vm/JSContext.cpp
+++ b/js/src/vm/JSContext.cpp
@@ -1544,17 +1544,17 @@ JSContext::updateMallocCounter(size_t nb
 }
 
 #ifdef DEBUG
 
 JS::AutoCheckRequestDepth::AutoCheckRequestDepth(JSContext* cxArg)
   : cx(cxArg->helperThread() ? nullptr : cxArg)
 {
     if (cx) {
-        MOZ_ASSERT(cx->requestDepth || JS::CurrentThreadIsHeapBusy());
+        MOZ_ASSERT(cx->requestDepth || JS::RuntimeHeapIsBusy());
         MOZ_ASSERT(CurrentThreadCanAccessRuntime(cx->runtime()));
         cx->checkRequestDepth++;
     }
 }
 
 JS::AutoCheckRequestDepth::~AutoCheckRequestDepth()
 {
     if (cx) {
--- a/js/src/vm/Realm.cpp
+++ b/js/src/vm/Realm.cpp
@@ -277,17 +277,17 @@ void
 Realm::traceGlobal(JSTracer* trc)
 {
     // Trace things reachable from the realm's global. Note that these edges
     // must be swept too in case the realm is live but the global is not.
 
     savedStacks_.trace(trc);
 
     // Atoms are always tenured.
-    if (!JS::CurrentThreadIsHeapMinorCollecting())
+    if (!JS::RuntimeHeapIsMinorCollecting())
         varNames_.trace(trc);
 }
 
 void
 ObjectRealm::trace(JSTracer* trc)
 {
     if (lazyArrayBuffers)
         lazyArrayBuffers->trace(trc);
@@ -303,17 +303,17 @@ void
 Realm::traceRoots(JSTracer* trc, js::gc::GCRuntime::TraceOrMarkRuntime traceOrMark)
 {
     if (objectMetadataState_.is<PendingMetadata>()) {
         TraceRoot(trc,
                   &objectMetadataState_.as<PendingMetadata>(),
                   "on-stack object pending metadata");
     }
 
-    if (!JS::CurrentThreadIsHeapMinorCollecting()) {
+    if (!JS::RuntimeHeapIsMinorCollecting()) {
         // The global is never nursery allocated, so we don't need to
         // trace it when doing a minor collection.
         //
         // If a compartment is on-stack, we mark its global so that
         // JSContext::global() remains valid.
         if (shouldTraceGlobal() && global_.unbarrieredGet())
             TraceRoot(trc, global_.unsafeUnbarrieredForTracing(), "on-stack compartment global");
     }
@@ -338,17 +338,17 @@ Realm::traceRoots(JSTracer* trc, js::gc:
     // keys of the HashMap to avoid adding a strong reference to the JSScript
     // pointers.
     //
     // If the code coverage is either enabled with the --dump-bytecode command
     // line option, or with the PCCount JSFriend API functions, then we mark the
     // keys of the map to hold the JSScript alive.
     if (scriptCountsMap &&
         trc->runtime()->profilingScripts &&
-        !JS::CurrentThreadIsHeapMinorCollecting())
+        !JS::RuntimeHeapIsMinorCollecting())
     {
         MOZ_ASSERT_IF(!trc->runtime()->isBeingDestroyed(), collectCoverage());
         for (ScriptCountsMap::Range r = scriptCountsMap->all(); !r.empty(); r.popFront()) {
             JSScript* script = const_cast<JSScript*>(r.front().key());
             MOZ_ASSERT(script->hasScriptCounts());
             TraceRoot(trc, &script, "profilingScripts");
             MOZ_ASSERT(script == r.front().key(), "const_cast is only a work-around");
         }
--- a/js/src/vm/RegExpObject.cpp
+++ b/js/src/vm/RegExpObject.cpp
@@ -139,22 +139,22 @@ RegExpObject::trace(JSTracer* trc, JSObj
 }
 
 static inline bool
 IsMarkingTrace(JSTracer* trc)
 {
     // Determine whether tracing is happening during normal marking.  We need to
     // test all the following conditions, since:
     //
-    //   1. During TraceRuntime, CurrentThreadIsHeapBusy() is true, but the
+    //   1. During TraceRuntime, RuntimeHeapIsBusy() is true, but the
     //      tracer might not be a marking tracer.
     //   2. When a write barrier executes, IsMarkingTracer is true, but
-    //      CurrentThreadIsHeapBusy() will be false.
+    //      RuntimeHeapIsBusy() will be false.
 
-    return JS::CurrentThreadIsHeapCollecting() && trc->isMarkingTracer();
+    return JS::RuntimeHeapIsCollecting() && trc->isMarkingTracer();
 }
 
 void
 RegExpObject::trace(JSTracer* trc)
 {
     TraceNullableEdge(trc, &sharedRef(), "RegExpObject shared");
 }
 
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -251,17 +251,17 @@ JSRuntime::init(JSContext* cx, uint32_t 
         return false;
 
     return true;
 }
 
 void
 JSRuntime::destroyRuntime()
 {
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
+    MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
     MOZ_ASSERT(childRuntimeCount == 0);
     MOZ_ASSERT(initialized_);
 
     sharedIntlData.ref().destroyInstance();
 
     if (gcInitialized) {
         /*
          * Finish any in-progress GCs first. This ensures the parseWaitingOnGC
@@ -722,17 +722,17 @@ JSRuntime::updateMallocCounter(size_t nb
     gc.updateMallocCounter(nbytes);
 }
 
 JS_FRIEND_API(void*)
 JSRuntime::onOutOfMemory(AllocFunction allocFunc, size_t nbytes, void* reallocPtr, JSContext* maybecx)
 {
     MOZ_ASSERT_IF(allocFunc != AllocFunction::Realloc, !reallocPtr);
 
-    if (JS::CurrentThreadIsHeapBusy())
+    if (JS::RuntimeHeapIsBusy())
         return nullptr;
 
     if (!oom::IsSimulatedOOMAllocation()) {
         /*
          * Retry when we are done with the background sweeping and have stopped
          * all the allocations and released the empty GC chunks.
          */
         gc.onOutOfMallocMemory();
@@ -773,17 +773,17 @@ JSRuntime::activeGCInAtomsZone()
     Zone* zone = unsafeAtomsZone();
     return (zone->needsIncrementalBarrier() && !gc.isVerifyPreBarriersEnabled()) ||
            zone->wasGCStarted();
 }
 
 bool
 JSRuntime::createAtomsAddedWhileSweepingTable()
 {
-    MOZ_ASSERT(JS::CurrentThreadIsHeapCollecting());
+    MOZ_ASSERT(JS::RuntimeHeapIsCollecting());
     MOZ_ASSERT(!atomsAddedWhileSweeping_);
 
     atomsAddedWhileSweeping_ = js_new<AtomSet>();
     if (!atomsAddedWhileSweeping_)
         return false;
 
     if (!atomsAddedWhileSweeping_->init()) {
         destroyAtomsAddedWhileSweepingTable();
@@ -791,17 +791,17 @@ JSRuntime::createAtomsAddedWhileSweeping
     }
 
     return true;
 }
 
 void
 JSRuntime::destroyAtomsAddedWhileSweepingTable()
 {
-    MOZ_ASSERT(JS::CurrentThreadIsHeapCollecting());
+    MOZ_ASSERT(JS::RuntimeHeapIsCollecting());
     MOZ_ASSERT(atomsAddedWhileSweeping_);
 
     js_delete(atomsAddedWhileSweeping_.ref());
     atomsAddedWhileSweeping_ = nullptr;
 }
 
 void
 JSRuntime::setUsedByHelperThread(Zone* zone)
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -714,17 +714,17 @@ struct JSRuntime : public js::MallocProv
     js::ExclusiveAccessLockOrGCTaskData<js::SymbolRegistry> symbolRegistry_;
 
   public:
     bool initializeAtoms(JSContext* cx);
     void finishAtoms();
     bool atomsAreFinished() const { return !atoms_; }
 
     js::AtomSet* atomsForSweeping() {
-        MOZ_ASSERT(JS::CurrentThreadIsHeapCollecting());
+        MOZ_ASSERT(JS::RuntimeHeapIsCollecting());
         return atoms_;
     }
 
     js::AtomSet& atoms(js::AutoLockForExclusiveAccess& lock) {
         MOZ_ASSERT(atoms_);
         return *atoms_;
     }
     js::AtomSet& unsafeAtoms() {
--- a/js/src/vm/TypeInference.cpp
+++ b/js/src/vm/TypeInference.cpp
@@ -4140,17 +4140,17 @@ TypeNewScript::trace(JSTracer* trc)
     TraceNullableEdge(trc, &templateObject_, "TypeNewScript_templateObject");
     TraceNullableEdge(trc, &initializedShape_, "TypeNewScript_initializedShape");
     TraceNullableEdge(trc, &initializedGroup_, "TypeNewScript_initializedGroup");
 }
 
 /* static */ void
 TypeNewScript::writeBarrierPre(TypeNewScript* newScript)
 {
-    if (JS::CurrentThreadIsHeapCollecting())
+    if (JS::RuntimeHeapIsCollecting())
         return;
 
     JS::Zone* zone = newScript->function()->zoneFromAnyThread();
     if (zone->needsIncrementalBarrier())
         newScript->trace(zone->barrierTracer());
 }
 
 void
@@ -4180,17 +4180,17 @@ TraceObjectKey(JSTracer* trc, TypeSet::O
 }
 
 void
 ConstraintTypeSet::trace(Zone* zone, JSTracer* trc)
 {
     checkMagic();
 
     // ConstraintTypeSets only hold strong references during minor collections.
-    MOZ_ASSERT(JS::CurrentThreadIsHeapMinorCollecting());
+    MOZ_ASSERT(JS::RuntimeHeapIsMinorCollecting());
 
     unsigned objectCount = baseObjectCount();
     if (objectCount >= 2) {
         unsigned oldCapacity = TypeHashSet::Capacity(objectCount);
         ObjectKey** oldArray = objectSet;
 
         MOZ_RELEASE_ASSERT(uintptr_t(oldArray[-1]) == oldCapacity);
 
@@ -4258,17 +4258,17 @@ ConstraintTypeSet::trace(Zone* zone, JST
 
 static inline void
 AssertGCStateForSweep(Zone* zone)
 {
     MOZ_ASSERT(zone->isGCSweepingOrCompacting());
 
     // IsAboutToBeFinalized doesn't work right on tenured objects when called
     // during a minor collection.
-    MOZ_ASSERT(!JS::CurrentThreadIsHeapMinorCollecting());
+    MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
 }
 
 void
 ConstraintTypeSet::sweep(const AutoSweepBase& sweep, Zone* zone,
                          AutoClearTypeInferenceStateOnOOM& oom)
 {
     AssertGCStateForSweep(zone);