Bug 1452982 part 10 - Rename *ActiveCooperatingThread to *MainThread. r=jonco
authorJan de Mooij <jdemooij@mozilla.com>
Sun, 15 Apr 2018 13:18:46 +0200
changeset 467351 16fcaa62dee9272dde73b52ae640988f70ffd196
parent 467350 db377658f683c663faac4c808c433c043082895f
child 467352 b63e4dc6d08e1e9388ef4e328f229d251150f314
push id9165
push userasasaki@mozilla.com
push dateThu, 26 Apr 2018 21:04:54 +0000
treeherdermozilla-beta@064c3804de2e [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjonco
bugs1452982
milestone61.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1452982 part 10 - Rename *ActiveCooperatingThread to *MainThread. r=jonco
js/public/HeapAPI.h
js/src/builtin/MapObject.cpp
js/src/builtin/Promise.cpp
js/src/builtin/TypedObject.cpp
js/src/builtin/intl/Collator.cpp
js/src/builtin/intl/DateTimeFormat.cpp
js/src/builtin/intl/NumberFormat.cpp
js/src/builtin/intl/PluralRules.cpp
js/src/builtin/intl/RelativeTimeFormat.cpp
js/src/gc/Allocator.cpp
js/src/gc/Barrier.cpp
js/src/gc/Cell.h
js/src/gc/FreeOp.h
js/src/gc/GC-inl.h
js/src/gc/GC.cpp
js/src/gc/GCParallelTask.h
js/src/gc/GCRuntime.h
js/src/gc/Marking.cpp
js/src/gc/Nursery.cpp
js/src/gc/PublicIterators.cpp
js/src/gc/StoreBuffer-inl.h
js/src/gc/StoreBuffer.cpp
js/src/gc/WeakMap.h
js/src/gc/Zone.cpp
js/src/gc/Zone.h
js/src/jit/BaselineJIT.cpp
js/src/jit/CompileWrappers.cpp
js/src/jit/Ion.cpp
js/src/jit/IonBuilder.cpp
js/src/jit/IonOptimizationLevels.cpp
js/src/jit/IonOptimizationLevels.h
js/src/jit/JitCompartment.h
js/src/jit/MIR.cpp
js/src/jit/VMFunctions.cpp
js/src/jit/arm/Bailouts-arm.cpp
js/src/jit/mips32/Bailouts-mips32.cpp
js/src/jit/shared/CodeGenerator-shared.cpp
js/src/jit/x86/Bailouts-x86.cpp
js/src/jsfriendapi.cpp
js/src/proxy/Wrapper.cpp
js/src/vm/ArgumentsObject.cpp
js/src/vm/Debugger.cpp
js/src/vm/GeckoProfiler.cpp
js/src/vm/HelperThreads.cpp
js/src/vm/JSCompartment.cpp
js/src/vm/JSCompartment.h
js/src/vm/JSObject.cpp
js/src/vm/JSScript.cpp
js/src/vm/NativeObject-inl.h
js/src/vm/RegExpStatics.cpp
js/src/vm/SavedStacks.cpp
js/src/vm/Shape-inl.h
js/src/vm/StringType.cpp
js/src/vm/TypeInference.cpp
js/src/vm/TypedArrayObject.cpp
--- a/js/public/HeapAPI.h
+++ b/js/public/HeapAPI.h
@@ -170,17 +170,17 @@ struct Zone
     }
 
     JSTracer* barrierTracer() {
         MOZ_ASSERT(needsIncrementalBarrier_);
         MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(runtime_));
         return barrierTracer_;
     }
 
-    JSRuntime* runtimeFromActiveCooperatingThread() const {
+    JSRuntime* runtimeFromMainThread() const {
         MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(runtime_));
         return runtime_;
     }
 
     // Note: Unrestricted access to the zone's runtime from an arbitrary
     // thread can easily lead to races. Use this method very carefully.
     JSRuntime* runtimeFromAnyThread() const {
         return runtime_;
--- a/js/src/builtin/MapObject.cpp
+++ b/js/src/builtin/MapObject.cpp
@@ -257,17 +257,17 @@ MapIteratorObject::create(JSContext* cx,
     iterobj->setSlot(RangeSlot, PrivateValue(range));
 
     return iterobj;
 }
 
 void
 MapIteratorObject::finalize(FreeOp* fop, JSObject* obj)
 {
-    MOZ_ASSERT(fop->onActiveCooperatingThread());
+    MOZ_ASSERT(fop->onMainThread());
     MOZ_ASSERT(!IsInsideNursery(obj));
 
     auto range = MapIteratorObjectRange(&obj->as<NativeObject>());
     MOZ_ASSERT(!fop->runtime()->gc.nursery().isInside(range));
 
     fop->delete_(range);
 }
 
@@ -277,17 +277,17 @@ MapIteratorObject::objectMoved(JSObject*
     if (!IsInsideNursery(old))
         return 0;
 
     MapIteratorObject* iter = &obj->as<MapIteratorObject>();
     ValueMap::Range* range = MapIteratorObjectRange(iter);
     if (!range)
         return 0;
 
-    Nursery& nursery = iter->runtimeFromActiveCooperatingThread()->gc.nursery();
+    Nursery& nursery = iter->runtimeFromMainThread()->gc.nursery();
     if (!nursery.isInside(range)) {
         nursery.removeMallocedBuffer(range);
         return 0;
     }
 
     AutoEnterOOMUnsafeRegion oomUnsafe;
     auto newRange = iter->zone()->pod_malloc<ValueMap::Range>();
     if (!newRange)
@@ -562,17 +562,17 @@ WriteBarrierPostImpl(ObjectT* obj, const
         return true;
 
     NurseryKeysVector* keys = GetNurseryKeys(obj);
     if (!keys) {
         keys = AllocNurseryKeys(obj);
         if (!keys)
             return false;
 
-        JSRuntime* rt = key->runtimeFromActiveCooperatingThread();
+        JSRuntime* rt = key->runtimeFromMainThread();
         rt->gc.storeBuffer().putGeneric(OrderedHashTableRef<ObjectT>(obj));
     }
 
     if (!keys->append(key))
         return false;
 
     return true;
 }
@@ -653,17 +653,17 @@ MapObject::create(JSContext* cx, HandleO
     mapObj->initReservedSlot(NurseryKeysSlot, PrivateValue(nullptr));
     mapObj->initReservedSlot(HasNurseryMemorySlot, JS::BooleanValue(insideNursery));
     return mapObj;
 }
 
 void
 MapObject::finalize(FreeOp* fop, JSObject* obj)
 {
-    MOZ_ASSERT(fop->onActiveCooperatingThread());
+    MOZ_ASSERT(fop->onMainThread());
     if (ValueMap* map = obj->as<MapObject>().getData())
         fop->delete_(map);
 }
 
 /* static */ void
 MapObject::sweepAfterMinorGC(FreeOp* fop, MapObject* mapobj)
 {
     if (IsInsideNursery(mapobj) && !IsForwarded(mapobj)) {
@@ -1108,17 +1108,17 @@ SetIteratorObject::create(JSContext* cx,
     iterobj->setSlot(RangeSlot, PrivateValue(range));
 
     return iterobj;
 }
 
 void
 SetIteratorObject::finalize(FreeOp* fop, JSObject* obj)
 {
-    MOZ_ASSERT(fop->onActiveCooperatingThread());
+    MOZ_ASSERT(fop->onMainThread());
     MOZ_ASSERT(!IsInsideNursery(obj));
 
     auto range = SetIteratorObjectRange(&obj->as<NativeObject>());
     MOZ_ASSERT(!fop->runtime()->gc.nursery().isInside(range));
 
     fop->delete_(range);
 }
 
@@ -1128,17 +1128,17 @@ SetIteratorObject::objectMoved(JSObject*
     if (!IsInsideNursery(old))
         return 0;
 
     SetIteratorObject* iter = &obj->as<SetIteratorObject>();
     ValueSet::Range* range = SetIteratorObjectRange(iter);
     if (!range)
         return 0;
 
-    Nursery& nursery = iter->runtimeFromActiveCooperatingThread()->gc.nursery();
+    Nursery& nursery = iter->runtimeFromMainThread()->gc.nursery();
     if (!nursery.isInside(range)) {
         nursery.removeMallocedBuffer(range);
         return 0;
     }
 
     AutoEnterOOMUnsafeRegion oomUnsafe;
     auto newRange = iter->zone()->pod_malloc<ValueSet::Range>();
     if (!newRange)
@@ -1346,17 +1346,17 @@ SetObject::trace(JSTracer* trc, JSObject
         for (ValueSet::Range r = set->all(); !r.empty(); r.popFront())
             TraceKey(r, r.front(), trc);
     }
 }
 
 void
 SetObject::finalize(FreeOp* fop, JSObject* obj)
 {
-    MOZ_ASSERT(fop->onActiveCooperatingThread());
+    MOZ_ASSERT(fop->onMainThread());
     SetObject* setobj = static_cast<SetObject*>(obj);
     if (ValueSet* set = setobj->getData())
         fop->delete_(set);
 }
 
 /* static */ void
 SetObject::sweepAfterMinorGC(FreeOp* fop, SetObject* setobj)
 {
--- a/js/src/builtin/Promise.cpp
+++ b/js/src/builtin/Promise.cpp
@@ -3479,17 +3479,17 @@ PromiseObject::onSettled(JSContext* cx, 
     Debugger::onPromiseSettled(cx, promise);
 }
 
 OffThreadPromiseTask::OffThreadPromiseTask(JSContext* cx, Handle<PromiseObject*> promise)
   : runtime_(cx->runtime()),
     promise_(cx, promise),
     registered_(false)
 {
-    MOZ_ASSERT(runtime_ == promise_->zone()->runtimeFromActiveCooperatingThread());
+    MOZ_ASSERT(runtime_ == promise_->zone()->runtimeFromMainThread());
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
     MOZ_ASSERT(cx->runtime()->offThreadPromiseState.ref().initialized());
 }
 
 OffThreadPromiseTask::~OffThreadPromiseTask()
 {
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
 
--- a/js/src/builtin/TypedObject.cpp
+++ b/js/src/builtin/TypedObject.cpp
@@ -2138,17 +2138,17 @@ InlineTypedObject::obj_moved(JSObject* d
     // whether this object moved and where it was moved from.
     TypeDescr& descr = dst->as<InlineTypedObject>().typeDescr();
     if (descr.kind() == type::Array) {
         // The forwarding pointer can be direct as long as there is enough
         // space for it. Other objects might point into the object's buffer,
         // but they will not set any direct forwarding pointers.
         uint8_t* oldData = reinterpret_cast<uint8_t*>(src) + offsetOfDataStart();
         uint8_t* newData = dst->as<InlineTypedObject>().inlineTypedMem();
-        auto& nursery = dst->runtimeFromActiveCooperatingThread()->gc.nursery();
+        auto& nursery = dst->runtimeFromMainThread()->gc.nursery();
         bool direct = descr.size() >= sizeof(uintptr_t);
         nursery.setForwardingPointerWhileTenuring(oldData, newData, direct);
     }
 
     return 0;
 }
 
 ArrayBufferObject*
--- a/js/src/builtin/intl/Collator.cpp
+++ b/js/src/builtin/intl/Collator.cpp
@@ -129,17 +129,17 @@ js::intl_Collator(JSContext* cx, unsigne
     MOZ_ASSERT(!args.isConstructing());
 
     return Collator(cx, args);
 }
 
 void
 js::CollatorObject::finalize(FreeOp* fop, JSObject* obj)
 {
-    MOZ_ASSERT(fop->onActiveCooperatingThread());
+    MOZ_ASSERT(fop->onMainThread());
 
     const Value& slot = obj->as<CollatorObject>().getReservedSlot(CollatorObject::UCOLLATOR_SLOT);
     if (UCollator* coll = static_cast<UCollator*>(slot.toPrivate()))
         ucol_close(coll);
 }
 
 JSObject*
 js::CreateCollatorPrototype(JSContext* cx, HandleObject Intl, Handle<GlobalObject*> global)
--- a/js/src/builtin/intl/DateTimeFormat.cpp
+++ b/js/src/builtin/intl/DateTimeFormat.cpp
@@ -152,17 +152,17 @@ js::intl_DateTimeFormat(JSContext* cx, u
     // cannot be used with "new", but it still has to be treated as a
     // constructor.
     return DateTimeFormat(cx, args, true, DateTimeFormatOptions::Standard);
 }
 
 void
 js::DateTimeFormatObject::finalize(FreeOp* fop, JSObject* obj)
 {
-    MOZ_ASSERT(fop->onActiveCooperatingThread());
+    MOZ_ASSERT(fop->onMainThread());
 
     const Value& slot =
         obj->as<DateTimeFormatObject>().getReservedSlot(DateTimeFormatObject::UDATE_FORMAT_SLOT);
     if (UDateFormat* df = static_cast<UDateFormat*>(slot.toPrivate()))
         udat_close(df);
 }
 
 JSObject*
--- a/js/src/builtin/intl/NumberFormat.cpp
+++ b/js/src/builtin/intl/NumberFormat.cpp
@@ -139,17 +139,17 @@ js::intl_NumberFormat(JSContext* cx, uns
     // cannot be used with "new", but it still has to be treated as a
     // constructor.
     return NumberFormat(cx, args, true);
 }
 
 void
 js::NumberFormatObject::finalize(FreeOp* fop, JSObject* obj)
 {
-    MOZ_ASSERT(fop->onActiveCooperatingThread());
+    MOZ_ASSERT(fop->onMainThread());
 
     const Value& slot =
         obj->as<NumberFormatObject>().getReservedSlot(NumberFormatObject::UNUMBER_FORMAT_SLOT);
     if (UNumberFormat* nf = static_cast<UNumberFormat*>(slot.toPrivate()))
         unum_close(nf);
 }
 
 JSObject*
--- a/js/src/builtin/intl/PluralRules.cpp
+++ b/js/src/builtin/intl/PluralRules.cpp
@@ -112,17 +112,17 @@ PluralRules(JSContext* cx, unsigned argc
 
     args.rval().setObject(*pluralRules);
     return true;
 }
 
 void
 js::PluralRulesObject::finalize(FreeOp* fop, JSObject* obj)
 {
-    MOZ_ASSERT(fop->onActiveCooperatingThread());
+    MOZ_ASSERT(fop->onMainThread());
 
     PluralRulesObject* pluralRules = &obj->as<PluralRulesObject>();
 
     const Value& prslot = pluralRules->getReservedSlot(PluralRulesObject::UPLURAL_RULES_SLOT);
     UPluralRules* pr = static_cast<UPluralRules*>(prslot.toPrivate());
 
     const Value& nfslot = pluralRules->getReservedSlot(PluralRulesObject::UNUMBER_FORMAT_SLOT);
     UNumberFormat* nf = static_cast<UNumberFormat*>(nfslot.toPrivate());
--- a/js/src/builtin/intl/RelativeTimeFormat.cpp
+++ b/js/src/builtin/intl/RelativeTimeFormat.cpp
@@ -112,17 +112,17 @@ RelativeTimeFormat(JSContext* cx, unsign
 
     args.rval().setObject(*relativeTimeFormat);
     return true;
 }
 
 void
 js::RelativeTimeFormatObject::finalize(FreeOp* fop, JSObject* obj)
 {
-    MOZ_ASSERT(fop->onActiveCooperatingThread());
+    MOZ_ASSERT(fop->onMainThread());
 
     constexpr auto RT_FORMAT_SLOT = RelativeTimeFormatObject::URELATIVE_TIME_FORMAT_SLOT;
     const Value& slot = obj->as<RelativeTimeFormatObject>().getReservedSlot(RT_FORMAT_SLOT);
     if (URelativeDateTimeFormatter* rtf = static_cast<URelativeDateTimeFormatter*>(slot.toPrivate()))
         ureldatefmt_close(rtf);
 }
 
 JSObject*
--- a/js/src/gc/Allocator.cpp
+++ b/js/src/gc/Allocator.cpp
@@ -362,23 +362,23 @@ GCRuntime::startBackgroundAllocTaskIfIdl
 }
 
 /* static */ TenuredCell*
 GCRuntime::refillFreeListFromAnyThread(JSContext* cx, AllocKind thingKind)
 {
     cx->arenas()->checkEmptyFreeList(thingKind);
 
     if (!cx->helperThread())
-        return refillFreeListFromActiveCooperatingThread(cx, thingKind);
+        return refillFreeListFromMainThread(cx, thingKind);
 
     return refillFreeListFromHelperThread(cx, thingKind);
 }
 
 /* static */ TenuredCell*
-GCRuntime::refillFreeListFromActiveCooperatingThread(JSContext* cx, AllocKind thingKind)
+GCRuntime::refillFreeListFromMainThread(JSContext* cx, AllocKind thingKind)
 {
     // It should not be possible to allocate on the active thread while we are
     // inside a GC.
     Zone *zone = cx->zone();
     MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy(), "allocating while under GC");
 
     return cx->arenas()->allocateFromArena(zone, thingKind, ShouldCheckThresholds::CheckThresholds);
 }
@@ -397,17 +397,17 @@ GCRuntime::refillFreeListFromHelperThrea
 /* static */ TenuredCell*
 GCRuntime::refillFreeListInGC(Zone* zone, AllocKind thingKind)
 {
     /*
      * Called by compacting GC to refill a free list while we are in a GC.
      */
 
     zone->arenas.checkEmptyFreeList(thingKind);
-    mozilla::DebugOnly<JSRuntime*> rt = zone->runtimeFromActiveCooperatingThread();
+    mozilla::DebugOnly<JSRuntime*> rt = zone->runtimeFromMainThread();
     MOZ_ASSERT(JS::CurrentThreadIsHeapCollecting());
     MOZ_ASSERT_IF(!JS::CurrentThreadIsHeapMinorCollecting(), !rt->gc.isBackgroundSweeping());
 
     return zone->arenas.allocateFromArena(zone, thingKind, ShouldCheckThresholds::DontCheckThresholds);
 }
 
 TenuredCell*
 ArenaLists::allocateFromArena(JS::Zone* zone, AllocKind thingKind,
--- a/js/src/gc/Barrier.cpp
+++ b/js/src/gc/Barrier.cpp
@@ -16,19 +16,19 @@
 #include "vm/JSObject.h"
 #include "vm/SharedArrayObject.h"
 #include "vm/SymbolType.h"
 #include "wasm/WasmJS.h"
 
 namespace js {
 
 bool
-RuntimeFromActiveCooperatingThreadIsHeapMajorCollecting(JS::shadow::Zone* shadowZone)
+RuntimeFromMainThreadIsHeapMajorCollecting(JS::shadow::Zone* shadowZone)
 {
-    MOZ_ASSERT(CurrentThreadCanAccessRuntime(shadowZone->runtimeFromActiveCooperatingThread()));
+    MOZ_ASSERT(CurrentThreadCanAccessRuntime(shadowZone->runtimeFromMainThread()));
     return JS::CurrentThreadIsHeapMajorCollecting();
 }
 
 #ifdef DEBUG
 
 bool
 IsMarkedBlack(JSObject* obj)
 {
--- a/js/src/gc/Cell.h
+++ b/js/src/gc/Cell.h
@@ -22,17 +22,17 @@ struct Zone;
 enum class TraceKind;
 } /* namespace JS */
 
 namespace js {
 
 class GenericPrinter;
 
 extern bool
-RuntimeFromActiveCooperatingThreadIsHeapMajorCollecting(JS::shadow::Zone* shadowZone);
+RuntimeFromMainThreadIsHeapMajorCollecting(JS::shadow::Zone* shadowZone);
 
 #ifdef DEBUG
 
 // Barriers can't be triggered during backend Ion compilation, which may run on
 // a helper thread.
 extern bool
 CurrentThreadIsIonCompiling();
 #endif
@@ -55,17 +55,17 @@ struct Cell
     MOZ_ALWAYS_INLINE bool isTenured() const { return !IsInsideNursery(this); }
     MOZ_ALWAYS_INLINE const TenuredCell& asTenured() const;
     MOZ_ALWAYS_INLINE TenuredCell& asTenured();
 
     MOZ_ALWAYS_INLINE bool isMarkedAny() const;
     MOZ_ALWAYS_INLINE bool isMarkedBlack() const;
     MOZ_ALWAYS_INLINE bool isMarkedGray() const;
 
-    inline JSRuntime* runtimeFromActiveCooperatingThread() const;
+    inline JSRuntime* runtimeFromMainThread() const;
 
     // Note: Unrestricted access to the runtime of a GC thing from an arbitrary
     // thread can easily lead to races. Use this method very carefully.
     inline JSRuntime* runtimeFromAnyThread() const;
 
     // May be overridden by GC thing kinds that have a compartment pointer.
     inline JSCompartment* maybeCompartment() const { return nullptr; }
 
@@ -208,17 +208,17 @@ Cell::isMarkedBlack() const
 
 MOZ_ALWAYS_INLINE bool
 Cell::isMarkedGray() const
 {
     return isTenured() && asTenured().isMarkedGray();
 }
 
 inline JSRuntime*
-Cell::runtimeFromActiveCooperatingThread() const
+Cell::runtimeFromMainThread() const
 {
     JSRuntime* rt = chunk()->trailer.runtime;
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
     return rt;
 }
 
 inline JSRuntime*
 Cell::runtimeFromAnyThread() const
@@ -379,26 +379,26 @@ TenuredCell::readBarrier(TenuredCell* th
     // read-barriered GC things after a moving GC.
     //
     // TODO: Fix this and assert we're not collecting if we're on the active
     // thread.
 
     JS::shadow::Zone* shadowZone = thing->shadowZoneFromAnyThread();
     if (shadowZone->needsIncrementalBarrier()) {
         // Barriers are only enabled on the active thread and are disabled while collecting.
-        MOZ_ASSERT(!RuntimeFromActiveCooperatingThreadIsHeapMajorCollecting(shadowZone));
+        MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
         Cell* tmp = thing;
         TraceManuallyBarrieredGenericPointerEdge(shadowZone->barrierTracer(), &tmp, "read barrier");
         MOZ_ASSERT(tmp == thing);
     }
 
     if (thing->isMarkedGray()) {
         // There shouldn't be anything marked grey unless we're on the active thread.
         MOZ_ASSERT(CurrentThreadCanAccessRuntime(thing->runtimeFromAnyThread()));
-        if (!RuntimeFromActiveCooperatingThreadIsHeapMajorCollecting(shadowZone))
+        if (!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone))
             JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr(thing, thing->getTraceKind()));
     }
 }
 
 void
 AssertSafeToSkipBarrier(TenuredCell* thing);
 
 /* static */ MOZ_ALWAYS_INLINE void
@@ -421,17 +421,17 @@ TenuredCell::writeBarrierPre(TenuredCell
     if (!CurrentThreadCanAccessRuntime(thing->runtimeFromAnyThread())) {
         AssertSafeToSkipBarrier(thing);
         return;
     }
 #endif
 
     JS::shadow::Zone* shadowZone = thing->shadowZoneFromAnyThread();
     if (shadowZone->needsIncrementalBarrier()) {
-        MOZ_ASSERT(!RuntimeFromActiveCooperatingThreadIsHeapMajorCollecting(shadowZone));
+        MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
         Cell* tmp = thing;
         TraceManuallyBarrieredGenericPointerEdge(shadowZone->barrierTracer(), &tmp, "pre barrier");
         MOZ_ASSERT(tmp == thing);
     }
 }
 
 static MOZ_ALWAYS_INLINE void
 AssertValidToSkipBarrier(TenuredCell* thing)
--- a/js/src/gc/FreeOp.h
+++ b/js/src/gc/FreeOp.h
@@ -35,17 +35,17 @@ class FreeOp : public JSFreeOp
   public:
     static FreeOp* get(JSFreeOp* fop) {
         return static_cast<FreeOp*>(fop);
     }
 
     explicit FreeOp(JSRuntime* maybeRuntime);
     ~FreeOp();
 
-    bool onActiveCooperatingThread() const {
+    bool onMainThread() const {
         return runtime_ != nullptr;
     }
 
     bool maybeOnHelperThread() const {
         // Sometimes background finalization happens on the active thread so
         // runtime_ being null doesn't always mean we are off thread.
         return !runtime_;
     }
--- a/js/src/gc/GC-inl.h
+++ b/js/src/gc/GC-inl.h
@@ -203,17 +203,17 @@ class ZoneCellIter<TenuredCell> {
 
   protected:
     // For use when a subclass wants to insert some setup before init().
     ZoneCellIter() {}
 
     void init(JS::Zone* zone, AllocKind kind) {
         MOZ_ASSERT_IF(IsNurseryAllocable(kind),
                       (zone->isAtomsZone() ||
-                       zone->runtimeFromActiveCooperatingThread()->gc.nursery().isEmpty()));
+                       zone->runtimeFromMainThread()->gc.nursery().isEmpty()));
         initForTenuredIteration(zone, kind);
     }
 
     void initForTenuredIteration(JS::Zone* zone, AllocKind kind) {
         JSRuntime* rt = zone->runtimeFromAnyThread();
 
         // If called from outside a GC, ensure that the heap is in a state
         // that allows us to iterate.
@@ -233,17 +233,17 @@ class ZoneCellIter<TenuredCell> {
             cellIter.init(arenaIter.get(), CellIterMayNeedBarrier);
     }
 
   public:
     ZoneCellIter(JS::Zone* zone, AllocKind kind) {
         // If we are iterating a nursery-allocated kind then we need to
         // evict first so that we can see all things.
         if (IsNurseryAllocable(kind))
-            zone->runtimeFromActiveCooperatingThread()->gc.evictNursery();
+            zone->runtimeFromMainThread()->gc.evictNursery();
 
         init(zone, kind);
     }
 
     ZoneCellIter(JS::Zone* zone, AllocKind kind, const js::gc::AutoAssertEmptyNursery&) {
         // No need to evict the nursery. (This constructor is known statically
         // to not GC.)
         init(zone, kind);
--- a/js/src/gc/GC.cpp
+++ b/js/src/gc/GC.cpp
@@ -628,22 +628,22 @@ FinalizeTypedArenas(FreeOp* fop,
                     Arena** src,
                     SortedArenaList& dest,
                     AllocKind thingKind,
                     SliceBudget& budget,
                     ArenaLists::KeepArenasEnum keepArenas)
 {
     // When operating in the foreground, take the lock at the top.
     Maybe<AutoLockGC> maybeLock;
-    if (fop->onActiveCooperatingThread())
+    if (fop->onMainThread())
         maybeLock.emplace(fop->runtime());
 
     // During background sweeping free arenas are released later on in
     // sweepBackgroundThings().
-    MOZ_ASSERT_IF(!fop->onActiveCooperatingThread(), keepArenas == ArenaLists::KEEP_ARENAS);
+    MOZ_ASSERT_IF(!fop->onMainThread(), keepArenas == ArenaLists::KEEP_ARENAS);
 
     size_t thingSize = Arena::thingSize(thingKind);
     size_t thingsPerArena = Arena::thingsPerArena(thingKind);
 
     while (Arena* arena = *src) {
         *src = arena->next;
         size_t nmarked = arena->finalize<T>(fop, thingKind, thingSize);
         size_t nfree = thingsPerArena - nmarked;
@@ -2454,17 +2454,17 @@ void MovingTracer::onScriptEdge(JSScript
 void MovingTracer::onLazyScriptEdge(LazyScript** lazyp) { updateEdge(lazyp); }
 void MovingTracer::onBaseShapeEdge(BaseShape** basep) { updateEdge(basep); }
 void MovingTracer::onScopeEdge(Scope** scopep) { updateEdge(scopep); }
 void MovingTracer::onRegExpSharedEdge(RegExpShared** sharedp) { updateEdge(sharedp); }
 
 void
 Zone::prepareForCompacting()
 {
-    FreeOp* fop = runtimeFromActiveCooperatingThread()->defaultFreeOp();
+    FreeOp* fop = runtimeFromMainThread()->defaultFreeOp();
     discardJitCode(fop);
 }
 
 void
 GCRuntime::sweepTypesAfterCompacting(Zone* zone)
 {
     zone->beginSweepTypes(rt->gc.releaseObservedTypes && !zone->isPreservingCode());
 
@@ -2739,17 +2739,17 @@ GCRuntime::updateCellPointers(Zone* zone
 
         for (size_t i = 0; i < bgTaskCount && !bgArenas.done(); i++) {
             bgTasks[i].emplace(rt, &bgArenas, lock);
             startTask(*bgTasks[i], gcstats::PhaseKind::COMPACT_UPDATE_CELLS, lock);
             tasksStarted = i;
         }
     }
 
-    fgTask->runFromActiveCooperatingThread(rt);
+    fgTask->runFromMainThread(rt);
 
     {
         AutoLockHelperThreadState lock;
 
         for (size_t i = 0; i < tasksStarted; i++)
             joinTask(*bgTasks[i], gcstats::PhaseKind::COMPACT_UPDATE_CELLS, lock);
     }
 }
@@ -3413,17 +3413,17 @@ GCRuntime::startDecommit()
             }
         }
     }
     decommitTask.setChunksToScan(toDecommit);
 
     if (sweepOnBackgroundThread && decommitTask.start())
         return;
 
-    decommitTask.runFromActiveCooperatingThread(rt);
+    decommitTask.runFromMainThread(rt);
 }
 
 void
 js::gc::BackgroundDecommitTask::setChunksToScan(ChunkVector &chunks)
 {
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime()));
     MOZ_ASSERT(!isRunning());
     MOZ_ASSERT(toDecommit.ref().empty());
@@ -3759,17 +3759,17 @@ Zone::destroy(FreeOp* fop)
  * |keepAtleastOne| true to prohibit sweepCompartments from deleting every
  * compartment. Instead, it preserves an arbitrary compartment in the zone.
  */
 void
 Zone::sweepCompartments(FreeOp* fop, bool keepAtleastOne, bool destroyingRuntime)
 {
     MOZ_ASSERT(!compartments().empty());
 
-    mozilla::DebugOnly<JSRuntime*> rt = runtimeFromActiveCooperatingThread();
+    mozilla::DebugOnly<JSRuntime*> rt = runtimeFromMainThread();
 
     JSCompartment** read = compartments().begin();
     JSCompartment** end = compartments().end();
     JSCompartment** write = read;
     bool foundOne = false;
     while (read < end) {
         JSCompartment* comp = *read++;
         MOZ_ASSERT(!rt->isAtomsCompartment(comp));
@@ -4089,17 +4089,17 @@ GCRuntime::checkForCompartmentMismatches
 #endif
 
 static void
 RelazifyFunctions(Zone* zone, AllocKind kind)
 {
     MOZ_ASSERT(kind == AllocKind::FUNCTION ||
                kind == AllocKind::FUNCTION_EXTENDED);
 
-    JSRuntime* rt = zone->runtimeFromActiveCooperatingThread();
+    JSRuntime* rt = zone->runtimeFromMainThread();
     AutoAssertEmptyNursery empty(rt->mainContextFromOwnThread());
 
     for (auto i = zone->cellIter<JSObject>(kind, empty); !i.done(); i.next()) {
         JSFunction* fun = &i->as<JSFunction>();
         if (fun->hasScript())
             fun->maybeRelazify(rt);
     }
 }
@@ -5427,17 +5427,17 @@ SweepUniqueIds(JSRuntime* runtime)
 }
 
 void
 GCRuntime::startTask(GCParallelTask& task, gcstats::PhaseKind phase, AutoLockHelperThreadState& locked)
 {
     if (!task.startWithLockHeld(locked)) {
         AutoUnlockHelperThreadState unlock(locked);
         gcstats::AutoPhase ap(stats(), phase);
-        task.runFromActiveCooperatingThread(rt);
+        task.runFromMainThread(rt);
     }
 }
 
 void
 GCRuntime::joinTask(GCParallelTask& task, gcstats::PhaseKind phase, AutoLockHelperThreadState& locked)
 {
     {
         gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::JOIN_PARALLEL_TASKS);
@@ -7122,17 +7122,17 @@ CheckZoneIsScheduled(Zone* zone, JS::gcr
     if (zone->isGCScheduled())
         return;
 
     fprintf(stderr,
             "CheckZoneIsScheduled: Zone %p not scheduled as expected in %s GC for %s trigger\n",
             zone,
             JS::gcreason::ExplainReason(reason),
             trigger);
-    JSRuntime* rt = zone->runtimeFromActiveCooperatingThread();
+    JSRuntime* rt = zone->runtimeFromMainThread();
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         fprintf(stderr,
                 "  Zone %p:%s%s\n",
                 zone.get(),
                 zone->isAtomsZone() ? " atoms" : "",
                 zone->isGCScheduled() ? " scheduled" : "");
     }
     fflush(stderr);
@@ -7856,17 +7856,17 @@ js::NewCompartment(JSContext* cx, JSPrin
 
     zoneHolder.forget();
     return compartment.forget();
 }
 
 void
 gc::MergeCompartments(JSCompartment* source, JSCompartment* target)
 {
-    JSRuntime* rt = source->runtimeFromActiveCooperatingThread();
+    JSRuntime* rt = source->runtimeFromMainThread();
     rt->gc.mergeCompartments(source, target);
 
     AutoLockGC lock(rt);
     rt->gc.maybeAllocTriggerZoneGC(target->zone(), lock);
 }
 
 void
 GCRuntime::mergeCompartments(JSCompartment* source, JSCompartment* target)
@@ -8946,17 +8946,17 @@ js::gc::detail::CellIsMarkedGrayIfKnown(
     // to be collected and will not be marked any color.
 
     if (!CanCheckGrayBits(cell))
         return false;
 
     auto tc = &cell->asTenured();
     MOZ_ASSERT(!tc->zoneFromAnyThread()->usedByHelperThread());
 
-    auto rt = tc->runtimeFromActiveCooperatingThread();
+    auto rt = tc->runtimeFromMainThread();
     if (rt->gc.isIncrementalGCInProgress() && !tc->zone()->wasGCStarted())
         return false;
 
     return detail::CellIsMarkedGray(tc);
 }
 
 #ifdef DEBUG
 
--- a/js/src/gc/GCParallelTask.h
+++ b/js/src/gc/GCParallelTask.h
@@ -61,17 +61,17 @@ class GCParallelTask
     void join();
 
     // If multiple tasks are to be started or joined at once, it is more
     // efficient to take the helper thread lock once and use these methods.
     bool startWithLockHeld(AutoLockHelperThreadState& locked);
     void joinWithLockHeld(AutoLockHelperThreadState& locked);
 
     // Instead of dispatching to a helper, run the task on the current thread.
-    void runFromActiveCooperatingThread(JSRuntime* rt);
+    void runFromMainThread(JSRuntime* rt);
 
     // Dispatch a cancelation request.
     enum CancelMode { CancelNoWait, CancelAndWait};
     void cancel(CancelMode mode = CancelNoWait) {
         cancel_ = true;
         if (mode == CancelAndWait)
             join();
     }
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -504,18 +504,17 @@ class GCRuntime
 
     void arenaAllocatedDuringGC(JS::Zone* zone, Arena* arena);
 
     // Allocator internals
     MOZ_MUST_USE bool gcIfNeededAtAllocation(JSContext* cx);
     template <typename T>
     static void checkIncrementalZoneState(JSContext* cx, T* t);
     static TenuredCell* refillFreeListFromAnyThread(JSContext* cx, AllocKind thingKind);
-    static TenuredCell* refillFreeListFromActiveCooperatingThread(JSContext* cx,
-                                                                  AllocKind thingKind);
+    static TenuredCell* refillFreeListFromMainThread(JSContext* cx, AllocKind thingKind);
     static TenuredCell* refillFreeListFromHelperThread(JSContext* cx, AllocKind thingKind);
 
     /*
      * Return the list of chunks that can be released outside the GC lock.
      * Must be called either during the GC or with the GC lock taken.
      */
     friend class BackgroundDecommitTask;
     ChunkPool expireEmptyChunkPool(const AutoLockGC& lock);
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -3619,17 +3619,17 @@ UnmarkGrayGCThing(JSRuntime* rt, JS::GCC
 }
 
 JS_FRIEND_API(bool)
 JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr thing)
 {
     MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
     MOZ_ASSERT(!JS::CurrentThreadIsHeapCycleCollecting());
 
-    JSRuntime* rt = thing.asCell()->runtimeFromActiveCooperatingThread();
+    JSRuntime* rt = thing.asCell()->runtimeFromMainThread();
     gcstats::AutoPhase outerPhase(rt->gc.stats(), gcstats::PhaseKind::BARRIER);
     return UnmarkGrayGCThing(rt, thing);
 }
 
 bool
 js::UnmarkGrayShapeRecursively(Shape* shape)
 {
     return JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr(shape));
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -977,17 +977,17 @@ js::Nursery::freeMallocedBuffers()
     {
         AutoLockHelperThreadState lock;
         freeMallocedBuffersTask->joinWithLockHeld(lock);
         freeMallocedBuffersTask->transferBuffersToFree(mallocedBuffers, lock);
         started = freeMallocedBuffersTask->startWithLockHeld(lock);
     }
 
     if (!started)
-        freeMallocedBuffersTask->runFromActiveCooperatingThread(runtime());
+        freeMallocedBuffersTask->runFromMainThread(runtime());
 
     MOZ_ASSERT(mallocedBuffers.empty());
 }
 
 void
 js::Nursery::waitBackgroundFreeEnd()
 {
     // We may finishRoots before nursery init if runtime init fails.
--- a/js/src/gc/PublicIterators.cpp
+++ b/js/src/gc/PublicIterators.cpp
@@ -118,17 +118,17 @@ js::IterateGrayObjects(Zone* zone, GCThi
     MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
     AutoPrepareForTracing prep(TlsContext.get());
     ::IterateGrayObjects(zone, cellCallback, data);
 }
 
 void
 js::IterateGrayObjectsUnderCC(Zone* zone, GCThingCallback cellCallback, void* data)
 {
-    mozilla::DebugOnly<JSRuntime*> rt = zone->runtimeFromActiveCooperatingThread();
+    mozilla::DebugOnly<JSRuntime*> rt = zone->runtimeFromMainThread();
     MOZ_ASSERT(JS::CurrentThreadIsHeapCycleCollecting());
     MOZ_ASSERT(!rt->gc.isIncrementalGCInProgress());
     ::IterateGrayObjects(zone, cellCallback, data);
 }
 
 JS_PUBLIC_API(void)
 JS_IterateCompartments(JSContext* cx, void* data,
                        JSIterateCompartmentCallback compartmentCallback)
--- a/js/src/gc/StoreBuffer-inl.h
+++ b/js/src/gc/StoreBuffer-inl.h
@@ -53,17 +53,17 @@ ArenaCellSet::check() const
 {
 #ifdef DEBUG
     bool bitsZero = bits.isAllClear();
     MOZ_ASSERT(isEmpty() == bitsZero);
     MOZ_ASSERT(isEmpty() == !arena);
     if (!isEmpty()) {
         MOZ_ASSERT(IsCellPointerValid(arena));
         MOZ_ASSERT(arena->bufferedCells() == this);
-        JSRuntime* runtime = arena->zone->runtimeFromActiveCooperatingThread();
+        JSRuntime* runtime = arena->zone->runtimeFromMainThread();
         MOZ_ASSERT(runtime->gc.minorGCCount() == minorGCNumberAtCreation);
     }
 #endif
 }
 
 inline void
 StoreBuffer::WholeCellBuffer::put(const Cell* cell)
 {
--- a/js/src/gc/StoreBuffer.cpp
+++ b/js/src/gc/StoreBuffer.cpp
@@ -122,28 +122,28 @@ ArenaCellSet::ArenaCellSet()
   , minorGCNumberAtCreation(0)
 #endif
 {}
 
 ArenaCellSet::ArenaCellSet(Arena* arena, ArenaCellSet* next)
   : arena(arena)
   , next(next)
 #ifdef DEBUG
-  , minorGCNumberAtCreation(arena->zone->runtimeFromActiveCooperatingThread()->gc.minorGCCount())
+  , minorGCNumberAtCreation(arena->zone->runtimeFromMainThread()->gc.minorGCCount())
 #endif
 {
     MOZ_ASSERT(arena);
     bits.clear(false);
 }
 
 ArenaCellSet*
 StoreBuffer::WholeCellBuffer::allocateCellSet(Arena* arena)
 {
     Zone* zone = arena->zone;
-    JSRuntime* rt = zone->runtimeFromActiveCooperatingThread();
+    JSRuntime* rt = zone->runtimeFromMainThread();
     if (!rt->gc.nursery().isEnabled())
         return nullptr;
 
     AutoEnterOOMUnsafeRegion oomUnsafe;
     auto cells = storage_->new_<ArenaCellSet>(arena, head_);
     if (!cells)
         oomUnsafe.crash("Failed to allocate ArenaCellSet");
 
--- a/js/src/gc/WeakMap.h
+++ b/js/src/gc/WeakMap.h
@@ -286,17 +286,17 @@ class WeakMap : public HashMap<Key, Valu
         JSWeakmapKeyDelegateOp op = key->getClass()->extWeakmapKeyDelegateOp();
         if (!op)
             return nullptr;
 
         JSObject* obj = op(key);
         if (!obj)
             return nullptr;
 
-        MOZ_ASSERT(obj->runtimeFromActiveCooperatingThread() == zone()->runtimeFromActiveCooperatingThread());
+        MOZ_ASSERT(obj->runtimeFromMainThread() == zone()->runtimeFromMainThread());
         return obj;
     }
 
     JSObject* getDelegate(JSScript* script) const {
         return nullptr;
     }
 
   private:
@@ -304,17 +304,17 @@ class WeakMap : public HashMap<Key, Valu
     void exposeGCThingToActiveJS(JSObject* obj) const { JS::ExposeObjectToActiveJS(obj); }
 
     bool keyNeedsMark(JSObject* key) const {
         JSObject* delegate = getDelegate(key);
         /*
          * Check if the delegate is marked with any color to properly handle
          * gray marking when the key's delegate is black and the map is gray.
          */
-        return delegate && gc::IsMarkedUnbarriered(zone()->runtimeFromActiveCooperatingThread(), &delegate);
+        return delegate && gc::IsMarkedUnbarriered(zone()->runtimeFromMainThread(), &delegate);
     }
 
     bool keyNeedsMark(JSScript* script) const {
         return false;
     }
 
     bool findZoneEdges() override {
         // This is overridden by ObjectValueMap.
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -267,17 +267,17 @@ JS::Zone::checkUniqueIdTableAfterMovingG
 }
 #endif
 
 uint64_t
 Zone::gcNumber()
 {
     // Zones in use by exclusive threads are not collected, and threads using
     // them cannot access the main runtime's gcNumber without racing.
-    return usedByHelperThread() ? 0 : runtimeFromActiveCooperatingThread()->gc.gcNumber();
+    return usedByHelperThread() ? 0 : runtimeFromMainThread()->gc.gcNumber();
 }
 
 js::jit::JitZone*
 Zone::createJitZone(JSContext* cx)
 {
     MOZ_ASSERT(!jitZone_);
 
     if (!cx->runtime()->getJitRuntime(cx))
@@ -312,17 +312,17 @@ Zone::canCollect()
     // Zones that will be or are currently used by other threads cannot be
     // collected.
     return !createdForHelperThread();
 }
 
 void
 Zone::notifyObservingDebuggers()
 {
-    JSRuntime* rt = runtimeFromActiveCooperatingThread();
+    JSRuntime* rt = runtimeFromMainThread();
     JSContext* cx = rt->mainContextFromOwnThread();
 
     for (CompartmentsInZoneIter comps(this); !comps.done(); comps.next()) {
         RootedGlobalObject global(cx, comps->unsafeUnbarrieredMaybeGlobal());
         if (!global)
             continue;
 
         GlobalObject::DebuggerVector* dbgs = global->getDebuggers();
@@ -390,17 +390,17 @@ Zone::addTypeDescrObject(JSContext* cx, 
 void
 Zone::deleteEmptyCompartment(JSCompartment* comp)
 {
     MOZ_ASSERT(comp->zone() == this);
     MOZ_ASSERT(arenas.checkEmptyArenaLists());
     for (auto& i : compartments()) {
         if (i == comp) {
             compartments().erase(&i);
-            comp->destroy(runtimeFromActiveCooperatingThread()->defaultFreeOp());
+            comp->destroy(runtimeFromMainThread()->defaultFreeOp());
             return;
         }
     }
     MOZ_CRASH("Compartment not found");
 }
 
 void
 Zone::setHelperThreadOwnerContext(JSContext* cx)
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -171,17 +171,17 @@ struct Zone : public JS::shadow::Zone,
     js::gc::ZoneCellIter<T> cellIter(Args&&... args) {
         return js::gc::ZoneCellIter<T>(const_cast<Zone*>(this), mozilla::Forward<Args>(args)...);
     }
 
     MOZ_MUST_USE void* onOutOfMemory(js::AllocFunction allocFunc, size_t nbytes,
                                      void* reallocPtr = nullptr) {
         if (!js::CurrentThreadCanAccessRuntime(runtime_))
             return nullptr;
-        return runtimeFromActiveCooperatingThread()->onOutOfMemory(allocFunc, nbytes, reallocPtr);
+        return runtimeFromMainThread()->onOutOfMemory(allocFunc, nbytes, reallocPtr);
     }
     void reportAllocationOverflow() { js::ReportAllocationOverflow(nullptr); }
 
     void beginSweepTypes(bool releaseTypes);
 
     bool hasMarkedCompartments();
 
     void scheduleGC() { MOZ_ASSERT(!CurrentThreadIsHeapBusy()); gcScheduled_ = true; }
@@ -198,17 +198,17 @@ struct Zone : public JS::shadow::Zone,
     void changeGCState(GCState prev, GCState next) {
         MOZ_ASSERT(CurrentThreadIsHeapBusy());
         MOZ_ASSERT(gcState() == prev);
         MOZ_ASSERT_IF(next != NoGC, canCollect());
         gcState_ = next;
     }
 
     bool isCollecting() const {
-        MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(runtimeFromActiveCooperatingThread()));
+        MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(runtimeFromMainThread()));
         return isCollectingFromAnyThread();
     }
 
     bool isCollectingFromAnyThread() const {
         if (CurrentThreadIsHeapCollecting())
             return gcState_ != NoGC;
         else
             return needsIncrementalBarrier();
@@ -227,17 +227,17 @@ struct Zone : public JS::shadow::Zone,
 
     // Get a number that is incremented whenever this zone is collected, and
     // possibly at other times too.
     uint64_t gcNumber();
 
     bool compileBarriers() const { return compileBarriers(needsIncrementalBarrier()); }
     bool compileBarriers(bool needsIncrementalBarrier) const {
         return needsIncrementalBarrier ||
-               runtimeFromActiveCooperatingThread()->hasZealMode(js::gc::ZealMode::VerifierPre);
+               runtimeFromMainThread()->hasZealMode(js::gc::ZealMode::VerifierPre);
     }
 
     void setNeedsIncrementalBarrier(bool needs);
     const uint32_t* addressOfNeedsIncrementalBarrier() const { return &needsIncrementalBarrier_; }
 
     js::jit::JitZone* getJitZone(JSContext* cx) { return jitZone_ ? jitZone_ : createJitZone(cx); }
     js::jit::JitZone* jitZone() { return jitZone_; }
 
@@ -604,17 +604,17 @@ struct Zone : public JS::shadow::Zone,
         *uidp = js::gc::NextCellUniqueId(runtimeFromAnyThread());
         if (!uniqueIds().add(p, cell, *uidp))
             return false;
 
         // If the cell was in the nursery, hopefully unlikely, then we need to
         // tell the nursery about it so that it can sweep the uid if the thing
         // does not get tenured.
         if (IsInsideNursery(cell) &&
-            !runtimeFromActiveCooperatingThread()->gc.nursery().addedUniqueIdToCell(cell))
+            !runtimeFromMainThread()->gc.nursery().addedUniqueIdToCell(cell))
         {
             uniqueIds().remove(cell);
             return false;
         }
 
         return true;
     }
 
@@ -636,17 +636,17 @@ struct Zone : public JS::shadow::Zone,
         return uniqueIds().has(cell);
     }
 
     // Transfer an id from another cell. This must only be called on behalf of a
     // moving GC. This method is infallible.
     void transferUniqueId(js::gc::Cell* tgt, js::gc::Cell* src) {
         MOZ_ASSERT(src != tgt);
         MOZ_ASSERT(!IsInsideNursery(tgt));
-        MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(runtimeFromActiveCooperatingThread()));
+        MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(runtimeFromMainThread()));
         MOZ_ASSERT(js::CurrentThreadCanAccessZone(this));
         MOZ_ASSERT(!uniqueIds().has(tgt));
         uniqueIds().rekeyIfMoved(src, tgt);
     }
 
     // Remove any unique id associated with this Cell.
     void removeUniqueId(js::gc::Cell* cell) {
         MOZ_ASSERT(js::CurrentThreadCanAccessZone(this));
@@ -689,17 +689,17 @@ struct Zone : public JS::shadow::Zone,
         T* p = pod_calloc<T>(numElems);
         if (MOZ_LIKELY(!!p))
             return p;
         size_t bytes;
         if (MOZ_UNLIKELY(!js::CalculateAllocSize<T>(numElems, &bytes))) {
             reportAllocationOverflow();
             return nullptr;
         }
-        JSRuntime* rt = runtimeFromActiveCooperatingThread();
+        JSRuntime* rt = runtimeFromMainThread();
         p = static_cast<T*>(rt->onOutOfMemoryCanGC(js::AllocFunction::Calloc, bytes));
         if (!p)
             return nullptr;
         updateMallocCounter(bytes);
         return p;
     }
 
   private:
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -50,17 +50,17 @@ PCMappingSlotInfo::ToSlotLocation(const 
 }
 
 void
 ICStubSpace::freeAllAfterMinorGC(Zone* zone)
 {
     if (zone->isAtomsZone())
         MOZ_ASSERT(allocator_.isEmpty());
     else
-        zone->runtimeFromActiveCooperatingThread()->gc.freeAllLifoBlocksAfterMinorGC(&allocator_);
+        zone->runtimeFromMainThread()->gc.freeAllLifoBlocksAfterMinorGC(&allocator_);
 }
 
 BaselineScript::BaselineScript(uint32_t prologueOffset, uint32_t epilogueOffset,
                                uint32_t profilerEnterToggleOffset,
                                uint32_t profilerExitToggleOffset,
                                uint32_t postDebugPrologueOffset)
   : method_(nullptr),
     templateEnv_(nullptr),
--- a/js/src/jit/CompileWrappers.cpp
+++ b/js/src/jit/CompileWrappers.cpp
@@ -221,17 +221,17 @@ CompileZone::nurseryExists()
 {
     return zone()->runtimeFromAnyThread()->gc.nursery().exists();
 }
 
 void
 CompileZone::setMinorGCShouldCancelIonCompilations()
 {
     MOZ_ASSERT(CurrentThreadCanAccessZone(zone()));
-    JSRuntime* rt = zone()->runtimeFromActiveCooperatingThread();
+    JSRuntime* rt = zone()->runtimeFromMainThread();
     rt->gc.storeBuffer().setShouldCancelIonCompilations();
 }
 
 JSCompartment*
 CompileCompartment::compartment()
 {
     return reinterpret_cast<JSCompartment*>(this);
 }
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -390,31 +390,31 @@ JitRuntime::ionLazyLinkList(JSRuntime* r
     return ionLazyLinkList_.ref();
 }
 
 void
 JitRuntime::ionLazyLinkListRemove(JSRuntime* rt, jit::IonBuilder* builder)
 {
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt),
                "Should only be mutated by the active thread.");
-    MOZ_ASSERT(rt == builder->script()->runtimeFromActiveCooperatingThread());
+    MOZ_ASSERT(rt == builder->script()->runtimeFromMainThread());
     MOZ_ASSERT(ionLazyLinkListSize_ > 0);
 
     builder->removeFrom(ionLazyLinkList(rt));
     ionLazyLinkListSize_--;
 
     MOZ_ASSERT(ionLazyLinkList(rt).isEmpty() == (ionLazyLinkListSize_ == 0));
 }
 
 void
 JitRuntime::ionLazyLinkListAdd(JSRuntime* rt, jit::IonBuilder* builder)
 {
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt),
                "Should only be mutated by the active thread.");
-    MOZ_ASSERT(rt == builder->script()->runtimeFromActiveCooperatingThread());
+    MOZ_ASSERT(rt == builder->script()->runtimeFromMainThread());
     ionLazyLinkList(rt).insertFront(builder);
     ionLazyLinkListSize_++;
 }
 
 uint8_t*
 JSContext::allocateOsrTempData(size_t size)
 {
     osrTempData_ = (uint8_t*)js_realloc(osrTempData_, size);
--- a/js/src/jit/IonBuilder.cpp
+++ b/js/src/jit/IonBuilder.cpp
@@ -8642,17 +8642,17 @@ IonBuilder::addTypedArrayLengthAndData(M
     } else if (TemporaryTypeSet* types = obj->resultTypeSet()) {
         tarr = types->maybeSingleton();
     }
 
     if (tarr) {
         SharedMem<void*> data = tarr->as<TypedArrayObject>().viewDataEither();
         // Bug 979449 - Optimistically embed the elements and use TI to
         //              invalidate if we move them.
-        bool isTenured = !tarr->runtimeFromActiveCooperatingThread()->gc.nursery().isInside(data);
+        bool isTenured = !tarr->runtimeFromMainThread()->gc.nursery().isInside(data);
         if (isTenured && tarr->isSingleton()) {
             // The 'data' pointer of TypedArrayObject can change in rare circumstances
             // (ArrayBufferObject::changeContents).
             TypeSet::ObjectKey* tarrKey = TypeSet::ObjectKey::get(tarr);
             if (!tarrKey->unknownProperties()) {
                 if (tarr->is<TypedArrayObject>())
                     tarrKey->watchStateChangeForTypedArrayData(constraints());
 
--- a/js/src/jit/IonOptimizationLevels.cpp
+++ b/js/src/jit/IonOptimizationLevels.cpp
@@ -37,17 +37,17 @@ OptimizationInfo::initNormalOptimization
     gvn_ = true;
     rangeAnalysis_ = true;
     reordering_ = true;
     sincos_ = true;
     sink_ = true;
 
     registerAllocator_ = RegisterAllocator_Backtracking;
 
-    inlineMaxBytecodePerCallSiteActiveCooperatingThread_ = 550;
+    inlineMaxBytecodePerCallSiteMainThread_ = 550;
     inlineMaxBytecodePerCallSiteHelperThread_ = 1100;
     inlineMaxCalleeInlinedBytecodeLength_ = 3550;
     inlineMaxTotalBytecodeLength_ = 85000;
     inliningMaxCallerBytecodeLength_ = 1600;
     maxInlineDepth_ = 3;
     scalarReplacement_ = true;
     smallFunctionMaxInlineDepth_ = 10;
     compilerWarmUpThreshold_ = CompilerWarmupThreshold;
--- a/js/src/jit/IonOptimizationLevels.h
+++ b/js/src/jit/IonOptimizationLevels.h
@@ -94,17 +94,17 @@ class OptimizationInfo
 
     // Describes which register allocator to use.
     IonRegisterAllocator registerAllocator_;
 
     // The maximum total bytecode size of an inline call site. We use a lower
     // value if off-thread compilation is not available, to avoid stalling the
     // active thread.
     uint32_t inlineMaxBytecodePerCallSiteHelperThread_;
-    uint32_t inlineMaxBytecodePerCallSiteActiveCooperatingThread_;
+    uint32_t inlineMaxBytecodePerCallSiteMainThread_;
 
     // The maximum value we allow for baselineScript->inlinedBytecodeLength_
     // when inlining.
     uint16_t inlineMaxCalleeInlinedBytecodeLength_;
 
     // The maximum bytecode length we'll inline in a single compilation.
     uint32_t inlineMaxTotalBytecodeLength_;
 
@@ -242,17 +242,17 @@ class OptimizationInfo
 
     uint32_t maxInlineDepth() const {
         return maxInlineDepth_;
     }
 
     uint32_t inlineMaxBytecodePerCallSite(bool offThread) const {
         return (offThread || !JitOptions.limitScriptSize)
                ? inlineMaxBytecodePerCallSiteHelperThread_
-               : inlineMaxBytecodePerCallSiteActiveCooperatingThread_;
+               : inlineMaxBytecodePerCallSiteMainThread_;
     }
 
     uint16_t inlineMaxCalleeInlinedBytecodeLength() const {
         return inlineMaxCalleeInlinedBytecodeLength_;
     }
 
     uint32_t inlineMaxTotalBytecodeLength() const {
         return inlineMaxTotalBytecodeLength_;
--- a/js/src/jit/JitCompartment.h
+++ b/js/src/jit/JitCompartment.h
@@ -649,17 +649,17 @@ class MOZ_STACK_CLASS AutoWritableJitCod
         rt_->toggleAutoWritableJitCodeActive(true);
         if (!ExecutableAllocator::makeWritable(addr_, size_))
             MOZ_CRASH();
     }
     AutoWritableJitCode(void* addr, size_t size)
       : AutoWritableJitCode(TlsContext.get()->runtime(), addr, size)
     {}
     explicit AutoWritableJitCode(JitCode* code)
-      : AutoWritableJitCode(code->runtimeFromActiveCooperatingThread(), code->raw(), code->bufferSize())
+      : AutoWritableJitCode(code->runtimeFromMainThread(), code->raw(), code->bufferSize())
     {}
     ~AutoWritableJitCode() {
         if (!ExecutableAllocator::makeExecutable(addr_, size_))
             MOZ_CRASH();
         rt_->toggleAutoWritableJitCodeActive(false);
     }
 };
 
--- a/js/src/jit/MIR.cpp
+++ b/js/src/jit/MIR.cpp
@@ -957,17 +957,17 @@ jit::IonCompilationCanUseNurseryPointers
     // thread but might actually be on the active thread, check the flag set on
     // the JSContext by AutoEnterIonCompilation.
     if (CurrentThreadIsIonCompiling())
         return !CurrentThreadIsIonCompilingSafeForMinorGC();
 
     // Otherwise, we must be on the active thread during MIR construction. The
     // store buffer must have been notified that minor GCs must cancel pending
     // or in progress Ion compilations.
-    JSRuntime* rt = TlsContext.get()->zone()->runtimeFromActiveCooperatingThread();
+    JSRuntime* rt = TlsContext.get()->zone()->runtimeFromMainThread();
     return rt->gc.storeBuffer().cancelIonCompilations();
 }
 
 #endif // DEBUG
 
 MConstant::MConstant(TempAllocator& alloc, const js::Value& vp, CompilerConstraintList* constraints)
   : MNullaryInstruction(classOpcode)
 {
--- a/js/src/jit/VMFunctions.cpp
+++ b/js/src/jit/VMFunctions.cpp
@@ -1326,17 +1326,17 @@ void
 AssertValidObjectPtr(JSContext* cx, JSObject* obj)
 {
     AutoUnsafeCallWithABI unsafe;
 #ifdef DEBUG
     // Check what we can, so that we'll hopefully assert/crash if we get a
     // bogus object (pointer).
     MOZ_ASSERT(obj->compartment() == cx->compartment());
     MOZ_ASSERT(obj->zoneFromAnyThread() == cx->zone());
-    MOZ_ASSERT(obj->runtimeFromActiveCooperatingThread() == cx->runtime());
+    MOZ_ASSERT(obj->runtimeFromMainThread() == cx->runtime());
 
     MOZ_ASSERT_IF(!obj->hasLazyGroup() && obj->maybeShape(),
                   obj->group()->clasp() == obj->maybeShape()->getObjectClass());
 
     if (obj->isTenured()) {
         MOZ_ASSERT(obj->isAligned());
         gc::AllocKind kind = obj->asTenured().getAllocKind();
         MOZ_ASSERT(gc::IsObjectAllocKind(kind));
--- a/js/src/jit/arm/Bailouts-arm.cpp
+++ b/js/src/jit/arm/Bailouts-arm.cpp
@@ -83,17 +83,17 @@ BailoutFrameInfo::BailoutFrameInfo(const
     attachOnJitActivation(activations);
 
     if (bailout->frameClass() == FrameSizeClass::None()) {
         snapshotOffset_ = bailout->snapshotOffset();
         return;
     }
 
     // Compute the snapshot offset from the bailout ID.
-    JSRuntime* rt = activation->compartment()->runtimeFromActiveCooperatingThread();
+    JSRuntime* rt = activation->compartment()->runtimeFromMainThread();
     TrampolinePtr code = rt->jitRuntime()->getBailoutTable(bailout->frameClass());
 #ifdef DEBUG
     uint32_t tableSize = rt->jitRuntime()->getBailoutTableSize(bailout->frameClass());
 #endif
     uintptr_t tableOffset = bailout->tableOffset();
     uintptr_t tableStart = reinterpret_cast<uintptr_t>(Assembler::BailoutTableStart(code.value));
 
     MOZ_ASSERT(tableOffset >= tableStart &&
--- a/js/src/jit/mips32/Bailouts-mips32.cpp
+++ b/js/src/jit/mips32/Bailouts-mips32.cpp
@@ -27,17 +27,17 @@ BailoutFrameInfo::BailoutFrameInfo(const
     attachOnJitActivation(activations);
 
     if (bailout->frameClass() == FrameSizeClass::None()) {
         snapshotOffset_ = bailout->snapshotOffset();
         return;
     }
 
     // Compute the snapshot offset from the bailout ID.
-    JSRuntime* rt = activation->compartment()->runtimeFromActiveCooperatingThread();
+    JSRuntime* rt = activation->compartment()->runtimeFromMainThread();
     TrampolinePtr code = rt->jitRuntime()->getBailoutTable(bailout->frameClass());
 #ifdef DEBUG
     uint32_t tableSize = rt->jitRuntime()->getBailoutTableSize(bailout->frameClass());
 #endif
     uintptr_t tableOffset = bailout->tableOffset();
     uintptr_t tableStart = reinterpret_cast<uintptr_t>(code.value);
 
     MOZ_ASSERT(tableOffset >= tableStart &&
--- a/js/src/jit/shared/CodeGenerator-shared.cpp
+++ b/js/src/jit/shared/CodeGenerator-shared.cpp
@@ -1057,17 +1057,17 @@ CodeGeneratorShared::verifyCompactTracke
             MOZ_ASSERT(startOffset == entry.startOffset.offset());
             MOZ_ASSERT(endOffset == entry.endOffset.offset());
             MOZ_ASSERT(index == unique.indexOf(entry.optimizations));
 
             // Assert that the type info and attempts vectors are correctly
             // decoded. This is disabled for now if the types table might
             // contain nursery pointers, in which case the types might not
             // match, see bug 1175761.
-            JSRuntime* rt = code->runtimeFromActiveCooperatingThread();
+            JSRuntime* rt = code->runtimeFromMainThread();
             if (!rt->gc.storeBuffer().cancelIonCompilations()) {
                 IonTrackedOptimizationsTypeInfo typeInfo = typesTable->entry(index);
                 TempOptimizationTypeInfoVector tvec(alloc());
                 ReadTempTypeInfoVectorOp top(alloc(), &tvec);
                 typeInfo.forEach(top, allTypes);
                 MOZ_ASSERT_IF(!top.oom(), entry.optimizations->matchTypes(tvec));
             }
 
--- a/js/src/jit/x86/Bailouts-x86.cpp
+++ b/js/src/jit/x86/Bailouts-x86.cpp
@@ -79,17 +79,17 @@ BailoutFrameInfo::BailoutFrameInfo(const
     attachOnJitActivation(activations);
 
     if (bailout->frameClass() == FrameSizeClass::None()) {
         snapshotOffset_ = bailout->snapshotOffset();
         return;
     }
 
     // Compute the snapshot offset from the bailout ID.
-    JSRuntime* rt = activation->compartment()->runtimeFromActiveCooperatingThread();
+    JSRuntime* rt = activation->compartment()->runtimeFromMainThread();
     TrampolinePtr code = rt->jitRuntime()->getBailoutTable(bailout->frameClass());
 #ifdef DEBUG
     uint32_t tableSize = rt->jitRuntime()->getBailoutTableSize(bailout->frameClass());
 #endif
     uintptr_t tableOffset = bailout->tableOffset();
     uintptr_t tableStart = reinterpret_cast<uintptr_t>(code.value);
 
     MOZ_ASSERT(tableOffset >= tableStart &&
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -162,17 +162,17 @@ JS_FRIEND_API(void)
 JS_SetCompartmentPrincipals(JSCompartment* compartment, JSPrincipals* principals)
 {
     // Short circuit if there's no change.
     if (principals == compartment->principals())
         return;
 
     // Any compartment with the trusted principals -- and there can be
     // multiple -- is a system compartment.
-    const JSPrincipals* trusted = compartment->runtimeFromActiveCooperatingThread()->trustedPrincipals();
+    const JSPrincipals* trusted = compartment->runtimeFromMainThread()->trustedPrincipals();
     bool isSystem = principals && principals == trusted;
 
     // Clear out the old principals, if any.
     if (compartment->principals()) {
         JS_DropPrincipals(TlsContext.get(), compartment->principals());
         compartment->setPrincipals(nullptr);
         // We'd like to assert that our new principals is always same-origin
         // with the old one, but JSPrincipals doesn't give us a way to do that.
@@ -393,17 +393,17 @@ js::AssertSameCompartment(JSObject* objA
 }
 #endif
 
 JS_FRIEND_API(void)
 js::NotifyAnimationActivity(JSObject* obj)
 {
     int64_t timeNow = PRMJ_Now();
     obj->compartment()->lastAnimationTime = timeNow;
-    obj->runtimeFromActiveCooperatingThread()->lastAnimationTime = timeNow;
+    obj->runtimeFromMainThread()->lastAnimationTime = timeNow;
 }
 
 JS_FRIEND_API(uint32_t)
 js::GetObjectSlotSpan(JSObject* obj)
 {
     return obj->as<NativeObject>().slotSpan();
 }
 
--- a/js/src/proxy/Wrapper.cpp
+++ b/js/src/proxy/Wrapper.cpp
@@ -30,17 +30,17 @@ Wrapper::finalizeInBackground(const Valu
 
     /*
      * Make the 'background-finalized-ness' of the wrapper the same as the
      * wrapped object, to allow transplanting between them.
      */
     JSObject* wrapped = MaybeForwarded(&priv.toObject());
     gc::AllocKind wrappedKind;
     if (IsInsideNursery(wrapped)) {
-        JSRuntime *rt = wrapped->runtimeFromActiveCooperatingThread();
+        JSRuntime* rt = wrapped->runtimeFromMainThread();
         wrappedKind = wrapped->allocKindForTenure(rt->gc.nursery());
     } else {
         wrappedKind = wrapped->asTenured().getAllocKind();
     }
     return IsBackgroundFinalized(wrappedKind);
 }
 
 bool
--- a/js/src/vm/ArgumentsObject.cpp
+++ b/js/src/vm/ArgumentsObject.cpp
@@ -863,17 +863,17 @@ ArgumentsObject::objectMoved(JSObject* d
 {
     ArgumentsObject* ndst = &dst->as<ArgumentsObject>();
     const ArgumentsObject* nsrc = &src->as<ArgumentsObject>();
     MOZ_ASSERT(ndst->data() == nsrc->data());
 
     if (!IsInsideNursery(src))
         return 0;
 
-    Nursery& nursery = dst->runtimeFromActiveCooperatingThread()->gc.nursery();
+    Nursery& nursery = dst->runtimeFromMainThread()->gc.nursery();
 
     size_t nbytesTotal = 0;
     if (!nursery.isInside(nsrc->data())) {
         nursery.removeMallocedBuffer(nsrc->data());
     } else {
         AutoEnterOOMUnsafeRegion oomUnsafe;
         uint32_t nbytes = ArgumentsData::bytesRequired(nsrc->data()->numArgs);
         uint8_t* data = nsrc->zone()->pod_malloc<uint8_t>(nbytes);
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -3262,17 +3262,17 @@ Debugger::detachAllDebuggersFromGlobal(F
 Debugger::findZoneEdges(Zone* zone, js::gc::ZoneComponentFinder& finder)
 {
     /*
      * For debugger cross compartment wrappers, add edges in the opposite
      * direction to those already added by JSCompartment::findOutgoingEdges.
      * This ensure that debuggers and their debuggees are finalized in the same
      * group.
      */
-    JSRuntime* rt = zone->runtimeFromActiveCooperatingThread();
+    JSRuntime* rt = zone->runtimeFromMainThread();
     for (Debugger* dbg : rt->debuggerList()) {
         Zone* w = dbg->object->zone();
         if (w == zone || !w->isGCMarking())
             continue;
         if (dbg->debuggeeZones.has(zone) ||
             dbg->scripts.hasKeyInZone(zone) ||
             dbg->sources.hasKeyInZone(zone) ||
             dbg->objects.hasKeyInZone(zone) ||
--- a/js/src/vm/GeckoProfiler.cpp
+++ b/js/src/vm/GeckoProfiler.cpp
@@ -230,17 +230,17 @@ void
 GeckoProfilerThread::exit(JSScript* script, JSFunction* maybeFun)
 {
     pseudoStack_->pop();
 
 #ifdef DEBUG
     /* Sanity check to make sure push/pop balanced */
     uint32_t sp = pseudoStack_->stackPointer;
     if (sp < pseudoStack_->stackCapacity()) {
-        JSRuntime* rt = script->runtimeFromActiveCooperatingThread();
+        JSRuntime* rt = script->runtimeFromMainThread();
         const char* dynamicString = rt->geckoProfiler().profileString(script, maybeFun);
         /* Can't fail lookup because we should already be in the set */
         MOZ_ASSERT(dynamicString);
 
         // Bug 822041
         if (!pseudoStack_->entries[sp].isJs()) {
             fprintf(stderr, "--- ABOUT TO FAIL ASSERTION ---\n");
             fprintf(stderr, " entries=%p size=%u/%u\n",
--- a/js/src/vm/HelperThreads.cpp
+++ b/js/src/vm/HelperThreads.cpp
@@ -212,19 +212,19 @@ FinishOffThreadIonCompile(jit::IonBuilde
     builder->script()->runtimeFromAnyThread()->jitRuntime()->numFinishedBuildersRef(lock)++;
 }
 
 static JSRuntime*
 GetSelectorRuntime(const CompilationSelector& selector)
 {
     struct Matcher
     {
-        JSRuntime* match(JSScript* script)    { return script->runtimeFromActiveCooperatingThread(); }
-        JSRuntime* match(JSCompartment* comp) { return comp->runtimeFromActiveCooperatingThread(); }
-        JSRuntime* match(Zone* zone)          { return zone->runtimeFromActiveCooperatingThread(); }
+        JSRuntime* match(JSScript* script)    { return script->runtimeFromMainThread(); }
+        JSRuntime* match(JSCompartment* comp) { return comp->runtimeFromMainThread(); }
+        JSRuntime* match(Zone* zone)          { return zone->runtimeFromMainThread(); }
         JSRuntime* match(ZonesInState zbs)    { return zbs.runtime; }
         JSRuntime* match(JSRuntime* runtime)  { return runtime; }
         JSRuntime* match(AllCompilations all) { return nullptr; }
         JSRuntime* match(CompilationsUsingNursery cun) { return cun.runtime; }
     };
 
     return selector.match(Matcher());
 }
@@ -363,17 +363,17 @@ js::HasOffThreadIonCompile(JSCompartment
 
     GlobalHelperThreadState::IonBuilderVector& finished = HelperThreadState().ionFinishedList(lock);
     for (size_t i = 0; i < finished.length(); i++) {
         jit::IonBuilder* builder = finished[i];
         if (builder->script()->compartment() == comp)
             return true;
     }
 
-    JSRuntime* rt = comp->runtimeFromActiveCooperatingThread();
+    JSRuntime* rt = comp->runtimeFromMainThread();
     jit::IonBuilder* builder = rt->jitRuntime()->ionLazyLinkList(rt).getFirst();
     while (builder) {
         if (builder->script()->compartment() == comp)
             return true;
         builder = builder->getNext();
     }
 
     return false;
@@ -1441,17 +1441,17 @@ TimeSince(TimeStamp prev)
     // Sadly this happens sometimes.
     MOZ_ASSERT(now >= prev);
     if (now < prev)
         now = prev;
     return now - prev;
 }
 
 void
-js::GCParallelTask::runFromActiveCooperatingThread(JSRuntime* rt)
+js::GCParallelTask::runFromMainThread(JSRuntime* rt)
 {
     MOZ_ASSERT(state == NotStarted);
     MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(rt));
     TimeStamp timeStart = TimeStamp::Now();
     run();
     duration_ = TimeSince(timeStart);
 }
 
--- a/js/src/vm/JSCompartment.cpp
+++ b/js/src/vm/JSCompartment.cpp
@@ -66,17 +66,17 @@ JSCompartment::JSCompartment(Zone* zone,
     arraySpeciesLookup(),
     globalWriteBarriered(0),
     detachedTypedObjects(0),
     objectMetadataState(ImmediateMetadata()),
     selfHostingScriptSource(nullptr),
     objectMetadataTable(nullptr),
     innerViews(zone),
     lazyArrayBuffers(nullptr),
-    wasm(zone->runtimeFromActiveCooperatingThread()),
+    wasm(zone->runtimeFromMainThread()),
     nonSyntacticLexicalEnvironments_(nullptr),
     gcIncomingGrayPointers(nullptr),
     debugModeBits(0),
     validAccessPtr(nullptr),
     randomKeyGenerator_(runtime_->forkRandomKeyGenerator()),
     scriptCountsMap(nullptr),
     scriptNameMap(nullptr),
     debugScriptMap(nullptr),
@@ -94,17 +94,17 @@ JSCompartment::JSCompartment(Zone* zone,
     runtime_->numCompartments++;
     MOZ_ASSERT_IF(creationOptions_.mergeable(),
                   creationOptions_.invisibleToDebugger());
 }
 
 JSCompartment::~JSCompartment()
 {
     // Write the code coverage information in a file.
-    JSRuntime* rt = runtimeFromActiveCooperatingThread();
+    JSRuntime* rt = runtimeFromMainThread();
     if (rt->lcovOutput().isEnabled())
         rt->lcovOutput().writeLCovResult(lcovOutput);
 
     js_delete(jitCompartment_);
     js_delete(scriptCountsMap);
     js_delete(scriptNameMap);
     js_delete(debugScriptMap);
     js_delete(debugEnvs);
@@ -1161,17 +1161,17 @@ void
 JSCompartment::updateDebuggerObservesFlag(unsigned flag)
 {
     MOZ_ASSERT(isDebuggee());
     MOZ_ASSERT(flag == DebuggerObservesAllExecution ||
                flag == DebuggerObservesCoverage ||
                flag == DebuggerObservesAsmJS ||
                flag == DebuggerObservesBinarySource);
 
-    GlobalObject* global = zone()->runtimeFromActiveCooperatingThread()->gc.isForegroundSweeping()
+    GlobalObject* global = zone()->runtimeFromMainThread()->gc.isForegroundSweeping()
                            ? unsafeUnbarrieredMaybeGlobal()
                            : maybeGlobal();
     const GlobalObject::DebuggerVector* v = global->getDebuggers();
     for (auto p = v->begin(); p != v->end(); p++) {
         Debugger* dbg = *p;
         if (flag == DebuggerObservesAllExecution ? dbg->observesAllExecution() :
             flag == DebuggerObservesCoverage ? dbg->observesCoverage() :
             flag == DebuggerObservesAsmJS ? dbg->observesAsmJS() :
--- a/js/src/vm/JSCompartment.h
+++ b/js/src/vm/JSCompartment.h
@@ -648,17 +648,17 @@ struct JSCompartment
 
     JS::Zone* zone() { return zone_; }
     const JS::Zone* zone() const { return zone_; }
 
     const JS::CompartmentCreationOptions& creationOptions() const { return creationOptions_; }
     JS::CompartmentBehaviors& behaviors() { return behaviors_; }
     const JS::CompartmentBehaviors& behaviors() const { return behaviors_; }
 
-    JSRuntime* runtimeFromActiveCooperatingThread() const {
+    JSRuntime* runtimeFromMainThread() const {
         MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(runtime_));
         return runtime_;
     }
 
     // Note: Unrestricted access to the zone's runtime from an arbitrary
     // thread can easily lead to races. Use this method very carefully.
     JSRuntime* runtimeFromAnyThread() const {
         return runtime_;
--- a/js/src/vm/JSObject.cpp
+++ b/js/src/vm/JSObject.cpp
@@ -3883,17 +3883,17 @@ JSObject::addSizeOfExcludingThis(mozilla
 size_t
 JSObject::sizeOfIncludingThisInNursery() const
 {
     // This function doesn't concern itself yet with typed objects (bug 1133593)
     // nor unboxed objects (bug 1133592).
 
     MOZ_ASSERT(!isTenured());
 
-    const Nursery& nursery = runtimeFromActiveCooperatingThread()->gc.nursery();
+    const Nursery& nursery = runtimeFromMainThread()->gc.nursery();
     size_t size = Arena::thingSize(allocKindForTenure(nursery));
 
     if (is<NativeObject>()) {
         const NativeObject& native = as<NativeObject>();
 
         size += native.numFixedSlots() * sizeof(Value);
         size += native.numDynamicSlots() * sizeof(Value);
 
--- a/js/src/vm/JSScript.cpp
+++ b/js/src/vm/JSScript.cpp
@@ -1332,17 +1332,17 @@ JSScript::hasScriptName()
 
     auto p = compartment()->scriptNameMap->lookup(this);
     return p.found();
 }
 
 void
 ScriptSourceObject::finalize(FreeOp* fop, JSObject* obj)
 {
-    MOZ_ASSERT(fop->onActiveCooperatingThread());
+    MOZ_ASSERT(fop->onMainThread());
     ScriptSourceObject* sso = &obj->as<ScriptSourceObject>();
     sso->source()->decref();
 }
 
 static const ClassOps ScriptSourceObjectClassOps = {
     nullptr, /* addProperty */
     nullptr, /* delProperty */
     nullptr, /* enumerate */
--- a/js/src/vm/NativeObject-inl.h
+++ b/js/src/vm/NativeObject-inl.h
@@ -119,17 +119,17 @@ NativeObject::markDenseElementsNotPacked
 }
 
 inline void
 NativeObject::elementsRangeWriteBarrierPost(uint32_t start, uint32_t count)
 {
     for (size_t i = 0; i < count; i++) {
         const Value& v = elements_[start + i];
         if ((v.isObject() || v.isString()) && IsInsideNursery(v.toGCThing())) {
-            JSRuntime* rt = runtimeFromActiveCooperatingThread();
+            JSRuntime* rt = runtimeFromMainThread();
             rt->gc.storeBuffer().putSlot(this, HeapSlot::Element,
                                          unshiftedIndex(start + i),
                                          count - i);
             return;
         }
     }
 }
 
--- a/js/src/vm/RegExpStatics.cpp
+++ b/js/src/vm/RegExpStatics.cpp
@@ -18,17 +18,17 @@ using namespace js;
  * per-global and not leak, we create a js::Class to wrap the C++ instance and
  * provide an appropriate finalizer. We lazily create and store an instance of
  * that js::Class in a global reserved slot.
  */
 
 static void
 resc_finalize(FreeOp* fop, JSObject* obj)
 {
-    MOZ_ASSERT(fop->onActiveCooperatingThread());
+    MOZ_ASSERT(fop->onMainThread());
     RegExpStatics* res = static_cast<RegExpStatics*>(obj->as<RegExpStaticsObject>().getPrivate());
     fop->delete_(res);
 }
 
 static void
 resc_trace(JSTracer* trc, JSObject* obj)
 {
     void* pdata = obj->as<RegExpStaticsObject>().getPrivate();
--- a/js/src/vm/SavedStacks.cpp
+++ b/js/src/vm/SavedStacks.cpp
@@ -400,20 +400,20 @@ SavedFrame::protoAccessors[] = {
     JS_PSG("asyncParent", SavedFrame::asyncParentProperty, 0),
     JS_PSG("parent", SavedFrame::parentProperty, 0),
     JS_PS_END
 };
 
 /* static */ void
 SavedFrame::finalize(FreeOp* fop, JSObject* obj)
 {
-    MOZ_ASSERT(fop->onActiveCooperatingThread());
+    MOZ_ASSERT(fop->onMainThread());
     JSPrincipals* p = obj->as<SavedFrame>().getPrincipals();
     if (p) {
-        JSRuntime* rt = obj->runtimeFromActiveCooperatingThread();
+        JSRuntime* rt = obj->runtimeFromMainThread();
         JS_DropPrincipals(rt->mainContextFromOwnThread(), p);
     }
 }
 
 JSAtom*
 SavedFrame::getSource()
 {
     const Value& v = getReservedSlot(JSSLOT_SOURCE);
--- a/js/src/vm/Shape-inl.h
+++ b/js/src/vm/Shape-inl.h
@@ -151,17 +151,17 @@ GetterSetterWriteBarrierPost(AccessorSha
     auto& nurseryShapes = shape->zone()->nurseryShapes();
 
     {
         AutoEnterOOMUnsafeRegion oomUnsafe;
         if (!nurseryShapes.append(shape))
             oomUnsafe.crash("GetterSetterWriteBarrierPost");
     }
 
-    auto& storeBuffer = shape->runtimeFromActiveCooperatingThread()->gc.storeBuffer();
+    auto& storeBuffer = shape->runtimeFromMainThread()->gc.storeBuffer();
     if (nurseryShapes.length() == 1) {
         storeBuffer.putGeneric(NurseryShapesRef(shape->zone()));
     } else if (nurseryShapes.length() == MaxShapeVectorLength) {
         storeBuffer.setAboutToOverflow(JS::gcreason::FULL_SHAPE_BUFFER);
     }
 }
 
 inline
--- a/js/src/vm/StringType.cpp
+++ b/js/src/vm/StringType.cpp
@@ -50,17 +50,17 @@ JSString::sizeOfExcludingThis(mozilla::M
 
     // JSDependentString: do nothing, we'll count the chars when we hit the base string.
     if (isDependent())
         return 0;
 
     // JSExternalString: Ask the embedding to tell us what's going on.  If it
     // doesn't want to say, don't count, the chars could be stored anywhere.
     if (isExternal()) {
-        if (auto* cb = runtimeFromActiveCooperatingThread()->externalStringSizeofCallback.ref()) {
+        if (auto* cb = runtimeFromMainThread()->externalStringSizeofCallback.ref()) {
             // Our callback isn't supposed to cause GC.
             JS::AutoSuppressGCAnalysis nogc;
             return cb(this, mallocSizeOf);
         }
         return 0;
     }
 
     MOZ_ASSERT(isFlat());
@@ -507,33 +507,33 @@ JSRope::flattenInternal(JSContext* maybe
             str->setNonInlineChars(wholeChars);
             pos = wholeChars + left.d.u1.length;
             if (IsSame<CharT, char16_t>::value)
                 left.d.u1.flags = DEPENDENT_FLAGS;
             else
                 left.d.u1.flags = DEPENDENT_FLAGS | LATIN1_CHARS_BIT;
             left.d.s.u3.base = (JSLinearString*)this;  /* will be true on exit */
             BarrierMethods<JSString*>::postBarrier((JSString**)&left.d.s.u3.base, nullptr, this);
-            Nursery& nursery = runtimeFromActiveCooperatingThread()->gc.nursery();
+            Nursery& nursery = runtimeFromMainThread()->gc.nursery();
             if (isTenured() && !left.isTenured())
                 nursery.removeMallocedBuffer(wholeChars);
             else if (!isTenured() && left.isTenured())
                 nursery.registerMallocedBuffer(wholeChars);
             goto visit_right_child;
         }
     }
 
     if (!AllocChars(this, wholeLength, &wholeChars, &wholeCapacity)) {
         if (maybecx)
             ReportOutOfMemory(maybecx);
         return nullptr;
     }
 
     if (!isTenured()) {
-        Nursery& nursery = runtimeFromActiveCooperatingThread()->gc.nursery();
+        Nursery& nursery = runtimeFromMainThread()->gc.nursery();
         if (!nursery.registerMallocedBuffer(wholeChars)) {
             js_free(wholeChars);
             if (maybecx)
                 ReportOutOfMemory(maybecx);
             return nullptr;
         }
     }
 
--- a/js/src/vm/TypeInference.cpp
+++ b/js/src/vm/TypeInference.cpp
@@ -4542,17 +4542,17 @@ AutoClearTypeInferenceStateOnOOM::AutoCl
     zone->types.setSweepingTypes(true);
 }
 
 AutoClearTypeInferenceStateOnOOM::~AutoClearTypeInferenceStateOnOOM()
 {
     zone->types.setSweepingTypes(false);
 
     if (oom) {
-        JSRuntime* rt = zone->runtimeFromActiveCooperatingThread();
+        JSRuntime* rt = zone->runtimeFromMainThread();
         js::CancelOffThreadIonCompile(rt);
         zone->setPreservingCode(false);
         zone->discardJitCode(rt->defaultFreeOp(), /* discardBaselineCode = */ false);
         zone->types.clearAllNewScriptsOnOOM();
     }
 }
 
 #ifdef DEBUG
--- a/js/src/vm/TypedArrayObject.cpp
+++ b/js/src/vm/TypedArrayObject.cpp
@@ -193,17 +193,17 @@ TypedArrayObject::objectMoved(JSObject* 
     if (!IsInsideNursery(old)) {
         // Update the data slot pointer if it points to the old JSObject.
         if (oldObj->hasInlineElements())
             newObj->setInlineElements();
 
         return 0;
     }
 
-    Nursery& nursery = obj->runtimeFromActiveCooperatingThread()->gc.nursery();
+    Nursery& nursery = obj->runtimeFromMainThread()->gc.nursery();
     void* buf = oldObj->elements();
 
     if (!nursery.isInside(buf)) {
         nursery.removeMallocedBuffer(buf);
         return 0;
     }
 
     // Determine if we can use inline data for the target array. If this is