Bug 1283855 part 28 - Make more GC APIs take JSContext instead of JSRuntime. r=terrence
authorJan de Mooij <jdemooij@mozilla.com>
Thu, 07 Jul 2016 09:56:09 +0200
changeset 331294 c521197a29b7e2f100420312788da9055ff2a8b5
parent 331293 d22e5cad510bc8c642702e9e8ebb7140cff9605e
child 331295 e36d8767d57e579cc6eab0570db2c8d869cd28c8
push idunknown
push userunknown
push dateunknown
reviewersterrence
bugs1283855
milestone50.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1283855 part 28 - Make more GC APIs take JSContext instead of JSRuntime. r=terrence
dom/base/nsDOMWindowUtils.cpp
dom/workers/WorkerPrivate.cpp
js/public/GCAPI.h
js/public/MemoryMetrics.h
js/public/RootingAPI.h
js/src/builtin/TestingFunctions.cpp
js/src/gc/GCInternals.h
js/src/gc/Iteration.cpp
js/src/gc/Verifier.cpp
js/src/jsapi-tests/testGCAllocator.cpp
js/src/jsapi-tests/testGCMarking.cpp
js/src/jsapi-tests/testGCUniqueId.cpp
js/src/jsapi-tests/testPreserveJitCode.cpp
js/src/jsapi-tests/testWeakMap.cpp
js/src/jsapi.cpp
js/src/jsfriendapi.cpp
js/src/jsfun.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsopcode.cpp
js/src/jsscript.cpp
js/src/jsweakmap.h
js/src/shell/js.cpp
js/src/vm/Debugger.cpp
js/src/vm/HelperThreads.cpp
js/src/vm/HelperThreads.h
js/src/vm/MemoryMetrics.cpp
js/src/vm/Runtime.cpp
js/xpconnect/src/XPCJSRuntime.cpp
js/xpconnect/src/XPCWrappedNative.cpp
js/xpconnect/src/xpcprivate.h
xpcom/base/CycleCollectedJSRuntime.cpp
xpcom/base/nsCycleCollector.cpp
--- a/dom/base/nsDOMWindowUtils.cpp
+++ b/dom/base/nsDOMWindowUtils.cpp
@@ -3070,17 +3070,17 @@ nsDOMWindowUtils::FlushPendingFileDeleti
   }
 
   return NS_OK;
 }
 
 NS_IMETHODIMP
 nsDOMWindowUtils::IsIncrementalGCEnabled(JSContext* cx, bool* aResult)
 {
-  *aResult = JS::IsIncrementalGCEnabled(JS_GetRuntime(cx));
+  *aResult = JS::IsIncrementalGCEnabled(cx);
   return NS_OK;
 }
 
 NS_IMETHODIMP
 nsDOMWindowUtils::StartPCCountProfiling(JSContext* cx)
 {
   js::StartPCCountProfiling(cx);
   return NS_OK;
--- a/dom/workers/WorkerPrivate.cpp
+++ b/dom/workers/WorkerPrivate.cpp
@@ -4937,17 +4937,17 @@ WorkerPrivate::BlockAndCollectRuntimeSta
 
   bool succeeded = false;
 
   // If mMemoryReporter is still set then we can do the actual report. Otherwise
   // we're trying to shut down and we don't want to do anything but clean up.
   if (mMemoryReporter) {
     // Don't hold the lock while doing the actual report.
     MutexAutoUnlock unlock(mMutex);
-    succeeded = JS::CollectRuntimeStats(rt, aRtStats, nullptr, aAnonymize);
+    succeeded = JS::CollectRuntimeStats(mJSContext, aRtStats, nullptr, aAnonymize);
   }
 
   NS_ASSERTION(mMemoryReporterRunning, "This isn't possible!");
   NS_ASSERTION(mBlockedForMemoryReporter, "Somehow we got unblocked!");
 
   // Tell the worker that it can now continue its execution.
   mMemoryReporterRunning = false;
 
--- a/js/public/GCAPI.h
+++ b/js/public/GCAPI.h
@@ -388,43 +388,43 @@ SetGCNurseryCollectionCallback(JSContext
 
 /**
  * Incremental GC defaults to enabled, but may be disabled for testing or in
  * embeddings that have not yet implemented barriers on their native classes.
  * There is not currently a way to re-enable incremental GC once it has been
  * disabled on the runtime.
  */
 extern JS_PUBLIC_API(void)
-DisableIncrementalGC(JSRuntime* rt);
+DisableIncrementalGC(JSContext* cx);
 
 /**
  * Returns true if incremental GC is enabled. Simply having incremental GC
  * enabled is not sufficient to ensure incremental collections are happening.
  * See the comment "Incremental GC" above for reasons why incremental GC may be
  * suppressed. Inspection of the "nonincremental reason" field of the
  * GCDescription returned by GCSliceCallback may help narrow down the cause if
  * collections are not happening incrementally when expected.
  */
 extern JS_PUBLIC_API(bool)
-IsIncrementalGCEnabled(JSRuntime* rt);
+IsIncrementalGCEnabled(JSContext* cx);
 
 /**
  * Returns true while an incremental GC is ongoing, both when actively
  * collecting and between slices.
  */
 extern JS_PUBLIC_API(bool)
-IsIncrementalGCInProgress(JSRuntime* rt);
+IsIncrementalGCInProgress(JSContext* cx);
 
 /*
  * Returns true when writes to GC things must call an incremental (pre) barrier.
  * This is generally only true when running mutator code in-between GC slices.
  * At other times, the barrier may be elided for performance.
  */
 extern JS_PUBLIC_API(bool)
-IsIncrementalBarrierNeeded(JSRuntime* rt);
+IsIncrementalBarrierNeeded(JSContext* cx);
 
 /*
  * Notify the GC that a reference to a GC thing is about to be overwritten.
  * These methods must be called if IsIncrementalBarrierNeeded.
  */
 extern JS_PUBLIC_API(void)
 IncrementalReferenceBarrier(GCCellPtr thing);
 
@@ -433,17 +433,17 @@ IncrementalValueBarrier(const Value& v);
 
 extern JS_PUBLIC_API(void)
 IncrementalObjectBarrier(JSObject* obj);
 
 /**
  * Returns true if the most recent GC ran incrementally.
  */
 extern JS_PUBLIC_API(bool)
-WasIncrementalGC(JSRuntime* rt);
+WasIncrementalGC(JSContext* cx);
 
 /*
  * Generational GC:
  *
  * Note: Generational GC is not yet enabled by default. The following class
  *       is non-functional unless SpiderMonkey was configured with
  *       --enable-gcgenerational.
  */
--- a/js/public/MemoryMetrics.h
+++ b/js/public/MemoryMetrics.h
@@ -878,33 +878,33 @@ class ObjectPrivateVisitor
     GetISupportsFun getISupports_;
 
     explicit ObjectPrivateVisitor(GetISupportsFun getISupports)
       : getISupports_(getISupports)
     {}
 };
 
 extern JS_PUBLIC_API(bool)
-CollectRuntimeStats(JSRuntime* rt, RuntimeStats* rtStats, ObjectPrivateVisitor* opv, bool anonymize);
+CollectRuntimeStats(JSContext* cx, RuntimeStats* rtStats, ObjectPrivateVisitor* opv, bool anonymize);
 
 extern JS_PUBLIC_API(size_t)
 SystemCompartmentCount(JSRuntime* rt);
 
 extern JS_PUBLIC_API(size_t)
 UserCompartmentCount(JSRuntime* rt);
 
 extern JS_PUBLIC_API(size_t)
 PeakSizeOfTemporary(const JSRuntime* rt);
 
 extern JS_PUBLIC_API(bool)
-AddSizeOfTab(JSRuntime* rt, JS::HandleObject obj, mozilla::MallocSizeOf mallocSizeOf,
+AddSizeOfTab(JSContext* cx, JS::HandleObject obj, mozilla::MallocSizeOf mallocSizeOf,
              ObjectPrivateVisitor* opv, TabSizes* sizes);
 
 extern JS_PUBLIC_API(bool)
-AddServoSizeOf(JSRuntime *rt, mozilla::MallocSizeOf mallocSizeOf,
+AddServoSizeOf(JSContext* cx, mozilla::MallocSizeOf mallocSizeOf,
                ObjectPrivateVisitor *opv, ServoSizes *sizes);
 
 } // namespace JS
 
 #undef DECL_SIZE
 #undef ZERO_SIZE
 #undef COPY_OTHER_SIZE
 #undef ADD_OTHER_SIZE
--- a/js/public/RootingAPI.h
+++ b/js/public/RootingAPI.h
@@ -1084,27 +1084,23 @@ class JS_PUBLIC_API(ObjectPtr)
   public:
     ObjectPtr() : value(nullptr) {}
 
     explicit ObjectPtr(JSObject* obj) : value(obj) {}
 
     /* Always call finalize before the destructor. */
     ~ObjectPtr() { MOZ_ASSERT(!value); }
 
-    void finalize(JSRuntime* rt) {
-        if (IsIncrementalBarrierNeeded(rt))
-            IncrementalObjectBarrier(value);
-        value = nullptr;
-    }
+    void finalize(JSRuntime* rt);
 
     void init(JSObject* obj) { value = obj; }
 
     JSObject* get() const { return value; }
 
-    void writeBarrierPre(JSRuntime* rt) {
+    void writeBarrierPre(JSContext* cx) {
         IncrementalObjectBarrier(value);
     }
 
     void updateWeakPointerAfterGC();
 
     ObjectPtr& operator=(JSObject* obj) {
         IncrementalObjectBarrier(value);
         value = obj;
--- a/js/src/builtin/TestingFunctions.cpp
+++ b/js/src/builtin/TestingFunctions.cpp
@@ -426,17 +426,17 @@ GCParameter(JSContext* cx, unsigned argc
         return false;
 
     if (d < 0 || d > UINT32_MAX) {
         JS_ReportError(cx, "Parameter value out of range");
         return false;
     }
 
     uint32_t value = floor(d);
-    if (param == JSGC_MARK_STACK_LIMIT && JS::IsIncrementalGCInProgress(cx->runtime())) {
+    if (param == JSGC_MARK_STACK_LIMIT && JS::IsIncrementalGCInProgress(cx)) {
         JS_ReportError(cx, "attempt to set markStackLimit while a GC is in progress");
         return false;
     }
 
     if (param == JSGC_MAX_BYTES) {
         uint32_t gcBytes = JS_GetGCParameter(cx->runtime(), JSGC_BYTES);
         if (value < gcBytes) {
             JS_ReportError(cx,
--- a/js/src/gc/GCInternals.h
+++ b/js/src/gc/GCInternals.h
@@ -15,17 +15,17 @@
 
 #include "gc/Zone.h"
 #include "vm/HelperThreads.h"
 #include "vm/Runtime.h"
 
 namespace js {
 namespace gc {
 
-void FinishGC(JSRuntime* rt);
+void FinishGC(JSContext* cx);
 
 /*
  * This class should be used by any code that needs to exclusive access to the
  * heap in order to trace through it...
  */
 class MOZ_RAII AutoTraceSession
 {
   public:
@@ -45,17 +45,17 @@ class MOZ_RAII AutoTraceSession
     AutoSPSEntry pseudoFrame;
 };
 
 class MOZ_RAII AutoPrepareForTracing
 {
     mozilla::Maybe<AutoTraceSession> session_;
 
   public:
-    AutoPrepareForTracing(JSRuntime* rt, ZoneSelector selector);
+    AutoPrepareForTracing(JSContext* cx, ZoneSelector selector);
     AutoTraceSession& session() { return session_.ref(); }
 };
 
 class IncrementalSafety
 {
     const char* reason_;
 
     explicit IncrementalSafety(const char* reason) : reason_(reason) {}
--- a/js/src/gc/Iteration.cpp
+++ b/js/src/gc/Iteration.cpp
@@ -19,17 +19,17 @@ using namespace js::gc;
 
 void
 js::TraceRuntime(JSTracer* trc)
 {
     MOZ_ASSERT(!trc->isMarkingTracer());
 
     JSRuntime* rt = trc->runtime();
     rt->gc.evictNursery();
-    AutoPrepareForTracing prep(rt, WithAtoms);
+    AutoPrepareForTracing prep(rt->contextFromMainThread(), WithAtoms);
     gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_TRACE_HEAP);
     rt->gc.markRuntime(trc, GCRuntime::TraceRuntime, prep.session().lock);
 }
 
 static void
 IterateCompartmentsArenasCells(JSRuntime* rt, Zone* zone, void* data,
                                JSIterateCompartmentCallback compartmentCallback,
                                IterateArenaCallback arenaCallback,
@@ -47,82 +47,82 @@ IterateCompartmentsArenasCells(JSRuntime
             (*arenaCallback)(rt, data, arena, traceKind, thingSize);
             for (ArenaCellIterUnderGC iter(arena); !iter.done(); iter.next())
                 (*cellCallback)(rt, data, iter.getCell(), traceKind, thingSize);
         }
     }
 }
 
 void
-js::IterateZonesCompartmentsArenasCells(JSRuntime* rt, void* data,
+js::IterateZonesCompartmentsArenasCells(JSContext* cx, void* data,
                                         IterateZoneCallback zoneCallback,
                                         JSIterateCompartmentCallback compartmentCallback,
                                         IterateArenaCallback arenaCallback,
                                         IterateCellCallback cellCallback)
 {
-    AutoPrepareForTracing prop(rt, WithAtoms);
+    AutoPrepareForTracing prop(cx, WithAtoms);
 
-    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
-        (*zoneCallback)(rt, data, zone);
-        IterateCompartmentsArenasCells(rt, zone, data,
+    for (ZonesIter zone(cx, WithAtoms); !zone.done(); zone.next()) {
+        (*zoneCallback)(cx, data, zone);
+        IterateCompartmentsArenasCells(cx, zone, data,
                                        compartmentCallback, arenaCallback, cellCallback);
     }
 }
 
 void
-js::IterateZoneCompartmentsArenasCells(JSRuntime* rt, Zone* zone, void* data,
+js::IterateZoneCompartmentsArenasCells(JSContext* cx, Zone* zone, void* data,
                                        IterateZoneCallback zoneCallback,
                                        JSIterateCompartmentCallback compartmentCallback,
                                        IterateArenaCallback arenaCallback,
                                        IterateCellCallback cellCallback)
 {
-    AutoPrepareForTracing prop(rt, WithAtoms);
+    AutoPrepareForTracing prop(cx, WithAtoms);
 
-    (*zoneCallback)(rt, data, zone);
-    IterateCompartmentsArenasCells(rt, zone, data,
+    (*zoneCallback)(cx, data, zone);
+    IterateCompartmentsArenasCells(cx, zone, data,
                                    compartmentCallback, arenaCallback, cellCallback);
 }
 
 void
-js::IterateChunks(JSRuntime* rt, void* data, IterateChunkCallback chunkCallback)
+js::IterateChunks(JSContext* cx, void* data, IterateChunkCallback chunkCallback)
 {
-    AutoPrepareForTracing prep(rt, SkipAtoms);
+    AutoPrepareForTracing prep(cx, SkipAtoms);
 
-    for (auto chunk = rt->gc.allNonEmptyChunks(); !chunk.done(); chunk.next())
-        chunkCallback(rt, data, chunk);
+    for (auto chunk = cx->gc.allNonEmptyChunks(); !chunk.done(); chunk.next())
+        chunkCallback(cx, data, chunk);
 }
 
 void
-js::IterateScripts(JSRuntime* rt, JSCompartment* compartment,
+js::IterateScripts(JSContext* cx, JSCompartment* compartment,
                    void* data, IterateScriptCallback scriptCallback)
 {
-    MOZ_ASSERT(!rt->mainThread.suppressGC);
-    AutoEmptyNursery empty(rt);
-    AutoPrepareForTracing prep(rt, SkipAtoms);
+    MOZ_ASSERT(!cx->mainThread().suppressGC);
+    AutoEmptyNursery empty(cx);
+    AutoPrepareForTracing prep(cx, SkipAtoms);
 
     if (compartment) {
         Zone* zone = compartment->zone();
         for (auto script = zone->cellIter<JSScript>(empty); !script.done(); script.next()) {
             if (script->compartment() == compartment)
-                scriptCallback(rt, data, script);
+                scriptCallback(cx, data, script);
         }
     } else {
-        for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
+        for (ZonesIter zone(cx, SkipAtoms); !zone.done(); zone.next()) {
             for (auto script = zone->cellIter<JSScript>(empty); !script.done(); script.next())
-                scriptCallback(rt, data, script);
+                scriptCallback(cx, data, script);
         }
     }
 }
 
 void
 js::IterateGrayObjects(Zone* zone, GCThingCallback cellCallback, void* data)
 {
     JSRuntime* rt = zone->runtimeFromMainThread();
     AutoEmptyNursery empty(rt);
-    AutoPrepareForTracing prep(rt, SkipAtoms);
+    AutoPrepareForTracing prep(rt->contextFromMainThread(), SkipAtoms);
 
     for (auto thingKind : ObjectAllocKinds()) {
         for (auto obj = zone->cellIter<JSObject>(thingKind, empty); !obj.done(); obj.next()) {
             if (obj->asTenured().isMarked(GRAY))
                 cellCallback(data, JS::GCCellPtr(obj.get()));
         }
     }
 }
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -178,17 +178,17 @@ gc::GCRuntime::startVerifyPreBarriers()
         return;
 
     number++;
 
     VerifyPreTracer* trc = js_new<VerifyPreTracer>(rt);
     if (!trc)
         return;
 
-    AutoPrepareForTracing prep(rt, WithAtoms);
+    AutoPrepareForTracing prep(rt->contextFromMainThread(), WithAtoms);
 
     for (auto chunk = allNonEmptyChunks(); !chunk.done(); chunk.next())
         chunk->bitmap.clear();
 
     gcstats::AutoPhase ap(stats, gcstats::PHASE_TRACE_HEAP);
 
     const size_t size = 64 * 1024 * 1024;
     trc->root = (VerifyNode*)js_malloc(size);
@@ -307,17 +307,17 @@ gc::GCRuntime::endVerifyPreBarriers()
 {
     VerifyPreTracer* trc = verifyPreData;
 
     if (!trc)
         return;
 
     MOZ_ASSERT(!JS::IsGenerationalGCEnabled(rt));
 
-    AutoPrepareForTracing prep(rt, SkipAtoms);
+    AutoPrepareForTracing prep(rt->contextFromMainThread(), SkipAtoms);
 
     bool compartmentCreated = false;
 
     /* We need to disable barriers before tracing, which may invoke barriers. */
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         if (!zone->needsIncrementalBarrier())
             compartmentCreated = true;
 
--- a/js/src/jsapi-tests/testGCAllocator.cpp
+++ b/js/src/jsapi-tests/testGCAllocator.cpp
@@ -43,17 +43,17 @@ BEGIN_TEST(testGCAllocator)
     return true;
 #elif defined(XP_UNIX)
     PageSize = size_t(sysconf(_SC_PAGESIZE));
 #else
     return true;
 #endif
 
     /* Finish any ongoing background free activity. */
-    js::gc::FinishGC(rt);
+    js::gc::FinishGC(cx);
 
     bool growUp;
     CHECK(addressesGrowUp(&growUp));
 
     if (growUp)
         return testGCAllocatorUp(PageSize);
     return testGCAllocatorDown(PageSize);
 }
--- a/js/src/jsapi-tests/testGCMarking.cpp
+++ b/js/src/jsapi-tests/testGCMarking.cpp
@@ -163,17 +163,17 @@ BEGIN_TEST(testIncrementalRoots)
     // of 1000, and the graph is about 3000 objects deep).
     js::SliceBudget budget(js::WorkBudget(1000));
     JS_SetGCParameter(cx, JSGC_MODE, JSGC_MODE_INCREMENTAL);
     rt->gc.startDebugGC(GC_NORMAL, budget);
 
     // We'd better be between iGC slices now. There's always a risk that
     // something will decide that we need to do a full GC (such as gczeal, but
     // that is turned off.)
-    MOZ_ASSERT(JS::IsIncrementalGCInProgress(rt));
+    MOZ_ASSERT(JS::IsIncrementalGCInProgress(cx));
 
     // And assert that the mark bits are as we expect them to be.
     MOZ_ASSERT(vec[0]->asTenured().isMarked());
     MOZ_ASSERT(!leafHandle->asTenured().isMarked());
     MOZ_ASSERT(!leafOwnerHandle->asTenured().isMarked());
 
 #ifdef DEBUG
     // Remember the current GC number so we can assert that no GC occurs
--- a/js/src/jsapi-tests/testGCUniqueId.cpp
+++ b/js/src/jsapi-tests/testGCUniqueId.cpp
@@ -7,36 +7,36 @@
 
 #include "gc/GCInternals.h"
 #include "gc/Zone.h"
 #include "js/GCVector.h"
 
 #include "jsapi-tests/tests.h"
 
 static void
-MinimizeHeap(JSRuntime* rt)
+MinimizeHeap(JSContext* cx)
 {
     // The second collection is to force us to wait for the background
     // sweeping that the first GC started to finish.
-    JS_GC(JS_GetContext(rt));
-    JS_GC(JS_GetContext(rt));
-    js::gc::FinishGC(rt);
+    JS_GC(cx);
+    JS_GC(cx);
+    js::gc::FinishGC(cx);
 }
 
 BEGIN_TEST(testGCUID)
 {
 #ifdef JS_GC_ZEAL
     AutoLeaveZeal nozeal(cx);
 #endif /* JS_GC_ZEAL */
 
     uint64_t uid = 0;
     uint64_t tmp = 0;
 
     // Ensure the heap is as minimal as it can get.
-    MinimizeHeap(rt);
+    MinimizeHeap(cx);
 
     JS::RootedObject obj(cx, JS_NewPlainObject(cx));
     uintptr_t nurseryAddr = uintptr_t(obj.get());
     CHECK(obj);
     CHECK(js::gc::IsInsideNursery(obj));
 
     // Do not start with an ID.
     CHECK(!obj->zone()->hasUniqueId(obj));
@@ -48,17 +48,17 @@ BEGIN_TEST(testGCUID)
     // We should now have an id.
     CHECK(obj->zone()->hasUniqueId(obj));
 
     // Calling again should get us the same thing.
     CHECK(obj->zone()->getUniqueId(obj, &tmp));
     CHECK(uid == tmp);
 
     // Tenure the thing and check that the UID moved with it.
-    MinimizeHeap(rt);
+    MinimizeHeap(cx);
     uintptr_t tenuredAddr = uintptr_t(obj.get());
     CHECK(tenuredAddr != nurseryAddr);
     CHECK(!js::gc::IsInsideNursery(obj));
     CHECK(obj->zone()->hasUniqueId(obj));
     CHECK(obj->zone()->getUniqueId(obj, &tmp));
     CHECK(uid == tmp);
 
     // Allocate a new nursery thing in the same location and check that we
@@ -66,19 +66,19 @@ BEGIN_TEST(testGCUID)
     obj = JS_NewPlainObject(cx);
     CHECK(obj);
     CHECK(uintptr_t(obj.get()) == nurseryAddr);
     CHECK(!obj->zone()->hasUniqueId(obj));
 
     // Try to get another tenured object in the same location and check that
     // the uid was removed correctly.
     obj = nullptr;
-    MinimizeHeap(rt);
+    MinimizeHeap(cx);
     obj = JS_NewPlainObject(cx);
-    MinimizeHeap(rt);
+    MinimizeHeap(cx);
     CHECK(uintptr_t(obj.get()) == tenuredAddr);
     CHECK(!obj->zone()->hasUniqueId(obj));
     CHECK(obj->zone()->getUniqueId(obj, &tmp));
     CHECK(uid != tmp);
     uid = tmp;
 
     // Allocate a few arenas worth of objects to ensure we get some compaction.
     const static size_t N = 2049;
@@ -86,38 +86,38 @@ BEGIN_TEST(testGCUID)
     JS::Rooted<ObjectVector> vec(cx, ObjectVector(cx));
     for (size_t i = 0; i < N; ++i) {
         obj = JS_NewPlainObject(cx);
         CHECK(obj);
         CHECK(vec.append(obj));
     }
 
     // Transfer our vector to tenured if it isn't there already.
-    MinimizeHeap(rt);
+    MinimizeHeap(cx);
 
     // Tear holes in the heap by unrooting the even objects and collecting.
     JS::Rooted<ObjectVector> vec2(cx, ObjectVector(cx));
     for (size_t i = 0; i < N; ++i) {
         if (i % 2 == 1)
             vec2.append(vec[i]);
     }
     vec.clear();
-    MinimizeHeap(rt);
+    MinimizeHeap(cx);
 
     // Grab the last object in the vector as our object of interest.
     obj = vec2.back();
     CHECK(obj);
     tenuredAddr = uintptr_t(obj.get());
     CHECK(obj->zone()->getUniqueId(obj, &uid));
 
     // Force a compaction to move the object and check that the uid moved to
     // the new tenured heap location.
     JS::PrepareForFullGC(cx);
     JS::GCForReason(cx, GC_SHRINK, JS::gcreason::API);
-    MinimizeHeap(rt);
+    MinimizeHeap(cx);
     CHECK(uintptr_t(obj.get()) != tenuredAddr);
     CHECK(obj->zone()->hasUniqueId(obj));
     CHECK(obj->zone()->getUniqueId(obj, &tmp));
     CHECK(uid == tmp);
 
     return true;
 }
 END_TEST(testGCUID)
--- a/js/src/jsapi-tests/testPreserveJitCode.cpp
+++ b/js/src/jsapi-tests/testPreserveJitCode.cpp
@@ -23,17 +23,17 @@ BEGIN_TEST(test_PreserveJitCode)
     CHECK(testPreserveJitCode(true, 1));
     return true;
 }
 
 unsigned
 countIonScripts(JSObject* global)
 {
     unsigned count = 0;
-    js::IterateScripts(rt, global->compartment(), &count, ScriptCallback);
+    js::IterateScripts(cx, global->compartment(), &count, ScriptCallback);
     return count;
 }
 
 bool
 testPreserveJitCode(bool preserveJitCode, unsigned remainingIonScripts)
 {
     cx->options().setBaseline(true);
     cx->options().setIon(true);
--- a/js/src/jsapi-tests/testWeakMap.cpp
+++ b/js/src/jsapi-tests/testWeakMap.cpp
@@ -94,33 +94,33 @@ BEGIN_TEST(testWeakMap_keyDelegates)
 
     /*
      * Perform an incremental GC, introducing an unmarked CCW to force the map
      * zone to finish marking before the delegate zone.
      */
     CHECK(newCCW(map, delegateRoot));
     js::SliceBudget budget(js::WorkBudget(1000000));
     rt->gc.startDebugGC(GC_NORMAL, budget);
-    while (JS::IsIncrementalGCInProgress(rt))
+    while (JS::IsIncrementalGCInProgress(cx))
         rt->gc.debugGCSlice(budget);
 #ifdef DEBUG
     CHECK(map->zone()->lastZoneGroupIndex() < delegateRoot->zone()->lastZoneGroupIndex());
 #endif
 
     /* Add our entry to the weakmap. */
     JS::RootedValue val(cx, JS::Int32Value(1));
     CHECK(SetWeakMapEntry(cx, map, key, val));
     CHECK(checkSize(map, 1));
 
     /* Check the delegate keeps the entry alive even if the key is not reachable. */
     key = nullptr;
     CHECK(newCCW(map, delegateRoot));
     budget = js::SliceBudget(js::WorkBudget(100000));
     rt->gc.startDebugGC(GC_NORMAL, budget);
-    while (JS::IsIncrementalGCInProgress(rt))
+    while (JS::IsIncrementalGCInProgress(cx))
         rt->gc.debugGCSlice(budget);
     CHECK(checkSize(map, 1));
 
     /*
      * Check that the zones finished marking at the same time, which is
      * necessary because of the presence of the delegate and the CCW.
      */
 #ifdef DEBUG
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -851,17 +851,17 @@ JS_TransplantObject(JSContext* cx, Handl
     MOZ_ASSERT(origobj != target);
     MOZ_ASSERT(!origobj->is<CrossCompartmentWrapperObject>());
     MOZ_ASSERT(!target->is<CrossCompartmentWrapperObject>());
 
     RootedValue origv(cx, ObjectValue(*origobj));
     RootedObject newIdentity(cx);
 
     // Don't allow a compacting GC to observe any intermediate state.
-    AutoDisableCompactingGC nocgc(cx->runtime());
+    AutoDisableCompactingGC nocgc(cx);
 
     AutoDisableProxyCheck adpc(cx->runtime());
 
     JSCompartment* destination = target->compartment();
 
     if (origobj->compartment() == destination) {
         // If the original object is in the same compartment as the
         // destination, then we know that we won't find a wrapper in the
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -1156,16 +1156,24 @@ JS_FRIEND_API(JSCompartment*)
 js::GetAnyCompartmentInZone(JS::Zone* zone)
 {
     CompartmentsInZoneIter comp(zone);
     MOZ_ASSERT(!comp.done());
     return comp.get();
 }
 
 void
+JS::ObjectPtr::finalize(JSRuntime* rt)
+{
+    if (IsIncrementalBarrierNeeded(rt->contextFromMainThread()))
+        IncrementalObjectBarrier(value);
+    value = nullptr;
+}
+
+void
 JS::ObjectPtr::updateWeakPointerAfterGC()
 {
     if (js::gc::IsAboutToBeFinalizedUnbarriered(value.unsafeGet()))
         value = nullptr;
 }
 
 void
 JS::ObjectPtr::trace(JSTracer* trc, const char* name)
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -1473,17 +1473,17 @@ JSFunction::createScriptForLazilyInterpr
         // Lazy script caching is only supported for leaf functions. If a
         // script with inner functions was returned by the cache, those inner
         // functions would be delazified when deep cloning the script, even if
         // they have never executed.
         //
         // Additionally, the lazy script cache is not used during incremental
         // GCs, to avoid resurrecting dead scripts after incremental sweeping
         // has started.
-        if (canRelazify && !JS::IsIncrementalGCInProgress(cx->runtime())) {
+        if (canRelazify && !JS::IsIncrementalGCInProgress(cx)) {
             LazyScriptCache::Lookup lookup(cx, lazy);
             cx->caches.lazyScriptCache.lookup(lookup, script.address());
         }
 
         if (script) {
             RootedObject enclosingScope(cx, lazy->enclosingScope());
             RootedScript clonedScript(cx, CloneScriptIntoFunction(cx, enclosingScope, fun, script));
             if (!clonedScript)
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1737,22 +1737,22 @@ GCRuntime::enableCompactingGC()
 
 bool
 GCRuntime::isCompactingGCEnabled() const
 {
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
     return compactingEnabled && compactingDisabledCount == 0;
 }
 
-AutoDisableCompactingGC::AutoDisableCompactingGC(JSRuntime* rt)
-  : gc(rt->gc)
+AutoDisableCompactingGC::AutoDisableCompactingGC(JSContext* cx)
+  : gc(cx->gc)
 {
     gc.disableCompactingGC();
     if (gc.isIncrementalGCInProgress() && gc.isCompactingGc())
-        FinishGC(rt);
+        FinishGC(cx);
 }
 
 AutoDisableCompactingGC::~AutoDisableCompactingGC()
 {
     gc.enableCompactingGC();
 }
 
 static bool
@@ -6599,30 +6599,31 @@ GCRuntime::gcIfRequested(JSContext* cx /
         else
             gcSlice(majorGCTriggerReason);
         return true;
     }
 
     return false;
 }
 
-void js::gc::FinishGC(JSRuntime* rt)
-{
-    if (JS::IsIncrementalGCInProgress(rt)) {
-        JS::PrepareForIncrementalGC(rt->contextFromMainThread());
-        JS::FinishIncrementalGC(rt->contextFromMainThread(), JS::gcreason::API);
-    }
-
-    rt->gc.nursery.waitBackgroundFreeEnd();
-}
-
-AutoPrepareForTracing::AutoPrepareForTracing(JSRuntime* rt, ZoneSelector selector)
-{
-    js::gc::FinishGC(rt);
-    session_.emplace(rt);
+void
+js::gc::FinishGC(JSContext* cx)
+{
+    if (JS::IsIncrementalGCInProgress(cx)) {
+        JS::PrepareForIncrementalGC(cx);
+        JS::FinishIncrementalGC(cx, JS::gcreason::API);
+    }
+
+    cx->gc.nursery.waitBackgroundFreeEnd();
+}
+
+AutoPrepareForTracing::AutoPrepareForTracing(JSContext* cx, ZoneSelector selector)
+{
+    js::gc::FinishGC(cx);
+    session_.emplace(cx);
 }
 
 JSCompartment*
 js::NewCompartment(JSContext* cx, Zone* zone, JSPrincipals* principals,
                    const JS::CompartmentOptions& options)
 {
     JSRuntime* rt = cx->runtime();
     JS_AbortIfWrongThread(cx);
@@ -6672,42 +6673,42 @@ gc::MergeCompartments(JSCompartment* sou
     // The source compartment must be specifically flagged as mergable.  This
     // also implies that the compartment is not visible to the debugger.
     MOZ_ASSERT(source->creationOptions_.mergeable());
     MOZ_ASSERT(source->creationOptions_.invisibleToDebugger());
 
     MOZ_ASSERT(source->creationOptions().addonIdOrNull() ==
                target->creationOptions().addonIdOrNull());
 
-    JSRuntime* rt = source->runtimeFromMainThread();
-
-    AutoPrepareForTracing prepare(rt, SkipAtoms);
+    JSContext* cx = source->contextFromMainThread();
+
+    AutoPrepareForTracing prepare(cx, SkipAtoms);
 
     // Cleanup tables and other state in the source compartment that will be
     // meaningless after merging into the target compartment.
 
     source->clearTables();
     source->unsetIsDebuggee();
 
     // The delazification flag indicates the presence of LazyScripts in a
     // compartment for the Debugger API, so if the source compartment created
     // LazyScripts, the flag must be propagated to the target compartment.
     if (source->needsDelazificationForDebugger())
         target->scheduleDelazificationForDebugger();
 
     // Release any relocated arenas which we may be holding on to as they might
     // be in the source zone
-    rt->gc.releaseHeldRelocatedArenas();
+    cx->gc.releaseHeldRelocatedArenas();
 
     // Fixup compartment pointers in source to refer to target, and make sure
     // type information generations are in sync.
 
     // Get the static global lexical scope of the target compartment. Static
     // scopes need to be fixed up below.
-    RootedObject targetStaticGlobalLexicalScope(rt);
+    RootedObject targetStaticGlobalLexicalScope(cx);
     targetStaticGlobalLexicalScope = &target->maybeGlobal()->lexicalScope().staticBlock();
 
     for (auto script = source->zone()->cellIter<JSScript>(); !script.done(); script.next()) {
         MOZ_ASSERT(script->compartment() == source);
         script->compartment_ = target;
         script->setTypesGeneration(target->zone()->types.generation);
 
         // If the script failed to compile, no need to fix up.
@@ -6776,17 +6777,17 @@ gc::MergeCompartments(JSCompartment* sou
         }
     }
 
     // The source should be the only compartment in its zone.
     for (CompartmentsInZoneIter c(source->zone()); !c.done(); c.next())
         MOZ_ASSERT(c.get() == source);
 
     // Merge the allocator, stats and UIDs in source's zone into target's zone.
-    target->zone()->arenas.adoptArenas(rt, &source->zone()->arenas);
+    target->zone()->arenas.adoptArenas(cx, &source->zone()->arenas);
     target->zone()->usage.adopt(source->zone()->usage);
     target->zone()->adoptUniqueIds(source->zone());
 
     // Merge other info in source's zone into target's zone.
     target->zone()->types.typeLifoAlloc.transferFrom(&source->zone()->types.typeLifoAlloc);
 }
 
 void
@@ -7321,37 +7322,37 @@ JS::SetGCSliceCallback(JSContext* cx, GC
 
 JS_PUBLIC_API(JS::GCNurseryCollectionCallback)
 JS::SetGCNurseryCollectionCallback(JSContext* cx, GCNurseryCollectionCallback callback)
 {
     return cx->gc.setNurseryCollectionCallback(callback);
 }
 
 JS_PUBLIC_API(void)
-JS::DisableIncrementalGC(JSRuntime* rt)
-{
-    rt->gc.disallowIncrementalGC();
+JS::DisableIncrementalGC(JSContext* cx)
+{
+    cx->gc.disallowIncrementalGC();
 }
 
 JS_PUBLIC_API(bool)
-JS::IsIncrementalGCEnabled(JSRuntime* rt)
-{
-    return rt->gc.isIncrementalGCEnabled();
+JS::IsIncrementalGCEnabled(JSContext* cx)
+{
+    return cx->gc.isIncrementalGCEnabled();
 }
 
 JS_PUBLIC_API(bool)
-JS::IsIncrementalGCInProgress(JSRuntime* rt)
-{
-    return rt->gc.isIncrementalGCInProgress() && !rt->gc.isVerifyPreBarriersEnabled();
+JS::IsIncrementalGCInProgress(JSContext* cx)
+{
+    return cx->gc.isIncrementalGCInProgress() && !cx->gc.isVerifyPreBarriersEnabled();
 }
 
 JS_PUBLIC_API(bool)
-JS::IsIncrementalBarrierNeeded(JSRuntime* rt)
-{
-    return rt->gc.state() == gc::MARK && !rt->isHeapBusy();
+JS::IsIncrementalBarrierNeeded(JSContext* cx)
+{
+    return cx->gc.state() == gc::MARK && !cx->isHeapBusy();
 }
 
 struct IncrementalReferenceBarrierFunctor {
     template <typename T> void operator()(T* t) { T::writeBarrierPre(t); }
 };
 
 JS_PUBLIC_API(void)
 JS::IncrementalReferenceBarrier(GCCellPtr thing)
@@ -7375,19 +7376,19 @@ JS::IncrementalObjectBarrier(JSObject* o
         return;
 
     MOZ_ASSERT(!obj->zone()->runtimeFromMainThread()->isHeapMajorCollecting());
 
     JSObject::writeBarrierPre(obj);
 }
 
 JS_PUBLIC_API(bool)
-JS::WasIncrementalGC(JSRuntime* rt)
-{
-    return rt->gc.isIncrementalGc();
+JS::WasIncrementalGC(JSContext* cx)
+{
+    return cx->gc.isIncrementalGc();
 }
 
 JS::AutoDisableGenerationalGC::AutoDisableGenerationalGC(JSRuntime* rt)
   : gc(&rt->gc)
 {
     gc->disableGenerationalGC();
 }
 
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -987,47 +987,47 @@ typedef void (*IterateCellCallback)(JSRu
                                     JS::TraceKind traceKind, size_t thingSize);
 
 /*
  * This function calls |zoneCallback| on every zone, |compartmentCallback| on
  * every compartment, |arenaCallback| on every in-use arena, and |cellCallback|
  * on every in-use cell in the GC heap.
  */
 extern void
-IterateZonesCompartmentsArenasCells(JSRuntime* rt, void* data,
+IterateZonesCompartmentsArenasCells(JSContext* cx, void* data,
                                     IterateZoneCallback zoneCallback,
                                     JSIterateCompartmentCallback compartmentCallback,
                                     IterateArenaCallback arenaCallback,
                                     IterateCellCallback cellCallback);
 
 /*
  * This function is like IterateZonesCompartmentsArenasCells, but does it for a
  * single zone.
  */
 extern void
-IterateZoneCompartmentsArenasCells(JSRuntime* rt, Zone* zone, void* data,
+IterateZoneCompartmentsArenasCells(JSContext* cx, Zone* zone, void* data,
                                    IterateZoneCallback zoneCallback,
                                    JSIterateCompartmentCallback compartmentCallback,
                                    IterateArenaCallback arenaCallback,
                                    IterateCellCallback cellCallback);
 
 /*
  * Invoke chunkCallback on every in-use chunk.
  */
 extern void
-IterateChunks(JSRuntime* rt, void* data, IterateChunkCallback chunkCallback);
+IterateChunks(JSContext* cx, void* data, IterateChunkCallback chunkCallback);
 
 typedef void (*IterateScriptCallback)(JSRuntime* rt, void* data, JSScript* script);
 
 /*
  * Invoke scriptCallback on every in-use script for
  * the given compartment or for all compartments if it is null.
  */
 extern void
-IterateScripts(JSRuntime* rt, JSCompartment* compartment,
+IterateScripts(JSContext* cx, JSCompartment* compartment,
                void* data, IterateScriptCallback scriptCallback);
 
 extern void
 FinalizeStringRT(JSRuntime* rt, JSString* str);
 
 JSCompartment*
 NewCompartment(JSContext* cx, JS::Zone* zone, JSPrincipals* principals,
                const JS::CompartmentOptions& options);
@@ -1438,17 +1438,17 @@ class MOZ_RAII AutoDisableProxyCheck
 struct MOZ_RAII AutoDisableProxyCheck
 {
     explicit AutoDisableProxyCheck(JSRuntime* rt) {}
 };
 #endif
 
 struct MOZ_RAII AutoDisableCompactingGC
 {
-    explicit AutoDisableCompactingGC(JSRuntime* rt);
+    explicit AutoDisableCompactingGC(JSContext* cx);
     ~AutoDisableCompactingGC();
 
   private:
     gc::GCRuntime& gc;
 };
 
 void
 PurgeJITCaches(JS::Zone* zone);
--- a/js/src/jsopcode.cpp
+++ b/js/src/jsopcode.cpp
@@ -2016,17 +2016,17 @@ js::GetPCCountScriptContents(JSContext* 
 
 static bool
 GenerateLcovInfo(JSContext* cx, JSCompartment* comp, GenericPrinter& out)
 {
     JSRuntime* rt = cx->runtime();
 
     // Collect the list of scripts which are part of the current compartment.
     {
-        js::gc::AutoPrepareForTracing apft(rt, SkipAtoms);
+        js::gc::AutoPrepareForTracing apft(cx, SkipAtoms);
     }
     Rooted<ScriptVector> topScripts(cx, ScriptVector(cx));
     for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
         for (auto script = zone->cellIter<JSScript>(); !script.done(); script.next()) {
             if (script->compartment() != comp ||
                 !script->isTopLevel() ||
                 !script->filename())
             {
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -2490,18 +2490,18 @@ SaveSharedScriptData(ExclusiveContext* c
 
     /*
      * During the IGC we need to ensure that bytecode is marked whenever it is
      * accessed even if the bytecode was already in the table: at this point
      * old scripts or exceptions pointing to the bytecode may no longer be
      * reachable. This is effectively a read barrier.
      */
     if (cx->isJSContext()) {
-        JSRuntime* rt = cx->asJSContext()->runtime();
-        if (JS::IsIncrementalGCInProgress(rt) && rt->gc.isFullGc())
+        JSContext* ncx = cx->asJSContext();
+        if (JS::IsIncrementalGCInProgress(ncx) && ncx->gc.isFullGc())
             ssd->marked = true;
     }
 
     script->setCode(ssd->data);
     script->atoms = ssd->atoms();
     return true;
 }
 
--- a/js/src/jsweakmap.h
+++ b/js/src/jsweakmap.h
@@ -131,17 +131,18 @@ class WeakMap : public HashMap<Key, Valu
 
     explicit WeakMap(JSContext* cx, JSObject* memOf = nullptr)
         : Base(cx->runtime()), WeakMapBase(memOf, cx->compartment()->zone()) { }
 
     bool init(uint32_t len = 16) {
         if (!Base::init(len))
             return false;
         zone->gcWeakMapList.insertFront(this);
-        marked = JS::IsIncrementalGCInProgress(zone->runtimeFromMainThread());
+        JSRuntime* rt = zone->runtimeFromMainThread();
+        marked = JS::IsIncrementalGCInProgress(rt->contextFromMainThread());
         return true;
     }
 
     // Overwritten to add a read barrier to prevent an incorrectly gray value
     // from escaping the weak map. See the UnmarkGrayTracer::onChild comment in
     // gc/Marking.cpp.
     Ptr lookup(const Lookup& l) const {
         Ptr p = Base::lookup(l);
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -3912,17 +3912,17 @@ OffThreadCompileScript(JSContext* cx, un
 
 static bool
 runOffThreadScript(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
 
     JSRuntime* rt = cx->runtime();
     if (OffThreadParsingMustWaitForGC(rt))
-        gc::FinishGC(rt);
+        gc::FinishGC(cx);
 
     void* token = offThreadState.waitUntilDone(cx, ScriptKind::Script);
     if (!token) {
         JS_ReportError(cx, "called runOffThreadScript when no compilation is pending");
         return false;
     }
 
     RootedScript script(cx, JS::FinishOffThreadScript(cx, rt, token));
@@ -3998,17 +3998,17 @@ OffThreadCompileModule(JSContext* cx, un
 
 static bool
 FinishOffThreadModule(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
 
     JSRuntime* rt = cx->runtime();
     if (OffThreadParsingMustWaitForGC(rt))
-        gc::FinishGC(rt);
+        gc::FinishGC(cx);
 
     void* token = offThreadState.waitUntilDone(cx, ScriptKind::Module);
     if (!token) {
         JS_ReportError(cx, "called finishOffThreadModule when no compilation is pending");
         return false;
     }
 
     RootedObject module(cx, JS::FinishOffThreadModule(cx, rt, token));
@@ -7025,17 +7025,17 @@ static int
 Shell(JSContext* cx, OptionParser* op, char** envp)
 {
     Maybe<JS::AutoDisableGenerationalGC> noggc;
     if (op->getBoolOption("no-ggc"))
         noggc.emplace(cx->runtime());
 
     Maybe<AutoDisableCompactingGC> nocgc;
     if (op->getBoolOption("no-cgc"))
-        nocgc.emplace(cx->runtime());
+        nocgc.emplace(cx);
 
     JSAutoRequest ar(cx);
 
     if (op->getBoolOption("fuzzing-safe"))
         fuzzingSafe = true;
     else
         fuzzingSafe = (getenv("MOZ_FUZZING_SAFE") && getenv("MOZ_FUZZING_SAFE")[0] != '0');
 
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -4123,17 +4123,17 @@ class MOZ_STACK_CLASS Debugger::ScriptQu
 
         JSCompartment* singletonComp = nullptr;
         if (compartments.count() == 1)
             singletonComp = compartments.all().front();
 
         /* Search each compartment for debuggee scripts. */
         MOZ_ASSERT(vector.empty());
         oom = false;
-        IterateScripts(cx->runtime(), singletonComp, this, considerScript);
+        IterateScripts(cx, singletonComp, this, considerScript);
         if (oom) {
             ReportOutOfMemory(cx);
             return false;
         }
 
         /* We cannot touch the gray bits while isHeapBusy, so do this now. */
         for (JSScript** i = vector.begin(); i != vector.end(); ++i)
             JS::ExposeScriptToActiveJS(*i);
--- a/js/src/vm/HelperThreads.cpp
+++ b/js/src/vm/HelperThreads.cpp
@@ -1152,17 +1152,17 @@ GlobalHelperThreadState::finishParseTask
     // Make sure we have all the constructors we need for the prototype
     // remapping below, since we can't GC while that's happening.
     Rooted<GlobalObject*> global(cx, &cx->global()->as<GlobalObject>());
     if (!EnsureParserCreatedClasses(cx, kind)) {
         LeaveParseTaskZone(rt, parseTask);
         return nullptr;
     }
 
-    mergeParseTaskCompartment(rt, parseTask, global, cx->compartment());
+    mergeParseTaskCompartment(cx, parseTask, global, cx->compartment());
 
     if (!parseTask->finish(cx))
         return nullptr;
 
     RootedScript script(rt, parseTask->script);
     releaseAssertSameCompartment(cx, script);
 
     // Report out of memory errors eagerly, or errors could be malformed.
@@ -1224,28 +1224,28 @@ GlobalHelperThreadState::finishModulePar
 JSObject*
 GlobalObject::getStarGeneratorFunctionPrototype()
 {
     const Value& v = getReservedSlot(STAR_GENERATOR_FUNCTION_PROTO);
     return v.isObject() ? &v.toObject() : nullptr;
 }
 
 void
-GlobalHelperThreadState::mergeParseTaskCompartment(JSRuntime* rt, ParseTask* parseTask,
+GlobalHelperThreadState::mergeParseTaskCompartment(JSContext* cx, ParseTask* parseTask,
                                                    Handle<GlobalObject*> global,
                                                    JSCompartment* dest)
 {
     // After we call LeaveParseTaskZone() it's not safe to GC until we have
     // finished merging the contents of the parse task's compartment into the
     // destination compartment.  Finish any ongoing incremental GC first and
     // assert that no allocation can occur.
-    gc::FinishGC(rt);
-    JS::AutoAssertNoAlloc noAlloc(rt);
+    gc::FinishGC(cx);
+    JS::AutoAssertNoAlloc noAlloc(cx);
 
-    LeaveParseTaskZone(rt, parseTask);
+    LeaveParseTaskZone(cx, parseTask);
 
     {
         // Generator functions don't have Function.prototype as prototype but a
         // different function object, so the IdentifyStandardPrototype trick
         // below won't work.  Just special-case it.
         GlobalObject* parseGlobal = &parseTask->exclusiveContextGlobal->as<GlobalObject>();
         JSObject* parseTaskStarGenFunctionProto = parseGlobal->getStarGeneratorFunctionPrototype();
 
--- a/js/src/vm/HelperThreads.h
+++ b/js/src/vm/HelperThreads.h
@@ -221,17 +221,17 @@ class GlobalHelperThreadState
         numWasmFailedJobs++;
     }
     bool wasmFailed() {
         MOZ_ASSERT(isLocked());
         return bool(numWasmFailedJobs);
     }
 
     JSScript* finishParseTask(JSContext* maybecx, JSRuntime* rt, ParseTaskKind kind, void* token);
-    void mergeParseTaskCompartment(JSRuntime* rt, ParseTask* parseTask,
+    void mergeParseTaskCompartment(JSContext* cx, ParseTask* parseTask,
                                    Handle<GlobalObject*> global,
                                    JSCompartment* dest);
 
   private:
     /*
      * Number of wasm jobs that encountered failure for the active module.
      * Their parent is logically the main thread, and this number serves for harvesting.
      */
--- a/js/src/vm/MemoryMetrics.cpp
+++ b/js/src/vm/MemoryMetrics.cpp
@@ -763,39 +763,40 @@ FindNotableScriptSources(JS::RuntimeSize
     // Delete |allScriptSources| now, rather than waiting for zStats's
     // destruction, to reduce peak memory consumption during reporting.
     js_delete(runtime.allScriptSources);
     runtime.allScriptSources = nullptr;
     return true;
 }
 
 static bool
-CollectRuntimeStatsHelper(JSRuntime* rt, RuntimeStats* rtStats, ObjectPrivateVisitor* opv,
+CollectRuntimeStatsHelper(JSContext* cx, RuntimeStats* rtStats, ObjectPrivateVisitor* opv,
                           bool anonymize, IterateCellCallback statsCellCallback)
 {
+    JSRuntime* rt = cx;
     if (!rtStats->compartmentStatsVector.reserve(rt->numCompartments))
         return false;
 
     if (!rtStats->zoneStatsVector.reserve(rt->gc.zones.length()))
         return false;
 
     rtStats->gcHeapChunkTotal =
         size_t(JS_GetGCParameter(rt, JSGC_TOTAL_CHUNKS)) * gc::ChunkSize;
 
     rtStats->gcHeapUnusedChunks =
         size_t(JS_GetGCParameter(rt, JSGC_UNUSED_CHUNKS)) * gc::ChunkSize;
 
-    IterateChunks(rt, &rtStats->gcHeapDecommittedArenas,
+    IterateChunks(cx, &rtStats->gcHeapDecommittedArenas,
                   DecommittedArenasChunkCallback);
 
     // Take the per-compartment measurements.
     StatsClosure closure(rtStats, opv, anonymize);
     if (!closure.init())
         return false;
-    IterateZonesCompartmentsArenasCells(rt, &closure,
+    IterateZonesCompartmentsArenasCells(cx, &closure,
                                         StatsZoneCallback,
                                         StatsCompartmentCallback,
                                         StatsArenaCallback,
                                         statsCellCallback);
 
     // Take the "explicit/js/runtime/" measurements.
     rt->addSizeOfIncludingThis(rtStats->mallocSizeOf_, &rtStats->runtime);
 
@@ -860,20 +861,20 @@ CollectRuntimeStatsHelper(JSRuntime* rt,
                                   rtStats->zTotals.unusedGCThings.totalSize() -
                                   rtStats->gcHeapChunkAdmin -
                                   rtStats->zTotals.gcHeapArenaAdmin -
                                   rtStats->gcHeapGCThings;
     return true;
 }
 
 JS_PUBLIC_API(bool)
-JS::CollectRuntimeStats(JSRuntime *rt, RuntimeStats *rtStats, ObjectPrivateVisitor *opv,
+JS::CollectRuntimeStats(JSContext* cx, RuntimeStats *rtStats, ObjectPrivateVisitor *opv,
                         bool anonymize)
 {
-    return CollectRuntimeStatsHelper(rt, rtStats, opv, anonymize, StatsCellCallback<FineGrained>);
+    return CollectRuntimeStatsHelper(cx, rtStats, opv, anonymize, StatsCellCallback<FineGrained>);
 }
 
 JS_PUBLIC_API(size_t)
 JS::SystemCompartmentCount(JSRuntime* rt)
 {
     size_t n = 0;
     for (CompartmentsIter comp(rt, WithAtoms); !comp.done(); comp.next()) {
         if (comp->isSystem())
@@ -913,17 +914,17 @@ class SimpleJSRuntimeStats : public JS::
     {}
 
     virtual void initExtraCompartmentStats(
         JSCompartment* c, JS::CompartmentStats* cStats) override
     {}
 };
 
 JS_PUBLIC_API(bool)
-AddSizeOfTab(JSRuntime* rt, HandleObject obj, MallocSizeOf mallocSizeOf, ObjectPrivateVisitor* opv,
+AddSizeOfTab(JSContext* cx, HandleObject obj, MallocSizeOf mallocSizeOf, ObjectPrivateVisitor* opv,
              TabSizes* sizes)
 {
     SimpleJSRuntimeStats rtStats(mallocSizeOf);
 
     JS::Zone* zone = GetObjectZone(obj);
 
     if (!rtStats.compartmentStatsVector.reserve(zone->compartments.length()))
         return false;
@@ -931,17 +932,17 @@ AddSizeOfTab(JSRuntime* rt, HandleObject
     if (!rtStats.zoneStatsVector.reserve(1))
         return false;
 
     // Take the per-compartment measurements. No need to anonymize because
     // these measurements will be aggregated.
     StatsClosure closure(&rtStats, opv, /* anonymize = */ false);
     if (!closure.init())
         return false;
-    IterateZoneCompartmentsArenasCells(rt, zone, &closure,
+    IterateZoneCompartmentsArenasCells(cx, zone, &closure,
                                        StatsZoneCallback,
                                        StatsCompartmentCallback,
                                        StatsArenaCallback,
                                        StatsCellCallback<CoarseGrained>);
 
     MOZ_ASSERT(rtStats.zoneStatsVector.length() == 1);
     rtStats.zTotals.addSizes(rtStats.zoneStatsVector[0]);
 
@@ -953,23 +954,23 @@ AddSizeOfTab(JSRuntime* rt, HandleObject
 
     rtStats.zTotals.addToTabSizes(sizes);
     rtStats.cTotals.addToTabSizes(sizes);
 
     return true;
 }
 
 JS_PUBLIC_API(bool)
-AddServoSizeOf(JSRuntime *rt, MallocSizeOf mallocSizeOf, ObjectPrivateVisitor *opv,
+AddServoSizeOf(JSContext* cx, MallocSizeOf mallocSizeOf, ObjectPrivateVisitor *opv,
                ServoSizes *sizes)
 {
     SimpleJSRuntimeStats rtStats(mallocSizeOf);
 
     // No need to anonymize because the results will be aggregated.
-    if (!CollectRuntimeStatsHelper(rt, &rtStats, opv, /* anonymize = */ false,
+    if (!CollectRuntimeStatsHelper(cx, &rtStats, opv, /* anonymize = */ false,
                                    StatsCellCallback<CoarseGrained>))
         return false;
 
 #ifdef DEBUG
     size_t gcHeapTotalOriginal = sizes->gcHeapUsed +
                                  sizes->gcHeapUnused +
                                  sizes->gcHeapAdmin +
                                  sizes->gcHeapDecommitted;
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -380,18 +380,19 @@ JSRuntime::destroyRuntime()
 
     fx.destroyInstance();
 
     if (gcInitialized) {
         /*
          * Finish any in-progress GCs first. This ensures the parseWaitingOnGC
          * list is empty in CancelOffThreadParses.
          */
-        if (JS::IsIncrementalGCInProgress(this))
-            FinishGC(this);
+        JSContext* cx = contextFromMainThread();
+        if (JS::IsIncrementalGCInProgress(cx))
+            FinishGC(cx);
 
         /* Free source hook early, as its destructor may want to delete roots. */
         sourceHook = nullptr;
 
         /*
          * Cancel any pending, in progress or completed Ion compilations and
          * parse tasks. Waiting for AsmJS and compression tasks is done
          * synchronously (on the main thread or during parse tasks), so no
--- a/js/xpconnect/src/XPCJSRuntime.cpp
+++ b/js/xpconnect/src/XPCJSRuntime.cpp
@@ -2947,17 +2947,17 @@ JSReporter::CollectReports(WindowPaths* 
     if (XRE_IsParentProcess()) {
         // Only try to access the service from the main process.
         addonManager = do_GetService("@mozilla.org/addons/integration;1");
     }
     bool getLocations = !!addonManager;
     XPCJSRuntimeStats rtStats(windowPaths, topWindowPaths, getLocations,
                               anonymize);
     OrphanReporter orphanReporter(XPCConvert::GetISupportsFromJSObject);
-    if (!JS::CollectRuntimeStats(xpcrt->Runtime(), &rtStats, &orphanReporter,
+    if (!JS::CollectRuntimeStats(xpcrt->Context(), &rtStats, &orphanReporter,
                                  anonymize))
     {
         return NS_ERROR_FAILURE;
     }
 
     size_t xpcJSRuntimeSize = xpcrt->SizeOfIncludingThis(JSMallocSizeOf);
 
     size_t wrappedJSSize = xpcrt->GetMultiCompartmentWrappedJSMap()->SizeOfWrappedJS(JSMallocSizeOf);
@@ -3130,22 +3130,22 @@ JSReporter::CollectReports(WindowPaths* 
 
     return NS_OK;
 }
 
 static nsresult
 JSSizeOfTab(JSObject* objArg, size_t* jsObjectsSize, size_t* jsStringsSize,
             size_t* jsPrivateSize, size_t* jsOtherSize)
 {
-    JSRuntime* rt = nsXPConnect::GetRuntimeInstance()->Runtime();
-    JS::RootedObject obj(rt, objArg);
+    JSContext* cx = nsXPConnect::GetRuntimeInstance()->Context();
+    JS::RootedObject obj(cx, objArg);
 
     TabSizes sizes;
     OrphanReporter orphanReporter(XPCConvert::GetISupportsFromJSObject);
-    NS_ENSURE_TRUE(JS::AddSizeOfTab(rt, obj, moz_malloc_size_of,
+    NS_ENSURE_TRUE(JS::AddSizeOfTab(cx, obj, moz_malloc_size_of,
                                     &orphanReporter, &sizes),
                    NS_ERROR_OUT_OF_MEMORY);
 
     *jsObjectsSize = sizes.objects;
     *jsStringsSize = sizes.strings;
     *jsPrivateSize = sizes.private_;
     *jsOtherSize   = sizes.other;
     return NS_OK;
--- a/js/xpconnect/src/XPCWrappedNative.cpp
+++ b/js/xpconnect/src/XPCWrappedNative.cpp
@@ -620,33 +620,33 @@ XPCWrappedNative::Destroy()
 }
 
 void
 XPCWrappedNative::UpdateScriptableInfo(XPCNativeScriptableInfo* si)
 {
     MOZ_ASSERT(mScriptableInfo, "UpdateScriptableInfo expects an existing scriptable info");
 
     // Write barrier for incremental GC.
-    JSRuntime* rt = GetRuntime()->Runtime();
-    if (IsIncrementalBarrierNeeded(rt))
+    JSContext* cx = GetRuntime()->Context();
+    if (IsIncrementalBarrierNeeded(cx))
         mScriptableInfo->Mark();
 
     mScriptableInfo = si;
 }
 
 void
 XPCWrappedNative::SetProto(XPCWrappedNativeProto* p)
 {
     MOZ_ASSERT(!IsWrapperExpired(), "bad ptr!");
 
     MOZ_ASSERT(HasProto());
 
     // Write barrier for incremental GC.
-    JSRuntime* rt = GetRuntime()->Runtime();
-    GetProto()->WriteBarrierPre(rt);
+    JSContext* cx = GetRuntime()->Context();
+    GetProto()->WriteBarrierPre(cx);
 
     mMaybeProto = p;
 }
 
 // This is factored out so that it can be called publicly
 // static
 void
 XPCWrappedNative::GatherProtoScriptableCreateInfo(nsIClassInfo* classInfo,
--- a/js/xpconnect/src/xpcprivate.h
+++ b/js/xpconnect/src/xpcprivate.h
@@ -1730,20 +1730,20 @@ public:
         GetScope()->TraceSelf(trc);
     }
 
     void TraceJS(JSTracer* trc) {
         TraceSelf(trc);
         TraceInside(trc);
     }
 
-    void WriteBarrierPre(JSRuntime* rt)
+    void WriteBarrierPre(JSContext* cx)
     {
-        if (JS::IsIncrementalBarrierNeeded(rt) && mJSProtoObject)
-            mJSProtoObject.writeBarrierPre(rt);
+        if (JS::IsIncrementalBarrierNeeded(cx) && mJSProtoObject)
+            mJSProtoObject.writeBarrierPre(cx);
     }
 
     // NOP. This is just here to make the AutoMarkingPtr code compile.
     inline void AutoTrace(JSTracer* trc) {}
 
     // Yes, we *do* need to mark the mScriptableInfo in both cases.
     void Mark() const
         {mSet->Mark();
--- a/xpcom/base/CycleCollectedJSRuntime.cpp
+++ b/xpcom/base/CycleCollectedJSRuntime.cpp
@@ -1221,17 +1221,17 @@ CycleCollectedJSRuntime::UsefulToMergeZo
 {
   return false;
 }
 
 void
 CycleCollectedJSRuntime::FixWeakMappingGrayBits() const
 {
   MOZ_ASSERT(mJSRuntime);
-  MOZ_ASSERT(!JS::IsIncrementalGCInProgress(mJSRuntime),
+  MOZ_ASSERT(!JS::IsIncrementalGCInProgress(mJSContext),
              "Don't call FixWeakMappingGrayBits during a GC.");
   FixWeakMappingGrayBitsTracer fixer(mJSRuntime);
   fixer.FixAll();
 }
 
 bool
 CycleCollectedJSRuntime::AreGCGrayBitsValid() const
 {
@@ -1629,17 +1629,17 @@ CycleCollectedJSRuntime::OnGC(JSGCStatus
         AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Recovered);
       }
       if (mLargeAllocationFailureState == OOMState::Reported) {
         AnnotateAndSetOutOfMemory(&mLargeAllocationFailureState, OOMState::Recovered);
       }
 #endif
 
       // Do any deferred finalization of native objects.
-      FinalizeDeferredThings(JS::WasIncrementalGC(mJSRuntime) ? FinalizeIncrementally :
+      FinalizeDeferredThings(JS::WasIncrementalGC(mJSContext) ? FinalizeIncrementally :
                                                                 FinalizeNow);
       break;
     }
     default:
       MOZ_CRASH();
   }
 
   CustomGCCallback(aStatus);
--- a/xpcom/base/nsCycleCollector.cpp
+++ b/xpcom/base/nsCycleCollector.cpp
@@ -3538,17 +3538,17 @@ nsCycleCollector::FixGrayBits(bool aForc
   mJSRuntime->GarbageCollect(aForceGC ? JS::gcreason::SHUTDOWN_CC :
                                         JS::gcreason::CC_FORCED);
   aTimeLog.Checkpoint("FixGrayBits GC");
 }
 
 bool
 nsCycleCollector::IsIncrementalGCInProgress()
 {
-  return mJSRuntime && JS::IsIncrementalGCInProgress(mJSRuntime->Runtime());
+  return mJSRuntime && JS::IsIncrementalGCInProgress(mJSRuntime->Context());
 }
 
 void
 nsCycleCollector::FinishAnyIncrementalGCInProgress()
 {
   if (IsIncrementalGCInProgress()) {
     NS_WARNING("Finishing incremental GC in progress during CC");
     JS::PrepareForIncrementalGC(mJSRuntime->Context());