Bug 1283855 part 26 - Make more GC APIs take JSContext instead of JSRuntime. r=terrence,mccr8
authorJan de Mooij <jdemooij@mozilla.com>
Thu, 07 Jul 2016 09:55:41 +0200
changeset 304054 a064d2e7a3c00288972e4f26cba74878a3c63d04
parent 304053 3a36f96cf7d68a7dfa6009518aa247c406eedd52
child 304055 d22e5cad510bc8c642702e9e8ebb7140cff9605e
push id19910
push userkwierso@gmail.com
push dateFri, 08 Jul 2016 00:37:20 +0000
treeherderfx-team@89cae7a45e23 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersterrence, mccr8
bugs1283855
milestone50.0a1
Bug 1283855 part 26 - Make more GC APIs take JSContext instead of JSRuntime. r=terrence,mccr8
dom/base/nsJSEnvironment.cpp
dom/workers/WorkerPrivate.cpp
js/public/GCAPI.h
js/src/builtin/TestingFunctions.cpp
js/src/gc/Allocator.cpp
js/src/jsapi-tests/testGCFinalizeCallback.cpp
js/src/jsapi-tests/testGCUniqueId.cpp
js/src/jsapi-tests/testPreserveJitCode.cpp
js/src/jsapi-tests/tests.h
js/src/jsgc.cpp
js/src/shell/js.cpp
js/src/vm/Runtime.cpp
js/xpconnect/src/XPCComponents.cpp
xpcom/base/CycleCollectedJSRuntime.cpp
xpcom/base/nsCycleCollector.cpp
--- a/dom/base/nsJSEnvironment.cpp
+++ b/dom/base/nsJSEnvironment.cpp
@@ -184,17 +184,18 @@ static bool sNeedsFullGC = false;
 static bool sNeedsGCAfterCC = false;
 static bool sIncrementalCC = false;
 static bool sDidPaintAfterPreviousICCSlice = false;
 
 static nsScriptNameSpaceManager *gNameSpaceManager;
 
 static PRTime sFirstCollectionTime;
 
-static JSRuntime *sRuntime;
+static JSRuntime* sRuntime;
+static JSContext* sContext;
 
 static bool sIsInitialized;
 static bool sDidShutdown;
 static bool sShuttingDown;
 static int32_t sContextCount;
 
 static nsIScriptSecurityManager *sSecurityManager;
 
@@ -1199,34 +1200,34 @@ nsJSContext::GarbageCollectNow(JS::gcrea
   sLoadingInProgress = false;
 
   if (!nsContentUtils::XPConnect() || !sRuntime) {
     return;
   }
 
   if (sCCLockedOut && aIncremental == IncrementalGC) {
     // We're in the middle of incremental GC. Do another slice.
-    JS::PrepareForIncrementalGC(sRuntime);
+    JS::PrepareForIncrementalGC(sContext);
     JS::IncrementalGCSlice(sRuntime, aReason, aSliceMillis);
     return;
   }
 
   JSGCInvocationKind gckind = aShrinking == ShrinkingGC ? GC_SHRINK : GC_NORMAL;
 
   if (sNeedsFullGC || aReason != JS::gcreason::CC_WAITING) {
     sNeedsFullGC = false;
-    JS::PrepareForFullGC(sRuntime);
+    JS::PrepareForFullGC(sContext);
   } else {
     CycleCollectedJSRuntime::Get()->PrepareWaitingZonesForGC();
   }
 
   if (aIncremental == IncrementalGC) {
     JS::StartIncrementalGC(sRuntime, gckind, aReason, aSliceMillis);
   } else {
-    JS::GCForReason(sRuntime, gckind, aReason);
+    JS::GCForReason(sContext, gckind, aReason);
   }
 }
 
 //static
 void
 nsJSContext::ShrinkGCBuffersNow()
 {
   PROFILER_LABEL("nsJSContext", "ShrinkGCBuffersNow",
@@ -1239,17 +1240,17 @@ nsJSContext::ShrinkGCBuffersNow()
 
 static void
 FinishAnyIncrementalGC()
 {
   PROFILER_LABEL_FUNC(js::ProfileEntry::Category::GC);
 
   if (sCCLockedOut) {
     // We're in the middle of an incremental GC, so finish it.
-    JS::PrepareForIncrementalGC(sRuntime);
+    JS::PrepareForIncrementalGC(sContext);
     JS::FinishIncrementalGC(sRuntime, JS::gcreason::CC_FORCED);
   }
 }
 
 static void
 FireForgetSkippable(uint32_t aSuspected, bool aRemoveChildless)
 {
   PRTime startTime = PR_Now();
@@ -2290,103 +2291,104 @@ mozilla::dom::StartupJSEnvironment()
   sCCollectedZonesWaitingForGC = 0;
   sLikelyShortLivingObjectsNeedingGC = 0;
   sPostGCEventsToConsole = false;
   sNeedsFullCC = false;
   sNeedsFullGC = false;
   sNeedsGCAfterCC = false;
   gNameSpaceManager = nullptr;
   sRuntime = nullptr;
+  sContext = nullptr;
   sIsInitialized = false;
   sDidShutdown = false;
   sShuttingDown = false;
   sContextCount = 0;
   sSecurityManager = nullptr;
   gCCStats.Init();
   sExpensiveCollectorPokes = 0;
 }
 
 static void
 SetMemoryHighWaterMarkPrefChangedCallback(const char* aPrefName, void* aClosure)
 {
   int32_t highwatermark = Preferences::GetInt(aPrefName, 128);
 
-  JS_SetGCParameter(JS_GetContext(sRuntime), JSGC_MAX_MALLOC_BYTES,
+  JS_SetGCParameter(sContext, JSGC_MAX_MALLOC_BYTES,
                     highwatermark * 1024L * 1024L);
 }
 
 static void
 SetMemoryMaxPrefChangedCallback(const char* aPrefName, void* aClosure)
 {
   int32_t pref = Preferences::GetInt(aPrefName, -1);
   // handle overflow and negative pref values
   uint32_t max = (pref <= 0 || pref >= 0x1000) ? -1 : (uint32_t)pref * 1024 * 1024;
-  JS_SetGCParameter(JS_GetContext(sRuntime), JSGC_MAX_BYTES, max);
+  JS_SetGCParameter(sContext, JSGC_MAX_BYTES, max);
 }
 
 static void
 SetMemoryGCModePrefChangedCallback(const char* aPrefName, void* aClosure)
 {
   bool enableCompartmentGC = Preferences::GetBool("javascript.options.mem.gc_per_compartment");
   bool enableIncrementalGC = Preferences::GetBool("javascript.options.mem.gc_incremental");
   JSGCMode mode;
   if (enableIncrementalGC) {
     mode = JSGC_MODE_INCREMENTAL;
   } else if (enableCompartmentGC) {
     mode = JSGC_MODE_COMPARTMENT;
   } else {
     mode = JSGC_MODE_GLOBAL;
   }
-  JS_SetGCParameter(JS_GetContext(sRuntime), JSGC_MODE, mode);
+  JS_SetGCParameter(sContext, JSGC_MODE, mode);
 }
 
 static void
 SetMemoryGCSliceTimePrefChangedCallback(const char* aPrefName, void* aClosure)
 {
   int32_t pref = Preferences::GetInt(aPrefName, -1);
   // handle overflow and negative pref values
   if (pref > 0 && pref < 100000)
-    JS_SetGCParameter(JS_GetContext(sRuntime), JSGC_SLICE_TIME_BUDGET, pref);
+    JS_SetGCParameter(sContext, JSGC_SLICE_TIME_BUDGET, pref);
 }
 
 static void
 SetMemoryGCCompactingPrefChangedCallback(const char* aPrefName, void* aClosure)
 {
   bool pref = Preferences::GetBool(aPrefName);
-  JS_SetGCParameter(JS_GetContext(sRuntime), JSGC_COMPACTING_ENABLED, pref);
+  JS_SetGCParameter(sContext, JSGC_COMPACTING_ENABLED, pref);
 }
 
 static void
 SetMemoryGCPrefChangedCallback(const char* aPrefName, void* aClosure)
 {
   int32_t pref = Preferences::GetInt(aPrefName, -1);
   // handle overflow and negative pref values
   if (pref >= 0 && pref < 10000)
-    JS_SetGCParameter(JS_GetContext(sRuntime), (JSGCParamKey)(intptr_t)aClosure, pref);
+    JS_SetGCParameter(sContext, (JSGCParamKey)(intptr_t)aClosure, pref);
 }
 
 static void
 SetMemoryGCDynamicHeapGrowthPrefChangedCallback(const char* aPrefName, void* aClosure)
 {
   bool pref = Preferences::GetBool(aPrefName);
-  JS_SetGCParameter(JS_GetContext(sRuntime), JSGC_DYNAMIC_HEAP_GROWTH, pref);
+  JS_SetGCParameter(sContext, JSGC_DYNAMIC_HEAP_GROWTH, pref);
 }
 
 static void
 SetMemoryGCDynamicMarkSlicePrefChangedCallback(const char* aPrefName, void* aClosure)
 {
   bool pref = Preferences::GetBool(aPrefName);
-  JS_SetGCParameter(JS_GetContext(sRuntime), JSGC_DYNAMIC_MARK_SLICE, pref);
+  JS_SetGCParameter(sContext, JSGC_DYNAMIC_MARK_SLICE, pref);
 }
 
 static void
 SetMemoryGCRefreshFrameSlicesEnabledPrefChangedCallback(const char* aPrefName, void* aClosure)
 {
   bool pref = Preferences::GetBool(aPrefName);
-  JS_SetGCParameter(JS_GetContext(sRuntime), JSGC_REFRESH_FRAME_SLICES_ENABLED, pref);
+  JS_SetGCParameter(sContext, JSGC_REFRESH_FRAME_SLICES_ENABLED, pref);
 }
 
 
 static void
 SetIncrementalCCPrefChangedCallback(const char* aPrefName, void* aClosure)
 {
   bool pref = Preferences::GetBool(aPrefName);
   sIncrementalCC = pref;
@@ -2437,29 +2439,31 @@ nsJSContext::EnsureStatics()
     MOZ_CRASH();
   }
 
   sRuntime = xpc::GetJSRuntime();
   if (!sRuntime) {
     MOZ_CRASH();
   }
 
+  sContext = JS_GetContext(sRuntime);
+
   // Let's make sure that our main thread is the same as the xpcom main thread.
   MOZ_ASSERT(NS_IsMainThread());
 
   sPrevGCSliceCallback = JS::SetGCSliceCallback(sRuntime, DOMGCSliceCallback);
 
   // Set up the asm.js cache callbacks
   static const JS::AsmJSCacheOps asmJSCacheOps = {
     AsmJSCacheOpenEntryForRead,
     asmjscache::CloseEntryForRead,
     AsmJSCacheOpenEntryForWrite,
     asmjscache::CloseEntryForWrite
   };
-  JS::SetAsmJSCacheOps(JS_GetContext(sRuntime), &asmJSCacheOps);
+  JS::SetAsmJSCacheOps(sContext, &asmJSCacheOps);
 
   // Set these global xpconnect options...
   Preferences::RegisterCallbackAndCall(SetMemoryHighWaterMarkPrefChangedCallback,
                                        "javascript.options.mem.high_water_mark");
 
   Preferences::RegisterCallbackAndCall(SetMemoryMaxPrefChangedCallback,
                                        "javascript.options.mem.max");
 
--- a/dom/workers/WorkerPrivate.cpp
+++ b/dom/workers/WorkerPrivate.cpp
@@ -6392,28 +6392,27 @@ WorkerPrivate::GarbageCollectInternal(JS
   AssertIsOnWorkerThread();
 
   if (!GlobalScope()) {
     // We haven't compiled anything yet. Just bail out.
     return;
   }
 
   if (aShrinking || aCollectChildren) {
-    JSRuntime* rt = JS_GetRuntime(aCx);
-    JS::PrepareForFullGC(rt);
+    JS::PrepareForFullGC(aCx);
 
     if (aShrinking) {
-      JS::GCForReason(rt, GC_SHRINK, JS::gcreason::DOM_WORKER);
+      JS::GCForReason(aCx, GC_SHRINK, JS::gcreason::DOM_WORKER);
 
       if (!aCollectChildren) {
         LOG(WorkerLog(), ("Worker %p collected idle garbage\n", this));
       }
     }
     else {
-      JS::GCForReason(rt, GC_NORMAL, JS::gcreason::DOM_WORKER);
+      JS::GCForReason(aCx, GC_NORMAL, JS::gcreason::DOM_WORKER);
       LOG(WorkerLog(), ("Worker %p collected garbage\n", this));
     }
   }
   else {
     JS_MaybeGC(aCx);
     LOG(WorkerLog(), ("Worker %p collected periodic garbage\n", this));
   }
 
--- a/js/public/GCAPI.h
+++ b/js/public/GCAPI.h
@@ -156,32 +156,32 @@ ExplainReason(JS::gcreason::Reason reaso
  */
 extern JS_PUBLIC_API(void)
 PrepareZoneForGC(Zone* zone);
 
 /**
  * Schedule all zones to be collected in the next GC.
  */
 extern JS_PUBLIC_API(void)
-PrepareForFullGC(JSRuntime* rt);
+PrepareForFullGC(JSContext* cx);
 
 /**
  * When performing an incremental GC, the zones that were selected for the
  * previous incremental slice must be selected in subsequent slices as well.
  * This function selects those slices automatically.
  */
 extern JS_PUBLIC_API(void)
-PrepareForIncrementalGC(JSRuntime* rt);
+PrepareForIncrementalGC(JSContext* cx);
 
 /**
  * Returns true if any zone in the system has been scheduled for GC with one of
  * the functions above or by the JS engine.
  */
 extern JS_PUBLIC_API(bool)
-IsGCScheduled(JSRuntime* rt);
+IsGCScheduled(JSContext* cx);
 
 /**
  * Undoes the effect of the Prepare methods above. The given zone will not be
  * collected in the next GC.
  */
 extern JS_PUBLIC_API(void)
 SkipZoneForGC(Zone* zone);
 
@@ -196,17 +196,17 @@ SkipZoneForGC(Zone* zone);
  *
  * If the gckind argument is GC_NORMAL, then some objects that are unreachable
  * from the program may still be alive afterwards because of internal
  * references; if GC_SHRINK is passed then caches and other temporary references
  * to objects will be cleared and all unreferenced objects will be removed from
  * the system.
  */
 extern JS_PUBLIC_API(void)
-GCForReason(JSRuntime* rt, JSGCInvocationKind gckind, gcreason::Reason reason);
+GCForReason(JSContext* cx, JSGCInvocationKind gckind, gcreason::Reason reason);
 
 /*
  * Incremental GC:
  *
  * Incremental GC divides the full mark-and-sweep collection into multiple
  * slices, allowing client JavaScript code to run between each slice. This
  * allows interactive apps to avoid long collection pauses. Incremental GC does
  * not make collection take less time, it merely spreads that time out so that
--- a/js/src/builtin/TestingFunctions.cpp
+++ b/js/src/builtin/TestingFunctions.cpp
@@ -304,20 +304,20 @@ GC(JSContext* cx, unsigned argc, Value* 
 
 #ifndef JS_MORE_DETERMINISTIC
     size_t preBytes = cx->runtime()->gc.usage.gcBytes();
 #endif
 
     if (compartment)
         PrepareForDebugGC(cx->runtime());
     else
-        JS::PrepareForFullGC(cx->runtime());
+        JS::PrepareForFullGC(cx);
 
     JSGCInvocationKind gckind = shrinking ? GC_SHRINK : GC_NORMAL;
-    JS::GCForReason(cx->runtime(), gckind, JS::gcreason::API);
+    JS::GCForReason(cx, gckind, JS::gcreason::API);
 
     char buf[256] = { '\0' };
 #ifndef JS_MORE_DETERMINISTIC
     JS_snprintf(buf, sizeof(buf), "before %" PRIuSIZE ", after %" PRIuSIZE "\n",
                 preBytes, cx->runtime()->gc.usage.gcBytes());
 #endif
     JSString* str = JS_NewStringCopyZ(cx, buf);
     if (!str)
@@ -3276,18 +3276,18 @@ static void
 majorGC(JSRuntime* rt, JSGCStatus status, void* data)
 {
     auto info = static_cast<MajorGC*>(data);
     if (!(info->phases & (1 << status)))
         return;
 
     if (info->depth > 0) {
         info->depth--;
-        JS::PrepareForFullGC(rt);
-        JS::GCForReason(rt, GC_NORMAL, JS::gcreason::API);
+        JS::PrepareForFullGC(rt->contextFromMainThread());
+        JS::GCForReason(rt->contextFromMainThread(), GC_NORMAL, JS::gcreason::API);
         info->depth++;
     }
 }
 
 struct MinorGC {
     int32_t phases;
     bool active;
 };
--- a/js/src/gc/Allocator.cpp
+++ b/js/src/gc/Allocator.cpp
@@ -160,21 +160,20 @@ GCRuntime::tryNewTenuredThing(ExclusiveC
         // acquire a new arena, which will lock the chunk list. If there are no
         // chunks available it may also allocate new memory directly.
         t = reinterpret_cast<T*>(refillFreeListFromAnyThread(cx, kind, thingSize));
 
         if (MOZ_UNLIKELY(!t && allowGC && cx->isJSContext())) {
             // We have no memory available for a new chunk; perform an
             // all-compartments, non-incremental, shrinking GC and wait for
             // sweeping to finish.
-            JSRuntime *rt = cx->asJSContext()->runtime();
-            JS::PrepareForFullGC(rt);
+            JS::PrepareForFullGC(cx->asJSContext());
             AutoKeepAtoms keepAtoms(cx->perThreadData);
-            rt->gc.gc(GC_SHRINK, JS::gcreason::LAST_DITCH);
-            rt->gc.waitBackgroundSweepOrAllocEnd();
+            cx->asJSContext()->gc.gc(GC_SHRINK, JS::gcreason::LAST_DITCH);
+            cx->asJSContext()->gc.waitBackgroundSweepOrAllocEnd();
 
             t = tryNewTenuredThing<T, NoGC>(cx, kind, thingSize);
             if (!t)
                 ReportOutOfMemory(cx);
         }
     }
 
     checkIncrementalZoneState(cx, t);
--- a/js/src/jsapi-tests/testGCFinalizeCallback.cpp
+++ b/js/src/jsapi-tests/testGCFinalizeCallback.cpp
@@ -18,20 +18,20 @@ BEGIN_TEST(testGCFinalizeCallback)
     JS_GC(cx);
     CHECK(rt->gc.isFullGc());
     CHECK(checkSingleGroup());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(false));
 
     /* Full GC, incremental. */
     FinalizeCalls = 0;
-    JS::PrepareForFullGC(rt);
+    JS::PrepareForFullGC(cx);
     JS::StartIncrementalGC(rt, GC_NORMAL, JS::gcreason::API, 1000000);
     while (rt->gc.isIncrementalGCInProgress()) {
-        JS::PrepareForFullGC(rt);
+        JS::PrepareForFullGC(cx);
         JS::IncrementalGCSlice(rt, JS::gcreason::API, 1000000);
     }
     CHECK(!rt->gc.isIncrementalGCInProgress());
     CHECK(rt->gc.isFullGc());
     CHECK(checkMultipleGroups());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(false));
 
@@ -40,28 +40,28 @@ BEGIN_TEST(testGCFinalizeCallback)
     JS::RootedObject global3(cx, createTestGlobal());
     CHECK(global1);
     CHECK(global2);
     CHECK(global3);
 
     /* Compartment GC, non-incremental, single compartment. */
     FinalizeCalls = 0;
     JS::PrepareZoneForGC(global1->zone());
-    JS::GCForReason(rt, GC_NORMAL, JS::gcreason::API);
+    JS::GCForReason(cx, GC_NORMAL, JS::gcreason::API);
     CHECK(!rt->gc.isFullGc());
     CHECK(checkSingleGroup());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(true));
 
     /* Compartment GC, non-incremental, multiple compartments. */
     FinalizeCalls = 0;
     JS::PrepareZoneForGC(global1->zone());
     JS::PrepareZoneForGC(global2->zone());
     JS::PrepareZoneForGC(global3->zone());
-    JS::GCForReason(rt, GC_NORMAL, JS::gcreason::API);
+    JS::GCForReason(cx, GC_NORMAL, JS::gcreason::API);
     CHECK(!rt->gc.isFullGc());
     CHECK(checkSingleGroup());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(true));
 
     /* Compartment GC, incremental, single compartment. */
     FinalizeCalls = 0;
     JS::PrepareZoneForGC(global1->zone());
@@ -95,17 +95,17 @@ BEGIN_TEST(testGCFinalizeCallback)
     CHECK(checkFinalizeIsCompartmentGC(true));
 
 #ifdef JS_GC_ZEAL
 
     /* Full GC with reset due to new compartment, becoming compartment GC. */
 
     FinalizeCalls = 0;
     JS_SetGCZeal(cx, 9, 1000000);
-    JS::PrepareForFullGC(rt);
+    JS::PrepareForFullGC(cx);
     js::SliceBudget budget(js::WorkBudget(1));
     rt->gc.startDebugGC(GC_NORMAL, budget);
     CHECK(rt->gc.state() == js::gc::MARK);
     CHECK(rt->gc.isFullGc());
 
     JS::RootedObject global4(cx, createTestGlobal());
     budget = js::SliceBudget(js::WorkBudget(1));
     rt->gc.debugGCSlice(budget);
--- a/js/src/jsapi-tests/testGCUniqueId.cpp
+++ b/js/src/jsapi-tests/testGCUniqueId.cpp
@@ -105,18 +105,18 @@ BEGIN_TEST(testGCUID)
     // Grab the last object in the vector as our object of interest.
     obj = vec2.back();
     CHECK(obj);
     tenuredAddr = uintptr_t(obj.get());
     CHECK(obj->zone()->getUniqueId(obj, &uid));
 
     // Force a compaction to move the object and check that the uid moved to
     // the new tenured heap location.
-    JS::PrepareForFullGC(rt);
-    JS::GCForReason(rt, GC_SHRINK, JS::gcreason::API);
+    JS::PrepareForFullGC(cx);
+    JS::GCForReason(cx, GC_SHRINK, JS::gcreason::API);
     MinimizeHeap(rt);
     CHECK(uintptr_t(obj.get()) != tenuredAddr);
     CHECK(obj->zone()->hasUniqueId(obj));
     CHECK(obj->zone()->getUniqueId(obj, &tmp));
     CHECK(uid == tmp);
 
     return true;
 }
--- a/js/src/jsapi-tests/testPreserveJitCode.cpp
+++ b/js/src/jsapi-tests/testPreserveJitCode.cpp
@@ -72,20 +72,20 @@ testPreserveJitCode(bool preserveJitCode
 			      source, length, &fun));
 
     RootedValue value(cx);
     for (unsigned i = 0; i < 1500; ++i)
         CHECK(JS_CallFunction(cx, global, fun, JS::HandleValueArray::empty(), &value));
     CHECK_EQUAL(value.toInt32(), 45);
     CHECK_EQUAL(countIonScripts(global), 1u);
 
-    GCForReason(rt, GC_NORMAL, gcreason::API);
+    GCForReason(cx, GC_NORMAL, gcreason::API);
     CHECK_EQUAL(countIonScripts(global), remainingIonScripts);
 
-    GCForReason(rt, GC_SHRINK, gcreason::API);
+    GCForReason(cx, GC_SHRINK, gcreason::API);
     CHECK_EQUAL(countIonScripts(global), 0u);
 
     return true;
 }
 
 JSObject*
 createTestGlobal(bool preserveJitCode)
 {
--- a/js/src/jsapi-tests/tests.h
+++ b/js/src/jsapi-tests/tests.h
@@ -432,18 +432,18 @@ class AutoLeaveZeal
     uint32_t zealBits_;
     uint32_t frequency_;
 
   public:
     explicit AutoLeaveZeal(JSContext* cx) : cx_(cx) {
         uint32_t dummy;
         JS_GetGCZealBits(cx_, &zealBits_, &frequency_, &dummy);
         JS_SetGCZeal(cx_, 0, 0);
-        JS::PrepareForFullGC(JS_GetRuntime(cx_));
-        JS::GCForReason(JS_GetRuntime(cx_), GC_SHRINK, JS::gcreason::DEBUG_GC);
+        JS::PrepareForFullGC(cx_);
+        JS::GCForReason(cx_, GC_SHRINK, JS::gcreason::DEBUG_GC);
     }
     ~AutoLeaveZeal() {
         for (size_t i = 0; i < sizeof(zealBits_) * 8; i++) {
             if (zealBits_ & (1 << i))
                 JS_SetGCZeal(cx_, i, frequency_);
         }
 
 #ifdef DEBUG
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -2978,17 +2978,17 @@ GCRuntime::triggerGC(JS::gcreason::Reaso
      */
     if (!CurrentThreadCanAccessRuntime(rt))
         return false;
 
     /* GC is already running. */
     if (rt->isHeapCollecting())
         return false;
 
-    JS::PrepareForFullGC(rt);
+    JS::PrepareForFullGC(rt->contextFromMainThread());
     requestMajorGC(reason);
     return true;
 }
 
 void
 GCRuntime::maybeAllocTriggerZoneGC(Zone* zone, const AutoLockGC& lock)
 {
     size_t usedBytes = zone->usage.gcBytes();
@@ -3059,17 +3059,17 @@ GCRuntime::triggerZoneGC(Zone* zone, JS:
 
 bool
 GCRuntime::maybeGC(Zone* zone)
 {
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
 
 #ifdef JS_GC_ZEAL
     if (hasZealMode(ZealMode::Alloc) || hasZealMode(ZealMode::Poke)) {
-        JS::PrepareForFullGC(rt);
+        JS::PrepareForFullGC(rt->contextFromMainThread());
         gc(GC_NORMAL, JS::gcreason::DEBUG_GC);
         return true;
     }
 #endif
 
     if (gcIfRequested())
         return true;
 
@@ -3099,17 +3099,17 @@ GCRuntime::maybePeriodicFullGC()
      * tolerate this.
      */
 #ifndef JS_MORE_DETERMINISTIC
     int64_t now = PRMJ_Now();
     if (nextFullGCTime && nextFullGCTime <= now && !isIncrementalGCInProgress()) {
         if (chunkAllocationSinceLastGC ||
             numArenasFreeCommitted > decommitThreshold)
         {
-            JS::PrepareForFullGC(rt);
+            JS::PrepareForFullGC(rt->contextFromMainThread());
             startGC(GC_SHRINK, JS::gcreason::PERIODIC_FULL_GC);
         } else {
             nextFullGCTime = now + GC_IDLE_FULL_SPAN;
         }
     }
 #endif
 }
 
@@ -6287,17 +6287,17 @@ GCRuntime::collect(bool nonincrementalBy
 
     bool repeat = false;
     do {
         poked = false;
         bool wasReset = gcCycle(nonincrementalByAPI, budget, reason);
 
         /* Need to re-schedule all zones for GC. */
         if (poked && cleanUpEverything)
-            JS::PrepareForFullGC(rt);
+            JS::PrepareForFullGC(rt->contextFromMainThread());
 
         /*
          * This code makes an extra effort to collect compartments that we
          * thought were dead at the start of the GC. See the large comment in
          * beginMarkPhase.
          */
         bool repeatForDeadZone = false;
         if (!nonincrementalByAPI && !isIncrementalGCInProgress()) {
@@ -6414,24 +6414,24 @@ GCRuntime::notifyDidPaint()
 {
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
 
 #ifdef JS_GC_ZEAL
     if (hasZealMode(ZealMode::FrameVerifierPre))
         verifyPreBarriers();
 
     if (hasZealMode(ZealMode::FrameGC)) {
-        JS::PrepareForFullGC(rt);
+        JS::PrepareForFullGC(rt->contextFromMainThread());
         gc(GC_NORMAL, JS::gcreason::REFRESH_FRAME);
         return;
     }
 #endif
 
     if (isIncrementalGCInProgress() && !interFrameGC && tunables.areRefreshFrameSlicesEnabled()) {
-        JS::PrepareForIncrementalGC(rt);
+        JS::PrepareForIncrementalGC(rt->contextFromMainThread());
         gcSlice(JS::gcreason::REFRESH_FRAME);
     }
 
     interFrameGC = false;
 }
 
 static bool
 ZonesSelected(JSRuntime* rt)
@@ -6443,36 +6443,36 @@ ZonesSelected(JSRuntime* rt)
     return false;
 }
 
 void
 GCRuntime::startDebugGC(JSGCInvocationKind gckind, SliceBudget& budget)
 {
     MOZ_ASSERT(!isIncrementalGCInProgress());
     if (!ZonesSelected(rt))
-        JS::PrepareForFullGC(rt);
+        JS::PrepareForFullGC(rt->contextFromMainThread());
     invocationKind = gckind;
     collect(false, budget, JS::gcreason::DEBUG_GC);
 }
 
 void
 GCRuntime::debugGCSlice(SliceBudget& budget)
 {
     MOZ_ASSERT(isIncrementalGCInProgress());
     if (!ZonesSelected(rt))
-        JS::PrepareForIncrementalGC(rt);
+        JS::PrepareForIncrementalGC(rt->contextFromMainThread());
     collect(false, budget, JS::gcreason::DEBUG_GC);
 }
 
 /* Schedule a full GC unless a zone will already be collected. */
 void
 js::PrepareForDebugGC(JSRuntime* rt)
 {
     if (!ZonesSelected(rt))
-        JS::PrepareForFullGC(rt);
+        JS::PrepareForFullGC(rt->contextFromMainThread());
 }
 
 JS_PUBLIC_API(void)
 JS::ShrinkGCBuffers(JSRuntime* rt)
 {
     MOZ_ASSERT(!rt->isHeapBusy());
     rt->gc.shrinkBuffers();
 }
@@ -6602,17 +6602,17 @@ GCRuntime::gcIfRequested(JSContext* cx /
     }
 
     return false;
 }
 
 void js::gc::FinishGC(JSRuntime* rt)
 {
     if (JS::IsIncrementalGCInProgress(rt)) {
-        JS::PrepareForIncrementalGC(rt);
+        JS::PrepareForIncrementalGC(rt->contextFromMainThread());
         JS::FinishIncrementalGC(rt, JS::gcreason::API);
     }
 
     rt->gc.nursery.waitBackgroundFreeEnd();
 }
 
 AutoPrepareForTracing::AutoPrepareForTracing(JSRuntime* rt, ZoneSelector selector)
 {
@@ -7190,56 +7190,56 @@ js::gc::CheckHashTablesAfterMovingGC(JSR
 
 JS_PUBLIC_API(void)
 JS::PrepareZoneForGC(Zone* zone)
 {
     zone->scheduleGC();
 }
 
 JS_PUBLIC_API(void)
-JS::PrepareForFullGC(JSRuntime* rt)
-{
-    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next())
+JS::PrepareForFullGC(JSContext* cx)
+{
+    for (ZonesIter zone(cx, WithAtoms); !zone.done(); zone.next())
         zone->scheduleGC();
 }
 
 JS_PUBLIC_API(void)
-JS::PrepareForIncrementalGC(JSRuntime* rt)
-{
-    if (!JS::IsIncrementalGCInProgress(rt))
+JS::PrepareForIncrementalGC(JSContext* cx)
+{
+    if (!JS::IsIncrementalGCInProgress(cx))
         return;
 
-    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
+    for (ZonesIter zone(cx, WithAtoms); !zone.done(); zone.next()) {
         if (zone->wasGCStarted())
             PrepareZoneForGC(zone);
     }
 }
 
 JS_PUBLIC_API(bool)
-JS::IsGCScheduled(JSRuntime* rt)
-{
-    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
+JS::IsGCScheduled(JSContext* cx)
+{
+    for (ZonesIter zone(cx, WithAtoms); !zone.done(); zone.next()) {
         if (zone->isGCScheduled())
             return true;
     }
 
     return false;
 }
 
 JS_PUBLIC_API(void)
 JS::SkipZoneForGC(Zone* zone)
 {
     zone->unscheduleGC();
 }
 
 JS_PUBLIC_API(void)
-JS::GCForReason(JSRuntime* rt, JSGCInvocationKind gckind, gcreason::Reason reason)
+JS::GCForReason(JSContext* cx, JSGCInvocationKind gckind, gcreason::Reason reason)
 {
     MOZ_ASSERT(gckind == GC_NORMAL || gckind == GC_SHRINK);
-    rt->gc.gc(gckind, reason);
+    cx->gc.gc(gckind, reason);
 }
 
 JS_PUBLIC_API(void)
 JS::StartIncrementalGC(JSRuntime* rt, JSGCInvocationKind gckind, gcreason::Reason reason, int64_t millis)
 {
     MOZ_ASSERT(gckind == GC_NORMAL || gckind == GC_SHRINK);
     rt->gc.startGC(gckind, reason, millis);
 }
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -1213,17 +1213,17 @@ my_LargeAllocFailCallback(void* data)
     JSRuntime* rt = cx->runtime();
 
     if (!cx->isJSContext())
         return;
 
     MOZ_ASSERT(!rt->isHeapBusy());
     MOZ_ASSERT(!rt->currentThreadHasExclusiveAccess());
 
-    JS::PrepareForFullGC(rt);
+    JS::PrepareForFullGC(cx);
     AutoKeepAtoms keepAtoms(cx->perThreadData);
     rt->gc.gc(GC_NORMAL, JS::gcreason::SHARED_MEMORY_LIMIT);
 }
 
 static const uint32_t CacheEntry_SOURCE = 0;
 static const uint32_t CacheEntry_BYTECODE = 1;
 
 static const JSClass CacheEntry_class = {
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -427,17 +427,17 @@ JSRuntime::destroyRuntime()
         beingDestroyed_ = true;
 
         /* Allow the GC to release scripts that were being profiled. */
         profilingScripts = false;
 
         /* Set the profiler sampler buffer generation to invalid. */
         profilerSampleBufferGen_ = UINT32_MAX;
 
-        JS::PrepareForFullGC(this);
+        JS::PrepareForFullGC(contextFromMainThread());
         gc.gc(GC_NORMAL, JS::gcreason::DESTROY_RUNTIME);
     }
 
     MOZ_ASSERT(ionLazyLinkListSize_ == 0);
     MOZ_ASSERT(ionLazyLinkList_.isEmpty());
 
     /*
      * Clear the self-hosted global and delete self-hosted classes *after*
--- a/js/xpconnect/src/XPCComponents.cpp
+++ b/js/xpconnect/src/XPCComponents.cpp
@@ -2565,19 +2565,19 @@ nsXPCComponents_Utils::GetWeakReference(
     NS_ENSURE_SUCCESS(rv, rv);
     ref.forget(_retval);
     return NS_OK;
 }
 
 NS_IMETHODIMP
 nsXPCComponents_Utils::ForceGC()
 {
-    JSRuntime* rt = nsXPConnect::GetRuntimeInstance()->Runtime();
-    PrepareForFullGC(rt);
-    GCForReason(rt, GC_NORMAL, gcreason::COMPONENT_UTILS);
+    JSContext* cx = nsXPConnect::GetRuntimeInstance()->Context();
+    PrepareForFullGC(cx);
+    GCForReason(cx, GC_NORMAL, gcreason::COMPONENT_UTILS);
     return NS_OK;
 }
 
 NS_IMETHODIMP
 nsXPCComponents_Utils::ForceCC(nsICycleCollectorListener* listener)
 {
     nsJSContext::CycleCollectNow(listener);
     return NS_OK;
@@ -2609,19 +2609,19 @@ nsXPCComponents_Utils::ClearMaxCCTime()
 {
     nsJSContext::ClearMaxCCSliceTime();
     return NS_OK;
 }
 
 NS_IMETHODIMP
 nsXPCComponents_Utils::ForceShrinkingGC()
 {
-    JSRuntime* rt = nsXPConnect::GetRuntimeInstance()->Runtime();
-    PrepareForFullGC(rt);
-    GCForReason(rt, GC_SHRINK, gcreason::COMPONENT_UTILS);
+    JSContext* cx = nsXPConnect::GetRuntimeInstance()->Context();
+    PrepareForFullGC(cx);
+    GCForReason(cx, GC_SHRINK, gcreason::COMPONENT_UTILS);
     return NS_OK;
 }
 
 class PreciseGCRunnable : public Runnable
 {
   public:
     PreciseGCRunnable(ScheduledGCCallback* aCallback, bool aShrinking)
     : mCallback(aCallback), mShrinking(aShrinking) {}
--- a/xpcom/base/CycleCollectedJSRuntime.cpp
+++ b/xpcom/base/CycleCollectedJSRuntime.cpp
@@ -1242,18 +1242,18 @@ CycleCollectedJSRuntime::AreGCGrayBitsVa
 void
 CycleCollectedJSRuntime::GarbageCollect(uint32_t aReason) const
 {
   MOZ_ASSERT(mJSRuntime);
 
   MOZ_ASSERT(aReason < JS::gcreason::NUM_REASONS);
   JS::gcreason::Reason gcreason = static_cast<JS::gcreason::Reason>(aReason);
 
-  JS::PrepareForFullGC(mJSRuntime);
-  JS::GCForReason(mJSRuntime, GC_NORMAL, gcreason);
+  JS::PrepareForFullGC(mJSContext);
+  JS::GCForReason(mJSContext, GC_NORMAL, gcreason);
 }
 
 void
 CycleCollectedJSRuntime::JSObjectsTenured()
 {
   MOZ_ASSERT(mJSRuntime);
 
   for (auto iter = mNurseryObjects.Iter(); !iter.Done(); iter.Next()) {
@@ -1664,17 +1664,17 @@ CycleCollectedJSRuntime::OnLargeAllocati
   CustomLargeAllocationFailureCallback();
   AnnotateAndSetOutOfMemory(&mLargeAllocationFailureState, OOMState::Reported);
 }
 
 void
 CycleCollectedJSRuntime::PrepareWaitingZonesForGC()
 {
   if (mZonesWaitingForGC.Count() == 0) {
-    JS::PrepareForFullGC(Runtime());
+    JS::PrepareForFullGC(Context());
   } else {
     for (auto iter = mZonesWaitingForGC.Iter(); !iter.Done(); iter.Next()) {
       JS::PrepareZoneForGC(iter.Get()->GetKey());
     }
     mZonesWaitingForGC.Clear();
   }
 }
 
--- a/xpcom/base/nsCycleCollector.cpp
+++ b/xpcom/base/nsCycleCollector.cpp
@@ -3546,17 +3546,17 @@ nsCycleCollector::IsIncrementalGCInProgr
   return mJSRuntime && JS::IsIncrementalGCInProgress(mJSRuntime->Runtime());
 }
 
 void
 nsCycleCollector::FinishAnyIncrementalGCInProgress()
 {
   if (IsIncrementalGCInProgress()) {
     NS_WARNING("Finishing incremental GC in progress during CC");
-    JS::PrepareForIncrementalGC(mJSRuntime->Runtime());
+    JS::PrepareForIncrementalGC(mJSRuntime->Context());
     JS::FinishIncrementalGC(mJSRuntime->Runtime(), JS::gcreason::CC_FORCED);
   }
 }
 
 void
 nsCycleCollector::CleanupAfterCollection()
 {
   TimeLog timeLog;