Bug 1359245 - Remove CycleCollectedJSRuntime::mJSContext (r=mccr8,sfink)
authorBill McCloskey <billm@mozilla.com>
Sun, 23 Apr 2017 19:30:06 -0700
changeset 403605 fd29fdf5c245fcaecc5cc93f95fecb9e87488320
parent 403604 a0ad80b18a0b97a3e65137fc53d559e2db1f430e
child 403606 22e729c4596ee963ecd5ae7cd909cfe87994d9da
push id7391
push usermtabara@mozilla.com
push dateMon, 12 Jun 2017 13:08:53 +0000
treeherdermozilla-beta@2191d7f87e2e [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmccr8, sfink
bugs1359245
milestone55.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1359245 - Remove CycleCollectedJSRuntime::mJSContext (r=mccr8,sfink) This patch eliminates a field where we assume that there is one CycleCollectedJSContext per runtime. MozReview-Commit-ID: 5cEL5Ml6Y9v
js/public/GCAPI.h
js/public/RootingAPI.h
js/src/gc/RootMarking.cpp
js/src/gc/Verifier.cpp
js/src/jsapi.cpp
js/src/jsapi.h
js/src/jsfriendapi.cpp
js/src/jsfriendapi.h
js/src/jsgc.cpp
js/src/jswatchpoint.cpp
js/src/jsweakmap.cpp
xpcom/base/CycleCollectedJSRuntime.cpp
xpcom/base/CycleCollectedJSRuntime.h
--- a/js/public/GCAPI.h
+++ b/js/public/GCAPI.h
@@ -419,16 +419,23 @@ IsIncrementalGCEnabled(JSContext* cx);
 
 /**
  * Returns true while an incremental GC is ongoing, both when actively
  * collecting and between slices.
  */
 extern JS_PUBLIC_API(bool)
 IsIncrementalGCInProgress(JSContext* cx);
 
+/**
+ * Returns true while an incremental GC is ongoing, both when actively
+ * collecting and between slices.
+ */
+extern JS_PUBLIC_API(bool)
+IsIncrementalGCInProgress(JSRuntime* rt);
+
 /*
  * Returns true when writes to GC thing pointers (and reads from weak pointers)
  * must call an incremental barrier. This is generally only true when running
  * mutator code in-between GC slices. At other times, the barrier may be elided
  * for performance.
  */
 extern JS_PUBLIC_API(bool)
 IsIncrementalBarrierNeeded(JSContext* cx);
@@ -446,17 +453,17 @@ IncrementalPreWriteBarrier(JSObject* obj
  */
 extern JS_PUBLIC_API(void)
 IncrementalReadBarrier(GCCellPtr thing);
 
 /**
  * Returns true if the most recent GC ran incrementally.
  */
 extern JS_PUBLIC_API(bool)
-WasIncrementalGC(JSContext* cx);
+WasIncrementalGC(JSRuntime* rt);
 
 /*
  * Generational GC:
  *
  * Note: Generational GC is not yet enabled by default. The following class
  *       is non-functional unless SpiderMonkey was configured with
  *       --enable-gcgenerational.
  */
--- a/js/public/RootingAPI.h
+++ b/js/public/RootingAPI.h
@@ -1050,16 +1050,19 @@ MutableHandle<T>::MutableHandle(Persiste
     static_assert(sizeof(MutableHandle<T>) == sizeof(T*),
                   "MutableHandle must be binary compatible with T*.");
     ptr = root->address();
 }
 
 JS_PUBLIC_API(void)
 AddPersistentRoot(RootingContext* cx, RootKind kind, PersistentRooted<void*>* root);
 
+JS_PUBLIC_API(void)
+AddPersistentRoot(JSRuntime* rt, RootKind kind, PersistentRooted<void*>* root);
+
 /**
  * A copyable, assignable global GC root type with arbitrary lifetime, an
  * infallible constructor, and automatic unrooting on destruction.
  *
  * These roots can be used in heap-allocated data structures, so they are not
  * associated with any particular JSContext or stack. They are registered with
  * the JSRuntime itself, without locking, so they require a full JSContext to be
  * initialized, not one of its more restricted superclasses. Initialization may
@@ -1099,16 +1102,22 @@ class PersistentRooted : public js::Root
     friend class mozilla::LinkedListElement<PersistentRooted>;
 
     void registerWithRootLists(RootingContext* cx) {
         MOZ_ASSERT(!initialized());
         JS::RootKind kind = JS::MapTypeToRootKind<T>::kind;
         AddPersistentRoot(cx, kind, reinterpret_cast<JS::PersistentRooted<void*>*>(this));
     }
 
+    void registerWithRootLists(JSRuntime* rt) {
+        MOZ_ASSERT(!initialized());
+        JS::RootKind kind = JS::MapTypeToRootKind<T>::kind;
+        AddPersistentRoot(rt, kind, reinterpret_cast<JS::PersistentRooted<void*>*>(this));
+    }
+
   public:
     using ElementType = T;
 
     PersistentRooted() : ptr(GCPolicy<T>::initial()) {}
 
     explicit PersistentRooted(RootingContext* cx)
       : ptr(GCPolicy<T>::initial())
     {
@@ -1130,16 +1139,29 @@ class PersistentRooted : public js::Root
 
     template <typename U>
     PersistentRooted(JSContext* cx, U&& initial)
       : ptr(mozilla::Forward<U>(initial))
     {
         registerWithRootLists(RootingContext::get(cx));
     }
 
+    explicit PersistentRooted(JSRuntime* rt)
+      : ptr(GCPolicy<T>::initial())
+    {
+        registerWithRootLists(rt);
+    }
+
+    template <typename U>
+    PersistentRooted(JSRuntime* rt, U&& initial)
+      : ptr(mozilla::Forward<U>(initial))
+    {
+        registerWithRootLists(rt);
+    }
+
     PersistentRooted(const PersistentRooted& rhs)
       : mozilla::LinkedListElement<PersistentRooted<T>>(),
         ptr(rhs.ptr)
     {
         /*
          * Copy construction takes advantage of the fact that the original
          * is already inserted, and simply adds itself to whatever list the
          * original was on - no JSRuntime pointer needed.
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -545,8 +545,14 @@ GCRuntime::resetBufferedGrayRoots() cons
         zone->gcGrayRoots().clearAndFree();
 }
 
 JS_PUBLIC_API(void)
 JS::AddPersistentRoot(JS::RootingContext* cx, RootKind kind, PersistentRooted<void*>* root)
 {
     static_cast<JSContext*>(cx)->runtime()->heapRoots.ref()[kind].insertBack(root);
 }
+
+JS_PUBLIC_API(void)
+JS::AddPersistentRoot(JSRuntime* rt, RootKind kind, PersistentRooted<void*>* root)
+{
+    rt->heapRoots.ref()[kind].insertBack(root);
+}
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -668,19 +668,18 @@ CheckGrayMarkingTracer::check(AutoLockFo
 {
     if (!traceHeap(lock))
         return true; // Ignore failure.
 
     return failures == 0;
 }
 
 JS_FRIEND_API(bool)
-js::CheckGrayMarkingState(JSContext* cx)
+js::CheckGrayMarkingState(JSRuntime* rt)
 {
-    JSRuntime* rt = cx->runtime();
     MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
     MOZ_ASSERT(!rt->gc.isIncrementalGCInProgress());
     if (!rt->gc.areGrayBitsValid())
         return true;
 
     gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PHASE_TRACE_HEAP);
     AutoTraceSession session(rt, JS::HeapState::Tracing);
     CheckGrayMarkingTracer tracer(rt);
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -561,16 +561,22 @@ JS_EndRequest(JSContext* cx)
 }
 
 JS_PUBLIC_API(JSRuntime*)
 JS_GetParentRuntime(JSContext* cx)
 {
     return cx->runtime()->parentRuntime ? cx->runtime()->parentRuntime : cx->runtime();
 }
 
+JS_PUBLIC_API(JSRuntime*)
+JS_GetRuntime(JSContext* cx)
+{
+    return cx->runtime();
+}
+
 JS_PUBLIC_API(void)
 JS::SetSingleThreadedExecutionCallbacks(JSContext* cx,
                                         BeginSingleThreadedExecutionCallback begin,
                                         EndSingleThreadedExecutionCallback end)
 {
     cx->runtime()->beginSingleThreadedExecutionCallback = begin;
     cx->runtime()->endSingleThreadedExecutionCallback = end;
 }
--- a/js/src/jsapi.h
+++ b/js/src/jsapi.h
@@ -1038,16 +1038,19 @@ JS_PUBLIC_API(void*)
 JS_GetContextPrivate(JSContext* cx);
 
 JS_PUBLIC_API(void)
 JS_SetContextPrivate(JSContext* cx, void* data);
 
 extern JS_PUBLIC_API(JSRuntime*)
 JS_GetParentRuntime(JSContext* cx);
 
+extern JS_PUBLIC_API(JSRuntime*)
+JS_GetRuntime(JSContext* cx);
+
 extern JS_PUBLIC_API(void)
 JS_BeginRequest(JSContext* cx);
 
 extern JS_PUBLIC_API(void)
 JS_EndRequest(JSContext* cx);
 
 extern JS_PUBLIC_API(void)
 JS_SetFutexCanWait(JSContext* cx);
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -602,19 +602,19 @@ JS_IsDeadWrapper(JSObject* obj)
 void
 js::TraceWeakMaps(WeakMapTracer* trc)
 {
     WeakMapBase::traceAllMappings(trc);
     WatchpointMap::traceAll(trc);
 }
 
 extern JS_FRIEND_API(bool)
-js::AreGCGrayBitsValid(JSContext* cx)
+js::AreGCGrayBitsValid(JSRuntime* rt)
 {
-    return cx->runtime()->gc.areGrayBitsValid();
+    return rt->gc.areGrayBitsValid();
 }
 
 JS_FRIEND_API(bool)
 js::ZoneGlobalsAreAllGray(JS::Zone* zone)
 {
     for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
         JSObject* obj = comp->unsafeUnbarrieredMaybeGlobal();
         if (!obj || !JS::ObjectIsMarkedGray(obj))
@@ -1097,17 +1097,17 @@ JS::IsGCPoisoning()
 
 struct DumpHeapTracer : public JS::CallbackTracer, public WeakMapTracer
 {
     const char* prefix;
     FILE* output;
 
     DumpHeapTracer(FILE* fp, JSContext* cx)
       : JS::CallbackTracer(cx, DoNotTraceWeakMaps),
-        js::WeakMapTracer(cx), prefix(""), output(fp)
+        js::WeakMapTracer(cx->runtime()), prefix(""), output(fp)
     {}
 
   private:
     void trace(JSObject* map, JS::GCCellPtr key, JS::GCCellPtr value) override {
         JSObject* kdelegate = nullptr;
         if (key.is<JSObject>())
             kdelegate = js::GetWeakmapKeyDelegate(&key.as<JSObject>());
 
--- a/js/src/jsfriendapi.h
+++ b/js/src/jsfriendapi.h
@@ -446,34 +446,34 @@ IsSystemZone(JS::Zone* zone);
 extern JS_FRIEND_API(bool)
 IsAtomsCompartment(JSCompartment* comp);
 
 extern JS_FRIEND_API(bool)
 IsAtomsZone(JS::Zone* zone);
 
 struct WeakMapTracer
 {
-    JSContext* context;
-
-    explicit WeakMapTracer(JSContext* cx) : context(cx) {}
+    JSRuntime* runtime;
+
+    explicit WeakMapTracer(JSRuntime* rt) : runtime(rt) {}
 
     // Weak map tracer callback, called once for every binding of every
     // weak map that was live at the time of the last garbage collection.
     //
     // m will be nullptr if the weak map is not contained in a JS Object.
     //
     // The callback should not GC (and will assert in a debug build if it does so.)
     virtual void trace(JSObject* m, JS::GCCellPtr key, JS::GCCellPtr value) = 0;
 };
 
 extern JS_FRIEND_API(void)
 TraceWeakMaps(WeakMapTracer* trc);
 
 extern JS_FRIEND_API(bool)
-AreGCGrayBitsValid(JSContext* cx);
+AreGCGrayBitsValid(JSRuntime* rt);
 
 extern JS_FRIEND_API(bool)
 ZoneGlobalsAreAllGray(JS::Zone* zone);
 
 extern JS_FRIEND_API(bool)
 IsObjectZoneSweepingOrCompacting(JSObject* obj);
 
 typedef void
@@ -500,17 +500,17 @@ IterateGrayObjectsUnderCC(JS::Zone* zone
 
 #ifdef DEBUG
 // Trace the heap and check there are no black to gray edges. These are
 // not allowed since the cycle collector could throw away the gray thing and
 // leave a dangling pointer.
 //
 // This doesn't trace weak maps as these are handled separately.
 extern JS_FRIEND_API(bool)
-CheckGrayMarkingState(JSContext* cx);
+CheckGrayMarkingState(JSRuntime* rt);
 #endif
 
 #ifdef JS_HAS_CTYPES
 extern JS_FRIEND_API(size_t)
 SizeOfDataIfCDataObject(mozilla::MallocSizeOf mallocSizeOf, JSObject* obj);
 #endif
 
 extern JS_FRIEND_API(JSCompartment*)
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -7659,16 +7659,22 @@ JS::IsIncrementalGCEnabled(JSContext* cx
 
 JS_PUBLIC_API(bool)
 JS::IsIncrementalGCInProgress(JSContext* cx)
 {
     return cx->runtime()->gc.isIncrementalGCInProgress() && !cx->runtime()->gc.isVerifyPreBarriersEnabled();
 }
 
 JS_PUBLIC_API(bool)
+JS::IsIncrementalGCInProgress(JSRuntime* rt)
+{
+    return rt->gc.isIncrementalGCInProgress() && !rt->gc.isVerifyPreBarriersEnabled();
+}
+
+JS_PUBLIC_API(bool)
 JS::IsIncrementalBarrierNeeded(JSContext* cx)
 {
     if (JS::CurrentThreadIsHeapBusy())
         return false;
 
     auto state = cx->runtime()->gc.state();
     return state != gc::State::NotActive && state <= gc::State::Sweep;
 }
@@ -7693,19 +7699,19 @@ JS::IncrementalReadBarrier(GCCellPtr thi
     if (!thing)
         return;
 
     MOZ_ASSERT(!JS::CurrentThreadIsHeapMajorCollecting());
     DispatchTyped(IncrementalReadBarrierFunctor(), thing);
 }
 
 JS_PUBLIC_API(bool)
-JS::WasIncrementalGC(JSContext* cx)
-{
-    return cx->runtime()->gc.isIncrementalGc();
+JS::WasIncrementalGC(JSRuntime* rt)
+{
+    return rt->gc.isIncrementalGc();
 }
 
 uint64_t
 js::gc::NextCellUniqueId(JSRuntime* rt)
 {
     return rt->gc.nextCellUniqueId();
 }
 
--- a/js/src/jswatchpoint.cpp
+++ b/js/src/jswatchpoint.cpp
@@ -212,17 +212,17 @@ WatchpointMap::sweep()
             e.rekeyFront(WatchKey(obj, entry.key().id));
         }
     }
 }
 
 void
 WatchpointMap::traceAll(WeakMapTracer* trc)
 {
-    JSRuntime* rt = trc->context->runtime();
+    JSRuntime* rt = trc->runtime;
     for (CompartmentsIter comp(rt, SkipAtoms); !comp.done(); comp.next()) {
         if (WatchpointMap* wpmap = comp->watchpointMap)
             wpmap->trace(trc);
     }
 }
 
 void
 WatchpointMap::trace(WeakMapTracer* trc)
--- a/js/src/jsweakmap.cpp
+++ b/js/src/jsweakmap.cpp
@@ -92,17 +92,17 @@ WeakMapBase::sweepZone(JS::Zone* zone)
     for (WeakMapBase* m : zone->gcWeakMapList())
         MOZ_ASSERT(m->isInList() && m->marked);
 #endif
 }
 
 void
 WeakMapBase::traceAllMappings(WeakMapTracer* tracer)
 {
-    JSRuntime* rt = tracer->context->runtime();
+    JSRuntime* rt = tracer->runtime;
     for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
         for (WeakMapBase* m : zone->gcWeakMapList()) {
             // The WeakMapTracer callback is not allowed to GC.
             JS::AutoSuppressGCAnalysis nogc;
             m->traceMappings(tracer);
         }
     }
 }
--- a/xpcom/base/CycleCollectedJSRuntime.cpp
+++ b/xpcom/base/CycleCollectedJSRuntime.cpp
@@ -127,19 +127,19 @@ public:
 
   NS_DECL_NSIRUNNABLE
 };
 
 } // namespace mozilla
 
 struct NoteWeakMapChildrenTracer : public JS::CallbackTracer
 {
-  NoteWeakMapChildrenTracer(JSContext* aCx,
+  NoteWeakMapChildrenTracer(JSRuntime* aRt,
                             nsCycleCollectionNoteRootCallback& aCb)
-    : JS::CallbackTracer(aCx), mCb(aCb), mTracedAny(false), mMap(nullptr),
+    : JS::CallbackTracer(aRt), mCb(aCb), mTracedAny(false), mMap(nullptr),
       mKey(nullptr), mKeyDelegate(nullptr)
   {
   }
   void onChild(const JS::GCCellPtr& aThing) override;
   nsCycleCollectionNoteRootCallback& mCb;
   bool mTracedAny;
   JSObject* mMap;
   JS::GCCellPtr mKey;
@@ -162,18 +162,18 @@ NoteWeakMapChildrenTracer::onChild(const
     mTracedAny = true;
   } else {
     JS::TraceChildren(this, aThing);
   }
 }
 
 struct NoteWeakMapsTracer : public js::WeakMapTracer
 {
-  NoteWeakMapsTracer(JSContext* aCx, nsCycleCollectionNoteRootCallback& aCccb)
-    : js::WeakMapTracer(aCx), mCb(aCccb), mChildTracer(aCx, aCccb)
+  NoteWeakMapsTracer(JSRuntime* aRt, nsCycleCollectionNoteRootCallback& aCccb)
+    : js::WeakMapTracer(aRt), mCb(aCccb), mChildTracer(aRt, aCccb)
   {
   }
   void trace(JSObject* aMap, JS::GCCellPtr aKey, JS::GCCellPtr aValue) override;
   nsCycleCollectionNoteRootCallback& mCb;
   NoteWeakMapChildrenTracer mChildTracer;
 };
 
 void
@@ -263,18 +263,18 @@ ShouldWeakMappingEntryBeBlack(JSObject* 
       (!aMap || !JS::ObjectIsMarkedGray(aMap)) &&
       aValue.kind() != JS::TraceKind::Shape) {
     *aValueShouldBeBlack = true;
   }
 }
 
 struct FixWeakMappingGrayBitsTracer : public js::WeakMapTracer
 {
-  explicit FixWeakMappingGrayBitsTracer(JSContext* aCx)
-    : js::WeakMapTracer(aCx)
+  explicit FixWeakMappingGrayBitsTracer(JSRuntime* aRt)
+    : js::WeakMapTracer(aRt)
   {
   }
 
   void
   FixAll()
   {
     do {
       mAnyMarked = false;
@@ -299,25 +299,25 @@ struct FixWeakMappingGrayBitsTracer : pu
 
   MOZ_INIT_OUTSIDE_CTOR bool mAnyMarked;
 };
 
 #ifdef DEBUG
 // Check whether weak maps are marked correctly according to the logic above.
 struct CheckWeakMappingGrayBitsTracer : public js::WeakMapTracer
 {
-  explicit CheckWeakMappingGrayBitsTracer(JSContext* aCx)
-    : js::WeakMapTracer(aCx), mFailed(false)
+  explicit CheckWeakMappingGrayBitsTracer(JSRuntime* aRt)
+    : js::WeakMapTracer(aRt), mFailed(false)
   {
   }
 
   static bool
-  Check(JSContext* aCx)
+  Check(JSRuntime* aRt)
   {
-    CheckWeakMappingGrayBitsTracer tracer(aCx);
+    CheckWeakMappingGrayBitsTracer tracer(aRt);
     js::TraceWeakMaps(&tracer);
     return !tracer.mFailed;
   }
 
   void trace(JSObject* aMap, JS::GCCellPtr aKey, JS::GCCellPtr aValue) override
   {
     bool keyShouldBeBlack;
     bool valueShouldBeBlack;
@@ -385,18 +385,18 @@ JSZoneParticipant::TraverseNative(void* 
   JS::Zone* zone = static_cast<JS::Zone*>(aPtr);
 
   runtime->TraverseZone(zone, aCb);
   return NS_OK;
 }
 
 struct TraversalTracer : public JS::CallbackTracer
 {
-  TraversalTracer(JSContext* aCx, nsCycleCollectionTraversalCallback& aCb)
-    : JS::CallbackTracer(aCx, DoNotTraceWeakMaps), mCb(aCb)
+  TraversalTracer(JSRuntime* aRt, nsCycleCollectionTraversalCallback& aCb)
+    : JS::CallbackTracer(aRt, DoNotTraceWeakMaps), mCb(aCb)
   {
   }
   void onChild(const JS::GCCellPtr& aThing) override;
   nsCycleCollectionTraversalCallback& mCb;
 };
 
 void
 TraversalTracer::onChild(const JS::GCCellPtr& aThing)
@@ -496,79 +496,75 @@ mozilla::GetBuildId(JS::BuildIdCharVecto
 }
 
 static void
 MozCrashWarningReporter(JSContext*, JSErrorReport*)
 {
   MOZ_CRASH("Why is someone touching JSAPI without an AutoJSAPI?");
 }
 
-CycleCollectedJSRuntime::CycleCollectedJSRuntime(JSContext* aMainContext)
+CycleCollectedJSRuntime::CycleCollectedJSRuntime(JSContext* aCx)
   : mGCThingCycleCollectorGlobal(sGCThingCycleCollectorGlobal)
   , mJSZoneCycleCollectorGlobal(sJSZoneCycleCollectorGlobal)
-  , mJSContext(aMainContext)
+  , mJSRuntime(JS_GetRuntime(aCx))
   , mPrevGCSliceCallback(nullptr)
   , mPrevGCNurseryCollectionCallback(nullptr)
   , mJSHolders(256)
   , mOutOfMemoryState(OOMState::OK)
   , mLargeAllocationFailureState(OOMState::OK)
 {
-  if (!JS_AddExtraGCRootsTracer(mJSContext, TraceBlackJS, this)) {
+  MOZ_ASSERT(aCx);
+  MOZ_ASSERT(mJSRuntime);
+
+  if (!JS_AddExtraGCRootsTracer(aCx, TraceBlackJS, this)) {
     MOZ_CRASH("JS_AddExtraGCRootsTracer failed");
   }
-  JS_SetGrayGCRootsTracer(mJSContext, TraceGrayJS, this);
-  JS_SetGCCallback(mJSContext, GCCallback, this);
-  mPrevGCSliceCallback = JS::SetGCSliceCallback(mJSContext, GCSliceCallback);
+  JS_SetGrayGCRootsTracer(aCx, TraceGrayJS, this);
+  JS_SetGCCallback(aCx, GCCallback, this);
+  mPrevGCSliceCallback = JS::SetGCSliceCallback(aCx, GCSliceCallback);
 
   if (NS_IsMainThread()) {
     // We would like to support all threads here, but the way timeline consumers
     // are set up currently, you can either add a marker for one specific
     // docshell, or for every consumer globally. We would like to add a marker
     // for every consumer observing anything on this thread, but that is not
     // currently possible. For now, add global markers only when we are on the
     // main thread, since the UI for this tracing data only displays data
     // relevant to the main-thread.
     mPrevGCNurseryCollectionCallback = JS::SetGCNurseryCollectionCallback(
-      mJSContext, GCNurseryCollectionCallback);
+      aCx, GCNurseryCollectionCallback);
   }
 
-  JS_SetObjectsTenuredCallback(mJSContext, JSObjectsTenuredCb, this);
-  JS::SetOutOfMemoryCallback(mJSContext, OutOfMemoryCallback, this);
-  JS_SetExternalStringSizeofCallback(mJSContext, SizeofExternalStringCallback);
-  JS::SetBuildIdOp(mJSContext, GetBuildId);
-  JS::SetWarningReporter(mJSContext, MozCrashWarningReporter);
+  JS_SetObjectsTenuredCallback(aCx, JSObjectsTenuredCb, this);
+  JS::SetOutOfMemoryCallback(aCx, OutOfMemoryCallback, this);
+  JS_SetExternalStringSizeofCallback(aCx, SizeofExternalStringCallback);
+  JS::SetBuildIdOp(aCx, GetBuildId);
+  JS::SetWarningReporter(aCx, MozCrashWarningReporter);
 #ifdef MOZ_CRASHREPORTER
     js::AutoEnterOOMUnsafeRegion::setAnnotateOOMAllocationSizeCallback(
             CrashReporter::AnnotateOOMAllocationSize);
 #endif
 
   static js::DOMCallbacks DOMcallbacks = {
     InstanceClassHasProtoAtDepth
   };
-  SetDOMCallbacks(mJSContext, &DOMcallbacks);
-  js::SetScriptEnvironmentPreparer(mJSContext, &mEnvironmentPreparer);
+  SetDOMCallbacks(aCx, &DOMcallbacks);
+  js::SetScriptEnvironmentPreparer(aCx, &mEnvironmentPreparer);
 
-  JS::dbg::SetDebuggerMallocSizeOf(mJSContext, moz_malloc_size_of);
+  JS::dbg::SetDebuggerMallocSizeOf(aCx, moz_malloc_size_of);
 }
 
 void
 CycleCollectedJSRuntime::Shutdown(JSContext* cx)
 {
 }
 
 CycleCollectedJSRuntime::~CycleCollectedJSRuntime()
 {
-  // If the allocation failed, here we are.
-  if (!mJSContext) {
-    return;
-  }
-
   MOZ_ASSERT(!mDeferredFinalizerTable.Count());
-
-  mJSContext = nullptr;
 }
 
 size_t
 CycleCollectedJSRuntime::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
 {
   size_t n = 0;
 
   // We're deliberately not measuring anything hanging off the entries in
@@ -629,18 +625,17 @@ CycleCollectedJSRuntime::DescribeGCThing
   // Disable printing global for objects while we figure out ObjShrink fallout.
   aCb.DescribeGCedNode(aIsMarked, name, compartmentAddress);
 }
 
 void
 CycleCollectedJSRuntime::NoteGCThingJSChildren(JS::GCCellPtr aThing,
                                                nsCycleCollectionTraversalCallback& aCb) const
 {
-  MOZ_ASSERT(mJSContext);
-  TraversalTracer trc(mJSContext, aCb);
+  TraversalTracer trc(mJSRuntime, aCb);
   JS::TraceChildren(&trc, aThing);
 }
 
 void
 CycleCollectedJSRuntime::NoteGCThingXPCOMChildren(const js::Class* aClasp,
                                                   JSObject* aObj,
                                                   nsCycleCollectionTraversalCallback& aCb) const
 {
@@ -708,18 +703,16 @@ struct TraverseObjectShimClosure
   nsCycleCollectionTraversalCallback& cb;
   CycleCollectedJSRuntime* self;
 };
 
 void
 CycleCollectedJSRuntime::TraverseZone(JS::Zone* aZone,
                                       nsCycleCollectionTraversalCallback& aCb)
 {
-  MOZ_ASSERT(mJSContext);
-
   /*
    * We treat the zone as being gray. We handle non-gray GCthings in the
    * zone by not reporting their children to the CC. The black-gray invariant
    * ensures that any JS children will also be non-gray, and thus don't need to be
    * added to the graph. For C++ children, not representing the edge from the
    * non-gray JS GCthings to the C++ object will keep the child alive.
    *
    * We don't allow zone merging in a WantAllTraces CC, because then these
@@ -730,17 +723,17 @@ CycleCollectedJSRuntime::TraverseZone(JS
   /*
    * Every JS child of everything in the zone is either in the zone
    * or is a cross-compartment wrapper. In the former case, we don't need to
    * represent these edges in the CC graph because JS objects are not ref counted.
    * In the latter case, the JS engine keeps a map of these wrappers, which we
    * iterate over. Edges between compartments in the same zone will add
    * unnecessary loop edges to the graph (bug 842137).
    */
-  TraversalTracer trc(mJSContext, aCb);
+  TraversalTracer trc(mJSRuntime, aCb);
   js::VisitGrayWrapperTargets(aZone, NoteJSChildGrayWrapperShim, &trc);
 
   /*
    * To find C++ children of things in the zone, we scan every JS Object in
    * the zone. Only JS Objects can have C++ children.
    */
   TraverseObjectShimClosure closure = { aCb, this };
   js::IterateGrayObjects(aZone, TraverseObjectShim, &closure);
@@ -987,33 +980,30 @@ mozilla::TraceScriptHolder(nsISupports* 
   nsXPCOMCycleCollectionParticipant* participant = nullptr;
   CallQueryInterface(aHolder, &participant);
   participant->Trace(aHolder, JsGcTracer(), aTracer);
 }
 
 void
 CycleCollectedJSRuntime::TraceNativeGrayRoots(JSTracer* aTracer)
 {
-  MOZ_ASSERT(mJSContext);
-
   // NB: This is here just to preserve the existing XPConnect order. I doubt it
   // would hurt to do this after the JS holders.
   TraceAdditionalNativeGrayRoots(aTracer);
 
   for (auto iter = mJSHolders.Iter(); !iter.Done(); iter.Next()) {
     void* holder = iter.Key();
     nsScriptObjectTracer*& tracer = iter.Data();
     tracer->Trace(holder, JsGcTracer(), aTracer);
   }
 }
 
 void
 CycleCollectedJSRuntime::AddJSHolder(void* aHolder, nsScriptObjectTracer* aTracer)
 {
-  MOZ_ASSERT(mJSContext);
   mJSHolders.Put(aHolder, aTracer);
 }
 
 struct ClearJSHolder : public TraceCallbacks
 {
   virtual void Trace(JS::Heap<JS::Value>* aPtr, const char*, void*) const override
   {
     aPtr->setUndefined();
@@ -1054,136 +1044,121 @@ struct ClearJSHolder : public TraceCallb
   {
     *aPtr = nullptr;
   }
 };
 
 void
 CycleCollectedJSRuntime::RemoveJSHolder(void* aHolder)
 {
-  MOZ_ASSERT(mJSContext);
-
   nsScriptObjectTracer* tracer = mJSHolders.Get(aHolder);
   if (!tracer) {
     return;
   }
   tracer->Trace(aHolder, ClearJSHolder(), nullptr);
   mJSHolders.Remove(aHolder);
 }
 
 #ifdef DEBUG
 bool
 CycleCollectedJSRuntime::IsJSHolder(void* aHolder)
 {
-  MOZ_ASSERT(mJSContext);
   return mJSHolders.Get(aHolder, nullptr);
 }
 
 static void
 AssertNoGcThing(JS::GCCellPtr aGCThing, const char* aName, void* aClosure)
 {
   MOZ_ASSERT(!aGCThing);
 }
 
 void
 CycleCollectedJSRuntime::AssertNoObjectsToTrace(void* aPossibleJSHolder)
 {
-  MOZ_ASSERT(mJSContext);
-
   nsScriptObjectTracer* tracer = mJSHolders.Get(aPossibleJSHolder);
   if (tracer) {
     tracer->Trace(aPossibleJSHolder, TraceCallbackFunc(AssertNoGcThing), nullptr);
   }
 }
 #endif
 
 nsCycleCollectionParticipant*
 CycleCollectedJSRuntime::GCThingParticipant()
 {
-  MOZ_ASSERT(mJSContext);
   return &mGCThingCycleCollectorGlobal;
 }
 
 nsCycleCollectionParticipant*
 CycleCollectedJSRuntime::ZoneParticipant()
 {
-  MOZ_ASSERT(mJSContext);
   return &mJSZoneCycleCollectorGlobal;
 }
 
 nsresult
 CycleCollectedJSRuntime::TraverseRoots(nsCycleCollectionNoteRootCallback& aCb)
 {
-  MOZ_ASSERT(mJSContext);
-
   TraverseNativeRoots(aCb);
 
-  NoteWeakMapsTracer trc(mJSContext, aCb);
+  NoteWeakMapsTracer trc(mJSRuntime, aCb);
   js::TraceWeakMaps(&trc);
 
   return NS_OK;
 }
 
 bool
 CycleCollectedJSRuntime::UsefulToMergeZones() const
 {
   return false;
 }
 
 void
 CycleCollectedJSRuntime::FixWeakMappingGrayBits() const
 {
-  MOZ_ASSERT(mJSContext);
-  MOZ_ASSERT(!JS::IsIncrementalGCInProgress(mJSContext),
+  MOZ_ASSERT(!JS::IsIncrementalGCInProgress(mJSRuntime),
              "Don't call FixWeakMappingGrayBits during a GC.");
-  FixWeakMappingGrayBitsTracer fixer(mJSContext);
+  FixWeakMappingGrayBitsTracer fixer(mJSRuntime);
   fixer.FixAll();
 }
 
 void
 CycleCollectedJSRuntime::CheckGrayBits() const
 {
-  MOZ_ASSERT(mJSContext);
-  MOZ_ASSERT(!JS::IsIncrementalGCInProgress(mJSContext),
+  MOZ_ASSERT(!JS::IsIncrementalGCInProgress(mJSRuntime),
              "Don't call CheckGrayBits during a GC.");
 
 #ifndef ANDROID
   // Bug 1346874 - The gray state check is expensive. Android tests are already
   // slow enough that this check can easily push them over the threshold to a
   // timeout.
 
-  MOZ_ASSERT(js::CheckGrayMarkingState(mJSContext));
-  MOZ_ASSERT(CheckWeakMappingGrayBitsTracer::Check(mJSContext));
+  MOZ_ASSERT(js::CheckGrayMarkingState(mJSRuntime));
+  MOZ_ASSERT(CheckWeakMappingGrayBitsTracer::Check(mJSRuntime));
 #endif
 }
 
 bool
 CycleCollectedJSRuntime::AreGCGrayBitsValid() const
 {
-  MOZ_ASSERT(mJSContext);
-  return js::AreGCGrayBitsValid(mJSContext);
+  return js::AreGCGrayBitsValid(mJSRuntime);
 }
 
 void
 CycleCollectedJSRuntime::GarbageCollect(uint32_t aReason) const
 {
-  MOZ_ASSERT(mJSContext);
-
   MOZ_ASSERT(aReason < JS::gcreason::NUM_REASONS);
   JS::gcreason::Reason gcreason = static_cast<JS::gcreason::Reason>(aReason);
 
-  JS::PrepareForFullGC(mJSContext);
-  JS::GCForReason(mJSContext, GC_NORMAL, gcreason);
+  JSContext* cx = CycleCollectedJSContext::Get()->Context();
+  JS::PrepareForFullGC(cx);
+  JS::GCForReason(cx, GC_NORMAL, gcreason);
 }
 
 void
 CycleCollectedJSRuntime::JSObjectsTenured()
 {
-  MOZ_ASSERT(mJSContext);
-
   for (auto iter = mNurseryObjects.Iter(); !iter.Done(); iter.Next()) {
     nsWrapperCache* cache = iter.Get();
     JSObject* wrapper = cache->GetWrapperMaybeDead();
     MOZ_DIAGNOSTIC_ASSERT(wrapper);
     if (!JS::ObjectIsTenured(wrapper)) {
       MOZ_ASSERT(!cache->PreservingWrapper());
       const JSClass* jsClass = js::GetObjectJSClass(wrapper);
       jsClass->doFinalize(nullptr, wrapper);
@@ -1198,62 +1173,56 @@ for (auto iter = mPreservedNurseryObject
 
   mNurseryObjects.Clear();
   mPreservedNurseryObjects.Clear();
 }
 
 void
 CycleCollectedJSRuntime::NurseryWrapperAdded(nsWrapperCache* aCache)
 {
-  MOZ_ASSERT(mJSContext);
   MOZ_ASSERT(aCache);
   MOZ_ASSERT(aCache->GetWrapperMaybeDead());
   MOZ_ASSERT(!JS::ObjectIsTenured(aCache->GetWrapperMaybeDead()));
   mNurseryObjects.InfallibleAppend(aCache);
 }
 
 void
 CycleCollectedJSRuntime::NurseryWrapperPreserved(JSObject* aWrapper)
 {
-  MOZ_ASSERT(mJSContext);
-
   mPreservedNurseryObjects.InfallibleAppend(
-    JS::PersistentRooted<JSObject*>(mJSContext, aWrapper));
+    JS::PersistentRooted<JSObject*>(mJSRuntime, aWrapper));
 }
 
 void
 CycleCollectedJSRuntime::DeferredFinalize(DeferredFinalizeAppendFunction aAppendFunc,
                                           DeferredFinalizeFunction aFunc,
                                           void* aThing)
 {
-  MOZ_ASSERT(mJSContext);
-
   void* thingArray = nullptr;
   bool hadThingArray = mDeferredFinalizerTable.Get(aFunc, &thingArray);
 
   thingArray = aAppendFunc(thingArray, aThing);
   if (!hadThingArray) {
     mDeferredFinalizerTable.Put(aFunc, thingArray);
   }
 }
 
 void
 CycleCollectedJSRuntime::DeferredFinalize(nsISupports* aSupports)
 {
-  MOZ_ASSERT(mJSContext);
-
   typedef DeferredFinalizerImpl<nsISupports> Impl;
   DeferredFinalize(Impl::AppendDeferredFinalizePointer, Impl::DeferredFinalize,
                    aSupports);
 }
 
 void
 CycleCollectedJSRuntime::DumpJSHeap(FILE* aFile)
 {
-  js::DumpHeap(mJSContext, aFile, js::CollectNurseryBeforeDump);
+  JSContext* cx = CycleCollectedJSContext::Get()->Context();
+  js::DumpHeap(cx, aFile, js::CollectNurseryBeforeDump);
 }
 
 IncrementalFinalizeRunnable::IncrementalFinalizeRunnable(CycleCollectedJSRuntime* aRt,
                                                          DeferredFinalizerTable& aFinalizers)
   : Runnable("IncrementalFinalizeRunnable")
   , mRuntime(aRt)
   , mFinalizeFunctionToRun(0)
   , mReleasing(false)
@@ -1351,18 +1320,16 @@ IncrementalFinalizeRunnable::Run()
   Telemetry::Accumulate(Telemetry::DEFERRED_FINALIZE_ASYNC, duration);
 
   return NS_OK;
 }
 
 void
 CycleCollectedJSRuntime::FinalizeDeferredThings(CycleCollectedJSContext::DeferredFinalizeType aType)
 {
-  MOZ_ASSERT(mJSContext);
-
   /*
    * If the previous GC created a runnable to finalize objects
    * incrementally, and if it hasn't finished yet, finish it now. We
    * don't want these to build up. We also don't want to allow any
    * existing incremental finalize runnables to run after a
    * non-incremental GC, since they are often used to detect leaks.
    */
   if (mFinalizeRunnable) {
@@ -1391,87 +1358,80 @@ CycleCollectedJSRuntime::FinalizeDeferre
     MOZ_ASSERT(!mFinalizeRunnable);
   }
 }
 
 void
 CycleCollectedJSRuntime::AnnotateAndSetOutOfMemory(OOMState* aStatePtr,
                                                    OOMState aNewState)
 {
-  MOZ_ASSERT(mJSContext);
-
   *aStatePtr = aNewState;
 #ifdef MOZ_CRASHREPORTER
   CrashReporter::AnnotateCrashReport(aStatePtr == &mOutOfMemoryState
                                      ? NS_LITERAL_CSTRING("JSOutOfMemory")
                                      : NS_LITERAL_CSTRING("JSLargeAllocationFailure"),
                                      aNewState == OOMState::Reporting
                                      ? NS_LITERAL_CSTRING("Reporting")
                                      : aNewState == OOMState::Reported
                                      ? NS_LITERAL_CSTRING("Reported")
                                      : NS_LITERAL_CSTRING("Recovered"));
 #endif
 }
 
 void
 CycleCollectedJSRuntime::OnGC(JSGCStatus aStatus)
 {
-  MOZ_ASSERT(mJSContext);
-
   switch (aStatus) {
     case JSGC_BEGIN:
       nsCycleCollector_prepareForGarbageCollection();
       mZonesWaitingForGC.Clear();
       break;
     case JSGC_END: {
 #ifdef MOZ_CRASHREPORTER
       if (mOutOfMemoryState == OOMState::Reported) {
         AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Recovered);
       }
       if (mLargeAllocationFailureState == OOMState::Reported) {
         AnnotateAndSetOutOfMemory(&mLargeAllocationFailureState, OOMState::Recovered);
       }
 #endif
 
       // Do any deferred finalization of native objects.
-      FinalizeDeferredThings(JS::WasIncrementalGC(mJSContext)
+      FinalizeDeferredThings(JS::WasIncrementalGC(mJSRuntime)
                              ? CycleCollectedJSContext::FinalizeIncrementally
                              : CycleCollectedJSContext::FinalizeNow);
       break;
     }
     default:
       MOZ_CRASH();
   }
 
   CustomGCCallback(aStatus);
 }
 
 void
 CycleCollectedJSRuntime::OnOutOfMemory()
 {
-  MOZ_ASSERT(mJSContext);
-
   AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Reporting);
   CustomOutOfMemoryCallback();
   AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Reported);
 }
 
 void
 CycleCollectedJSRuntime::SetLargeAllocationFailure(OOMState aNewState)
 {
-  MOZ_ASSERT(mJSContext);
-
   AnnotateAndSetOutOfMemory(&mLargeAllocationFailureState, aNewState);
 }
 
 void
 CycleCollectedJSRuntime::PrepareWaitingZonesForGC()
 {
+  JSContext* cx = CycleCollectedJSContext::Get()->Context();
   if (mZonesWaitingForGC.Count() == 0) {
-    JS::PrepareForFullGC(mJSContext);
+    JS::PrepareForFullGC(cx);
   } else {
     for (auto iter = mZonesWaitingForGC.Iter(); !iter.Done(); iter.Next()) {
       JS::PrepareZoneForGC(iter.Get()->GetKey());
     }
     mZonesWaitingForGC.Clear();
   }
 }
 
--- a/xpcom/base/CycleCollectedJSRuntime.h
+++ b/xpcom/base/CycleCollectedJSRuntime.h
@@ -279,17 +279,17 @@ public:
   // isn't one.
   static CycleCollectedJSRuntime* Get();
 
 private:
   JSGCThingParticipant mGCThingCycleCollectorGlobal;
 
   JSZoneParticipant mJSZoneCycleCollectorGlobal;
 
-  JSContext* mJSContext;
+  JSRuntime* mJSRuntime;
 
   JS::GCSliceCallback mPrevGCSliceCallback;
   JS::GCNurseryCollectionCallback mPrevGCNurseryCollectionCallback;
 
   nsDataHashtable<nsPtrHashKey<void>, nsScriptObjectTracer*> mJSHolders;
 
   typedef nsDataHashtable<nsFuncPtrHashKey<DeferredFinalizeFunction>, void*>
     DeferredFinalizerTable;