Bug 988486 - Make more GCRuntime members private and add necessary accessors r=terrence
☠☠ backed out by 6932ea846a28 ☠ ☠
authorJon Coppeard <jcoppeard@mozilla.com>
Sat, 07 Jun 2014 10:34:57 +0100
changeset 187384 14a4a906225384565d6c23c796fa57d185af228c
parent 187383 03eccac81e158aabf1e1fba832e13fb647d361b6
child 187385 f56234ba7ec78819ad274cf738583ee3e150bdc6
push id7176
push userryanvm@gmail.com
push dateSat, 07 Jun 2014 18:15:31 +0000
treeherderfx-team@a2f0e0619332 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersterrence
bugs988486
milestone32.0a1
Bug 988486 - Make more GCRuntime members private and add necessary accessors r=terrence
js/public/GCAPI.h
js/src/builtin/TestingFunctions.cpp
js/src/gc/GCRuntime.h
js/src/gc/Nursery.cpp
js/src/gc/Statistics.cpp
js/src/gc/Statistics.h
js/src/jit/CompileWrappers.cpp
js/src/jsapi.cpp
js/src/jsfriendapi.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsgcinlines.h
js/src/jsobj.cpp
js/src/vm/Runtime-inl.h
js/src/vm/Runtime.cpp
js/src/vm/Runtime.h
--- a/js/public/GCAPI.h
+++ b/js/public/GCAPI.h
@@ -370,17 +370,17 @@ ShrinkGCBuffers(JSRuntime *rt);
 
 /*
  * Assert if a GC occurs while this class is live. This class does not disable
  * the static rooting hazard analysis.
  */
 class JS_PUBLIC_API(AutoAssertOnGC)
 {
 #ifdef DEBUG
-    JSRuntime *runtime;
+    js::gc::GCRuntime *gc;
     size_t gcNumber;
 
   public:
     AutoAssertOnGC();
     explicit AutoAssertOnGC(JSRuntime *rt);
     ~AutoAssertOnGC();
 
     static void VerifyIsSafeToGC(JSRuntime *rt);
--- a/js/src/builtin/TestingFunctions.cpp
+++ b/js/src/builtin/TestingFunctions.cpp
@@ -515,17 +515,17 @@ SelectForGC(JSContext *cx, unsigned argc
      * start to detect missing pre-barriers. It is invalid for nursery things
      * to be in the set, so evict the nursery before adding items.
      */
     JSRuntime *rt = cx->runtime();
     MinorGC(rt, JS::gcreason::EVICT_NURSERY);
 
     for (unsigned i = 0; i < args.length(); i++) {
         if (args[i].isObject()) {
-            if (!rt->gc.selectedForMarking.append(&args[i].toObject()))
+            if (!rt->gc.selectForMarking(&args[i].toObject()))
                 return false;
         }
     }
 
     args.rval().setUndefined();
     return true;
 }
 
@@ -594,17 +594,17 @@ DeterministicGC(JSContext *cx, unsigned 
     CallArgs args = CallArgsFromVp(argc, vp);
 
     if (args.length() != 1) {
         RootedObject callee(cx, &args.callee());
         ReportUsageError(cx, callee, "Wrong number of arguments");
         return false;
     }
 
-    gc::SetDeterministicGC(cx, ToBoolean(args[0]));
+    cx->runtime()->gc.setDeterministic(ToBoolean(args[0]));
     args.rval().setUndefined();
     return true;
 }
 #endif /* JS_GC_ZEAL */
 
 static bool
 GCSlice(JSContext *cx, unsigned argc, Value *vp)
 {
@@ -636,33 +636,33 @@ ValidateGC(JSContext *cx, unsigned argc,
     CallArgs args = CallArgsFromVp(argc, vp);
 
     if (args.length() != 1) {
         RootedObject callee(cx, &args.callee());
         ReportUsageError(cx, callee, "Wrong number of arguments");
         return false;
     }
 
-    gc::SetValidateGC(cx, ToBoolean(args[0]));
+    cx->runtime()->gc.setValidate(ToBoolean(args[0]));
     args.rval().setUndefined();
     return true;
 }
 
 static bool
 FullCompartmentChecks(JSContext *cx, unsigned argc, jsval *vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
 
     if (args.length() != 1) {
         RootedObject callee(cx, &args.callee());
         ReportUsageError(cx, callee, "Wrong number of arguments");
         return false;
     }
 
-    gc::SetFullCompartmentChecks(cx, ToBoolean(args[0]));
+    cx->runtime()->gc.setFullCompartmentChecks(ToBoolean(args[0]));
     args.rval().setUndefined();
     return true;
 }
 
 static bool
 NondeterministicGetWeakMapKeys(JSContext *cx, unsigned argc, jsval *vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -91,17 +91,20 @@ class CallbackVector : public Vector<Cal
 
 class GCRuntime
 {
   public:
     explicit GCRuntime(JSRuntime *rt);
     bool init(uint32_t maxbytes);
     void finish();
 
-    void setGCZeal(uint8_t zeal, uint32_t frequency);
+    inline int zeal();
+    inline bool upcomingZealousGC();
+    inline bool needZealousGC();
+
     template <typename T> bool addRoot(T *rp, const char *name, JSGCRootType rootType);
     void removeRoot(void *rp);
     void setMarkStackLimit(size_t limit);
 
     bool isHeapBusy() { return heapState != js::Idle; }
     bool isHeapMajorCollecting() { return heapState == js::MajorCollecting; }
     bool isHeapMinorCollecting() { return heapState == js::MinorCollecting; }
     bool isHeapCollecting() { return isHeapMajorCollecting() || isHeapMinorCollecting(); }
@@ -111,24 +114,31 @@ class GCRuntime
     void maybeGC(Zone *zone);
     void minorGC(JS::gcreason::Reason reason);
     void minorGC(JSContext *cx, JS::gcreason::Reason reason);
     void gcIfNeeded(JSContext *cx);
     void collect(bool incremental, int64_t budget, JSGCInvocationKind gckind,
                  JS::gcreason::Reason reason);
     void gcSlice(JSGCInvocationKind gckind, JS::gcreason::Reason reason, int64_t millis);
     void runDebugGC();
+    inline void poke();
 
     void markRuntime(JSTracer *trc, bool useSavedRoots = false);
 
 #ifdef JS_GC_ZEAL
+    const void *addressOfZealMode() { return &zealMode; }
+    void setZeal(uint8_t zeal, uint32_t frequency);
+    void setNextScheduled(uint32_t count);
     void verifyPreBarriers();
     void verifyPostBarriers();
     void maybeVerifyPreBarriers(bool always);
     void maybeVerifyPostBarriers(bool always);
+    bool selectForMarking(JSObject *object);
+    void clearSelectedForMarking();
+    void setDeterministic(bool enable);
 #endif
 
   public:
     // Internal public interface
     void recordNativeStackTop();
 #ifdef JS_THREADSAFE
     void notifyRequestEnd() { conservativeGC.updateForRequestEnd(); }
 #endif
@@ -179,40 +189,65 @@ class GCRuntime
 
 #ifdef DEBUG
     bool isAllocAllowed() { return noGCOrAllocationCheck == 0; }
     void disallowAlloc() { ++noGCOrAllocationCheck; }
     void allowAlloc() {
         JS_ASSERT(!isAllocAllowed());
         --noGCOrAllocationCheck;
     }
+
+    bool isInsideUnsafeRegion() { return inUnsafeRegion != 0; }
+    void enterUnsafeRegion() { ++inUnsafeRegion; }
+    void leaveUnsafeRegion() {
+        JS_ASSERT(inUnsafeRegion > 0);
+        --inUnsafeRegion;
+    }
 #endif
 
     void setAlwaysPreserveCode() { alwaysPreserveCode = true; }
 
     bool isGenerationalGCEnabled() { return generationalDisabled == 0; }
     void disableGenerationalGC();
     void enableGenerationalGC();
 
+    void setGrayRootsTracer(JSTraceDataOp traceOp, void *data);
+    bool addBlackRootsTracer(JSTraceDataOp traceOp, void *data);
+    void removeBlackRootsTracer(JSTraceDataOp traceOp, void *data);
+
+    void setMaxMallocBytes(size_t value);
+    void resetMallocBytes();
+    bool isTooMuchMalloc() const { return mallocBytes <= 0; }
+    void updateMallocCounter(JS::Zone *zone, size_t nbytes);
+    void onTooMuchMalloc();
+
+    void setGCCallback(JSGCCallback callback, void *data);
+    bool addFinalizeCallback(JSFinalizeCallback callback, void *data);
+    void removeFinalizeCallback(JSFinalizeCallback func);
+    JS::GCSliceCallback setSliceCallback(JS::GCSliceCallback callback);
+
+    void setValidate(bool enable);
+    void setFullCompartmentChecks(bool enable);
+
 #ifdef JS_GC_ZEAL
     void startVerifyPreBarriers();
     bool endVerifyPreBarriers();
     void startVerifyPostBarriers();
     bool endVerifyPostBarriers();
     void finishVerifier();
 #endif
 
   private:
     // For ArenaLists::allocateFromArenaInline()
     friend class ArenaLists;
     Chunk *pickChunk(Zone *zone, AutoMaybeStartBackgroundAllocation &maybeStartBackgroundAllocation);
 
     inline bool wantBackgroundAllocation() const;
 
-    bool initGCZeal();
+    bool initZeal();
     void requestInterrupt(JS::gcreason::Reason reason);
     bool gcCycle(bool incremental, int64_t budget, JSGCInvocationKind gckind,
                  JS::gcreason::Reason reason);
     void budgetIncrementalGC(int64_t *budget);
     void resetIncrementalGC(const char *reason);
     void incrementalCollectSlice(int64_t budget, JS::gcreason::Reason reason,
                                  JSGCInvocationKind gckind);
     void pushZealSelectedObjects();
@@ -435,25 +470,26 @@ class GCRuntime
      * zone with no incoming cross-compartment pointers. Typically if
      * this happens it signals that an incremental GC is marking too much
      * stuff. At various times we check this counter and, if it has changed, we
      * run an immediate, non-incremental GC to clean up the dead
      * zones. This should happen very rarely.
      */
     unsigned              objectsMarkedInDeadZones;
 
-    bool                  poke;
+    bool                  poked;
 
     volatile js::HeapState heapState;
 
 #ifdef JSGC_GENERATIONAL
     js::Nursery           nursery;
     js::gc::StoreBuffer   storeBuffer;
 #endif
 
+  private:
     /*
      * These options control the zealousness of the GC. The fundamental values
      * are   nextScheduled and gcDebugCompartmentGC. At every allocation,
      *   nextScheduled is decremented. When it reaches zero, we do either a
      * full or a compartmental GC, based on   debugCompartmentGC.
      *
      * At this point, if   zeal_ is one of the types that trigger periodic
      * collection, then   nextScheduled is reset to the value of
@@ -481,79 +517,97 @@ class GCRuntime
     int                   incrementalLimit;
 
     js::Vector<JSObject *, 0, js::SystemAllocPolicy>   selectedForMarking;
 #endif
 
     bool                  validate;
     bool                  fullCompartmentChecks;
 
-    JSGCCallback          gcCallback;
-    void                  *gcCallbackData;
-
-    JS::GCSliceCallback   sliceCallback;
+    Callback<JSGCCallback>  gcCallback;
     CallbackVector<JSFinalizeCallback> finalizeCallbacks;
 
     /*
      * Malloc counter to measure memory pressure for GC scheduling. It runs
      * from   maxMallocBytes down to zero.
      */
     mozilla::Atomic<ptrdiff_t, mozilla::ReleaseAcquire>   mallocBytes;
 
     /*
-     * Whether a GC has been triggered as a result of   mallocBytes falling
+     * Whether a GC has been triggered as a result of mallocBytes falling
      * below zero.
      */
     mozilla::Atomic<bool, mozilla::ReleaseAcquire>   mallocGCTriggered;
 
     /*
      * The trace operations to trace embedding-specific GC roots. One is for
      * tracing through black roots and the other is for tracing through gray
      * roots. The black/gray distinction is only relevant to the cycle
      * collector.
      */
     CallbackVector<JSTraceDataOp> blackRootTracers;
     Callback<JSTraceDataOp> grayRootTracer;
 
-    /*
-     * The GC can only safely decommit memory when the page size of the
-     * running process matches the compiled arena size.
-     */
-    size_t                systemPageSize;
-
-    /* The OS allocation granularity may not match the page size. */
-    size_t                systemAllocGranularity;
-
 #ifdef DEBUG
     /*
      * Some regions of code are hard for the static rooting hazard analysis to
      * understand. In those cases, we trade the static analysis for a dynamic
      * analysis. When this is non-zero, we should assert if we trigger, or
      * might trigger, a GC.
      */
     int inUnsafeRegion;
 #endif
 
-  private:
     /* Always preserve JIT code during GCs, for testing. */
     bool                  alwaysPreserveCode;
 
 #ifdef DEBUG
     size_t                noGCOrAllocationCheck;
 #endif
 
     /* Synchronize GC heap access between main thread and GCHelperState. */
     PRLock                *lock;
     mozilla::DebugOnly<PRThread *>   lockOwner;
 
     GCHelperState helperState;
 
     ConservativeGCData conservativeGC;
 
-    //friend class js::gc::Chunk; // todo: remove
     friend class js::GCHelperState;
     friend class js::gc::MarkingValidator;
 };
 
+#ifdef JS_GC_ZEAL
+inline int
+GCRuntime::zeal() {
+    return zealMode;
+}
+
+inline bool
+GCRuntime::upcomingZealousGC() {
+    return nextScheduled == 1;
+}
+
+inline bool
+GCRuntime::needZealousGC() {
+    if (nextScheduled > 0 && --nextScheduled == 0) {
+        if (zealMode == ZealAllocValue ||
+            zealMode == ZealGenerationalGCValue ||
+            (zealMode >= ZealIncrementalRootsThenFinish &&
+             zealMode <= ZealIncrementalMultipleSlices))
+        {
+            nextScheduled = zealFrequency;
+        }
+        return true;
+    }
+    return false;
+}
+#else
+inline int GCRuntime::zeal() { return 0; }
+inline bool GCRuntime::upcomingZealousGC() { return false; }
+inline bool GCRuntime::needZealousGC() { return false; }
+#endif
+
+
 } /* namespace gc */
 } /* namespace js */
 
 #endif
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -102,17 +102,17 @@ js::Nursery::enable()
 {
     JS_ASSERT(isEmpty());
     if (isEnabled())
         return;
     numActiveChunks_ = 1;
     setCurrentChunk(0);
     currentStart_ = position();
 #ifdef JS_GC_ZEAL
-    if (runtime()->gc.zealMode == ZealGenerationalGCValue)
+    if (runtime()->gcZeal() == ZealGenerationalGCValue)
         enterZealMode();
 #endif
 }
 
 void
 js::Nursery::disable()
 {
     JS_ASSERT(isEmpty());
@@ -124,17 +124,17 @@ js::Nursery::disable()
 }
 
 bool
 js::Nursery::isEmpty() const
 {
     JS_ASSERT(runtime_);
     if (!isEnabled())
         return true;
-    JS_ASSERT_IF(runtime_->gc.zealMode != ZealGenerationalGCValue, currentStart_ == start());
+    JS_ASSERT_IF(runtime_->gcZeal() != ZealGenerationalGCValue, currentStart_ == start());
     return position() == currentStart_;
 }
 
 #ifdef JS_GC_ZEAL
 void
 js::Nursery::enterZealMode() {
     if (isEnabled())
         numActiveChunks_ = NumNurseryChunks;
@@ -925,17 +925,17 @@ void
 js::Nursery::sweep()
 {
 #ifdef JS_GC_ZEAL
     /* Poison the nursery contents so touching a freed object will crash. */
     JS_POISON((void *)start(), JS_SWEPT_NURSERY_PATTERN, NurserySize);
     for (int i = 0; i < NumNurseryChunks; ++i)
         initChunk(i);
 
-    if (runtime()->gc.zealMode == ZealGenerationalGCValue) {
+    if (runtime()->gcZeal() == ZealGenerationalGCValue) {
         MOZ_ASSERT(numActiveChunks_ == NumNurseryChunks);
 
         /* Only reset the alloc point when we are close to the end. */
         if (currentChunk_ + 1 == NumNurseryChunks)
             setCurrentChunk(0);
     } else
 #endif
     {
@@ -950,26 +950,26 @@ js::Nursery::sweep()
     /* Set current start position for isEmpty checks. */
     currentStart_ = position();
 }
 
 void
 js::Nursery::growAllocableSpace()
 {
 #ifdef JS_GC_ZEAL
-    MOZ_ASSERT_IF(runtime()->gc.zealMode == ZealGenerationalGCValue,
+    MOZ_ASSERT_IF(runtime()->gcZeal() == ZealGenerationalGCValue,
                   numActiveChunks_ == NumNurseryChunks);
 #endif
     numActiveChunks_ = Min(numActiveChunks_ * 2, NumNurseryChunks);
 }
 
 void
 js::Nursery::shrinkAllocableSpace()
 {
 #ifdef JS_GC_ZEAL
-    if (runtime()->gc.zealMode == ZealGenerationalGCValue)
+    if (runtime()->gcZeal() == ZealGenerationalGCValue)
         return;
 #endif
     numActiveChunks_ = Max(numActiveChunks_ - 1, 1);
     updateDecommittedRegion();
 }
 
 #endif /* JSGC_GENERATIONAL */
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -441,17 +441,18 @@ Statistics::Statistics(JSRuntime *rt)
     fp(nullptr),
     fullFormat(false),
     gcDepth(0),
     collectedCount(0),
     zoneCount(0),
     compartmentCount(0),
     nonincrementalReason(nullptr),
     preBytes(0),
-    phaseNestingDepth(0)
+    phaseNestingDepth(0),
+    sliceCallback(nullptr)
 {
     PodArrayZero(phaseTotals);
     PodArrayZero(counts);
 
     char *env = getenv("MOZ_GCTIMER");
     if (!env || strcmp(env, "none") == 0) {
         fp = nullptr;
         return;
@@ -484,16 +485,23 @@ Statistics::~Statistics()
             }
         }
 
         if (fp != stdout && fp != stderr)
             fclose(fp);
     }
 }
 
+JS::GCSliceCallback
+Statistics::setSliceCallback(JS::GCSliceCallback newCallback) {
+    JS::GCSliceCallback oldCallback = sliceCallback;
+    sliceCallback = newCallback;
+    return oldCallback;
+}
+
 void
 Statistics::printStats()
 {
     if (fullFormat) {
         StatisticsSerializer ss(StatisticsSerializer::AsText);
         formatData(ss, 0);
         char *msg = ss.finishCString();
         if (msg) {
@@ -576,19 +584,19 @@ Statistics::beginSlice(int collectedCoun
     (void) slices.append(data); /* Ignore any OOMs here. */
 
     if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
         (*cb)(JS_TELEMETRY_GC_REASON, reason);
 
     // Slice callbacks should only fire for the outermost level
     if (++gcDepth == 1) {
         bool wasFullGC = collectedCount == zoneCount;
-        if (JS::GCSliceCallback cb = runtime->gc.sliceCallback)
-            (*cb)(runtime, first ? JS::GC_CYCLE_BEGIN : JS::GC_SLICE_BEGIN,
-                  JS::GCDescription(!wasFullGC));
+        if (sliceCallback)
+            (*sliceCallback)(runtime, first ? JS::GC_CYCLE_BEGIN : JS::GC_SLICE_BEGIN,
+                             JS::GCDescription(!wasFullGC));
     }
 }
 
 void
 Statistics::endSlice()
 {
     slices.back().end = PRMJ_Now();
     slices.back().endFaults = SystemPageAllocator::GetPageFaultCount();
@@ -600,19 +608,19 @@ Statistics::endSlice()
 
     bool last = runtime->gc.incrementalState == gc::NO_INCREMENTAL;
     if (last)
         endGC();
 
     // Slice callbacks should only fire for the outermost level
     if (--gcDepth == 0) {
         bool wasFullGC = collectedCount == zoneCount;
-        if (JS::GCSliceCallback cb = runtime->gc.sliceCallback)
-            (*cb)(runtime, last ? JS::GC_CYCLE_END : JS::GC_SLICE_END,
-                  JS::GCDescription(!wasFullGC));
+        if (sliceCallback)
+            (*sliceCallback)(runtime, last ? JS::GC_CYCLE_END : JS::GC_SLICE_END,
+                             JS::GCDescription(!wasFullGC));
     }
 
     /* Do this after the slice callback since it uses these values. */
     if (last)
         PodArrayZero(counts);
 }
 
 void
--- a/js/src/gc/Statistics.h
+++ b/js/src/gc/Statistics.h
@@ -93,16 +93,18 @@ struct Statistics {
     }
 
     int64_t beginSCC();
     void endSCC(unsigned scc, int64_t start);
 
     jschar *formatMessage();
     jschar *formatJSON(uint64_t timestamp);
 
+    JS::GCSliceCallback setSliceCallback(JS::GCSliceCallback callback);
+
   private:
     JSRuntime *runtime;
 
     int64_t startupTime;
 
     FILE *fp;
     bool fullFormat;
 
@@ -155,16 +157,18 @@ struct Statistics {
     static const size_t MAX_NESTING = 8;
     Phase phaseNesting[MAX_NESTING];
 #endif
     mozilla::DebugOnly<size_t> phaseNestingDepth;
 
     /* Sweep times for SCCs of compartments. */
     Vector<int64_t, 0, SystemAllocPolicy> sccTimes;
 
+    JS::GCSliceCallback sliceCallback;
+
     void beginGC();
     void endGC();
 
     void gcDuration(int64_t *total, int64_t *maxPause);
     void sccDurations(int64_t *total, int64_t *maxPause);
     void printStats();
     bool formatData(StatisticsSerializer &ss, uint64_t timestamp);
 
--- a/js/src/jit/CompileWrappers.cpp
+++ b/js/src/jit/CompileWrappers.cpp
@@ -63,17 +63,17 @@ CompileRuntime::addressOfLastCachedNativ
 {
     return &runtime()->nativeIterCache.last;
 }
 
 #ifdef JS_GC_ZEAL
 const void *
 CompileRuntime::addressOfGCZeal()
 {
-    return &runtime()->gc.zealMode;
+    return runtime()->gc.addressOfZealMode();
 }
 #endif
 
 const void *
 CompileRuntime::addressOfInterrupt()
 {
     return &runtime()->interrupt;
 }
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -1610,30 +1610,23 @@ JS::RemoveScriptRootRT(JSRuntime *rt, JS
 {
     RemoveRoot(rt, (void *)rp);
     *rp = nullptr;
 }
 
 JS_PUBLIC_API(bool)
 JS_AddExtraGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data)
 {
-    AssertHeapIsIdle(rt);
-    return !!rt->gc.blackRootTracers.append(Callback<JSTraceDataOp>(traceOp, data));
+    return rt->gc.addBlackRootsTracer(traceOp, data);
 }
 
 JS_PUBLIC_API(void)
 JS_RemoveExtraGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data)
 {
-    for (size_t i = 0; i < rt->gc.blackRootTracers.length(); i++) {
-        Callback<JSTraceDataOp> *e = &rt->gc.blackRootTracers[i];
-        if (e->op == traceOp && e->data == data) {
-            rt->gc.blackRootTracers.erase(e);
-            break;
-        }
-    }
+    return rt->gc.removeBlackRootsTracer(traceOp, data);
 }
 
 #ifdef DEBUG
 
 typedef struct JSHeapDumpNode JSHeapDumpNode;
 
 struct JSHeapDumpNode {
     void            *thing;
@@ -1894,38 +1887,31 @@ JS_MaybeGC(JSContext *cx)
 {
     MaybeGC(cx);
 }
 
 JS_PUBLIC_API(void)
 JS_SetGCCallback(JSRuntime *rt, JSGCCallback cb, void *data)
 {
     AssertHeapIsIdle(rt);
-    rt->gc.gcCallback = cb;
-    rt->gc.gcCallbackData = data;
+    rt->gc.setGCCallback(cb, data);
 }
 
 JS_PUBLIC_API(bool)
 JS_AddFinalizeCallback(JSRuntime *rt, JSFinalizeCallback cb, void *data)
 {
     AssertHeapIsIdle(rt);
-    return rt->gc.finalizeCallbacks.append(Callback<JSFinalizeCallback>(cb, data));
+    return rt->gc.addFinalizeCallback(cb, data);
 }
 
 JS_PUBLIC_API(void)
 JS_RemoveFinalizeCallback(JSRuntime *rt, JSFinalizeCallback cb)
 {
-    for (Callback<JSFinalizeCallback> *p = rt->gc.finalizeCallbacks.begin();
-         p < rt->gc.finalizeCallbacks.end(); p++)
-    {
-        if (p->op == cb) {
-            rt->gc.finalizeCallbacks.erase(p);
-            break;
-        }
-    }
+    AssertHeapIsIdle(rt);
+    rt->gc.removeFinalizeCallback(cb);
 }
 
 JS_PUBLIC_API(bool)
 JS_IsAboutToBeFinalized(JS::Heap<JSObject *> *objp)
 {
     return IsObjectAboutToBeFinalized(objp->unsafeGet());
 }
 
@@ -1940,17 +1926,17 @@ JS_SetGCParameter(JSRuntime *rt, JSGCPar
 {
     switch (key) {
       case JSGC_MAX_BYTES: {
         JS_ASSERT(value >= rt->gc.bytes);
         rt->gc.maxBytes = value;
         break;
       }
       case JSGC_MAX_MALLOC_BYTES:
-        rt->setGCMaxMallocBytes(value);
+        rt->gc.setMaxMallocBytes(value);
         break;
       case JSGC_SLICE_TIME_BUDGET:
         rt->gc.sliceBudget = SliceBudget::TimeBudget(value);
         break;
       case JSGC_MARK_STACK_LIMIT:
         js::SetMarkStackLimit(rt, value);
         break;
       case JSGC_HIGH_FREQUENCY_TIME_LIMIT:
@@ -6215,23 +6201,23 @@ JS_AbortIfWrongThread(JSRuntime *rt)
     if (!js::TlsPerThreadData.get()->associatedWith(rt))
         MOZ_CRASH();
 }
 
 #ifdef JS_GC_ZEAL
 JS_PUBLIC_API(void)
 JS_SetGCZeal(JSContext *cx, uint8_t zeal, uint32_t frequency)
 {
-    SetGCZeal(cx->runtime(), zeal, frequency);
+    cx->runtime()->gc.setZeal(zeal, frequency);
 }
 
 JS_PUBLIC_API(void)
 JS_ScheduleGC(JSContext *cx, uint32_t count)
 {
-    cx->runtime()->gc.nextScheduled = count;
+    cx->runtime()->gc.setNextScheduled(count);
 }
 #endif
 
 JS_PUBLIC_API(void)
 JS_SetParallelParsingEnabled(JSRuntime *rt, bool enabled)
 {
 #ifdef JS_ION
     rt->setParallelParsingEnabled(enabled);
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -58,18 +58,17 @@ JS_FRIEND_API(SourceHook *)
 js::ForgetSourceHook(JSRuntime *rt)
 {
     return rt->sourceHook.forget();
 }
 
 JS_FRIEND_API(void)
 JS_SetGrayGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data)
 {
-    rt->gc.grayRootTracer.op = traceOp;
-    rt->gc.grayRootTracer.data = data;
+    rt->gc.setGrayRootsTracer(traceOp, data);
 }
 
 JS_FRIEND_API(JSString *)
 JS_GetAnonymousString(JSRuntime *rt)
 {
     JS_ASSERT(rt->hasContexts());
     return rt->commonNames->anonymous;
 }
@@ -859,19 +858,17 @@ JS_FRIEND_API(bool)
 js::IsContextRunningJS(JSContext *cx)
 {
     return cx->currentlyRunning();
 }
 
 JS_FRIEND_API(JS::GCSliceCallback)
 JS::SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback)
 {
-    JS::GCSliceCallback old = rt->gc.sliceCallback;
-    rt->gc.sliceCallback = callback;
-    return old;
+    return rt->gc.setSliceCallback(callback);
 }
 
 JS_FRIEND_API(bool)
 JS::WasIncrementalGC(JSRuntime *rt)
 {
     return rt->gc.isIncremental;
 }
 
@@ -1021,17 +1018,17 @@ JS_FRIEND_API(void)
 JS::IncrementalValueBarrier(const Value &v)
 {
     js::HeapValue::writeBarrierPre(v);
 }
 
 JS_FRIEND_API(void)
 JS::PokeGC(JSRuntime *rt)
 {
-    rt->gc.poke = true;
+    rt->gc.poke();
 }
 
 JS_FRIEND_API(JSCompartment *)
 js::GetAnyCompartmentInZone(JS::Zone *zone)
 {
     CompartmentsInZoneIter comp(zone);
     JS_ASSERT(!comp.done());
     return comp.get();
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1090,33 +1090,31 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
     markingValidator(nullptr),
 #endif
     interFrameGC(0),
     sliceBudget(SliceBudget::Unlimited),
     incrementalEnabled(true),
     generationalDisabled(0),
     manipulatingDeadZones(false),
     objectsMarkedInDeadZones(0),
-    poke(false),
+    poked(false),
     heapState(Idle),
 #ifdef JSGC_GENERATIONAL
     nursery(rt),
     storeBuffer(rt, nursery),
 #endif
 #ifdef JS_GC_ZEAL
     zealMode(0),
     zealFrequency(0),
     nextScheduled(0),
     deterministicOnly(false),
     incrementalLimit(0),
 #endif
     validate(true),
     fullCompartmentChecks(false),
-    gcCallback(nullptr),
-    sliceCallback(nullptr),
     mallocBytes(0),
     mallocGCTriggered(false),
 #ifdef DEBUG
     inUnsafeRegion(0),
 #endif
     alwaysPreserveCode(false),
 #ifdef DEBUG
     noGCOrAllocationCheck(0),
@@ -1124,24 +1122,18 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
     lock(nullptr),
     lockOwner(nullptr),
     helperState(rt)
 {
 }
 
 #ifdef JS_GC_ZEAL
 
-extern void
-js::SetGCZeal(JSRuntime *rt, uint8_t zeal, uint32_t frequency)
-{
-    rt->gc.setGCZeal(zeal, frequency);
-}
-
 void
-GCRuntime::setGCZeal(uint8_t zeal, uint32_t frequency)
+GCRuntime::setZeal(uint8_t zeal, uint32_t frequency)
 {
     if (verifyPreData)
         VerifyBarriers(rt, PreBarrierVerifier);
     if (verifyPostData)
         VerifyBarriers(rt, PostBarrierVerifier);
 
 #ifdef JSGC_GENERATIONAL
     if (zealMode == ZealGenerationalGCValue) {
@@ -1154,18 +1146,24 @@ GCRuntime::setGCZeal(uint8_t zeal, uint3
 #endif
 
     bool schedule = zeal >= js::gc::ZealAllocValue;
     zealMode = zeal;
     zealFrequency = frequency;
     nextScheduled = schedule ? frequency : 0;
 }
 
+void
+GCRuntime::setNextScheduled(uint32_t count)
+{
+    nextScheduled = count;
+}
+
 bool
-GCRuntime::initGCZeal()
+GCRuntime::initZeal()
 {
     const char *env = getenv("JS_GC_ZEAL");
     if (!env)
         return true;
 
     int zeal = -1;
     int frequency = JS_DEFAULT_ZEAL_FREQ;
     if (strcmp(env, "help") != 0) {
@@ -1191,17 +1189,17 @@ GCRuntime::initGCZeal()
                 "  9: Incremental GC in two slices: 1) mark all 2) new marking and finish\n"
                 " 10: Incremental GC in multiple slices\n"
                 " 11: Verify post write barriers between instructions\n"
                 " 12: Verify post write barriers between paints\n"
                 " 13: Purge analysis state every F allocations (default: 100)\n");
         return false;
     }
 
-    setGCZeal(zeal, frequency);
+    setZeal(zeal, frequency);
     return true;
 }
 
 #endif
 
 /* Lifetime for type sets attached to scripts containing observed types. */
 static const int64_t JIT_SCRIPT_RELEASE_TYPES_INTERVAL = 60 * 1000 * 1000;
 
@@ -1223,32 +1221,32 @@ GCRuntime::init(uint32_t maxbytes)
     if (!helperState.init())
         return false;
 
     /*
      * Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
      * for default backward API compatibility.
      */
     maxBytes = maxbytes;
-    rt->setGCMaxMallocBytes(maxbytes);
+    setMaxMallocBytes(maxbytes);
 
 #ifndef JS_MORE_DETERMINISTIC
     jitReleaseTime = PRMJ_Now() + JIT_SCRIPT_RELEASE_TYPES_INTERVAL;
 #endif
 
 #ifdef JSGC_GENERATIONAL
     if (!nursery.init())
         return false;
 
     if (!storeBuffer.enable())
         return false;
 #endif
 
 #ifdef JS_GC_ZEAL
-    if (!initGCZeal())
+    if (!initZeal())
         return false;
 #endif
 
     if (!marker.init(mode))
         return false;
 
     return true;
 }
@@ -1323,16 +1321,73 @@ js::gc::FinishPersistentRootedChains(JSR
     rt->stringPersistentRooteds.clear();
     rt->valuePersistentRooteds.clear();
 }
 
 template <typename T> struct BarrierOwner {};
 template <typename T> struct BarrierOwner<T *> { typedef T result; };
 template <> struct BarrierOwner<Value> { typedef HeapValue result; };
 
+bool
+GCRuntime::addBlackRootsTracer(JSTraceDataOp traceOp, void *data)
+{
+    AssertHeapIsIdle(rt);
+    return !!blackRootTracers.append(Callback<JSTraceDataOp>(traceOp, data));
+}
+
+void
+GCRuntime::removeBlackRootsTracer(JSTraceDataOp traceOp, void *data)
+{
+    // Can be called from finalizers
+    for (size_t i = 0; i < blackRootTracers.length(); i++) {
+        Callback<JSTraceDataOp> *e = &blackRootTracers[i];
+        if (e->op == traceOp && e->data == data) {
+            blackRootTracers.erase(e);
+        }
+    }
+}
+
+void
+GCRuntime::setGrayRootsTracer(JSTraceDataOp traceOp, void *data)
+{
+    AssertHeapIsIdle(rt);
+    grayRootTracer.op = traceOp;
+    grayRootTracer.data = data;
+}
+
+void
+GCRuntime::setGCCallback(JSGCCallback callback, void *data)
+{
+    gcCallback.op = callback;
+    gcCallback.data = data;
+}
+
+bool
+GCRuntime::addFinalizeCallback(JSFinalizeCallback callback, void *data)
+{
+    return finalizeCallbacks.append(Callback<JSFinalizeCallback>(callback, data));
+}
+
+void
+GCRuntime::removeFinalizeCallback(JSFinalizeCallback callback)
+{
+    for (Callback<JSFinalizeCallback> *p = finalizeCallbacks.begin();
+         p < finalizeCallbacks.end(); p++) {
+        if (p->op == callback) {
+            finalizeCallbacks.erase(p);
+            break;
+        }
+    }
+}
+
+JS::GCSliceCallback
+GCRuntime::setSliceCallback(JS::GCSliceCallback callback) {
+    return stats.setSliceCallback(callback);
+}
+
 template <typename T>
 bool
 GCRuntime::addRoot(T *rp, const char *name, JSGCRootType rootType)
 {
     /*
      * Sometimes Firefox will hold weak references to objects and then convert
      * them to strong references by calling AddRoot (e.g., via PreserveWrapper,
      * or ModifyBusyCount in workers). We need a read barrier to cover these
@@ -1343,17 +1398,17 @@ GCRuntime::addRoot(T *rp, const char *na
 
     return rt->gc.rootsHash.put((void *)rp, RootInfo(name, rootType));
 }
 
 void
 GCRuntime::removeRoot(void *rp)
 {
     rootsHash.remove(rp);
-    poke = true;
+    poke();
 }
 
 template <typename T>
 static bool
 AddRoot(JSRuntime *rt, T *rp, const char *name, JSGCRootType rootType)
 {
     return rt->gc.addRoot(rp, name, rootType);
 }
@@ -1417,19 +1472,52 @@ js::RemoveRawValueRoot(JSContext *cx, Va
 }
 
 void
 js::RemoveRoot(JSRuntime *rt, void *rp)
 {
     rt->gc.removeRoot(rp);
 }
 
-typedef RootedValueMap::Range RootRange;
-typedef RootedValueMap::Entry RootEntry;
-typedef RootedValueMap::Enum RootEnum;
+void
+GCRuntime::setMaxMallocBytes(size_t value)
+{
+    /*
+     * For compatibility treat any value that exceeds PTRDIFF_T_MAX to
+     * mean that value.
+     */
+    maxMallocBytes = (ptrdiff_t(value) >= 0) ? value : size_t(-1) >> 1;
+    resetMallocBytes();
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next())
+        zone->setGCMaxMallocBytes(value);
+}
+
+void
+GCRuntime::resetMallocBytes()
+{
+    mallocBytes = ptrdiff_t(maxMallocBytes);
+    mallocGCTriggered = false;
+}
+
+void
+GCRuntime::updateMallocCounter(JS::Zone *zone, size_t nbytes)
+{
+    mallocBytes -= ptrdiff_t(nbytes);
+    if (MOZ_UNLIKELY(isTooMuchMalloc()))
+        onTooMuchMalloc();
+    else if (zone)
+        zone->updateMallocCounter(nbytes);
+}
+
+void
+GCRuntime::onTooMuchMalloc()
+{
+    if (!mallocGCTriggered)
+        mallocGCTriggered = triggerGC(JS::gcreason::TOO_MUCH_MALLOC);
+}
 
 static size_t
 ComputeTriggerBytes(Zone *zone, size_t lastBytes, size_t maxBytes, JSGCInvocationKind gckind)
 {
     size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, zone->runtimeFromMainThread()->gc.allocationThreshold);
     double trigger = double(base) * zone->gcHeapGrowthFactor;
     return size_t(Min(double(maxBytes), trigger));
 }
@@ -4348,26 +4436,26 @@ AutoGCSession::~AutoGCSession()
 #ifndef JS_MORE_DETERMINISTIC
     gc->nextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN;
 #endif
 
     gc->chunkAllocationSinceLastGC = false;
 
 #ifdef JS_GC_ZEAL
     /* Keeping these around after a GC is dangerous. */
-    gc->selectedForMarking.clearAndFree();
+    gc->clearSelectedForMarking();
 #endif
 
     /* Clear gcMallocBytes for all compartments */
     for (ZonesIter zone(gc->rt, WithAtoms); !zone.done(); zone.next()) {
         zone->resetGCMallocBytes();
         zone->unscheduleGC();
     }
 
-    gc->rt->resetGCMallocBytes();
+    gc->resetMallocBytes();
 }
 
 AutoCopyFreeListToArenas::AutoCopyFreeListToArenas(JSRuntime *rt, ZoneSelector selector)
   : runtime(rt),
     selector(selector)
 {
     for (ZonesIter zone(rt, selector); !zone.done(); zone.next())
         zone->allocator.arenas.copyFreeListsToArenas();
@@ -4690,17 +4778,17 @@ GCRuntime::budgetIncrementalGC(int64_t *
 
     if (mode != JSGC_MODE_INCREMENTAL) {
         resetIncrementalGC("GC mode change");
         *budget = SliceBudget::Unlimited;
         stats.nonincremental("GC mode");
         return;
     }
 
-    if (rt->isTooMuchMalloc()) {
+    if (isTooMuchMalloc()) {
         *budget = SliceBudget::Unlimited;
         stats.nonincremental("malloc bytes trigger");
     }
 
     bool reset = false;
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         if (zone->gcBytes >= zone->gcTriggerBytes) {
             *budget = SliceBudget::Unlimited;
@@ -4889,31 +4977,31 @@ GCRuntime::collect(bool incremental, int
         gcstats::AutoGCSlice agc(stats, collectedCount, zoneCount, compartmentCount, reason);
 
         /*
          * Let the API user decide to defer a GC if it wants to (unless this
          * is the last context). Invoke the callback regardless.
          */
         if (incrementalState == NO_INCREMENTAL) {
             gcstats::AutoPhase ap(stats, gcstats::PHASE_GC_BEGIN);
-            if (gcCallback)
-                gcCallback(rt, JSGC_BEGIN, gcCallbackData);
+            if (gcCallback.op)
+                gcCallback.op(rt, JSGC_BEGIN, gcCallback.data);
         }
 
-        poke = false;
+        poked = false;
         bool wasReset = gcCycle(incremental, budget, gckind, reason);
 
         if (incrementalState == NO_INCREMENTAL) {
             gcstats::AutoPhase ap(stats, gcstats::PHASE_GC_END);
-            if (gcCallback)
-                gcCallback(rt, JSGC_END, gcCallbackData);
+            if (gcCallback.op)
+                gcCallback.op(rt, JSGC_END, gcCallback.data);
         }
 
         /* Need to re-schedule all zones for GC. */
-        if (poke && shouldCleanUpEverything)
+        if (poked && shouldCleanUpEverything)
             JS::PrepareForFullGC(rt);
 
         /*
          * This code makes an extra effort to collect compartments that we
          * thought were dead at the start of the GC. See the large comment in
          * beginMarkPhase.
          */
         bool repeatForDeadZone = false;
@@ -4929,17 +5017,17 @@ GCRuntime::collect(bool incremental, int
         }
 
         /*
          * If we reset an existing GC, we need to start a new one. Also, we
          * repeat GCs that happen during shutdown (the gcShouldCleanUpEverything
          * case) until we can be sure that no additional garbage is created
          * (which typically happens if roots are dropped during finalizers).
          */
-        repeat = (poke && shouldCleanUpEverything) || wasReset || repeatForDeadZone;
+        repeat = (poked && shouldCleanUpEverything) || wasReset || repeatForDeadZone;
     } while (repeat);
 
     if (incrementalState == NO_INCREMENTAL) {
 #ifdef JS_THREADSAFE
         EnqueuePendingParseTasksAfterGC(rt);
 #endif
     }
 }
@@ -5289,37 +5377,50 @@ GCRuntime::runDebugGC()
     } else {
         collect(false, SliceBudget::Unlimited, GC_NORMAL, JS::gcreason::DEBUG_GC);
     }
 
 #endif
 }
 
 void
-gc::SetDeterministicGC(JSContext *cx, bool enabled)
-{
-#ifdef JS_GC_ZEAL
-    JSRuntime *rt = cx->runtime();
-    rt->gc.deterministicOnly = enabled;
-#endif
+GCRuntime::setValidate(bool enabled)
+{
+    JS_ASSERT(!isHeapMajorCollecting());
+    validate = enabled;
 }
 
 void
-gc::SetValidateGC(JSContext *cx, bool enabled)
-{
-    JSRuntime *rt = cx->runtime();
-    rt->gc.validate = enabled;
+GCRuntime::setFullCompartmentChecks(bool enabled)
+{
+    JS_ASSERT(!isHeapMajorCollecting());
+    fullCompartmentChecks = enabled;
+}
+
+#ifdef JS_GC_ZEAL
+bool
+GCRuntime::selectForMarking(JSObject *object)
+{
+    JS_ASSERT(!isHeapMajorCollecting());
+    return selectedForMarking.append(object);
 }
 
 void
-gc::SetFullCompartmentChecks(JSContext *cx, bool enabled)
-{
-    JSRuntime *rt = cx->runtime();
-    rt->gc.fullCompartmentChecks = enabled;
-}
+GCRuntime::clearSelectedForMarking()
+{
+    selectedForMarking.clearAndFree();
+}
+
+void
+GCRuntime::setDeterministic(bool enabled)
+{
+    JS_ASSERT(!isHeapMajorCollecting());
+    deterministicOnly = enabled;
+}
+#endif
 
 #ifdef DEBUG
 
 /* Should only be called manually under gdb */
 void PreventGCDuringInteractiveDebug()
 {
     TlsPerThreadData.get()->suppressGC++;
 }
@@ -5496,74 +5597,72 @@ JS::AssertGCThingMustBeTenured(JSObject 
 {
     JS_ASSERT((!IsNurseryAllocable(obj->tenuredGetAllocKind()) || obj->getClass()->finalize) &&
               obj->isTenured());
 }
 
 JS_FRIEND_API(void)
 js::gc::AssertGCThingHasType(js::gc::Cell *cell, JSGCTraceKind kind)
 {
-#ifdef DEBUG
     JS_ASSERT(cell);
     if (IsInsideNursery(cell))
         JS_ASSERT(kind == JSTRACE_OBJECT);
     else
         JS_ASSERT(MapAllocToTraceKind(cell->tenuredGetAllocKind()) == kind);
-#endif
 }
 
 JS_FRIEND_API(size_t)
 JS::GetGCNumber()
 {
     JSRuntime *rt = js::TlsPerThreadData.get()->runtimeFromMainThread();
     if (!rt)
         return 0;
     return rt->gc.number;
 }
 #endif
 
 #ifdef DEBUG
 JS::AutoAssertOnGC::AutoAssertOnGC()
-  : runtime(nullptr), gcNumber(0)
+  : gc(nullptr), gcNumber(0)
 {
     js::PerThreadData *data = js::TlsPerThreadData.get();
     if (data) {
         /*
          * GC's from off-thread will always assert, so off-thread is implicitly
          * AutoAssertOnGC. We still need to allow AutoAssertOnGC to be used in
          * code that works from both threads, however. We also use this to
          * annotate the off thread run loops.
          */
-        runtime = data->runtimeIfOnOwnerThread();
+        JSRuntime *runtime = data->runtimeIfOnOwnerThread();
         if (runtime) {
-            gcNumber = runtime->gc.number;
-            ++runtime->gc.inUnsafeRegion;
+            gc = &runtime->gc;
+            gcNumber = gc->number;
+            gc->enterUnsafeRegion();
         }
     }
 }
 
 JS::AutoAssertOnGC::AutoAssertOnGC(JSRuntime *rt)
-  : runtime(rt), gcNumber(rt->gc.number)
-{
-    ++rt->gc.inUnsafeRegion;
+  : gc(&rt->gc), gcNumber(rt->gc.number)
+{
+    gc->enterUnsafeRegion();
 }
 
 JS::AutoAssertOnGC::~AutoAssertOnGC()
 {
-    if (runtime) {
-        --runtime->gc.inUnsafeRegion;
-        MOZ_ASSERT(runtime->gc.inUnsafeRegion >= 0);
+    if (gc) {
+        gc->leaveUnsafeRegion();
 
         /*
          * The following backstop assertion should never fire: if we bumped the
          * gcNumber, we should have asserted because inUnsafeRegion was true.
          */
-        MOZ_ASSERT(gcNumber == runtime->gc.number, "GC ran inside an AutoAssertOnGC scope.");
+        MOZ_ASSERT(gcNumber == gc->number, "GC ran inside an AutoAssertOnGC scope.");
     }
 }
 
 /* static */ void
 JS::AutoAssertOnGC::VerifyIsSafeToGC(JSRuntime *rt)
 {
-    if (rt->gc.inUnsafeRegion > 0)
+    if (rt->gc.isInsideUnsafeRegion())
         MOZ_CRASH("[AutoAssertOnGC] possible GC in GC-unsafe region");
 }
 #endif
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -895,21 +895,16 @@ extern void
 PrepareForDebugGC(JSRuntime *rt);
 
 extern void
 MinorGC(JSRuntime *rt, JS::gcreason::Reason reason);
 
 extern void
 MinorGC(JSContext *cx, JS::gcreason::Reason reason);
 
-#ifdef JS_GC_ZEAL
-extern void
-SetGCZeal(JSRuntime *rt, uint8_t zeal, uint32_t frequency);
-#endif
-
 /* Functions for managing cross compartment gray pointers. */
 
 extern void
 DelayCrossCompartmentGrayMarking(JSObject *src);
 
 extern void
 NotifyGCNukeWrapper(JSObject *o);
 
@@ -1156,25 +1151,16 @@ namespace gc {
 
 extern void
 GCIfNeeded(JSContext *cx);
 
 /* Tries to run a GC no matter what (used for GC zeal). */
 void
 RunDebugGC(JSContext *cx);
 
-void
-SetDeterministicGC(JSContext *cx, bool enabled);
-
-void
-SetValidateGC(JSContext *cx, bool enabled);
-
-void
-SetFullCompartmentChecks(JSContext *cx, bool enabled);
-
 /* Wait for the background thread to finish sweeping if it is running. */
 void
 FinishBackgroundFinalize(JSRuntime *rt);
 
 /*
  * Merge all contents of source into target. This can only be used if source is
  * the only compartment in its zone.
  */
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -71,25 +71,25 @@ GetGCThingTraceKind(const void *thing)
     const Cell *cell = static_cast<const Cell *>(thing);
 #ifdef JSGC_GENERATIONAL
     if (IsInsideNursery(cell))
         return JSTRACE_OBJECT;
 #endif
     return MapAllocToTraceKind(cell->tenuredGetAllocKind());
 }
 
-static inline void
-GCPoke(JSRuntime *rt)
+inline void
+GCRuntime::poke()
 {
-    rt->gc.poke = true;
+    poked = true;
 
 #ifdef JS_GC_ZEAL
     /* Schedule a GC to happen "soon" after a GC poke. */
-    if (rt->gcZeal() == js::gc::ZealPokeValue)
-        rt->gc.nextScheduled = 1;
+    if (zealMode == ZealPokeValue)
+        nextScheduled = 1;
 #endif
 }
 
 class ArenaIter
 {
     ArenaHeader *aheader;
     ArenaHeader *remainingHeader;
 
@@ -484,17 +484,17 @@ CheckAllocatorState(ThreadSafeContext *c
     // For testing out of memory conditions
     if (!PossiblyFail()) {
         js_ReportOutOfMemory(cx);
         return false;
     }
 
     if (allowGC) {
 #ifdef JS_GC_ZEAL
-        if (rt->needZealousGC())
+        if (rt->gc.needZealousGC())
             js::gc::RunDebugGC(ncx);
 #endif
 
         if (rt->interrupt) {
             // Invoking the interrupt callback can fail and we can't usefully
             // handle that here. Just check in case we need to collect instead.
             js::gc::GCIfNeeded(ncx);
         }
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -5269,17 +5269,17 @@ baseops::DeleteGeneric(JSContext *cx, Ha
     if (!shape || proto != obj) {
         /*
          * If no property, or the property comes from a prototype, call the
          * class's delProperty hook, passing succeeded as the result parameter.
          */
         return CallJSDeletePropertyOp(cx, obj->getClass()->delProperty, obj, id, succeeded);
     }
 
-    GCPoke(cx->runtime());
+    cx->runtime()->gc.poke();
 
     if (IsImplicitDenseOrTypedArrayElement(shape)) {
         if (obj->is<TypedArrayObject>()) {
             // Don't delete elements from typed arrays.
             *succeeded = false;
             return true;
         }
 
--- a/js/src/vm/Runtime-inl.h
+++ b/js/src/vm/Runtime-inl.h
@@ -51,17 +51,17 @@ NewObjectCache::newObjectFromHit(JSConte
 
     // Do an end run around JSObject::type() to avoid doing AutoUnprotectCell
     // on the templateObj, which is not a GC thing and can't use runtimeFromAnyThread.
     types::TypeObject *type = templateObj->type_;
 
     if (type->shouldPreTenure())
         heap = gc::TenuredHeap;
 
-    if (cx->runtime()->upcomingZealousGC())
+    if (cx->runtime()->gc.upcomingZealousGC())
         return nullptr;
 
     // Trigger an identical allocation to the one that notified us of OOM
     // so that we trigger the right kind of GC automatically.
     if (allowGC) {
         mozilla::DebugOnly<JSObject *> obj =
             js::gc::AllocateObjectForCacheHit<allowGC>(cx, entry->kind, heap);
         JS_ASSERT(!obj);
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -691,53 +691,33 @@ JSRuntime::triggerActivityCallback(bool 
      * property and ensures that it remains true in the future.
      */
     AutoSuppressGC suppress(this);
 
     activityCallback(activityCallbackArg, active);
 }
 
 void
-JSRuntime::setGCMaxMallocBytes(size_t value)
-{
-    /*
-     * For compatibility treat any value that exceeds PTRDIFF_T_MAX to
-     * mean that value.
-     */
-    gc.maxMallocBytes = (ptrdiff_t(value) >= 0) ? value : size_t(-1) >> 1;
-    resetGCMallocBytes();
-    for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next())
-        zone->setGCMaxMallocBytes(value);
-}
-
-void
 JSRuntime::updateMallocCounter(size_t nbytes)
 {
     updateMallocCounter(nullptr, nbytes);
 }
 
 void
 JSRuntime::updateMallocCounter(JS::Zone *zone, size_t nbytes)
 {
-    /* We tolerate any thread races when updating gcMallocBytes. */
-    gc.mallocBytes -= ptrdiff_t(nbytes);
-    if (MOZ_UNLIKELY(gc.mallocBytes <= 0))
-        onTooMuchMalloc();
-    else if (zone)
-        zone->updateMallocCounter(nbytes);
+    gc.updateMallocCounter(zone, nbytes);
 }
 
 JS_FRIEND_API(void)
 JSRuntime::onTooMuchMalloc()
 {
     if (!CurrentThreadCanAccessRuntime(this))
         return;
-
-    if (!gc.mallocGCTriggered)
-        gc.mallocGCTriggered = TriggerGC(this, JS::gcreason::TOO_MUCH_MALLOC);
+    gc.onTooMuchMalloc();
 }
 
 JS_FRIEND_API(void *)
 JSRuntime::onOutOfMemory(void *p, size_t nbytes)
 {
     return onOutOfMemory(p, nbytes, nullptr);
 }
 
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -950,41 +950,17 @@ struct JSRuntime : public JS::shadow::Ru
         gc.marker.setGCMode(mode);
     }
 
     bool isHeapBusy() { return gc.isHeapBusy(); }
     bool isHeapMajorCollecting() { return gc.isHeapMajorCollecting(); }
     bool isHeapMinorCollecting() { return gc.isHeapMinorCollecting(); }
     bool isHeapCollecting() { return gc.isHeapCollecting(); }
 
-#ifdef JS_GC_ZEAL
-    int gcZeal() { return gc.zealMode; }
-
-    bool upcomingZealousGC() {
-        return gc.nextScheduled == 1;
-    }
-
-    bool needZealousGC() {
-        if (gc.nextScheduled > 0 && --gc.nextScheduled == 0) {
-            if (gcZeal() == js::gc::ZealAllocValue ||
-                gcZeal() == js::gc::ZealGenerationalGCValue ||
-                (gcZeal() >= js::gc::ZealIncrementalRootsThenFinish &&
-                 gcZeal() <= js::gc::ZealIncrementalMultipleSlices))
-            {
-                gc.nextScheduled = gc.zealFrequency;
-            }
-            return true;
-        }
-        return false;
-    }
-#else
-    int gcZeal() { return 0; }
-    bool upcomingZealousGC() { return false; }
-    bool needZealousGC() { return false; }
-#endif
+    int gcZeal() { return gc.zeal(); }
 
     void lockGC() {
         assertCanLock(js::GCLock);
         gc.lockGC();
     }
 
     void unlockGC() {
         gc.unlockGC();
@@ -1276,40 +1252,29 @@ struct JSRuntime : public JS::shadow::Ru
 
     JSRuntime(JSRuntime *parentRuntime);
     ~JSRuntime();
 
     bool init(uint32_t maxbytes);
 
     JSRuntime *thisFromCtor() { return this; }
 
-    void setGCMaxMallocBytes(size_t value);
-
-    void resetGCMallocBytes() {
-        gc.mallocBytes = ptrdiff_t(gc.maxMallocBytes);
-        gc.mallocGCTriggered = false;
-    }
-
     /*
      * Call this after allocating memory held by GC things, to update memory
      * pressure counters or report the OOM error if necessary. If oomError and
      * cx is not null the function also reports OOM error.
      *
      * The function must be called outside the GC lock and in case of OOM error
      * the caller must ensure that no deadlock possible during OOM reporting.
      */
     void updateMallocCounter(size_t nbytes);
     void updateMallocCounter(JS::Zone *zone, size_t nbytes);
 
     void reportAllocationOverflow() { js_ReportAllocationOverflow(nullptr); }
 
-    bool isTooMuchMalloc() const {
-        return gc.mallocBytes <= 0;
-    }
-
     /*
      * The function must be called outside the GC lock.
      */
     JS_FRIEND_API(void) onTooMuchMalloc();
 
     /*
      * This should be called after system malloc/realloc returns nullptr to try
      * to recove some memory or to report an error. Failures in malloc and