Bug 988486 - Make more GCRuntime state private and add necessary accessors r=terrence
authorJon Coppeard <jcoppeard@mozilla.com>
Thu, 26 Jun 2014 10:07:35 +0100
changeset 191011 e599c809f50dfbd93bebe2fbb28edd025a8c8e9b
parent 191010 c3d4833a1735a190213ce28ac17c6ba3a8c664ce
child 191012 8b7ede108fafd2ff1bd0a0e82803b5a853ad5870
push id8417
push userryanvm@gmail.com
push dateThu, 26 Jun 2014 21:15:47 +0000
treeherderb2g-inbound@3bef42144aab [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersterrence
bugs988486
milestone33.0a1
Bug 988486 - Make more GCRuntime state private and add necessary accessors r=terrence
js/src/gc/GCRuntime.h
js/src/gc/Marking.cpp
js/src/gc/Nursery.cpp
js/src/gc/Statistics.cpp
js/src/gc/Tracer.cpp
js/src/gc/Tracer.h
js/src/gc/Verifier.cpp
js/src/gc/Zone.cpp
js/src/jit/Ion.cpp
js/src/jit/IonFrames.cpp
js/src/jit/PcScriptCache.h
js/src/jsapi-tests/testGCFinalizeCallback.cpp
js/src/jsfriendapi.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsobj.cpp
js/src/jsproxy.cpp
js/src/jsscript.cpp
js/src/vm/ForkJoin.cpp
js/src/vm/Runtime.cpp
js/src/vm/Runtime.h
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -212,22 +212,32 @@ class GCRuntime
     }
 
     bool isInsideUnsafeRegion() { return inUnsafeRegion != 0; }
     void enterUnsafeRegion() { ++inUnsafeRegion; }
     void leaveUnsafeRegion() {
         JS_ASSERT(inUnsafeRegion > 0);
         --inUnsafeRegion;
     }
+
+    bool isStrictProxyCheckingEnabled() { return disableStrictProxyCheckingCount == 0; }
+    void disableStrictProxyChecking() { ++disableStrictProxyCheckingCount; }
+    void enableStrictProxyChecking() {
+        JS_ASSERT(disableStrictProxyCheckingCount > 0);
+        --disableStrictProxyCheckingCount;
+    }
 #endif
 
     void setAlwaysPreserveCode() { alwaysPreserveCode = true; }
 
-    bool isIncrementalGCEnabled() { return incrementalEnabled; }
-    void disableIncrementalGC() { incrementalEnabled = false; }
+    bool isIncrementalGCAllowed() { return incrementalAllowed; }
+    void disallowIncrementalGC() { incrementalAllowed = false; }
+
+    bool isIncrementalGCEnabled() { return mode == JSGC_MODE_INCREMENTAL && incrementalAllowed; }
+    bool isIncrementalGCInProgress() { return state() != gc::NO_INCREMENTAL && !verifyPreData; }
 
     bool isGenerationalGCEnabled() { return generationalDisabled == 0; }
     void disableGenerationalGC();
     void enableGenerationalGC();
 
     void setGrayRootsTracer(JSTraceDataOp traceOp, void *data);
     bool addBlackRootsTracer(JSTraceDataOp traceOp, void *data);
     void removeBlackRootsTracer(JSTraceDataOp traceOp, void *data);
@@ -252,16 +262,43 @@ class GCRuntime
     void incObjectsMarkedInDeadZone() {
         JS_ASSERT(manipulatingDeadZones);
         ++objectsMarkedInDeadZones;
     }
 
     JS::Zone *getCurrentZoneGroup() { return currentZoneGroup; }
     void setFoundBlackGrayEdges() { foundBlackGrayEdges = true; }
 
+    uint64_t gcNumber() { return number; }
+    void incGcNumber() { ++number; }
+
+    bool isIncrementalGc() { return isIncremental; }
+    bool isFullGc() { return isFull; }
+
+    bool shouldCleanUpEverything() { return cleanUpEverything; }
+
+    bool areGrayBitsValid() { return grayBitsValid; }
+    void setGrayBitsInvalid() { grayBitsValid = false; }
+
+    bool isGcNeeded() { return isNeeded; }
+
+    double computeHeapGrowthFactor(size_t lastBytes);
+    size_t computeTriggerBytes(double growthFactor, size_t lastBytes, JSGCInvocationKind gckind);
+    size_t allocationThreshold() { return allocThreshold; }
+
+    JSGCMode gcMode() const { return mode; }
+    void setGCMode(JSGCMode m) {
+        mode = m;
+        marker.setGCMode(mode);
+    }
+
+    inline void updateOnChunkFree(const ChunkInfo &info);
+    inline void updateOnFreeArenaAlloc(const ChunkInfo &info);
+    inline void updateOnArenaFree(const ChunkInfo &info);
+
 #ifdef JS_GC_ZEAL
     void startVerifyPreBarriers();
     bool endVerifyPreBarriers();
     void startVerifyPostBarriers();
     bool endVerifyPostBarriers();
     void finishVerifier();
 #endif
 
@@ -348,102 +385,93 @@ class GCRuntime
     js::gc::Chunk         *userAvailableChunkListHead;
     js::gc::ChunkPool     chunkPool;
 
 #ifdef JSGC_GENERATIONAL
     js::Nursery           nursery;
     js::gc::StoreBuffer   storeBuffer;
 #endif
 
+    js::gcstats::Statistics stats;
+
+    js::GCMarker          marker;
+
     js::RootedValueMap    rootsHash;
 
     /* This is updated by both the main and GC helper threads. */
     mozilla::Atomic<size_t, mozilla::ReleaseAcquire>   bytes;
 
     size_t                maxBytes;
     size_t                maxMallocBytes;
 
+  private:
     /*
      * Number of the committed arenas in all GC chunks including empty chunks.
      */
     mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire>   numArenasFreeCommitted;
-    js::GCMarker          marker;
     void                  *verifyPreData;
     void                  *verifyPostData;
     bool                  chunkAllocationSinceLastGC;
     int64_t               nextFullGCTime;
     int64_t               lastGCTime;
     int64_t               jitReleaseTime;
 
     JSGCMode              mode;
 
-    size_t                allocationThreshold;
+    size_t                allocThreshold;
     bool                  highFrequencyGC;
     uint64_t              highFrequencyTimeThreshold;
     uint64_t              highFrequencyLowLimitBytes;
     uint64_t              highFrequencyHighLimitBytes;
     double                highFrequencyHeapGrowthMax;
     double                highFrequencyHeapGrowthMin;
     double                lowFrequencyHeapGrowth;
     bool                  dynamicHeapGrowth;
     bool                  dynamicMarkSlice;
     uint64_t              decommitThreshold;
 
     /* During shutdown, the GC needs to clean up every possible object. */
-    bool                  shouldCleanUpEverything;
+    bool                  cleanUpEverything;
 
     /*
      * The gray bits can become invalid if UnmarkGray overflows the stack. A
      * full GC will reset this bit, since it fills in all the gray bits.
      */
     bool                  grayBitsValid;
 
     /*
      * These flags must be kept separate so that a thread requesting a
      * compartment GC doesn't cancel another thread's concurrent request for a
      * full GC.
      */
     volatile uintptr_t    isNeeded;
 
-    js::gcstats::Statistics stats;
-
     /* Incremented on every GC slice. */
     uint64_t              number;
 
-    /* The   number at the time of the most recent GC's first slice. */
+    /* The number at the time of the most recent GC's first slice. */
     uint64_t              startNumber;
 
     /* Whether the currently running GC can finish in multiple slices. */
     bool                  isIncremental;
 
     /* Whether all compartments are being collected in first GC slice. */
     bool                  isFull;
 
     /* The reason that an interrupt-triggered GC should be called. */
     JS::gcreason::Reason  triggerReason;
 
     /*
-     * If this is true, all marked objects must belong to a compartment being
-     * GCed. This is used to look for compartment bugs.
-     */
-    bool                  strictCompartmentChecking;
-
-#ifdef DEBUG
-    /*
      * If this is 0, all cross-compartment proxies must be registered in the
      * wrapper map. This checking must be disabled temporarily while creating
      * new wrappers. When non-zero, this records the recursion depth of wrapper
      * creation.
      */
-    uintptr_t             disableStrictProxyCheckingCount;
-#else
-    uintptr_t             unused1;
-#endif
+    mozilla::DebugOnly<uintptr_t>  disableStrictProxyCheckingCount;
 
-  private:
     /*
      * The current incremental GC phase. This is also used internally in
      * non-incremental GC.
      */
     js::gc::State         incrementalState;
 
     /* Indicates that the last incremental slice exhausted the mark stack. */
     bool                  lastMarkSlice;
@@ -488,17 +516,17 @@ class GCRuntime
 
     /* Default budget for incremental GC slice. See SliceBudget in jsgc.h. */
     int64_t               sliceBudget;
 
     /*
      * We disable incremental GC if we encounter a js::Class with a trace hook
      * that does not implement write barriers.
      */
-    bool                  incrementalEnabled;
+    bool                  incrementalAllowed;
 
     /*
      * GGC can be enabled from the command line while testing.
      */
     unsigned              generationalDisabled;
 
     /*
      * This is true if we are in the middle of a brain transplant (e.g.,
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -200,36 +200,38 @@ CheckMarkedThing(JSTracer *trc, T **thin
         return;
 
     JS_ASSERT(thing->zone());
     JS_ASSERT(thing->zone()->runtimeFromMainThread() == trc->runtime());
     JS_ASSERT(trc->hasTracingDetails());
 
     DebugOnly<JSRuntime *> rt = trc->runtime();
 
-    JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gc.isManipulatingDeadZones(),
+    bool isGcMarkingTracer = IS_GC_MARKING_TRACER(trc);
+    JS_ASSERT_IF(isGcMarkingTracer && rt->gc.isManipulatingDeadZones(),
                  !thing->zone()->scheduledForDestruction);
 
     JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
 
-    JS_ASSERT_IF(thing->zone()->requireGCTracer(),
-                 IS_GC_MARKING_TRACER(trc));
+    JS_ASSERT_IF(thing->zone()->requireGCTracer(), isGcMarkingTracer);
 
     JS_ASSERT(thing->isAligned());
 
     JS_ASSERT(MapTypeToTraceKind<T>::kind == GetGCThingTraceKind(thing));
 
-    JS_ASSERT_IF(rt->gc.strictCompartmentChecking,
-                 thing->zone()->isCollecting() || rt->isAtomsZone(thing->zone()));
+    if (isGcMarkingTracer) {
+        GCMarker *gcMarker = static_cast<GCMarker *>(trc);
+        JS_ASSERT_IF(gcMarker->shouldCheckCompartments(),
+                     thing->zone()->isCollecting() || rt->isAtomsZone(thing->zone()));
 
-    JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && AsGCMarker(trc)->getMarkColor() == GRAY,
-                 !thing->zone()->isGCMarkingBlack() || rt->isAtomsZone(thing->zone()));
+        JS_ASSERT_IF(gcMarker->getMarkColor() == GRAY,
+                     !thing->zone()->isGCMarkingBlack() || rt->isAtomsZone(thing->zone()));
 
-    JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc),
-                 !(thing->zone()->isGCSweeping() || thing->zone()->isGCFinished()));
+        JS_ASSERT(!(thing->zone()->isGCSweeping() || thing->zone()->isGCFinished()));
+    }
 
     /*
      * Try to assert that the thing is allocated.  This is complicated by the
      * fact that allocated things may still contain the poison pattern if that
      * part has not been overwritten, and that the free span list head in the
      * ArenaHeader may not be synced with the real one in ArenaLists.
      */
     JS_ASSERT_IF(IsThingPoisoned(thing) && rt->isHeapBusy(),
@@ -1686,18 +1688,17 @@ GCMarker::processMarkStackTop(SliceBudge
         PushMarkStack(this, shape);
 
         /* Call the trace hook if necessary. */
         const Class *clasp = type->clasp();
         if (clasp->trace) {
             // Global objects all have the same trace hook. That hook is safe without barriers
             // if the gloal has no custom trace hook of it's own, or has been moved to a different
             // compartment, and so can't have one.
-            JS_ASSERT_IF(runtime()->gcMode() == JSGC_MODE_INCREMENTAL &&
-                         runtime()->gc.isIncrementalGCEnabled() &&
+            JS_ASSERT_IF(runtime()->gc.isIncrementalGCEnabled() &&
                          !(clasp->trace == JS_GlobalObjectTraceHook &&
                            (!obj->compartment()->options().getTrace() ||
                             !obj->isOwnGlobal())),
                          clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS);
             clasp->trace(this, obj);
         }
 
         if (!shape->isNative())
@@ -1728,26 +1729,24 @@ GCMarker::processMarkStackTop(SliceBudge
         goto scan_value_array;
     }
 }
 
 bool
 GCMarker::drainMarkStack(SliceBudget &budget)
 {
 #ifdef DEBUG
-    JSRuntime *rt = runtime();
-
     struct AutoCheckCompartment {
-        JSRuntime *runtime;
-        explicit AutoCheckCompartment(JSRuntime *rt) : runtime(rt) {
-            JS_ASSERT(!rt->gc.strictCompartmentChecking);
-            runtime->gc.strictCompartmentChecking = true;
+        bool &flag;
+        explicit AutoCheckCompartment(bool &comparmentCheckFlag) : flag(comparmentCheckFlag) {
+            JS_ASSERT(!flag);
+            flag = true;
         }
-        ~AutoCheckCompartment() { runtime->gc.strictCompartmentChecking = false; }
-    } acc(rt);
+        ~AutoCheckCompartment() { flag = false; }
+    } acc(strictCompartmentChecking);
 #endif
 
     if (budget.isOverBudget())
         return false;
 
     for (;;) {
         while (!stack.isEmpty()) {
             processMarkStackTop(budget);
@@ -1890,17 +1889,17 @@ UnmarkGrayChildren(JSTracer *trc, void *
 {
     void *thing = *thingp;
     int stackDummy;
     if (!JS_CHECK_STACK_SIZE(trc->runtime()->mainThread.nativeStackLimit[StackForSystemCode], &stackDummy)) {
         /*
          * If we run out of stack, we take a more drastic measure: require that
          * we GC again before the next CC.
          */
-        trc->runtime()->gc.grayBitsValid = false;
+        trc->runtime()->gc.setGrayBitsInvalid();
         return;
     }
 
     UnmarkGrayTracer *tracer = static_cast<UnmarkGrayTracer *>(trc);
     if (!IsInsideNursery(static_cast<Cell *>(thing))) {
         if (!JS::GCThingIsMarkedGray(thing))
             return;
 
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -328,17 +328,17 @@ class MinorCollectionTracer : public JST
         nursery(nursery),
         session(rt, MinorCollecting),
         tenuredSize(0),
         head(nullptr),
         tail(&head),
         savedRuntimeNeedBarrier(rt->needsBarrier()),
         disableStrictProxyChecking(rt)
     {
-        rt->gc.number++;
+        rt->gc.incGcNumber();
 
         /*
          * We disable the runtime needsBarrier() check so that pre-barriers do
          * not fire on objects that have been relocated. The pre-barrier's
          * call to obj->zone() will try to look through shape_, which is now
          * the relocation magic and will crash. However, zone->needsBarrier()
          * must still be set correctly so that allocations we make in minor
          * GCs between incremental slices will allocate their objects marked.
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -552,17 +552,17 @@ Statistics::endGC()
         (*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, collectedCount == zoneCount ? 0 : 1);
         (*cb)(JS_TELEMETRY_GC_MS, t(total));
         (*cb)(JS_TELEMETRY_GC_MAX_PAUSE_MS, t(longest));
         (*cb)(JS_TELEMETRY_GC_MARK_MS, t(phaseTimes[PHASE_MARK]));
         (*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[PHASE_SWEEP]));
         (*cb)(JS_TELEMETRY_GC_MARK_ROOTS_MS, t(phaseTimes[PHASE_MARK_ROOTS]));
         (*cb)(JS_TELEMETRY_GC_MARK_GRAY_MS, t(phaseTimes[PHASE_SWEEP_MARK_GRAY]));
         (*cb)(JS_TELEMETRY_GC_NON_INCREMENTAL, !!nonincrementalReason);
-        (*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gc.isIncrementalGCEnabled());
+        (*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gc.isIncrementalGCAllowed());
         (*cb)(JS_TELEMETRY_GC_SCC_SWEEP_TOTAL_MS, t(sccTotal));
         (*cb)(JS_TELEMETRY_GC_SCC_SWEEP_MAX_PAUSE_MS, t(sccLongest));
 
         double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC);
         (*cb)(JS_TELEMETRY_GC_MMU_50, mmu50 * 100);
     }
 
     if (fp)
--- a/js/src/gc/Tracer.cpp
+++ b/js/src/gc/Tracer.cpp
@@ -457,17 +457,18 @@ MarkStack::sizeOfExcludingThis(mozilla::
  */
 GCMarker::GCMarker(JSRuntime *rt)
   : JSTracer(rt, nullptr, DoNotTraceWeakMaps),
     stack(size_t(-1)),
     color(BLACK),
     unmarkedArenaStackTop(nullptr),
     markLaterArenas(0),
     grayBufferState(GRAY_BUFFER_UNUSED),
-    started(false)
+    started(false),
+    strictCompartmentChecking(false)
 {
 }
 
 bool
 GCMarker::init(JSGCMode gcMode)
 {
     return stack.init(gcMode);
 }
@@ -685,13 +686,11 @@ GCMarker::sizeOfExcludingThis(mozilla::M
     for (ZonesIter zone(runtime(), WithAtoms); !zone.done(); zone.next())
         size += zone->gcGrayRoots.sizeOfExcludingThis(mallocSizeOf);
     return size;
 }
 
 void
 js::SetMarkStackLimit(JSRuntime *rt, size_t limit)
 {
-    JS_ASSERT(!rt->isHeapBusy());
-    AutoStopVerifyingBarriers pauseVerification(rt, false);
-    rt->gc.marker.setMaxCapacity(limit);
+    rt->gc.setMarkStackLimit(limit);
 }
 
--- a/js/src/gc/Tracer.h
+++ b/js/src/gc/Tracer.h
@@ -204,16 +204,20 @@ class GCMarker : public JSTracer
     void markBufferedGrayRoots(JS::Zone *zone);
 
     static void GrayCallback(JSTracer *trc, void **thing, JSGCTraceKind kind);
 
     void setGCMode(JSGCMode mode) { stack.setGCMode(mode); }
 
     size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
 
+#ifdef DEBUG
+    bool shouldCheckCompartments() { return strictCompartmentChecking; }
+#endif
+
     /* This is public exclusively for ScanRope. */
     MarkStack stack;
 
   private:
 #ifdef DEBUG
     void checkZone(void *p);
 #else
     void checkZone(void *p) {}
@@ -286,16 +290,22 @@ class GCMarker : public JSTracer
         GRAY_BUFFER_UNUSED,
         GRAY_BUFFER_OK,
         GRAY_BUFFER_FAILED
     };
     GrayBufferState grayBufferState;
 
     /* Assert that start and stop are called with correct ordering. */
     mozilla::DebugOnly<bool> started;
+
+    /*
+     * If this is true, all marked objects must belong to a compartment being
+     * GCed. This is used to look for compartment bugs.
+     */
+    mozilla::DebugOnly<bool> strictCompartmentChecking;
 };
 
 void
 SetMarkStackLimit(JSRuntime *rt, size_t limit);
 
 } /* namespace js */
 
 /*
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -98,17 +98,17 @@ struct VerifyPreTracer : JSTracer
     /* This graph represents the initial GC "snapshot". */
     VerifyNode *curnode;
     VerifyNode *root;
     char *edgeptr;
     char *term;
     NodeMap nodemap;
 
     VerifyPreTracer(JSRuntime *rt, JSTraceCallback callback)
-      : JSTracer(rt, callback), noggc(rt), number(rt->gc.number), count(0), root(nullptr)
+      : JSTracer(rt, callback), noggc(rt), number(rt->gc.gcNumber()), count(0), root(nullptr)
     {}
 
     ~VerifyPreTracer() {
         js_free(root);
     }
 };
 
 /*
@@ -385,17 +385,17 @@ struct VerifyPostTracer : JSTracer
     /* This counts up to gcZealFrequency to decide whether to verify. */
     int count;
 
     /* The set of edges in the StoreBuffer at the end of verification. */
     typedef HashSet<void **, PointerHasher<void **, 3>, SystemAllocPolicy> EdgeSet;
     EdgeSet *edges;
 
     VerifyPostTracer(JSRuntime *rt, JSTraceCallback callback)
-      : JSTracer(rt, callback), number(rt->gc.number), count(0)
+      : JSTracer(rt, callback), number(rt->gc.gcNumber()), count(0)
     {}
 };
 
 /*
  * The post-barrier verifier runs the full store buffer and a fake nursery when
  * running and when it stops, walks the full heap to ensure that all the
  * important edges were inserted into the storebuffer.
  */
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -218,17 +218,17 @@ Zone::discardJitCode(FreeOp *fop)
 #endif
 }
 
 uint64_t
 Zone::gcNumber()
 {
     // Zones in use by exclusive threads are not collected, and threads using
     // them cannot access the main runtime's gcNumber without racing.
-    return usedByExclusiveThread ? 0 : runtimeFromMainThread()->gc.number;
+    return usedByExclusiveThread ? 0 : runtimeFromMainThread()->gc.gcNumber();
 }
 
 #ifdef JS_ION
 js::jit::JitZone *
 Zone::createJitZone(JSContext *cx)
 {
     MOZ_ASSERT(!jitZone_);
 
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -612,17 +612,17 @@ JitCompartment::mark(JSTracer *trc, JSCo
         for (ScriptSet::Enum e(*activeParallelEntryScripts_); !e.empty(); e.popFront()) {
             JSScript *script = e.front();
 
             // If the script has since been invalidated or was attached by an
             // off-thread helper too late (i.e., the ForkJoin finished with
             // warmup doing all the work), remove it.
             if (!script->hasParallelIonScript() ||
                 !script->parallelIonScript()->isParallelEntryScript() ||
-                trc->runtime()->gc.shouldCleanUpEverything)
+                trc->runtime()->gc.shouldCleanUpEverything())
             {
                 e.removeFront();
                 continue;
             }
 
             // Check and increment the age. If the script is below the max
             // age, mark it.
             //
--- a/js/src/jit/IonFrames.cpp
+++ b/js/src/jit/IonFrames.cpp
@@ -1332,17 +1332,17 @@ GetPcScript(JSContext *cx, JSScript **sc
     uint8_t *retAddr = it.returnAddress();
     uint32_t hash = PcScriptCache::Hash(retAddr);
     JS_ASSERT(retAddr != nullptr);
 
     // Lazily initialize the cache. The allocation may safely fail and will not GC.
     if (MOZ_UNLIKELY(rt->ionPcScriptCache == nullptr)) {
         rt->ionPcScriptCache = (PcScriptCache *)js_malloc(sizeof(struct PcScriptCache));
         if (rt->ionPcScriptCache)
-            rt->ionPcScriptCache->clear(rt->gc.number);
+            rt->ionPcScriptCache->clear(rt->gc.gcNumber());
     }
 
     // Attempt to lookup address in cache.
     if (rt->ionPcScriptCache && rt->ionPcScriptCache->get(rt, hash, retAddr, scriptRes, pcRes))
         return;
 
     // Lookup failed: undertake expensive process to recover the innermost inlined frame.
     ++it; // Skip exit frame.
--- a/js/src/jit/PcScriptCache.h
+++ b/js/src/jit/PcScriptCache.h
@@ -40,18 +40,18 @@ struct PcScriptCache
         this->gcNumber = gcNumber;
     }
 
     // Get a value from the cache. May perform lazy allocation.
     bool get(JSRuntime *rt, uint32_t hash, uint8_t *addr,
              JSScript **scriptRes, jsbytecode **pcRes)
     {
         // If a GC occurred, lazily clear the cache now.
-        if (gcNumber != rt->gc.number) {
-            clear(rt->gc.number);
+        if (gcNumber != rt->gc.gcNumber()) {
+            clear(rt->gc.gcNumber());
             return false;
         }
 
         if (entries[hash].returnAddress != addr)
             return false;
 
         *scriptRes = entries[hash].script;
         if (pcRes)
--- a/js/src/jsapi-tests/testGCFinalizeCallback.cpp
+++ b/js/src/jsapi-tests/testGCFinalizeCallback.cpp
@@ -12,95 +12,95 @@ static bool IsCompartmentGCBuffer[Buffer
 BEGIN_TEST(testGCFinalizeCallback)
 {
     JS_SetGCParameter(rt, JSGC_MODE, JSGC_MODE_INCREMENTAL);
     JS_AddFinalizeCallback(rt, FinalizeCallback, nullptr);
 
     /* Full GC, non-incremental. */
     FinalizeCalls = 0;
     JS_GC(rt);
-    CHECK(rt->gc.isFull);
+    CHECK(rt->gc.isFullGc());
     CHECK(checkSingleGroup());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(false));
 
     /* Full GC, incremental. */
     FinalizeCalls = 0;
     JS::PrepareForFullGC(rt);
     JS::IncrementalGC(rt, JS::gcreason::API, 1000000);
     CHECK(rt->gc.state() == js::gc::NO_INCREMENTAL);
-    CHECK(rt->gc.isFull);
+    CHECK(rt->gc.isFullGc());
     CHECK(checkMultipleGroups());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(false));
 
     JS::RootedObject global1(cx, createGlobal());
     JS::RootedObject global2(cx, createGlobal());
     JS::RootedObject global3(cx, createGlobal());
     CHECK(global1);
     CHECK(global2);
     CHECK(global3);
 
     /* Compartment GC, non-incremental, single compartment. */
     FinalizeCalls = 0;
     JS::PrepareZoneForGC(global1->zone());
     JS::GCForReason(rt, JS::gcreason::API);
-    CHECK(!rt->gc.isFull);
+    CHECK(!rt->gc.isFullGc());
     CHECK(checkSingleGroup());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(true));
 
     /* Compartment GC, non-incremental, multiple compartments. */
     FinalizeCalls = 0;
     JS::PrepareZoneForGC(global1->zone());
     JS::PrepareZoneForGC(global2->zone());
     JS::PrepareZoneForGC(global3->zone());
     JS::GCForReason(rt, JS::gcreason::API);
-    CHECK(!rt->gc.isFull);
+    CHECK(!rt->gc.isFullGc());
     CHECK(checkSingleGroup());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(true));
 
     /* Compartment GC, incremental, single compartment. */
     FinalizeCalls = 0;
     JS::PrepareZoneForGC(global1->zone());
     JS::IncrementalGC(rt, JS::gcreason::API, 1000000);
     CHECK(rt->gc.state() == js::gc::NO_INCREMENTAL);
-    CHECK(!rt->gc.isFull);
+    CHECK(!rt->gc.isFullGc());
     CHECK(checkSingleGroup());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(true));
 
     /* Compartment GC, incremental, multiple compartments. */
     FinalizeCalls = 0;
     JS::PrepareZoneForGC(global1->zone());
     JS::PrepareZoneForGC(global2->zone());
     JS::PrepareZoneForGC(global3->zone());
     JS::IncrementalGC(rt, JS::gcreason::API, 1000000);
     CHECK(rt->gc.state() == js::gc::NO_INCREMENTAL);
-    CHECK(!rt->gc.isFull);
+    CHECK(!rt->gc.isFullGc());
     CHECK(checkMultipleGroups());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(true));
 
 #ifdef JS_GC_ZEAL
 
     /* Full GC with reset due to new compartment, becoming compartment GC. */
 
     FinalizeCalls = 0;
     JS_SetGCZeal(cx, 9, 1000000);
     JS::PrepareForFullGC(rt);
     js::GCDebugSlice(rt, true, 1);
     CHECK(rt->gc.state() == js::gc::MARK);
-    CHECK(rt->gc.isFull);
+    CHECK(rt->gc.isFullGc());
 
     JS::RootedObject global4(cx, createGlobal());
     js::GCDebugSlice(rt, true, 1);
     CHECK(rt->gc.state() == js::gc::NO_INCREMENTAL);
-    CHECK(!rt->gc.isFull);
+    CHECK(!rt->gc.isFullGc());
     CHECK(checkMultipleGroups());
     CHECK(checkFinalizeStatus());
 
     for (unsigned i = 0; i < FinalizeCalls - 1; ++i)
         CHECK(!IsCompartmentGCBuffer[i]);
     CHECK(IsCompartmentGCBuffer[FinalizeCalls - 1]);
 
     JS_SetGCZeal(cx, 0, 0);
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -628,17 +628,17 @@ js::TraceWeakMaps(WeakMapTracer *trc)
 {
     WeakMapBase::traceAllMappings(trc);
     WatchpointMap::traceAll(trc);
 }
 
 extern JS_FRIEND_API(bool)
 js::AreGCGrayBitsValid(JSRuntime *rt)
 {
-    return rt->gc.grayBitsValid;
+    return rt->gc.areGrayBitsValid();
 }
 
 JS_FRIEND_API(bool)
 js::ZoneGlobalsAreAllGray(JS::Zone *zone)
 {
     for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
         JSObject *obj = comp->maybeGlobal();
         if (!obj || !JS::GCThingIsMarkedGray(obj))
@@ -858,17 +858,17 @@ JS_FRIEND_API(JS::GCSliceCallback)
 JS::SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback)
 {
     return rt->gc.setSliceCallback(callback);
 }
 
 JS_FRIEND_API(bool)
 JS::WasIncrementalGC(JSRuntime *rt)
 {
-    return rt->gc.isIncremental;
+    return rt->gc.isIncrementalGc();
 }
 
 jschar *
 GCDescription::formatMessage(JSRuntime *rt) const
 {
     return rt->gc.stats.formatMessage();
 }
 
@@ -882,29 +882,29 @@ JS_FRIEND_API(void)
 JS::NotifyDidPaint(JSRuntime *rt)
 {
     rt->gc.notifyDidPaint();
 }
 
 JS_FRIEND_API(bool)
 JS::IsIncrementalGCEnabled(JSRuntime *rt)
 {
-    return rt->gc.isIncrementalGCEnabled() && rt->gcMode() == JSGC_MODE_INCREMENTAL;
+    return rt->gc.isIncrementalGCEnabled();
 }
 
 JS_FRIEND_API(bool)
 JS::IsIncrementalGCInProgress(JSRuntime *rt)
 {
-    return rt->gc.state() != gc::NO_INCREMENTAL && !rt->gc.verifyPreData;
+    return rt->gc.isIncrementalGCInProgress();
 }
 
 JS_FRIEND_API(void)
 JS::DisableIncrementalGC(JSRuntime *rt)
 {
-    rt->gc.disableIncrementalGC();
+    rt->gc.disallowIncrementalGC();
 }
 
 JS::AutoDisableGenerationalGC::AutoDisableGenerationalGC(JSRuntime *rt)
   : gc(&rt->gc)
 #if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)
   , restartVerifier(false)
 #endif
 {
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -727,22 +727,27 @@ Chunk::allocate(JSRuntime *rt)
 Chunk::release(JSRuntime *rt, Chunk *chunk)
 {
     JS_ASSERT(chunk);
     chunk->prepareToBeFreed(rt);
     FreeChunk(rt, chunk);
 }
 
 inline void
+GCRuntime::updateOnChunkFree(const ChunkInfo &info)
+{
+    JS_ASSERT(numArenasFreeCommitted >= info.numArenasFreeCommitted);
+    numArenasFreeCommitted -= info.numArenasFreeCommitted;
+    stats.count(gcstats::STAT_DESTROY_CHUNK);
+}
+
+inline void
 Chunk::prepareToBeFreed(JSRuntime *rt)
 {
-    JS_ASSERT(rt->gc.numArenasFreeCommitted >= info.numArenasFreeCommitted);
-    rt->gc.numArenasFreeCommitted -= info.numArenasFreeCommitted;
-    rt->gc.stats.count(gcstats::STAT_DESTROY_CHUNK);
-
+    rt->gc.updateOnChunkFree(info);
 #ifdef DEBUG
     /*
      * Let FreeChunkList detect a missing prepareToBeFreed call before it
      * frees chunk.
      */
     info.numArenasFreeCommitted = 0;
 #endif
 }
@@ -860,28 +865,34 @@ Chunk::fetchNextDecommittedArena()
 
     Arena *arena = &arenas[offset];
     info.trailer.runtime->gc.pageAllocator.markPagesInUse(arena, ArenaSize);
     arena->aheader.setAsNotAllocated();
 
     return &arena->aheader;
 }
 
+inline void
+GCRuntime::updateOnFreeArenaAlloc(const ChunkInfo &info)
+{
+    JS_ASSERT(info.numArenasFreeCommitted <= numArenasFreeCommitted);
+    --numArenasFreeCommitted;
+}
+
 inline ArenaHeader *
 Chunk::fetchNextFreeArena(JSRuntime *rt)
 {
     JS_ASSERT(info.numArenasFreeCommitted > 0);
     JS_ASSERT(info.numArenasFreeCommitted <= info.numArenasFree);
-    JS_ASSERT(info.numArenasFreeCommitted <= rt->gc.numArenasFreeCommitted);
 
     ArenaHeader *aheader = info.freeArenasHead;
     info.freeArenasHead = aheader->next;
     --info.numArenasFreeCommitted;
     --info.numArenasFree;
-    --rt->gc.numArenasFreeCommitted;
+    rt->gc.updateOnFreeArenaAlloc(info);
 
     return aheader;
 }
 
 ArenaHeader *
 Chunk::allocateArena(Zone *zone, AllocKind thingKind)
 {
     JS_ASSERT(hasAvailableArenas());
@@ -913,24 +924,30 @@ Chunk::allocateArena(Zone *zone, AllocKi
         AutoUnlockGC unlock(rt);
         TriggerZoneGC(zone, JS::gcreason::ALLOC_TRIGGER);
     }
 
     return aheader;
 }
 
 inline void
+GCRuntime::updateOnArenaFree(const ChunkInfo &info)
+{
+    ++numArenasFreeCommitted;
+}
+
+inline void
 Chunk::addArenaToFreeList(JSRuntime *rt, ArenaHeader *aheader)
 {
     JS_ASSERT(!aheader->allocated());
     aheader->next = info.freeArenasHead;
     info.freeArenasHead = aheader;
     ++info.numArenasFreeCommitted;
     ++info.numArenasFree;
-    ++rt->gc.numArenasFreeCommitted;
+    rt->gc.updateOnArenaFree(info);
 }
 
 void
 Chunk::recycleArena(ArenaHeader *aheader, ArenaList &dest, AllocKind thingKind)
 {
     aheader->getArena()->setAsFullyUnused(thingKind);
     dest.insertAtCursor(aheader);
 }
@@ -1057,47 +1074,46 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
     rt(rt),
     systemZone(nullptr),
     systemAvailableChunkListHead(nullptr),
     userAvailableChunkListHead(nullptr),
 #ifdef JSGC_GENERATIONAL
     nursery(rt),
     storeBuffer(rt, nursery),
 #endif
+    stats(rt),
+    marker(rt),
     bytes(0),
     maxBytes(0),
     maxMallocBytes(0),
     numArenasFreeCommitted(0),
-    marker(rt),
     verifyPreData(nullptr),
     verifyPostData(nullptr),
     chunkAllocationSinceLastGC(false),
     nextFullGCTime(0),
     lastGCTime(0),
     jitReleaseTime(0),
-    allocationThreshold(30 * 1024 * 1024),
+    allocThreshold(30 * 1024 * 1024),
     highFrequencyGC(false),
     highFrequencyTimeThreshold(1000),
     highFrequencyLowLimitBytes(100 * 1024 * 1024),
     highFrequencyHighLimitBytes(500 * 1024 * 1024),
     highFrequencyHeapGrowthMax(3.0),
     highFrequencyHeapGrowthMin(1.5),
     lowFrequencyHeapGrowth(1.5),
     dynamicHeapGrowth(false),
     dynamicMarkSlice(false),
     decommitThreshold(32 * 1024 * 1024),
-    shouldCleanUpEverything(false),
+    cleanUpEverything(false),
     grayBitsValid(false),
     isNeeded(0),
-    stats(rt),
     number(0),
     startNumber(0),
     isFull(false),
     triggerReason(JS::gcreason::NO_REASON),
-    strictCompartmentChecking(false),
 #ifdef DEBUG
     disableStrictProxyCheckingCount(0),
 #endif
     incrementalState(gc::NO_INCREMENTAL),
     lastMarkSlice(false),
     sweepOnBackgroundThread(false),
     foundBlackGrayEdges(false),
     sweepingZones(nullptr),
@@ -1108,17 +1124,17 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
     sweepKindIndex(0),
     abortSweepAfterCurrentGroup(false),
     arenasAllocatedDuringSweep(nullptr),
 #ifdef JS_GC_MARKING_VALIDATION
     markingValidator(nullptr),
 #endif
     interFrameGC(0),
     sliceBudget(SliceBudget::Unlimited),
-    incrementalEnabled(true),
+    incrementalAllowed(true),
     generationalDisabled(0),
     manipulatingDeadZones(false),
     objectsMarkedInDeadZones(0),
     poked(false),
     heapState(Idle),
 #ifdef JS_GC_ZEAL
     zealMode(0),
     zealFrequency(0),
@@ -1136,16 +1152,17 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
     alwaysPreserveCode(false),
 #ifdef DEBUG
     noGCOrAllocationCheck(0),
 #endif
     lock(nullptr),
     lockOwner(nullptr),
     helperState(rt)
 {
+    setGCMode(JSGC_MODE_GLOBAL);
 }
 
 #ifdef JS_GC_ZEAL
 
 void
 GCRuntime::setZeal(uint8_t zeal, uint32_t frequency)
 {
     if (verifyPreData)
@@ -1351,17 +1368,17 @@ GCRuntime::setParameter(JSGCParamKey key
       }
       case JSGC_MAX_MALLOC_BYTES:
         setMaxMallocBytes(value);
         break;
       case JSGC_SLICE_TIME_BUDGET:
         sliceBudget = SliceBudget::TimeBudget(value);
         break;
       case JSGC_MARK_STACK_LIMIT:
-        js::SetMarkStackLimit(rt, value);
+        setMarkStackLimit(value);
         break;
       case JSGC_HIGH_FREQUENCY_TIME_LIMIT:
         highFrequencyTimeThreshold = value;
         break;
       case JSGC_HIGH_FREQUENCY_LOW_LIMIT:
         highFrequencyLowLimitBytes = value * 1024 * 1024;
         break;
       case JSGC_HIGH_FREQUENCY_HIGH_LIMIT:
@@ -1381,17 +1398,17 @@ GCRuntime::setParameter(JSGCParamKey key
         break;
       case JSGC_DYNAMIC_HEAP_GROWTH:
         dynamicHeapGrowth = value;
         break;
       case JSGC_DYNAMIC_MARK_SLICE:
         dynamicMarkSlice = value;
         break;
       case JSGC_ALLOCATION_THRESHOLD:
-        allocationThreshold = value * 1024 * 1024;
+        allocThreshold = value * 1024 * 1024;
         break;
       case JSGC_DECOMMIT_THRESHOLD:
         decommitThreshold = value * 1024 * 1024;
         break;
       default:
         JS_ASSERT(key == JSGC_MODE);
         mode = JSGCMode(value);
         JS_ASSERT(mode == JSGC_MODE_GLOBAL ||
@@ -1407,17 +1424,17 @@ GCRuntime::getParameter(JSGCParamKey key
     switch (key) {
       case JSGC_MAX_BYTES:
         return uint32_t(maxBytes);
       case JSGC_MAX_MALLOC_BYTES:
         return maxMallocBytes;
       case JSGC_BYTES:
         return uint32_t(bytes);
       case JSGC_MODE:
-        return uint32_t(rt->gcMode());
+        return uint32_t(mode);
       case JSGC_UNUSED_CHUNKS:
         return uint32_t(chunkPool.getEmptyCount());
       case JSGC_TOTAL_CHUNKS:
         return uint32_t(chunkSet.count() + chunkPool.getEmptyCount());
       case JSGC_SLICE_TIME_BUDGET:
         return uint32_t(sliceBudget > 0 ? sliceBudget / PRMJ_USEC_PER_MSEC : 0);
       case JSGC_MARK_STACK_LIMIT:
         return marker.maxCapacity();
@@ -1433,23 +1450,31 @@ GCRuntime::getParameter(JSGCParamKey key
         return uint32_t(highFrequencyHeapGrowthMin * 100);
       case JSGC_LOW_FREQUENCY_HEAP_GROWTH:
         return uint32_t(lowFrequencyHeapGrowth * 100);
       case JSGC_DYNAMIC_HEAP_GROWTH:
         return dynamicHeapGrowth;
       case JSGC_DYNAMIC_MARK_SLICE:
         return dynamicMarkSlice;
       case JSGC_ALLOCATION_THRESHOLD:
-        return allocationThreshold / 1024 / 1024;
+        return allocThreshold / 1024 / 1024;
       default:
         JS_ASSERT(key == JSGC_NUMBER);
         return uint32_t(number);
     }
 }
 
+void
+GCRuntime::setMarkStackLimit(size_t limit)
+{
+    JS_ASSERT(!isHeapBusy());
+    AutoStopVerifyingBarriers pauseVerification(rt, false);
+    marker.setMaxCapacity(limit);
+}
+
 template <typename T> struct BarrierOwner {};
 template <typename T> struct BarrierOwner<T *> { typedef T result; };
 template <> struct BarrierOwner<Value> { typedef HeapValue result; };
 
 bool
 GCRuntime::addBlackRootsTracer(JSTraceDataOp traceOp, void *data)
 {
     AssertHeapIsIdle(rt);
@@ -1632,72 +1657,82 @@ GCRuntime::updateMallocCounter(JS::Zone 
 
 void
 GCRuntime::onTooMuchMalloc()
 {
     if (!mallocGCTriggered)
         mallocGCTriggered = triggerGC(JS::gcreason::TOO_MUCH_MALLOC);
 }
 
-static size_t
-ComputeTriggerBytes(Zone *zone, size_t lastBytes, size_t maxBytes, JSGCInvocationKind gckind)
-{
-    size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, zone->runtimeFromMainThread()->gc.allocationThreshold);
-    double trigger = double(base) * zone->gcHeapGrowthFactor;
+size_t
+GCRuntime::computeTriggerBytes(double growthFactor, size_t lastBytes, JSGCInvocationKind gckind)
+{
+    size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, allocThreshold);
+    double trigger = double(base) * growthFactor;
     return size_t(Min(double(maxBytes), trigger));
 }
 
-void
-Zone::setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind)
+double
+GCRuntime::computeHeapGrowthFactor(size_t lastBytes)
 {
     /*
      * The heap growth factor depends on the heap size after a GC and the GC frequency.
      * For low frequency GCs (more than 1sec between GCs) we let the heap grow to 150%.
      * For high frequency GCs we let the heap grow depending on the heap size:
      *   lastBytes < highFrequencyLowLimit: 300%
      *   lastBytes > highFrequencyHighLimit: 150%
      *   otherwise: linear interpolation between 150% and 300% based on lastBytes
      */
-    JSRuntime *rt = runtimeFromMainThread();
-
-    if (!rt->gc.dynamicHeapGrowth) {
-        gcHeapGrowthFactor = 3.0;
+
+    double factor;
+    if (!dynamicHeapGrowth) {
+        factor = 3.0;
     } else if (lastBytes < 1 * 1024 * 1024) {
-        gcHeapGrowthFactor = rt->gc.lowFrequencyHeapGrowth;
+        factor = lowFrequencyHeapGrowth;
     } else {
-        JS_ASSERT(rt->gc.highFrequencyHighLimitBytes > rt->gc.highFrequencyLowLimitBytes);
+        JS_ASSERT(highFrequencyHighLimitBytes > highFrequencyLowLimitBytes);
         uint64_t now = PRMJ_Now();
-        if (rt->gc.lastGCTime && rt->gc.lastGCTime + rt->gc.highFrequencyTimeThreshold * PRMJ_USEC_PER_MSEC > now) {
-            if (lastBytes <= rt->gc.highFrequencyLowLimitBytes) {
-                gcHeapGrowthFactor = rt->gc.highFrequencyHeapGrowthMax;
-            } else if (lastBytes >= rt->gc.highFrequencyHighLimitBytes) {
-                gcHeapGrowthFactor = rt->gc.highFrequencyHeapGrowthMin;
+        if (lastGCTime && lastGCTime + highFrequencyTimeThreshold * PRMJ_USEC_PER_MSEC > now) {
+            if (lastBytes <= highFrequencyLowLimitBytes) {
+                factor = highFrequencyHeapGrowthMax;
+            } else if (lastBytes >= highFrequencyHighLimitBytes) {
+                factor = highFrequencyHeapGrowthMin;
             } else {
-                double k = (rt->gc.highFrequencyHeapGrowthMin - rt->gc.highFrequencyHeapGrowthMax)
-                           / (double)(rt->gc.highFrequencyHighLimitBytes - rt->gc.highFrequencyLowLimitBytes);
-                gcHeapGrowthFactor = (k * (lastBytes - rt->gc.highFrequencyLowLimitBytes)
-                                     + rt->gc.highFrequencyHeapGrowthMax);
-                JS_ASSERT(gcHeapGrowthFactor <= rt->gc.highFrequencyHeapGrowthMax
-                          && gcHeapGrowthFactor >= rt->gc.highFrequencyHeapGrowthMin);
+                double k = (highFrequencyHeapGrowthMin - highFrequencyHeapGrowthMax)
+                           / (double)(highFrequencyHighLimitBytes - highFrequencyLowLimitBytes);
+                factor = (k * (lastBytes - highFrequencyLowLimitBytes)
+                                     + highFrequencyHeapGrowthMax);
+                JS_ASSERT(factor <= highFrequencyHeapGrowthMax
+                          && factor >= highFrequencyHeapGrowthMin);
             }
-            rt->gc.highFrequencyGC = true;
+            highFrequencyGC = true;
         } else {
-            gcHeapGrowthFactor = rt->gc.lowFrequencyHeapGrowth;
-            rt->gc.highFrequencyGC = false;
+            factor = lowFrequencyHeapGrowth;
+            highFrequencyGC = false;
         }
     }
-    gcTriggerBytes = ComputeTriggerBytes(this, lastBytes, rt->gc.maxBytes, gckind);
+
+    return factor;
+}
+
+void
+Zone::setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind)
+{
+    GCRuntime &gc = runtimeFromMainThread()->gc;
+    gcHeapGrowthFactor = gc.computeHeapGrowthFactor(lastBytes);
+    gcTriggerBytes = gc.computeTriggerBytes(gcHeapGrowthFactor, lastBytes, gckind);
 }
 
 void
 Zone::reduceGCTriggerBytes(size_t amount)
 {
     JS_ASSERT(amount > 0);
     JS_ASSERT(gcTriggerBytes >= amount);
-    if (gcTriggerBytes - amount < runtimeFromAnyThread()->gc.allocationThreshold * gcHeapGrowthFactor)
+    GCRuntime &gc = runtimeFromAnyThread()->gc;
+    if (gcTriggerBytes - amount < gc.allocationThreshold() * gcHeapGrowthFactor)
         return;
     gcTriggerBytes -= amount;
 }
 
 Allocator::Allocator(Zone *zone)
   : zone_(zone)
 {}
 
@@ -2962,17 +2997,17 @@ PurgeRuntime(JSRuntime *rt)
 
     if (!rt->hasActiveCompilations())
         rt->parseMapPool().purgeAll();
 }
 
 bool
 GCRuntime::shouldPreserveJITCode(JSCompartment *comp, int64_t currentTime)
 {
-    if (shouldCleanUpEverything)
+    if (cleanUpEverything)
         return false;
 
     if (alwaysPreserveCode)
         return true;
     if (comp->lastAnimationTime + PRMJ_USEC_PER_SEC >= currentTime)
         return true;
 
 #ifdef JS_ION
@@ -3120,17 +3155,17 @@ GCRuntime::beginMarkPhase()
 
     for (CompartmentsIter c(rt, WithAtoms); !c.done(); c.next()) {
         JS_ASSERT(c->gcLiveArrayBuffers.empty());
         c->marked = false;
         if (shouldPreserveJITCode(c, currentTime))
             c->zone()->setPreservingCode(true);
     }
 
-    if (!rt->gc.shouldCleanUpEverything) {
+    if (!rt->gc.cleanUpEverything) {
 #ifdef JS_ION
         if (JSCompartment *comp = jit::TopmostIonActivationCompartment(rt))
             comp->zone()->setPreservingCode(true);
 #endif
     }
 
     /*
      * Atoms are not in the cross-compartment map. So if there are any
@@ -4573,17 +4608,17 @@ GCRuntime::resetIncrementalGC(const char
             zone->setNeedsBarrier(false, Zone::UpdateIon);
             zone->setGCState(Zone::NoGC);
         }
         rt->setNeedsBarrier(false);
         AssertNeedsBarrierFlagsConsistent(rt);
 
         incrementalState = NO_INCREMENTAL;
 
-        JS_ASSERT(!strictCompartmentChecking);
+        JS_ASSERT(!marker.shouldCheckCompartments());
 
         break;
       }
 
       case SWEEP:
         marker.reset();
 
         for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next())
@@ -4820,17 +4855,17 @@ GCRuntime::incrementalCollectSlice(int64
 IncrementalSafety
 gc::IsIncrementalGCSafe(JSRuntime *rt)
 {
     JS_ASSERT(!rt->mainThread.suppressGC);
 
     if (rt->keepAtoms())
         return IncrementalSafety::Unsafe("keepAtoms set");
 
-    if (!rt->gc.isIncrementalGCEnabled())
+    if (!rt->gc.isIncrementalGCAllowed())
         return IncrementalSafety::Unsafe("incremental permanently disabled");
 
     return IncrementalSafety::Safe();
 }
 
 void
 GCRuntime::budgetIncrementalGC(int64_t *budget)
 {
@@ -5056,17 +5091,17 @@ GCRuntime::collect(bool incremental, int
         zoneCount++;
         if (zone->isGCScheduled())
             collectedCount++;
     }
 
     for (CompartmentsIter c(rt, WithAtoms); !c.done(); c.next())
         compartmentCount++;
 
-    shouldCleanUpEverything = ShouldCleanUpEverything(reason, gckind);
+    cleanUpEverything = ShouldCleanUpEverything(reason, gckind);
 
     bool repeat = false;
     do {
         minorGC(reason);
 
         /*
          * Marking can trigger many incidental post barriers, some of them for
          * objects which are not going to be live after the GC.
@@ -5090,26 +5125,26 @@ GCRuntime::collect(bool incremental, int
 
         if (incrementalState == NO_INCREMENTAL) {
             gcstats::AutoPhase ap(stats, gcstats::PHASE_GC_END);
             if (gcCallback.op)
                 gcCallback.op(rt, JSGC_END, gcCallback.data);
         }
 
         /* Need to re-schedule all zones for GC. */
-        if (poked && shouldCleanUpEverything)
+        if (poked && cleanUpEverything)
             JS::PrepareForFullGC(rt);
 
         /*
          * If we reset an existing GC, we need to start a new one. Also, we
          * repeat GCs that happen during shutdown (the gcShouldCleanUpEverything
          * case) until we can be sure that no additional garbage is created
          * (which typically happens if roots are dropped during finalizers).
          */
-        repeat = (poked && shouldCleanUpEverything) || wasReset;
+        repeat = (poked && cleanUpEverything) || wasReset;
     } while (repeat);
 
     if (incrementalState == NO_INCREMENTAL) {
 #ifdef JS_THREADSAFE
         EnqueuePendingParseTasksAfterGC(rt);
 #endif
     }
 }
@@ -5728,20 +5763,25 @@ bool
 js::UninlinedIsInsideNursery(const gc::Cell *cell)
 {
     return IsInsideNursery(cell);
 }
 
 #ifdef DEBUG
 AutoDisableProxyCheck::AutoDisableProxyCheck(JSRuntime *rt
                                              MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
-  : count(rt->gc.disableStrictProxyCheckingCount)
+  : gc(rt->gc)
 {
     MOZ_GUARD_OBJECT_NOTIFIER_INIT;
-    count++;
+    gc.disableStrictProxyChecking();
+}
+
+AutoDisableProxyCheck::~AutoDisableProxyCheck()
+{
+    gc.enableStrictProxyChecking();
 }
 
 JS_FRIEND_API(void)
 JS::AssertGCThingMustBeTenured(JSObject *obj)
 {
     JS_ASSERT((!IsNurseryAllocable(obj->tenuredGetAllocKind()) || obj->getClass()->finalize) &&
               obj->isTenured());
 }
@@ -5757,17 +5797,17 @@ js::gc::AssertGCThingHasType(js::gc::Cel
 }
 
 JS_FRIEND_API(size_t)
 JS::GetGCNumber()
 {
     JSRuntime *rt = js::TlsPerThreadData.get()->runtimeFromMainThread();
     if (!rt)
         return 0;
-    return rt->gc.number;
+    return rt->gc.gcNumber();
 }
 #endif
 
 #ifdef DEBUG
 JS::AutoAssertOnGC::AutoAssertOnGC()
   : gc(nullptr), gcNumber(0)
 {
     js::PerThreadData *data = js::TlsPerThreadData.get();
@@ -5776,38 +5816,38 @@ JS::AutoAssertOnGC::AutoAssertOnGC()
          * GC's from off-thread will always assert, so off-thread is implicitly
          * AutoAssertOnGC. We still need to allow AutoAssertOnGC to be used in
          * code that works from both threads, however. We also use this to
          * annotate the off thread run loops.
          */
         JSRuntime *runtime = data->runtimeIfOnOwnerThread();
         if (runtime) {
             gc = &runtime->gc;
-            gcNumber = gc->number;
+            gcNumber = gc->gcNumber();;
             gc->enterUnsafeRegion();
         }
     }
 }
 
 JS::AutoAssertOnGC::AutoAssertOnGC(JSRuntime *rt)
-  : gc(&rt->gc), gcNumber(rt->gc.number)
+  : gc(&rt->gc), gcNumber(rt->gc.gcNumber())
 {
     gc->enterUnsafeRegion();
 }
 
 JS::AutoAssertOnGC::~AutoAssertOnGC()
 {
     if (gc) {
         gc->leaveUnsafeRegion();
 
         /*
          * The following backstop assertion should never fire: if we bumped the
          * gcNumber, we should have asserted because inUnsafeRegion was true.
          */
-        MOZ_ASSERT(gcNumber == gc->number, "GC ran inside an AutoAssertOnGC scope.");
+        MOZ_ASSERT(gcNumber == gc->gcNumber(), "GC ran inside an AutoAssertOnGC scope.");
     }
 }
 
 /* static */ void
 JS::AutoAssertOnGC::VerifyIsSafeToGC(JSRuntime *rt)
 {
     if (rt->gc.isInsideUnsafeRegion())
         MOZ_CRASH("[AutoAssertOnGC] possible GC in GC-unsafe region");
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -1310,25 +1310,22 @@ IsInsideGGCNursery(const gc::Cell *cell)
 
 } /* namespace gc */
 
 #ifdef DEBUG
 /* Use this to avoid assertions when manipulating the wrapper map. */
 class AutoDisableProxyCheck
 {
     MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER;
-    uintptr_t &count;
+    gc::GCRuntime &gc;
 
   public:
     explicit AutoDisableProxyCheck(JSRuntime *rt
                                    MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
-
-    ~AutoDisableProxyCheck() {
-        count--;
-    }
+    ~AutoDisableProxyCheck();
 };
 #else
 struct AutoDisableProxyCheck
 {
     explicit AutoDisableProxyCheck(JSRuntime *rt) {}
 };
 #endif
 
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -1425,20 +1425,20 @@ NewObject(ExclusiveContext *cx, types::T
                                     !cx->compartment()->options().getTrace();
     if (clasp->trace &&
         !globalWithoutCustomTrace &&
         !(clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS))
     {
         if (!cx->shouldBeJSContext())
             return nullptr;
         JSRuntime *rt = cx->asJSContext()->runtime();
-        rt->gc.disableIncrementalGC();
+        rt->gc.disallowIncrementalGC();
 
 #ifdef DEBUG
-        if (rt->gcMode() == JSGC_MODE_INCREMENTAL) {
+        if (rt->gc.gcMode() == JSGC_MODE_INCREMENTAL) {
             fprintf(stderr,
                     "The class %s has a trace hook but does not declare the\n"
                     "JSCLASS_IMPLEMENTS_BARRIERS flag. Please ensure that it correctly\n"
                     "implements write barriers and then set the flag.\n",
                     clasp->name);
             MOZ_CRASH();
         }
 #endif
--- a/js/src/jsproxy.cpp
+++ b/js/src/jsproxy.cpp
@@ -2724,17 +2724,17 @@ js::proxy_Trace(JSTracer *trc, JSObject 
 }
 
 /* static */ void
 ProxyObject::trace(JSTracer *trc, JSObject *obj)
 {
     ProxyObject *proxy = &obj->as<ProxyObject>();
 
 #ifdef DEBUG
-    if (!trc->runtime()->gc.disableStrictProxyCheckingCount && proxy->is<WrapperObject>()) {
+    if (trc->runtime()->gc.isStrictProxyCheckingEnabled() && proxy->is<WrapperObject>()) {
         JSObject *referent = &proxy->private_().toObject();
         if (referent->compartment() != proxy->compartment()) {
             /*
              * Assert that this proxy is tracked in the wrapper map. We maintain
              * the invariant that the wrapped object is the key in the wrapper map.
              */
             Value key = ObjectValue(*referent);
             WrapperMap::Ptr p = proxy->compartment()->lookupWrapper(key);
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -2153,17 +2153,17 @@ SaveSharedScriptData(ExclusiveContext *c
     /*
      * During the IGC we need to ensure that bytecode is marked whenever it is
      * accessed even if the bytecode was already in the table: at this point
      * old scripts or exceptions pointing to the bytecode may no longer be
      * reachable. This is effectively a read barrier.
      */
     if (cx->isJSContext()) {
         JSRuntime *rt = cx->asJSContext()->runtime();
-        if (JS::IsIncrementalGCInProgress(rt) && rt->gc.isFull)
+        if (JS::IsIncrementalGCInProgress(rt) && rt->gc.isFullGc())
             ssd->marked = true;
     }
 #endif
 
     script->setCode(ssd->data);
     script->atoms = ssd->atoms();
     return true;
 }
@@ -2171,35 +2171,35 @@ SaveSharedScriptData(ExclusiveContext *c
 static inline void
 MarkScriptData(JSRuntime *rt, const jsbytecode *bytecode)
 {
     /*
      * As an invariant, a ScriptBytecodeEntry should not be 'marked' outside of
      * a GC. Since SweepScriptBytecodes is only called during a full gc,
      * to preserve this invariant, only mark during a full gc.
      */
-    if (rt->gc.isFull)
+    if (rt->gc.isFullGc())
         SharedScriptData::fromBytecode(bytecode)->marked = true;
 }
 
 void
 js::UnmarkScriptData(JSRuntime *rt)
 {
-    JS_ASSERT(rt->gc.isFull);
+    JS_ASSERT(rt->gc.isFullGc());
     ScriptDataTable &table = rt->scriptDataTable();
     for (ScriptDataTable::Enum e(table); !e.empty(); e.popFront()) {
         SharedScriptData *entry = e.front();
         entry->marked = false;
     }
 }
 
 void
 js::SweepScriptData(JSRuntime *rt)
 {
-    JS_ASSERT(rt->gc.isFull);
+    JS_ASSERT(rt->gc.isFullGc());
     ScriptDataTable &table = rt->scriptDataTable();
 
     if (rt->keepAtoms())
         return;
 
     for (ScriptDataTable::Enum e(table); !e.empty(); e.popFront()) {
         SharedScriptData *entry = e.front();
         if (!entry->marked) {
@@ -3394,17 +3394,19 @@ JSScript::clearTraps(FreeOp *fop)
 void
 JSScript::markChildren(JSTracer *trc)
 {
     // NOTE: this JSScript may be partially initialized at this point.  E.g. we
     // may have created it and partially initialized it with
     // JSScript::Create(), but not yet finished initializing it with
     // fullyInitFromEmitter() or fullyInitTrivial().
 
-    JS_ASSERT_IF(trc->runtime()->gc.strictCompartmentChecking, zone()->isCollecting());
+    JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) &&
+                 static_cast<GCMarker *>(trc)->shouldCheckCompartments(),
+                 zone()->isCollecting());
 
     for (uint32_t i = 0; i < natoms(); ++i) {
         if (atoms[i])
             MarkString(trc, &atoms[i], "atom");
     }
 
     if (hasObjects()) {
         ObjectArray *objarray = objects();
--- a/js/src/vm/ForkJoin.cpp
+++ b/js/src/vm/ForkJoin.cpp
@@ -1719,17 +1719,17 @@ ForkJoinShared::executePortion(PerThread
 
 void
 ForkJoinShared::setAbortFlagDueToInterrupt(ForkJoinContext &cx)
 {
     JS_ASSERT(cx_->runtime()->interruptPar);
     // The GC Needed flag should not be set during parallel
     // execution.  Instead, one of the requestGC() or
     // requestZoneGC() methods should be invoked.
-    JS_ASSERT(!cx_->runtime()->gc.isNeeded);
+    JS_ASSERT(!cx_->runtime()->gc.isGcNeeded());
 
     if (!abort_) {
         cx.bailoutRecord->joinCause(ParallelBailoutInterrupt);
         setAbortFlagAndRequestInterrupt(false);
     }
 }
 
 void
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -225,18 +225,16 @@ JSRuntime::JSRuntime(JSRuntime *parentRu
 #ifdef DEBUG
     enteredPolicy(nullptr),
 #endif
     largeAllocationFailureCallback(nullptr),
     oomCallback(nullptr)
 {
     liveRuntimesCount++;
 
-    setGCMode(JSGC_MODE_GLOBAL);
-
     /* Initialize infallibly first, so we can goto bad and JS_DestroyRuntime. */
     JS_INIT_CLIST(&onNewGlobalObjectWatchers);
 
     PodZero(&debugHooks);
     PodArrayZero(nativeStackQuota);
     PodZero(&asmJSCacheOps);
 }
 
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -955,22 +955,16 @@ struct JSRuntime : public JS::shadow::Ru
 #endif
 
     /* Garbage collector state, used by jsgc.c. */
     js::gc::GCRuntime   gc;
 
     /* Garbase collector state has been sucessfully initialized. */
     bool                gcInitialized;
 
-    JSGCMode gcMode() const { return gc.mode; }
-    void setGCMode(JSGCMode mode) {
-        gc.mode = mode;
-        gc.marker.setGCMode(mode);
-    }
-
     bool isHeapBusy() { return gc.isHeapBusy(); }
     bool isHeapMajorCollecting() { return gc.isHeapMajorCollecting(); }
     bool isHeapMinorCollecting() { return gc.isHeapMinorCollecting(); }
     bool isHeapCollecting() { return gc.isHeapCollecting(); }
 
     bool isFJMinorCollecting() { return gc.isFJMinorCollecting(); }
 
     int gcZeal() { return gc.zeal(); }