Bug 988486 - Make more GCRuntime state private and add accessors r=terrence
authorJon Coppeard <jcoppeard@mozilla.com>
Mon, 16 Jun 2014 10:40:44 +0100
changeset 209695 39acaa2e399b9843fb4ebeceba4d3d352aa36b07
parent 209694 c2a8504bf1091e745bd09766aa40b57e654e4c0e
child 209696 9ab3b097f304834155bc66840632781af66d610e
push id3857
push userraliiev@mozilla.com
push dateTue, 02 Sep 2014 16:39:23 +0000
treeherdermozilla-beta@5638b907b505 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersterrence
bugs988486
milestone33.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 988486 - Make more GCRuntime state private and add accessors r=terrence
js/src/builtin/TestingFunctions.cpp
js/src/gc/ForkJoinNursery.cpp
js/src/gc/GCRuntime.h
js/src/gc/Marking.cpp
js/src/gc/Nursery.cpp
js/src/gc/RootMarking.cpp
js/src/gc/Statistics.cpp
js/src/gc/Tracer.cpp
js/src/jsapi-tests/testGCFinalizeCallback.cpp
js/src/jsapi.cpp
js/src/jsfriendapi.cpp
js/src/jsgc.cpp
js/src/jsgcinlines.h
js/src/jsobj.cpp
js/src/vm/Runtime.h
--- a/js/src/builtin/TestingFunctions.cpp
+++ b/js/src/builtin/TestingFunctions.cpp
@@ -566,17 +566,17 @@ GCState(JSContext *cx, unsigned argc, js
 
     if (args.length() != 0) {
         RootedObject callee(cx, &args.callee());
         ReportUsageError(cx, callee, "Too many arguments");
         return false;
     }
 
     const char *state;
-    gc::State globalState = cx->runtime()->gc.incrementalState;
+    gc::State globalState = cx->runtime()->gc.state();
     if (globalState == gc::NO_INCREMENTAL)
         state = "none";
     else if (globalState == gc::MARK)
         state = "mark";
     else if (globalState == gc::SWEEP)
         state = "sweep";
     else
         MOZ_ASSUME_UNREACHABLE("Unobserveable global GC state");
--- a/js/src/gc/ForkJoinNursery.cpp
+++ b/js/src/gc/ForkJoinNursery.cpp
@@ -227,17 +227,17 @@ ForkJoinNursery::pjsCollection(int op)
     JSRuntime *const rt = shared_->runtime();
     const unsigned currentNumActiveChunks_ = numActiveChunks_;
     const char *msg = "";
 
     JS_ASSERT(!rt->needsBarrier());
 
     TIME_START(pjsCollection);
 
-    rt->incFJMinorCollecting();
+    rt->gc.incFJMinorCollecting();
     if (evacuate) {
         isEvacuating_ = true;
         evacuationZone_ = shared_->zone();
     }
 
     flip();
     if (recreate) {
         initNewspace();
@@ -260,17 +260,17 @@ ForkJoinNursery::pjsCollection(int op)
     JS_ASSERT_IF(isEvacuating_, hugeSlots[hugeSlotsNew].empty());
 
     isEvacuating_ = false;
     evacuationZone_ = nullptr;
     head_ = nullptr;
     tail_ = &head_;
     movedSize_ = 0;
 
-    rt->decFJMinorCollecting();
+    rt->gc.decFJMinorCollecting();
 
     TIME_END(pjsCollection);
 
     // Note, the spew is awk-friendly, non-underlined words serve as markers:
     //   FJGC _tag_ us _value_ copied _value_ size _value_ _message-word_ ...
     shared_->spewGC("FJGC %s us %5" PRId64 "  copied %7" PRIu64 "  size %" PRIu64 "  %s",
                     (evacuate ? "evacuate " : "collect  "),
                     TIME_TOTAL(pjsCollection),
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -27,16 +27,17 @@
 namespace js {
 
 namespace gc {
 
 typedef Vector<JS::Zone *, 4, SystemAllocPolicy> ZoneVector;
 
 class MarkingValidator;
 class AutoPrepareForTracing;
+class AutoTraceSession;
 
 struct ConservativeGCData
 {
     /*
      * The GC scans conservatively between ThreadData::nativeStackBase and
      * nativeStackTop unless the latter is nullptr.
      */
     uintptr_t           *nativeStackTop;
@@ -99,58 +100,73 @@ class GCRuntime
     inline int zeal();
     inline bool upcomingZealousGC();
     inline bool needZealousGC();
 
     template <typename T> bool addRoot(T *rp, const char *name, JSGCRootType rootType);
     void removeRoot(void *rp);
     void setMarkStackLimit(size_t limit);
 
+    void setParameter(JSGCParamKey key, uint32_t value);
+    uint32_t getParameter(JSGCParamKey key);
+
     bool isHeapBusy() { return heapState != js::Idle; }
     bool isHeapMajorCollecting() { return heapState == js::MajorCollecting; }
     bool isHeapMinorCollecting() { return heapState == js::MinorCollecting; }
     bool isHeapCollecting() { return isHeapMajorCollecting() || isHeapMinorCollecting(); }
 
+    // Performance note: if isFJMinorCollecting turns out to be slow because
+    // reading the counter is slow then we may be able to augment the counter
+    // with a volatile flag that is set iff the counter is greater than
+    // zero. (It will require some care to make sure the two variables stay in
+    // sync.)
+    bool isFJMinorCollecting() { return fjCollectionCounter > 0; }
+    void incFJMinorCollecting() { fjCollectionCounter++; }
+    void decFJMinorCollecting() { fjCollectionCounter--; }
+
     bool triggerGC(JS::gcreason::Reason reason);
     bool triggerZoneGC(Zone *zone, JS::gcreason::Reason reason);
     void maybeGC(Zone *zone);
     void minorGC(JS::gcreason::Reason reason);
     void minorGC(JSContext *cx, JS::gcreason::Reason reason);
     void gcIfNeeded(JSContext *cx);
     void collect(bool incremental, int64_t budget, JSGCInvocationKind gckind,
                  JS::gcreason::Reason reason);
-    void gcSlice(JSGCInvocationKind gckind, JS::gcreason::Reason reason, int64_t millis);
+    void gcSlice(JSGCInvocationKind gckind, JS::gcreason::Reason reason, int64_t millis = 0);
     void runDebugGC();
     inline void poke();
 
     void markRuntime(JSTracer *trc, bool useSavedRoots = false);
 
+    void notifyDidPaint();
+    void shrinkBuffers();
+
 #ifdef JS_GC_ZEAL
     const void *addressOfZealMode() { return &zealMode; }
     void setZeal(uint8_t zeal, uint32_t frequency);
     void setNextScheduled(uint32_t count);
     void verifyPreBarriers();
     void verifyPostBarriers();
     void maybeVerifyPreBarriers(bool always);
     void maybeVerifyPostBarriers(bool always);
     bool selectForMarking(JSObject *object);
     void clearSelectedForMarking();
     void setDeterministic(bool enable);
 #endif
 
   public:
     // Internal public interface
+    js::gc::State state() { return incrementalState; }
     void recordNativeStackTop();
 #ifdef JS_THREADSAFE
     void notifyRequestEnd() { conservativeGC.updateForRequestEnd(); }
 #endif
     bool isBackgroundSweeping() { return helperState.isBackgroundSweeping(); }
     void waitBackgroundSweepEnd() { helperState.waitBackgroundSweepEnd(); }
     void waitBackgroundSweepOrAllocEnd() { helperState.waitBackgroundSweepOrAllocEnd(); }
-    void startBackgroundShrink() { helperState.startBackgroundShrink(); }
     void startBackgroundAllocationIfIdle() { helperState.startBackgroundAllocationIfIdle(); }
     void freeLater(void *p) { helperState.freeLater(p); }
 
 #ifdef DEBUG
 
     bool onBackgroundThread() { return helperState.onBackgroundThread(); }
 
     bool currentThreadOwnsGCLock() {
@@ -200,16 +216,19 @@ class GCRuntime
     void leaveUnsafeRegion() {
         JS_ASSERT(inUnsafeRegion > 0);
         --inUnsafeRegion;
     }
 #endif
 
     void setAlwaysPreserveCode() { alwaysPreserveCode = true; }
 
+    bool isIncrementalGCEnabled() { return incrementalEnabled; }
+    void disableIncrementalGC() { incrementalEnabled = false; }
+
     bool isGenerationalGCEnabled() { return generationalDisabled == 0; }
     void disableGenerationalGC();
     void enableGenerationalGC();
 
     void setGrayRootsTracer(JSTraceDataOp traceOp, void *data);
     bool addBlackRootsTracer(JSTraceDataOp traceOp, void *data);
     void removeBlackRootsTracer(JSTraceDataOp traceOp, void *data);
 
@@ -222,28 +241,40 @@ class GCRuntime
     void setGCCallback(JSGCCallback callback, void *data);
     bool addFinalizeCallback(JSFinalizeCallback callback, void *data);
     void removeFinalizeCallback(JSFinalizeCallback func);
     JS::GCSliceCallback setSliceCallback(JS::GCSliceCallback callback);
 
     void setValidate(bool enable);
     void setFullCompartmentChecks(bool enable);
 
+    bool isManipulatingDeadZones() { return manipulatingDeadZones; }
+    void setManipulatingDeadZones(bool value) { manipulatingDeadZones = value; }
+    unsigned objectsMarkedInDeadZonesCount() { return objectsMarkedInDeadZones; }
+    void incObjectsMarkedInDeadZone() {
+        JS_ASSERT(manipulatingDeadZones);
+        ++objectsMarkedInDeadZones;
+    }
+
+    JS::Zone *getCurrentZoneGroup() { return currentZoneGroup; }
+    void setFoundBlackGrayEdges() { foundBlackGrayEdges = true; }
+
 #ifdef JS_GC_ZEAL
     void startVerifyPreBarriers();
     bool endVerifyPreBarriers();
     void startVerifyPostBarriers();
     bool endVerifyPostBarriers();
     void finishVerifier();
 #endif
 
   private:
     // For ArenaLists::allocateFromArenaInline()
     friend class ArenaLists;
     Chunk *pickChunk(Zone *zone, AutoMaybeStartBackgroundAllocation &maybeStartBackgroundAllocation);
+    inline void arenaAllocatedDuringGC(JS::Zone *zone, ArenaHeader *arena);
 
     inline bool wantBackgroundAllocation() const;
 
     bool initZeal();
     void requestInterrupt(JS::gcreason::Reason reason);
     bool gcCycle(bool incremental, int64_t budget, JSGCInvocationKind gckind,
                  JS::gcreason::Reason reason);
     void budgetIncrementalGC(int64_t *budget);
@@ -265,16 +296,21 @@ class GCRuntime
     void getNextZoneGroup();
     void endMarkingZoneGroup();
     void beginSweepingZoneGroup();
     bool releaseObservedTypes();
     void endSweepingZoneGroup();
     bool sweepPhase(SliceBudget &sliceBudget);
     void endSweepPhase(JSGCInvocationKind gckind, bool lastGC);
     void sweepZones(FreeOp *fop, bool lastGC);
+    void decommitArenasFromAvailableList(Chunk **availableListHeadp);
+    void decommitArenas();
+    void expireChunksAndArenas(bool shouldShrink);
+    void sweepBackgroundThings(bool onBackgroundThread);
+    void assertBackgroundSweepingFinished();
 
     void computeNonIncrementalMarkingForValidation();
     void validateIncrementalMarking();
     void finishMarkingValidation();
 
     void markConservativeStackRoots(JSTracer *trc, bool useSavedRoots);
 
 #ifdef DEBUG
@@ -307,16 +343,21 @@ class GCRuntime
      * in the list head are taken, then the chunk is removed from the list.
      * During the GC when all arenas in a chunk become free, that chunk is
      * removed from the list and scheduled for release.
      */
     js::gc::Chunk         *systemAvailableChunkListHead;
     js::gc::Chunk         *userAvailableChunkListHead;
     js::gc::ChunkPool     chunkPool;
 
+#ifdef JSGC_GENERATIONAL
+    js::Nursery           nursery;
+    js::gc::StoreBuffer   storeBuffer;
+#endif
+
     js::RootedValueMap    rootsHash;
 
     /* This is updated by both the main and GC helper threads. */
     mozilla::Atomic<size_t, mozilla::ReleaseAcquire>   bytes;
 
     size_t                maxBytes;
     size_t                maxMallocBytes;
 
@@ -392,16 +433,17 @@ class GCRuntime
      * new wrappers. When non-zero, this records the recursion depth of wrapper
      * creation.
      */
     uintptr_t             disableStrictProxyCheckingCount;
 #else
     uintptr_t             unused1;
 #endif
 
+  private:
     /*
      * The current incremental GC phase. This is also used internally in
      * non-incremental GC.
      */
     js::gc::State         incrementalState;
 
     /* Indicates that the last incremental slice exhausted the mark stack. */
     bool                  lastMarkSlice;
@@ -474,32 +516,26 @@ class GCRuntime
      * zones. This should happen very rarely.
      */
     unsigned              objectsMarkedInDeadZones;
 
     bool                  poked;
 
     volatile js::HeapState heapState;
 
-#ifdef JSGC_GENERATIONAL
-    js::Nursery           nursery;
-    js::gc::StoreBuffer   storeBuffer;
-#endif
-
     /*
      * ForkJoin workers enter and leave GC independently; this counter
      * tracks the number that are currently in GC.
      *
      * Technically this should be #ifdef JSGC_FJGENERATIONAL but that
      * affects the observed size of JSRuntime in problematic ways, see
      * note in vm/ThreadPool.h.
      */
     mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> fjCollectionCounter;
 
-  private:
     /*
      * These options control the zealousness of the GC. The fundamental values
      * are   nextScheduled and gcDebugCompartmentGC. At every allocation,
      *   nextScheduled is decremented. When it reaches zero, we do either a
      * full or a compartmental GC, based on   debugCompartmentGC.
      *
      * At this point, if   zeal_ is one of the types that trigger periodic
      * collection, then   nextScheduled is reset to the value of
@@ -578,16 +614,17 @@ class GCRuntime
     mozilla::DebugOnly<PRThread *>   lockOwner;
 
     GCHelperState helperState;
 
     ConservativeGCData conservativeGC;
 
     friend class js::GCHelperState;
     friend class js::gc::MarkingValidator;
+    friend class js::gc::AutoTraceSession;
 };
 
 #ifdef JS_GC_ZEAL
 inline int
 GCRuntime::zeal() {
     return zealMode;
 }
 
@@ -611,13 +648,12 @@ GCRuntime::needZealousGC() {
     return false;
 }
 #else
 inline int GCRuntime::zeal() { return 0; }
 inline bool GCRuntime::upcomingZealousGC() { return false; }
 inline bool GCRuntime::needZealousGC() { return false; }
 #endif
 
-
 } /* namespace gc */
 } /* namespace js */
 
 #endif
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -193,17 +193,17 @@ CheckMarkedThing(JSTracer *trc, T **thin
         return;
 
     JS_ASSERT(thing->zone());
     JS_ASSERT(thing->zone()->runtimeFromMainThread() == trc->runtime());
     JS_ASSERT(trc->hasTracingDetails());
 
     DebugOnly<JSRuntime *> rt = trc->runtime();
 
-    JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gc.manipulatingDeadZones,
+    JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gc.isManipulatingDeadZones(),
                  !thing->zone()->scheduledForDestruction);
 
     JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
 
     JS_ASSERT_IF(thing->zone()->requireGCTracer(),
                  IS_GC_MARKING_TRACER(trc));
 
     JS_ASSERT(thing->isAligned());
@@ -280,18 +280,18 @@ MarkInternal(JSTracer *trc, T **thingp)
         trc->unsetTracingLocation();
     }
 
     trc->clearTracingDetails();
 }
 
 #define JS_ROOT_MARKING_ASSERT(trc)                                     \
     JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc),                             \
-                 trc->runtime()->gc.incrementalState == NO_INCREMENTAL ||       \
-                 trc->runtime()->gc.incrementalState == MARK_ROOTS);
+                 trc->runtime()->gc.state() == NO_INCREMENTAL ||        \
+                 trc->runtime()->gc.state() == MARK_ROOTS);
 
 namespace js {
 namespace gc {
 
 template <typename T>
 void
 MarkUnbarriered(JSTracer *trc, T **thingp, const char *name)
 {
@@ -854,17 +854,17 @@ ShouldMarkCrossCompartment(JSTracer *trc
          * Having black->gray edges violates our promise to the cycle
          * collector. This can happen if we're collecting a compartment and it
          * has an edge to an uncollected compartment: it's possible that the
          * source and destination of the cross-compartment edge should be gray,
          * but the source was marked black by the conservative scanner.
          */
         if (cell->isMarked(GRAY)) {
             JS_ASSERT(!zone->isCollecting());
-            trc->runtime()->gc.foundBlackGrayEdges = true;
+            trc->runtime()->gc.setFoundBlackGrayEdges();
         }
         return zone->isGCMarking();
     } else {
         if (zone->isGCMarkingBlack()) {
             /*
              * The destination compartment is being not being marked gray now,
              * but it will be later, so record the cell so it can be marked gray
              * at the appropriate time.
@@ -1606,17 +1606,17 @@ GCMarker::processMarkStackTop(SliceBudge
 
         /* Call the trace hook if necessary. */
         const Class *clasp = type->clasp();
         if (clasp->trace) {
             // Global objects all have the same trace hook. That hook is safe without barriers
             // if the gloal has no custom trace hook of it's own, or has been moved to a different
             // compartment, and so can't have one.
             JS_ASSERT_IF(runtime()->gcMode() == JSGC_MODE_INCREMENTAL &&
-                         runtime()->gc.incrementalEnabled &&
+                         runtime()->gc.isIncrementalGCEnabled() &&
                          !(clasp->trace == JS_GlobalObjectTraceHook &&
                            (!obj->compartment()->options().getTrace() ||
                             !obj->isOwnGlobal())),
                          clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS);
             clasp->trace(this, obj);
         }
 
         if (!shape->isNative())
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -345,28 +345,28 @@ class MinorCollectionTracer : public JST
          */
         rt->setNeedsBarrier(false);
 
         /*
          * We use the live array buffer lists to track traced buffers so we can
          * sweep their dead views. Incremental collection also use these lists,
          * so we may need to save and restore their contents here.
          */
-        if (rt->gc.incrementalState != NO_INCREMENTAL) {
+        if (rt->gc.state() != NO_INCREMENTAL) {
             for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
                 if (!ArrayBufferObject::saveArrayBufferList(c, liveArrayBuffers))
                     CrashAtUnhandlableOOM("OOM while saving live array buffers");
                 ArrayBufferObject::resetArrayBufferList(c);
             }
         }
     }
 
     ~MinorCollectionTracer() {
         runtime()->setNeedsBarrier(savedRuntimeNeedBarrier);
-        if (runtime()->gc.incrementalState != NO_INCREMENTAL)
+        if (runtime()->gc.state() != NO_INCREMENTAL)
             ArrayBufferObject::restoreArrayBufferLists(liveArrayBuffers);
     }
 };
 
 } /* namespace gc */
 } /* namespace js */
 
 static AllocKind
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -261,17 +261,17 @@ MarkIfGCThingWord(JSTracer *trc, uintptr
     trc->setTracingName(nameBuf);
 #endif
     trc->setTracingLocation((void *)w);
     void *tmp = thing;
     MarkKind(trc, &tmp, traceKind);
     JS_ASSERT(tmp == thing);
 
 #ifdef DEBUG
-    if (trc->runtime()->gc.incrementalState == MARK_ROOTS)
+    if (trc->runtime()->gc.state() == MARK_ROOTS)
         trc->runtime()->mainThread.gcSavedRoots.append(
             PerThreadData::SavedGCRoot(thing, traceKind));
 #endif
 
     return CGCT_VALID;
 }
 
 static void
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -551,17 +551,17 @@ Statistics::endGC()
         (*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, collectedCount == zoneCount ? 0 : 1);
         (*cb)(JS_TELEMETRY_GC_MS, t(total));
         (*cb)(JS_TELEMETRY_GC_MAX_PAUSE_MS, t(longest));
         (*cb)(JS_TELEMETRY_GC_MARK_MS, t(phaseTimes[PHASE_MARK]));
         (*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[PHASE_SWEEP]));
         (*cb)(JS_TELEMETRY_GC_MARK_ROOTS_MS, t(phaseTimes[PHASE_MARK_ROOTS]));
         (*cb)(JS_TELEMETRY_GC_MARK_GRAY_MS, t(phaseTimes[PHASE_SWEEP_MARK_GRAY]));
         (*cb)(JS_TELEMETRY_GC_NON_INCREMENTAL, !!nonincrementalReason);
-        (*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gc.incrementalEnabled);
+        (*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gc.isIncrementalGCEnabled());
         (*cb)(JS_TELEMETRY_GC_SCC_SWEEP_TOTAL_MS, t(sccTotal));
         (*cb)(JS_TELEMETRY_GC_SCC_SWEEP_MAX_PAUSE_MS, t(sccLongest));
 
         double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC);
         (*cb)(JS_TELEMETRY_GC_MMU_50, mmu50 * 100);
     }
 
     if (fp)
@@ -571,17 +571,17 @@ Statistics::endGC()
 void
 Statistics::beginSlice(int collectedCount, int zoneCount, int compartmentCount,
                        JS::gcreason::Reason reason)
 {
     this->collectedCount = collectedCount;
     this->zoneCount = zoneCount;
     this->compartmentCount = compartmentCount;
 
-    bool first = runtime->gc.incrementalState == gc::NO_INCREMENTAL;
+    bool first = runtime->gc.state() == gc::NO_INCREMENTAL;
     if (first)
         beginGC();
 
     SliceData data(reason, PRMJ_Now(), SystemPageAllocator::GetPageFaultCount());
     (void) slices.append(data); /* Ignore any OOMs here. */
 
     if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
         (*cb)(JS_TELEMETRY_GC_REASON, reason);
@@ -601,17 +601,17 @@ Statistics::endSlice()
     slices.back().end = PRMJ_Now();
     slices.back().endFaults = SystemPageAllocator::GetPageFaultCount();
 
     if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback) {
         (*cb)(JS_TELEMETRY_GC_SLICE_MS, t(slices.back().end - slices.back().start));
         (*cb)(JS_TELEMETRY_GC_RESET, !!slices.back().resetReason);
     }
 
-    bool last = runtime->gc.incrementalState == gc::NO_INCREMENTAL;
+    bool last = runtime->gc.state() == gc::NO_INCREMENTAL;
     if (last)
         endGC();
 
     // Slice callbacks should only fire for the outermost level
     if (--gcDepth == 0) {
         bool wasFullGC = collectedCount == zoneCount;
         if (sliceCallback)
             (*sliceCallback)(runtime, last ? JS::GC_CYCLE_END : JS::GC_SLICE_END,
--- a/js/src/gc/Tracer.cpp
+++ b/js/src/gc/Tracer.cpp
@@ -526,17 +526,17 @@ GCMarker::markDelayedChildren(ArenaHeade
      * allocatedDuringIncremental flag if we continue marking.
      */
 }
 
 bool
 GCMarker::markDelayedChildren(SliceBudget &budget)
 {
     gcstats::MaybeAutoPhase ap;
-    if (runtime()->gc.incrementalState == MARK)
+    if (runtime()->gc.state() == MARK)
         ap.construct(runtime()->gc.stats, gcstats::PHASE_MARK_DELAYED);
 
     JS_ASSERT(unmarkedArenaStackTop);
     do {
         /*
          * If marking gets delayed at the same arena again, we must repeat
          * marking of its things. For that we pop arena from the stack and
          * clear its hasDelayedMarking flag before we begin the marking.
--- a/js/src/jsapi-tests/testGCFinalizeCallback.cpp
+++ b/js/src/jsapi-tests/testGCFinalizeCallback.cpp
@@ -21,17 +21,17 @@ BEGIN_TEST(testGCFinalizeCallback)
     CHECK(checkSingleGroup());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(false));
 
     /* Full GC, incremental. */
     FinalizeCalls = 0;
     JS::PrepareForFullGC(rt);
     JS::IncrementalGC(rt, JS::gcreason::API, 1000000);
-    CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL);
+    CHECK(rt->gc.state() == js::gc::NO_INCREMENTAL);
     CHECK(rt->gc.isFull);
     CHECK(checkMultipleGroups());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(false));
 
     JS::RootedObject global1(cx, createGlobal());
     JS::RootedObject global2(cx, createGlobal());
     JS::RootedObject global3(cx, createGlobal());
@@ -58,48 +58,48 @@ BEGIN_TEST(testGCFinalizeCallback)
     CHECK(checkSingleGroup());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(true));
 
     /* Compartment GC, incremental, single compartment. */
     FinalizeCalls = 0;
     JS::PrepareZoneForGC(global1->zone());
     JS::IncrementalGC(rt, JS::gcreason::API, 1000000);
-    CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL);
+    CHECK(rt->gc.state() == js::gc::NO_INCREMENTAL);
     CHECK(!rt->gc.isFull);
     CHECK(checkSingleGroup());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(true));
 
     /* Compartment GC, incremental, multiple compartments. */
     FinalizeCalls = 0;
     JS::PrepareZoneForGC(global1->zone());
     JS::PrepareZoneForGC(global2->zone());
     JS::PrepareZoneForGC(global3->zone());
     JS::IncrementalGC(rt, JS::gcreason::API, 1000000);
-    CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL);
+    CHECK(rt->gc.state() == js::gc::NO_INCREMENTAL);
     CHECK(!rt->gc.isFull);
     CHECK(checkMultipleGroups());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(true));
 
 #ifdef JS_GC_ZEAL
 
     /* Full GC with reset due to new compartment, becoming compartment GC. */
 
     FinalizeCalls = 0;
     JS_SetGCZeal(cx, 9, 1000000);
     JS::PrepareForFullGC(rt);
     js::GCDebugSlice(rt, true, 1);
-    CHECK(rt->gc.incrementalState == js::gc::MARK);
+    CHECK(rt->gc.state() == js::gc::MARK);
     CHECK(rt->gc.isFull);
 
     JS::RootedObject global4(cx, createGlobal());
     js::GCDebugSlice(rt, true, 1);
-    CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL);
+    CHECK(rt->gc.state() == js::gc::NO_INCREMENTAL);
     CHECK(!rt->gc.isFull);
     CHECK(checkMultipleGroups());
     CHECK(checkFinalizeStatus());
 
     for (unsigned i = 0; i < FinalizeCalls - 1; ++i)
         CHECK(!IsCompartmentGCBuffer[i]);
     CHECK(IsCompartmentGCBuffer[FinalizeCalls - 1]);
 
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -149,17 +149,17 @@ JS_GetEmptyString(JSRuntime *rt)
     return rt->emptyString;
 }
 
 namespace js {
 
 void
 AssertHeapIsIdle(JSRuntime *rt)
 {
-    JS_ASSERT(rt->gc.heapState == js::Idle);
+    JS_ASSERT(!rt->isHeapBusy());
 }
 
 void
 AssertHeapIsIdle(JSContext *cx)
 {
     AssertHeapIsIdle(cx->runtime());
 }
 
@@ -1921,116 +1921,23 @@ JS_PUBLIC_API(bool)
 JS_IsAboutToBeFinalizedUnbarriered(JSObject **objp)
 {
     return IsObjectAboutToBeFinalized(objp);
 }
 
 JS_PUBLIC_API(void)
 JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32_t value)
 {
-    switch (key) {
-      case JSGC_MAX_BYTES: {
-        JS_ASSERT(value >= rt->gc.bytes);
-        rt->gc.maxBytes = value;
-        break;
-      }
-      case JSGC_MAX_MALLOC_BYTES:
-        rt->gc.setMaxMallocBytes(value);
-        break;
-      case JSGC_SLICE_TIME_BUDGET:
-        rt->gc.sliceBudget = SliceBudget::TimeBudget(value);
-        break;
-      case JSGC_MARK_STACK_LIMIT:
-        js::SetMarkStackLimit(rt, value);
-        break;
-      case JSGC_HIGH_FREQUENCY_TIME_LIMIT:
-        rt->gc.highFrequencyTimeThreshold = value;
-        break;
-      case JSGC_HIGH_FREQUENCY_LOW_LIMIT:
-        rt->gc.highFrequencyLowLimitBytes = value * 1024 * 1024;
-        break;
-      case JSGC_HIGH_FREQUENCY_HIGH_LIMIT:
-        rt->gc.highFrequencyHighLimitBytes = value * 1024 * 1024;
-        break;
-      case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX:
-        rt->gc.highFrequencyHeapGrowthMax = value / 100.0;
-        MOZ_ASSERT(rt->gc.highFrequencyHeapGrowthMax / 0.85 > 1.0);
-        break;
-      case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN:
-        rt->gc.highFrequencyHeapGrowthMin = value / 100.0;
-        MOZ_ASSERT(rt->gc.highFrequencyHeapGrowthMin / 0.85 > 1.0);
-        break;
-      case JSGC_LOW_FREQUENCY_HEAP_GROWTH:
-        rt->gc.lowFrequencyHeapGrowth = value / 100.0;
-        MOZ_ASSERT(rt->gc.lowFrequencyHeapGrowth / 0.9 > 1.0);
-        break;
-      case JSGC_DYNAMIC_HEAP_GROWTH:
-        rt->gc.dynamicHeapGrowth = value;
-        break;
-      case JSGC_DYNAMIC_MARK_SLICE:
-        rt->gc.dynamicMarkSlice = value;
-        break;
-      case JSGC_ALLOCATION_THRESHOLD:
-        rt->gc.allocationThreshold = value * 1024 * 1024;
-        break;
-      case JSGC_DECOMMIT_THRESHOLD:
-        rt->gc.decommitThreshold = value * 1024 * 1024;
-        break;
-      default:
-        JS_ASSERT(key == JSGC_MODE);
-        rt->setGCMode(JSGCMode(value));
-        JS_ASSERT(rt->gcMode() == JSGC_MODE_GLOBAL ||
-                  rt->gcMode() == JSGC_MODE_COMPARTMENT ||
-                  rt->gcMode() == JSGC_MODE_INCREMENTAL);
-        return;
-    }
+    rt->gc.setParameter(key, value);
 }
 
 JS_PUBLIC_API(uint32_t)
 JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key)
 {
-    switch (key) {
-      case JSGC_MAX_BYTES:
-        return uint32_t(rt->gc.maxBytes);
-      case JSGC_MAX_MALLOC_BYTES:
-        return rt->gc.maxMallocBytes;
-      case JSGC_BYTES:
-        return uint32_t(rt->gc.bytes);
-      case JSGC_MODE:
-        return uint32_t(rt->gcMode());
-      case JSGC_UNUSED_CHUNKS:
-        return uint32_t(rt->gc.chunkPool.getEmptyCount());
-      case JSGC_TOTAL_CHUNKS:
-        return uint32_t(rt->gc.chunkSet.count() + rt->gc.chunkPool.getEmptyCount());
-      case JSGC_SLICE_TIME_BUDGET:
-        return uint32_t(rt->gc.sliceBudget > 0 ? rt->gc.sliceBudget / PRMJ_USEC_PER_MSEC : 0);
-      case JSGC_MARK_STACK_LIMIT:
-        return rt->gc.marker.maxCapacity();
-      case JSGC_HIGH_FREQUENCY_TIME_LIMIT:
-        return rt->gc.highFrequencyTimeThreshold;
-      case JSGC_HIGH_FREQUENCY_LOW_LIMIT:
-        return rt->gc.highFrequencyLowLimitBytes / 1024 / 1024;
-      case JSGC_HIGH_FREQUENCY_HIGH_LIMIT:
-        return rt->gc.highFrequencyHighLimitBytes / 1024 / 1024;
-      case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX:
-        return uint32_t(rt->gc.highFrequencyHeapGrowthMax * 100);
-      case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN:
-        return uint32_t(rt->gc.highFrequencyHeapGrowthMin * 100);
-      case JSGC_LOW_FREQUENCY_HEAP_GROWTH:
-        return uint32_t(rt->gc.lowFrequencyHeapGrowth * 100);
-      case JSGC_DYNAMIC_HEAP_GROWTH:
-        return rt->gc.dynamicHeapGrowth;
-      case JSGC_DYNAMIC_MARK_SLICE:
-        return rt->gc.dynamicMarkSlice;
-      case JSGC_ALLOCATION_THRESHOLD:
-        return rt->gc.allocationThreshold / 1024 / 1024;
-      default:
-        JS_ASSERT(key == JSGC_NUMBER);
-        return uint32_t(rt->gc.number);
-    }
+    return rt->gc.getParameter(key);
 }
 
 JS_PUBLIC_API(void)
 JS_SetGCParameterForThread(JSContext *cx, JSGCParamKey key, uint32_t value)
 {
     JS_ASSERT(key == JSGC_MAX_CODE_CACHE_BYTES);
 }
 
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -882,56 +882,35 @@ jschar *
 GCDescription::formatJSON(JSRuntime *rt, uint64_t timestamp) const
 {
     return rt->gc.stats.formatJSON(timestamp);
 }
 
 JS_FRIEND_API(void)
 JS::NotifyDidPaint(JSRuntime *rt)
 {
-    if (rt->gcZeal() == gc::ZealFrameVerifierPreValue) {
-        gc::VerifyBarriers(rt, gc::PreBarrierVerifier);
-        return;
-    }
-
-    if (rt->gcZeal() == gc::ZealFrameVerifierPostValue) {
-        gc::VerifyBarriers(rt, gc::PostBarrierVerifier);
-        return;
-    }
-
-    if (rt->gcZeal() == gc::ZealFrameGCValue) {
-        PrepareForFullGC(rt);
-        GCSlice(rt, GC_NORMAL, gcreason::REFRESH_FRAME);
-        return;
-    }
-
-    if (JS::IsIncrementalGCInProgress(rt) && !rt->gc.interFrameGC) {
-        JS::PrepareForIncrementalGC(rt);
-        GCSlice(rt, GC_NORMAL, gcreason::REFRESH_FRAME);
-    }
-
-    rt->gc.interFrameGC = false;
+    rt->gc.notifyDidPaint();
 }
 
 JS_FRIEND_API(bool)
 JS::IsIncrementalGCEnabled(JSRuntime *rt)
 {
-    return rt->gc.incrementalEnabled && rt->gcMode() == JSGC_MODE_INCREMENTAL;
+    return rt->gc.isIncrementalGCEnabled() && rt->gcMode() == JSGC_MODE_INCREMENTAL;
 }
 
 JS_FRIEND_API(bool)
 JS::IsIncrementalGCInProgress(JSRuntime *rt)
 {
-    return rt->gc.incrementalState != gc::NO_INCREMENTAL && !rt->gc.verifyPreData;
+    return rt->gc.state() != gc::NO_INCREMENTAL && !rt->gc.verifyPreData;
 }
 
 JS_FRIEND_API(void)
 JS::DisableIncrementalGC(JSRuntime *rt)
 {
-    rt->gc.incrementalEnabled = false;
+    rt->gc.disableIncrementalGC();
 }
 
 JS::AutoDisableGenerationalGC::AutoDisableGenerationalGC(JSRuntime *rt)
   : gc(&rt->gc)
 #if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)
   , restartVerifier(false)
 #endif
 {
@@ -956,17 +935,17 @@ extern JS_FRIEND_API(bool)
 JS::IsGenerationalGCEnabled(JSRuntime *rt)
 {
     return rt->gc.isGenerationalGCEnabled();
 }
 
 JS_FRIEND_API(bool)
 JS::IsIncrementalBarrierNeeded(JSRuntime *rt)
 {
-    return rt->gc.incrementalState == gc::MARK && !rt->isHeapBusy();
+    return rt->gc.state() == gc::MARK && !rt->isHeapBusy();
 }
 
 JS_FRIEND_API(bool)
 JS::IsIncrementalBarrierNeeded(JSContext *cx)
 {
     return IsIncrementalBarrierNeeded(cx->runtime());
 }
 
@@ -1189,17 +1168,17 @@ js::UnsafeDefineElement(JSContext *cx, J
 }
 
 JS_FRIEND_API(bool)
 js_DefineOwnProperty(JSContext *cx, JSObject *objArg, jsid idArg,
                      JS::Handle<js::PropertyDescriptor> descriptor, bool *bp)
 {
     RootedObject obj(cx, objArg);
     RootedId id(cx, idArg);
-    JS_ASSERT(cx->runtime()->gc.heapState == js::Idle);
+    js::AssertHeapIsIdle(cx);
     CHECK_REQUEST(cx);
     assertSameCompartment(cx, obj, id, descriptor.value());
     if (descriptor.hasGetterObject())
         assertSameCompartment(cx, descriptor.getterObject());
     if (descriptor.hasSetterObject())
         assertSameCompartment(cx, descriptor.setterObject());
 
     return DefineOwnProperty(cx, HandleObject(obj), id, descriptor, bp);
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1328,16 +1328,121 @@ js::gc::FinishPersistentRootedChains(JSR
     rt->functionPersistentRooteds.clear();
     rt->idPersistentRooteds.clear();
     rt->objectPersistentRooteds.clear();
     rt->scriptPersistentRooteds.clear();
     rt->stringPersistentRooteds.clear();
     rt->valuePersistentRooteds.clear();
 }
 
+void
+GCRuntime::setParameter(JSGCParamKey key, uint32_t value)
+{
+    switch (key) {
+      case JSGC_MAX_BYTES: {
+        JS_ASSERT(value >= bytes);
+        maxBytes = value;
+        break;
+      }
+      case JSGC_MAX_MALLOC_BYTES:
+        setMaxMallocBytes(value);
+        break;
+      case JSGC_SLICE_TIME_BUDGET:
+        sliceBudget = SliceBudget::TimeBudget(value);
+        break;
+      case JSGC_MARK_STACK_LIMIT:
+        js::SetMarkStackLimit(rt, value);
+        break;
+      case JSGC_HIGH_FREQUENCY_TIME_LIMIT:
+        highFrequencyTimeThreshold = value;
+        break;
+      case JSGC_HIGH_FREQUENCY_LOW_LIMIT:
+        highFrequencyLowLimitBytes = value * 1024 * 1024;
+        break;
+      case JSGC_HIGH_FREQUENCY_HIGH_LIMIT:
+        highFrequencyHighLimitBytes = value * 1024 * 1024;
+        break;
+      case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX:
+        highFrequencyHeapGrowthMax = value / 100.0;
+        MOZ_ASSERT(highFrequencyHeapGrowthMax / 0.85 > 1.0);
+        break;
+      case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN:
+        highFrequencyHeapGrowthMin = value / 100.0;
+        MOZ_ASSERT(highFrequencyHeapGrowthMin / 0.85 > 1.0);
+        break;
+      case JSGC_LOW_FREQUENCY_HEAP_GROWTH:
+        lowFrequencyHeapGrowth = value / 100.0;
+        MOZ_ASSERT(lowFrequencyHeapGrowth / 0.9 > 1.0);
+        break;
+      case JSGC_DYNAMIC_HEAP_GROWTH:
+        dynamicHeapGrowth = value;
+        break;
+      case JSGC_DYNAMIC_MARK_SLICE:
+        dynamicMarkSlice = value;
+        break;
+      case JSGC_ALLOCATION_THRESHOLD:
+        allocationThreshold = value * 1024 * 1024;
+        break;
+      case JSGC_DECOMMIT_THRESHOLD:
+        decommitThreshold = value * 1024 * 1024;
+        break;
+      default:
+        JS_ASSERT(key == JSGC_MODE);
+        mode = JSGCMode(value);
+        JS_ASSERT(mode == JSGC_MODE_GLOBAL ||
+                  mode == JSGC_MODE_COMPARTMENT ||
+                  mode == JSGC_MODE_INCREMENTAL);
+        return;
+    }
+}
+
+uint32_t
+GCRuntime::getParameter(JSGCParamKey key)
+{
+    switch (key) {
+      case JSGC_MAX_BYTES:
+        return uint32_t(maxBytes);
+      case JSGC_MAX_MALLOC_BYTES:
+        return maxMallocBytes;
+      case JSGC_BYTES:
+        return uint32_t(bytes);
+      case JSGC_MODE:
+        return uint32_t(rt->gcMode());
+      case JSGC_UNUSED_CHUNKS:
+        return uint32_t(chunkPool.getEmptyCount());
+      case JSGC_TOTAL_CHUNKS:
+        return uint32_t(chunkSet.count() + chunkPool.getEmptyCount());
+      case JSGC_SLICE_TIME_BUDGET:
+        return uint32_t(sliceBudget > 0 ? sliceBudget / PRMJ_USEC_PER_MSEC : 0);
+      case JSGC_MARK_STACK_LIMIT:
+        return marker.maxCapacity();
+      case JSGC_HIGH_FREQUENCY_TIME_LIMIT:
+        return highFrequencyTimeThreshold;
+      case JSGC_HIGH_FREQUENCY_LOW_LIMIT:
+        return highFrequencyLowLimitBytes / 1024 / 1024;
+      case JSGC_HIGH_FREQUENCY_HIGH_LIMIT:
+        return highFrequencyHighLimitBytes / 1024 / 1024;
+      case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX:
+        return uint32_t(highFrequencyHeapGrowthMax * 100);
+      case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN:
+        return uint32_t(highFrequencyHeapGrowthMin * 100);
+      case JSGC_LOW_FREQUENCY_HEAP_GROWTH:
+        return uint32_t(lowFrequencyHeapGrowth * 100);
+      case JSGC_DYNAMIC_HEAP_GROWTH:
+        return dynamicHeapGrowth;
+      case JSGC_DYNAMIC_MARK_SLICE:
+        return dynamicMarkSlice;
+      case JSGC_ALLOCATION_THRESHOLD:
+        return allocationThreshold / 1024 / 1024;
+      default:
+        JS_ASSERT(key == JSGC_NUMBER);
+        return uint32_t(number);
+    }
+}
+
 template <typename T> struct BarrierOwner {};
 template <typename T> struct BarrierOwner<T *> { typedef T result; };
 template <> struct BarrierOwner<Value> { typedef HeapValue result; };
 
 bool
 GCRuntime::addBlackRootsTracer(JSTraceDataOp traceOp, void *data)
 {
     AssertHeapIsIdle(rt);
@@ -1617,21 +1722,26 @@ ArenaLists::prepareForIncrementalGC(JSRu
         if (!freeList->isEmpty()) {
             ArenaHeader *aheader = freeList->arenaHeader();
             aheader->allocatedDuringIncremental = true;
             rt->gc.marker.delayMarkingArena(aheader);
         }
     }
 }
 
-static inline void
-PushArenaAllocatedDuringSweep(JSRuntime *runtime, ArenaHeader *arena)
-{
-    arena->setNextAllocDuringSweep(runtime->gc.arenasAllocatedDuringSweep);
-    runtime->gc.arenasAllocatedDuringSweep = arena;
+inline void
+GCRuntime::arenaAllocatedDuringGC(JS::Zone *zone, ArenaHeader *arena)
+{
+    if (zone->needsBarrier()) {
+        arena->allocatedDuringIncremental = true;
+        marker.delayMarkingArena(arena);
+    } else if (zone->isGCSweeping()) {
+        arena->setNextAllocDuringSweep(arenasAllocatedDuringSweep);
+        arenasAllocatedDuringSweep = arena;
+    }
 }
 
 inline void *
 ArenaLists::allocateFromArenaInline(Zone *zone, AllocKind thingKind,
                                     AutoMaybeStartBackgroundAllocation &maybeStartBackgroundAllocation)
 {
     /*
      * Parallel JS Note:
@@ -1682,24 +1792,18 @@ ArenaLists::allocateFromArenaInline(Zone
 
         /*
          * Move the free span stored in the arena to the free list and
          * allocate from it.
          */
         FreeSpan firstFreeSpan = aheader->getFirstFreeSpan();
         freeLists[thingKind].setHead(&firstFreeSpan);
         aheader->setAsFullyUsed();
-        if (MOZ_UNLIKELY(zone->wasGCStarted())) {
-            if (zone->needsBarrier()) {
-                aheader->allocatedDuringIncremental = true;
-                zone->runtimeFromMainThread()->gc.marker.delayMarkingArena(aheader);
-            } else if (zone->isGCSweeping()) {
-                PushArenaAllocatedDuringSweep(zone->runtimeFromMainThread(), aheader);
-            }
-        }
+        if (MOZ_UNLIKELY(zone->wasGCStarted()))
+            zone->runtimeFromMainThread()->gc.arenaAllocatedDuringGC(zone, aheader);
         void *thing = freeLists[thingKind].allocate(Arena::thingSize(thingKind));
         JS_ASSERT(thing);   // This allocation is infallible.
         return thing;
     }
 
     /* Make sure we hold the GC lock before we call pickChunk. */
     JSRuntime *rt = zone->runtimeFromAnyThread();
     if (!maybeLock.locked())
@@ -1717,24 +1821,18 @@ ArenaLists::allocateFromArenaInline(Zone
      * recently added arena will be used first for allocations. This improves
      * cache locality.
      */
     JS_ASSERT(al->isCursorAtEnd());
     aheader = chunk->allocateArena(zone, thingKind);
     if (!aheader)
         return nullptr;
 
-    if (MOZ_UNLIKELY(zone->wasGCStarted())) {
-        if (zone->needsBarrier()) {
-            aheader->allocatedDuringIncremental = true;
-            zone->runtimeFromMainThread()->gc.marker.delayMarkingArena(aheader);
-        } else if (zone->isGCSweeping()) {
-            PushArenaAllocatedDuringSweep(zone->runtimeFromMainThread(), aheader);
-        }
-    }
+    if (MOZ_UNLIKELY(zone->wasGCStarted()))
+        rt->gc.arenaAllocatedDuringGC(zone, aheader);
     al->insertAtStart(aheader);
 
     /*
      * Allocate from a newly allocated arena. The arena will have been set up
      * as fully used during the initialization so we have to re-mark it as
      * empty before allocating.
      */
     JS_ASSERT(!aheader->hasFreeThings());
@@ -2263,18 +2361,18 @@ GCRuntime::maybeGC(Zone *zone)
             GCSlice(rt, GC_SHRINK, JS::gcreason::MAYBEGC);
         } else {
             nextFullGCTime = now + GC_IDLE_FULL_SPAN;
         }
     }
 #endif
 }
 
-static void
-DecommitArenasFromAvailableList(JSRuntime *rt, Chunk **availableListHeadp)
+void
+GCRuntime::decommitArenasFromAvailableList(Chunk **availableListHeadp)
 {
     Chunk *chunk = *availableListHeadp;
     if (!chunk)
         return;
 
     /*
      * Decommit is expensive so we avoid holding the GC lock while calling it.
      *
@@ -2315,19 +2413,19 @@ DecommitArenasFromAvailableList(JSRuntim
             bool ok;
             {
                 /*
                  * If the main thread waits for the decommit to finish, skip
                  * potentially expensive unlock/lock pair on the contested
                  * lock.
                  */
                 Maybe<AutoUnlockGC> maybeUnlock;
-                if (!rt->isHeapBusy())
+                if (!isHeapBusy())
                     maybeUnlock.construct(rt);
-                ok = rt->gc.pageAllocator.markPagesUnused(aheader->getArena(), ArenaSize);
+                ok = pageAllocator.markPagesUnused(aheader->getArena(), ArenaSize);
             }
 
             if (ok) {
                 ++chunk->info.numArenasFree;
                 chunk->decommittedArenas.set(arenaIndex);
             } else {
                 chunk->addArenaToFreeList(rt, aheader);
             }
@@ -2347,17 +2445,17 @@ DecommitArenasFromAvailableList(JSRuntim
                     if (!prev->hasAvailableArenas())
                         insertPoint = availableListHeadp;
                 }
                 chunk->insertToAvailableList(insertPoint);
             } else {
                 JS_ASSERT(chunk->info.prevp);
             }
 
-            if (rt->gc.chunkAllocationSinceLastGC || !ok) {
+            if (chunkAllocationSinceLastGC || !ok) {
                 /*
                  * The allocator thread has started to get new chunks. We should stop
                  * to avoid decommitting arenas in just allocated chunks.
                  */
                 return;
             }
         }
 
@@ -2372,77 +2470,77 @@ DecommitArenasFromAvailableList(JSRuntim
         /*
          * prevp exists and is not the list head. It must point to the next
          * field of the previous chunk.
          */
         chunk = chunk->getPrevious();
     }
 }
 
-static void
-DecommitArenas(JSRuntime *rt)
-{
-    DecommitArenasFromAvailableList(rt, &rt->gc.systemAvailableChunkListHead);
-    DecommitArenasFromAvailableList(rt, &rt->gc.userAvailableChunkListHead);
+void
+GCRuntime::decommitArenas()
+{
+    decommitArenasFromAvailableList(&systemAvailableChunkListHead);
+    decommitArenasFromAvailableList(&userAvailableChunkListHead);
 }
 
 /* Must be called with the GC lock taken. */
-static void
-ExpireChunksAndArenas(JSRuntime *rt, bool shouldShrink)
+void
+GCRuntime::expireChunksAndArenas(bool shouldShrink)
 {
 #ifdef JSGC_FJGENERATIONAL
     rt->threadPool.pruneChunkCache();
 #endif
 
-    if (Chunk *toFree = rt->gc.chunkPool.expire(rt, shouldShrink)) {
+    if (Chunk *toFree = chunkPool.expire(rt, shouldShrink)) {
         AutoUnlockGC unlock(rt);
         FreeChunkList(rt, toFree);
     }
 
     if (shouldShrink)
-        DecommitArenas(rt);
-}
-
-static void
-SweepBackgroundThings(JSRuntime* rt, bool onBackgroundThread)
+        decommitArenas();
+}
+
+void
+GCRuntime::sweepBackgroundThings(bool onBackgroundThread)
 {
     /*
      * We must finalize in the correct order, see comments in
      * finalizeObjects.
      */
     FreeOp fop(rt, false);
     for (int phase = 0 ; phase < BackgroundPhaseCount ; ++phase) {
-        for (Zone *zone = rt->gc.sweepingZones; zone; zone = zone->gcNextGraphNode) {
+        for (Zone *zone = sweepingZones; zone; zone = zone->gcNextGraphNode) {
             for (int index = 0 ; index < BackgroundPhaseLength[phase] ; ++index) {
                 AllocKind kind = BackgroundPhases[phase][index];
                 ArenaHeader *arenas = zone->allocator.arenas.arenaListsToSweep[kind];
                 if (arenas)
                     ArenaLists::backgroundFinalize(&fop, arenas, onBackgroundThread);
             }
         }
     }
 
-    rt->gc.sweepingZones = nullptr;
-}
-
-#ifdef JS_THREADSAFE
-static void
-AssertBackgroundSweepingFinished(JSRuntime *rt)
-{
-#ifdef DEBUG
-    JS_ASSERT(!rt->gc.sweepingZones);
+    sweepingZones = nullptr;
+}
+
+void
+GCRuntime::assertBackgroundSweepingFinished()
+{
+#if defined(JS_THREADSAFE) && defined(DEBUG)
+    JS_ASSERT(!sweepingZones);
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         for (unsigned i = 0; i < FINALIZE_LIMIT; ++i) {
             JS_ASSERT(!zone->allocator.arenas.arenaListsToSweep[i]);
             JS_ASSERT(zone->allocator.arenas.doneBackgroundFinalize(AllocKind(i)));
         }
     }
 #endif
 }
 
+#ifdef JS_THREADSAFE
 unsigned
 js::GetCPUCount()
 {
     static unsigned ncpus = 0;
     if (ncpus == 0) {
 # ifdef XP_WIN
         SYSTEM_INFO sysinfo;
         GetSystemInfo(&sysinfo);
@@ -2636,31 +2734,31 @@ GCHelperState::startBackgroundShrink()
 void
 GCHelperState::waitBackgroundSweepEnd()
 {
 #ifdef JS_THREADSAFE
     AutoLockGC lock(rt);
     while (state() == SWEEPING)
         waitForBackgroundThread();
     if (rt->gc.incrementalState == NO_INCREMENTAL)
-        AssertBackgroundSweepingFinished(rt);
+        rt->gc.assertBackgroundSweepingFinished();
 #endif /* JS_THREADSAFE */
 }
 
 void
 GCHelperState::waitBackgroundSweepOrAllocEnd()
 {
 #ifdef JS_THREADSAFE
     AutoLockGC lock(rt);
     if (state() == ALLOCATING)
         setState(CANCEL_ALLOCATION);
     while (state() == SWEEPING || state() == CANCEL_ALLOCATION)
         waitForBackgroundThread();
     if (rt->gc.incrementalState == NO_INCREMENTAL)
-        AssertBackgroundSweepingFinished(rt);
+        rt->gc.assertBackgroundSweepingFinished();
 #endif /* JS_THREADSAFE */
 }
 
 /* Must be called with the GC lock taken. */
 inline void
 GCHelperState::startBackgroundAllocationIfIdle()
 {
 #ifdef JS_THREADSAFE
@@ -2692,17 +2790,17 @@ GCHelperState::replenishAndFreeLater(voi
 /* Must be called with the GC lock taken. */
 void
 GCHelperState::doSweep()
 {
     if (sweepFlag) {
         sweepFlag = false;
         AutoUnlockGC unlock(rt);
 
-        SweepBackgroundThings(rt, true);
+        rt->gc.sweepBackgroundThings(true);
 
         if (freeCursor) {
             void **array = freeCursorEnd - FREE_ARRAY_LENGTH;
             freeElementsAndArray(array, freeCursor);
             freeCursor = freeCursorEnd = nullptr;
         } else {
             JS_ASSERT(!freeCursorEnd);
         }
@@ -2711,26 +2809,26 @@ GCHelperState::doSweep()
             freeElementsAndArray(array, array + FREE_ARRAY_LENGTH);
         }
         freeVector.resize(0);
 
         rt->freeLifoAlloc.freeAll();
     }
 
     bool shrinking = shrinkFlag;
-    ExpireChunksAndArenas(rt, shrinking);
+    rt->gc.expireChunksAndArenas(shrinking);
 
     /*
      * The main thread may have called ShrinkGCBuffers while
      * ExpireChunksAndArenas(rt, false) was running, so we recheck the flag
      * afterwards.
      */
     if (!shrinking && shrinkFlag) {
         shrinkFlag = false;
-        ExpireChunksAndArenas(rt, true);
+        rt->gc.expireChunksAndArenas(true);
     }
 }
 #endif /* JS_THREADSAFE */
 
 bool
 GCHelperState::onBackgroundThread()
 {
 #ifdef JS_THREADSAFE
@@ -4271,17 +4369,17 @@ GCRuntime::endSweepPhase(JSGCInvocationK
         if (!sweepOnBackgroundThread) {
             /*
              * Destroy arenas after we finished the sweeping so finalizers can
              * safely use IsAboutToBeFinalized(). This is done on the
              * GCHelperState if possible. We acquire the lock only because
              * Expire needs to unlock it for other callers.
              */
             AutoLockGC lock(rt);
-            ExpireChunksAndArenas(rt, gckind == GC_SHRINK);
+            expireChunksAndArenas(gckind == GC_SHRINK);
         }
     }
 
     {
         gcstats::AutoPhase ap(stats, gcstats::PHASE_FINALIZE_END);
 
         for (Callback<JSFinalizeCallback> *p = rt->gc.finalizeCallbacks.begin();
              p < rt->gc.finalizeCallbacks.end(); p++)
@@ -4300,17 +4398,17 @@ GCRuntime::endSweepPhase(JSGCInvocationK
         zone->gcNextGraphNode = sweepingZones;
         sweepingZones = zone;
     }
 
     /* If not sweeping on background thread then we must do it here. */
     if (!sweepOnBackgroundThread) {
         gcstats::AutoPhase ap(stats, gcstats::PHASE_DESTROY);
 
-        SweepBackgroundThings(rt, false);
+        sweepBackgroundThings(false);
 
         rt->freeLifoAlloc.freeAll();
 
         /* Ensure the compartments get swept if it's the last GC. */
         if (lastGC)
             sweepZones(&fop, lastGC);
     }
 
@@ -4345,34 +4443,16 @@ GCRuntime::endSweepPhase(JSGCInvocationK
     }
 #endif
 
     finishMarkingValidation();
 
     lastGCTime = PRMJ_Now();
 }
 
-namespace {
-
-/* ...while this class is to be used only for garbage collection. */
-class AutoGCSession
-{
-    GCRuntime *gc;
-    AutoTraceSession session;
-    bool canceled;
-
-  public:
-    explicit AutoGCSession(GCRuntime *gc);
-    ~AutoGCSession();
-
-    void cancel() { canceled = true; }
-};
-
-} /* anonymous namespace */
-
 /* Start a new heap session. */
 AutoTraceSession::AutoTraceSession(JSRuntime *rt, js::HeapState heapState)
   : lock(rt),
     runtime(rt),
     prevState(rt->gc.heapState)
 {
     JS_ASSERT(rt->gc.isAllocAllowed());
     JS_ASSERT(rt->gc.heapState == Idle);
@@ -4415,59 +4495,16 @@ AutoTraceSession::~AutoTraceSession()
 #else
         MOZ_CRASH();
 #endif
     } else {
         runtime->gc.heapState = prevState;
     }
 }
 
-AutoGCSession::AutoGCSession(GCRuntime *gc)
-  : gc(gc),
-    session(gc->rt, MajorCollecting),
-    canceled(false)
-{
-    gc->isNeeded = false;
-    gc->interFrameGC = true;
-
-    gc->number++;
-
-    // It's ok if threads other than the main thread have suppressGC set, as
-    // they are operating on zones which will not be collected from here.
-    JS_ASSERT(!gc->rt->mainThread.suppressGC);
-
-    // Assert if this is a GC unsafe region.
-    JS::AutoAssertOnGC::VerifyIsSafeToGC(gc->rt);
-}
-
-AutoGCSession::~AutoGCSession()
-{
-    if (canceled)
-        return;
-
-#ifndef JS_MORE_DETERMINISTIC
-    gc->nextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN;
-#endif
-
-    gc->chunkAllocationSinceLastGC = false;
-
-#ifdef JS_GC_ZEAL
-    /* Keeping these around after a GC is dangerous. */
-    gc->clearSelectedForMarking();
-#endif
-
-    /* Clear gcMallocBytes for all compartments */
-    for (ZonesIter zone(gc->rt, WithAtoms); !zone.done(); zone.next()) {
-        zone->resetGCMallocBytes();
-        zone->unscheduleGC();
-    }
-
-    gc->resetMallocBytes();
-}
-
 AutoCopyFreeListToArenas::AutoCopyFreeListToArenas(JSRuntime *rt, ZoneSelector selector)
   : runtime(rt),
     selector(selector)
 {
     for (ZonesIter zone(rt, selector); !zone.done(); zone.next())
         zone->allocator.arenas.copyFreeListsToArenas();
 }
 
@@ -4764,17 +4801,17 @@ GCRuntime::incrementalCollectSlice(int64
 IncrementalSafety
 gc::IsIncrementalGCSafe(JSRuntime *rt)
 {
     JS_ASSERT(!rt->mainThread.suppressGC);
 
     if (rt->keepAtoms())
         return IncrementalSafety::Unsafe("keepAtoms set");
 
-    if (!rt->gc.incrementalEnabled)
+    if (!rt->gc.isIncrementalGCEnabled())
         return IncrementalSafety::Unsafe("incremental permanently disabled");
 
     return IncrementalSafety::Safe();
 }
 
 void
 GCRuntime::budgetIncrementalGC(int64_t *budget)
 {
@@ -4829,17 +4866,29 @@ GCRuntime::budgetIncrementalGC(int64_t *
  *
  * Returns true if we "reset" an existing incremental GC, which would force us
  * to run another cycle.
  */
 MOZ_NEVER_INLINE bool
 GCRuntime::gcCycle(bool incremental, int64_t budget, JSGCInvocationKind gckind,
                    JS::gcreason::Reason reason)
 {
-    AutoGCSession gcsession(this);
+    AutoTraceSession session(rt, MajorCollecting);
+
+    isNeeded = false;
+    interFrameGC = true;
+
+    number++;
+
+    // It's ok if threads other than the main thread have suppressGC set, as
+    // they are operating on zones which will not be collected from here.
+    JS_ASSERT(!rt->mainThread.suppressGC);
+
+    // Assert if this is a GC unsafe region.
+    JS::AutoAssertOnGC::VerifyIsSafeToGC(rt);
 
     /*
      * As we about to purge caches and clear the mark bits we must wait for
      * any background finalization to finish. We must also wait for the
      * background allocation to finish so we can avoid taking the GC lock
      * when manipulating the chunks during the GC.
      */
     {
@@ -4854,22 +4903,40 @@ GCRuntime::gcCycle(bool incremental, int
         resetIncrementalGC("requested");
         stats.nonincremental("requested");
         budget = SliceBudget::Unlimited;
     } else {
         budgetIncrementalGC(&budget);
     }
 
     /* The GC was reset, so we need a do-over. */
-    if (prevState != NO_INCREMENTAL && incrementalState == NO_INCREMENTAL) {
-        gcsession.cancel();
+    if (prevState != NO_INCREMENTAL && incrementalState == NO_INCREMENTAL)
         return true;
-    }
 
     incrementalCollectSlice(budget, reason, gckind);
+
+#ifndef JS_MORE_DETERMINISTIC
+    nextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN;
+#endif
+
+    chunkAllocationSinceLastGC = false;
+
+#ifdef JS_GC_ZEAL
+    /* Keeping these around after a GC is dangerous. */
+    clearSelectedForMarking();
+#endif
+
+    /* Clear gcMallocBytes for all compartments */
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
+        zone->resetGCMallocBytes();
+        zone->unscheduleGC();
+    }
+
+    resetMallocBytes();
+
     return false;
 }
 
 #ifdef JS_GC_ZEAL
 static bool
 IsDeterministicGCReason(JS::gcreason::Reason reason)
 {
     if (reason > JS::gcreason::DEBUG_GC &&
@@ -5052,16 +5119,45 @@ GCRuntime::gcSlice(JSGCInvocationKind gc
 }
 
 void
 js::GCFinalSlice(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason)
 {
     rt->gc.collect(true, SliceBudget::Unlimited, gckind, reason);
 }
 
+void
+GCRuntime::notifyDidPaint()
+{
+#ifdef JS_GC_ZEAL
+    if (zealMode == ZealFrameVerifierPreValue) {
+        verifyPreBarriers();
+        return;
+    }
+
+    if (zealMode == ZealFrameVerifierPostValue) {
+        verifyPostBarriers();
+        return;
+    }
+
+    if (zealMode == ZealFrameGCValue) {
+        JS::PrepareForFullGC(rt);
+        gcSlice(GC_NORMAL, JS::gcreason::REFRESH_FRAME);
+        return;
+    }
+#endif
+
+    if (JS::IsIncrementalGCInProgress(rt) && !interFrameGC) {
+        JS::PrepareForIncrementalGC(rt);
+        gcSlice(GC_NORMAL, JS::gcreason::REFRESH_FRAME);
+    }
+
+    interFrameGC = false;
+}
+
 static bool
 ZonesSelected(JSRuntime *rt)
 {
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         if (zone->isGCScheduled())
             return true;
     }
     return false;
@@ -5086,24 +5182,30 @@ js::PrepareForDebugGC(JSRuntime *rt)
 {
     if (!ZonesSelected(rt))
         JS::PrepareForFullGC(rt);
 }
 
 JS_FRIEND_API(void)
 JS::ShrinkGCBuffers(JSRuntime *rt)
 {
+    rt->gc.shrinkBuffers();
+}
+
+void
+GCRuntime::shrinkBuffers()
+{
     AutoLockHelperThreadState helperLock;
     AutoLockGC lock(rt);
     JS_ASSERT(!rt->isHeapBusy());
 
 #ifdef JS_THREADSAFE
-    rt->gc.startBackgroundShrink();
+    helperState.startBackgroundShrink();
 #else
-    ExpireChunksAndArenas(rt, true);
+    expireChunksAndArenas(true);
 #endif
 }
 
 void
 js::MinorGC(JSRuntime *rt, JS::gcreason::Reason reason)
 {
     rt->gc.minorGC(reason);
 }
@@ -5549,37 +5651,37 @@ ArenaLists::containsArena(JSRuntime *rt,
             return true;
     }
     return false;
 }
 
 
 AutoMaybeTouchDeadZones::AutoMaybeTouchDeadZones(JSContext *cx)
   : runtime(cx->runtime()),
-    markCount(runtime->gc.objectsMarkedInDeadZones),
+    markCount(runtime->gc.objectsMarkedInDeadZonesCount()),
     inIncremental(JS::IsIncrementalGCInProgress(runtime)),
-    manipulatingDeadZones(runtime->gc.manipulatingDeadZones)
-{
-    runtime->gc.manipulatingDeadZones = true;
+    manipulatingDeadZones(runtime->gc.isManipulatingDeadZones())
+{
+    runtime->gc.setManipulatingDeadZones(true);
 }
 
 AutoMaybeTouchDeadZones::AutoMaybeTouchDeadZones(JSObject *obj)
   : runtime(obj->compartment()->runtimeFromMainThread()),
-    markCount(runtime->gc.objectsMarkedInDeadZones),
+    markCount(runtime->gc.objectsMarkedInDeadZonesCount()),
     inIncremental(JS::IsIncrementalGCInProgress(runtime)),
-    manipulatingDeadZones(runtime->gc.manipulatingDeadZones)
-{
-    runtime->gc.manipulatingDeadZones = true;
+    manipulatingDeadZones(runtime->gc.isManipulatingDeadZones())
+{
+    runtime->gc.setManipulatingDeadZones(true);
 }
 
 AutoMaybeTouchDeadZones::~AutoMaybeTouchDeadZones()
 {
-    runtime->gc.manipulatingDeadZones = manipulatingDeadZones;
-
-    if (inIncremental && runtime->gc.objectsMarkedInDeadZones != markCount) {
+    runtime->gc.setManipulatingDeadZones(manipulatingDeadZones);
+
+    if (inIncremental && runtime->gc.objectsMarkedInDeadZonesCount() != markCount) {
         JS::PrepareForFullGC(runtime);
         js::GC(runtime, GC_NORMAL, JS::gcreason::TRANSPLANT);
     }
 }
 
 AutoSuppressGC::AutoSuppressGC(ExclusiveContext *cx)
   : suppressGC_(cx->perThreadData->suppressGC)
 {
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -22,19 +22,19 @@ class Shape;
  * for more details.
  */
 struct AutoMarkInDeadZone
 {
     explicit AutoMarkInDeadZone(JS::Zone *zone)
       : zone(zone),
         scheduled(zone->scheduledForDestruction)
     {
-        JSRuntime *rt = zone->runtimeFromMainThread();
-        if (rt->gc.manipulatingDeadZones && zone->scheduledForDestruction) {
-            rt->gc.objectsMarkedInDeadZones++;
+        gc::GCRuntime &gc = zone->runtimeFromMainThread()->gc;
+        if (gc.isManipulatingDeadZones() && zone->scheduledForDestruction) {
+            gc.incObjectsMarkedInDeadZone();
             zone->scheduledForDestruction = false;
         }
     }
 
     ~AutoMarkInDeadZone() {
         zone->scheduledForDestruction = scheduled;
     }
 
@@ -448,17 +448,17 @@ typedef CompartmentsIterT<GCZonesIter> G
 /* Iterates over all zones in the current zone group. */
 class GCZoneGroupIter {
   private:
     JS::Zone *current;
 
   public:
     explicit GCZoneGroupIter(JSRuntime *rt) {
         JS_ASSERT(rt->isHeapBusy());
-        current = rt->gc.currentZoneGroup;
+        current = rt->gc.getCurrentZoneGroup();
     }
 
     bool done() const { return !current; }
 
     void next() {
         JS_ASSERT(!done());
         current = current->nextNodeInGroup();
     }
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -1422,17 +1422,17 @@ NewObject(ExclusiveContext *cx, types::T
                                     !cx->compartment()->options().getTrace();
     if (clasp->trace &&
         !globalWithoutCustomTrace &&
         !(clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS))
     {
         if (!cx->shouldBeJSContext())
             return nullptr;
         JSRuntime *rt = cx->asJSContext()->runtime();
-        rt->gc.incrementalEnabled = false;
+        rt->gc.disableIncrementalGC();
 
 #ifdef DEBUG
         if (rt->gcMode() == JSGC_MODE_INCREMENTAL) {
             fprintf(stderr,
                     "The class %s has a trace hook but does not declare the\n"
                     "JSCLASS_IMPLEMENTS_BARRIERS flag. Please ensure that it correctly\n"
                     "implements write barriers and then set the flag.\n",
                     clasp->name);
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -953,24 +953,17 @@ struct JSRuntime : public JS::shadow::Ru
         gc.marker.setGCMode(mode);
     }
 
     bool isHeapBusy() { return gc.isHeapBusy(); }
     bool isHeapMajorCollecting() { return gc.isHeapMajorCollecting(); }
     bool isHeapMinorCollecting() { return gc.isHeapMinorCollecting(); }
     bool isHeapCollecting() { return gc.isHeapCollecting(); }
 
-    // Performance note: if isFJMinorCollecting turns out to be slow
-    // because reading the counter is slow then we may be able to
-    // augment the counter with a volatile flag that is set iff the
-    // counter is greater than zero.  (It will require some care to
-    // make sure the two variables stay in sync.)
-    bool isFJMinorCollecting() { return gc.fjCollectionCounter > 0; }
-    void incFJMinorCollecting() { gc.fjCollectionCounter++; }
-    void decFJMinorCollecting() { gc.fjCollectionCounter--; }
+    bool isFJMinorCollecting() { return gc.isFJMinorCollecting(); }
 
     int gcZeal() { return gc.zeal(); }
 
     void lockGC() {
         assertCanLock(js::GCLock);
         gc.lockGC();
     }