Bug 988486 - Make some GCRuntime members private and add necessary accessors r=terrence
authorJon Coppeard <jcoppeard@mozilla.com>
Mon, 19 May 2014 11:09:55 +0100
changeset 202967 31984278765e18f3faa7bc5fa5966884e08eed17
parent 202966 2ee1b111863627ff72a81b222ecad5f0f0b4cb6e
child 202968 0ad2c38c664983201911bb4167b588121e51001a
push id3741
push userasasaki@mozilla.com
push dateMon, 21 Jul 2014 20:25:18 +0000
treeherdermozilla-beta@4d6f46f5af68 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersterrence
bugs988486
milestone32.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 988486 - Make some GCRuntime members private and add necessary accessors r=terrence
js/src/builtin/TestingFunctions.cpp
js/src/gc/GCInternals.h
js/src/gc/GCRuntime.h
js/src/gc/Iteration.cpp
js/src/gc/Nursery.cpp
js/src/gc/RootMarking.cpp
js/src/gc/Verifier.cpp
js/src/jsapi.cpp
js/src/jsgc.cpp
js/src/jsgcinlines.h
js/src/jsopcode.cpp
js/src/vm/ForkJoin.cpp
js/src/vm/Runtime.cpp
js/src/vm/Runtime.h
--- a/js/src/builtin/TestingFunctions.cpp
+++ b/js/src/builtin/TestingFunctions.cpp
@@ -440,17 +440,17 @@ GCPreserveCode(JSContext *cx, unsigned a
     CallArgs args = CallArgsFromVp(argc, vp);
 
     if (args.length() != 0) {
         RootedObject callee(cx, &args.callee());
         ReportUsageError(cx, callee, "Wrong number of arguments");
         return false;
     }
 
-    cx->runtime()->gc.alwaysPreserveCode = true;
+    cx->runtime()->gc.setAlwaysPreserveCode();
 
     args.rval().setUndefined();
     return true;
 }
 
 #ifdef JS_GC_ZEAL
 static bool
 GCZeal(JSContext *cx, unsigned argc, Value *vp)
--- a/js/src/gc/GCInternals.h
+++ b/js/src/gc/GCInternals.h
@@ -14,22 +14,16 @@
 #include "vm/Runtime.h"
 
 namespace js {
 namespace gc {
 
 void
 MarkPersistentRootedChains(JSTracer *trc);
 
-void
-MarkRuntime(JSTracer *trc, bool useSavedRoots = false);
-
-void
-BufferGrayRoots(GCMarker *gcmarker);
-
 class AutoCopyFreeListToArenas
 {
     JSRuntime *runtime;
     ZoneSelector selector;
 
   public:
     AutoCopyFreeListToArenas(JSRuntime *rt, ZoneSelector selector);
     ~AutoCopyFreeListToArenas();
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -124,35 +124,85 @@ class GCRuntime
     void minorGC(JS::gcreason::Reason reason);
     void minorGC(JSContext *cx, JS::gcreason::Reason reason);
     void gcIfNeeded(JSContext *cx);
     void collect(bool incremental, int64_t budget, JSGCInvocationKind gckind,
                  JS::gcreason::Reason reason);
     void gcSlice(JSGCInvocationKind gckind, JS::gcreason::Reason reason, int64_t millis);
     void runDebugGC();
 
+    void markRuntime(JSTracer *trc, bool useSavedRoots = false);
+
+  public:
+    // Internal public interface
+    void recordNativeStackTop();
+    void notifyRequestEnd() { conservativeGC.updateForRequestEnd(); }
+    bool isBackgroundSweeping() { return helperThread.sweeping(); }
+    void waitBackgroundSweepEnd() { helperThread.waitBackgroundSweepEnd(); }
+    void waitBackgroundSweepOrAllocEnd() { helperThread.waitBackgroundSweepOrAllocEnd(); }
+    void startBackgroundShrink() { helperThread.startBackgroundShrink(); }
+    void freeLater(void *p) { helperThread.freeLater(p); }
+#ifdef DEBUG
+    bool onBackgroundThread() { return helperThread.onBackgroundThread(); }
+#endif
+
+#ifdef JS_THREADSAFE
+    void assertCanLock() {
+        JS_ASSERT(lockOwner != PR_GetCurrentThread());
+    }
+#endif
+
+    void lockGC() {
+#ifdef JS_THREADSAFE
+        PR_Lock(lock);
+        JS_ASSERT(!lockOwner);
+#ifdef DEBUG
+        lockOwner = PR_GetCurrentThread();
+#endif
+#endif
+    }
+
+    void unlockGC() {
+#ifdef JS_THREADSAFE
+        JS_ASSERT(lockOwner == PR_GetCurrentThread());
+        lockOwner = nullptr;
+        PR_Unlock(lock);
+#endif
+    }
+
+#ifdef DEBUG
+    bool isAllocAllowed() { return noGCOrAllocationCheck == 0; }
+    void disallowAlloc() { ++noGCOrAllocationCheck; }
+    void allowAlloc() {
+        JS_ASSERT(!isAllocAllowed());
+        --noGCOrAllocationCheck;
+    }
+#endif
+
+    void setAlwaysPreserveCode() { alwaysPreserveCode = true; }
+
   private:
     // For ArenaLists::allocateFromArenaInline()
     friend class ArenaLists;
     Chunk *pickChunk(Zone *zone);
 
     inline bool wantBackgroundAllocation() const;
 
     bool initGCZeal();
-    void recordNativeStackTopForGC();
     void requestInterrupt(JS::gcreason::Reason reason);
     bool gcCycle(bool incremental, int64_t budget, JSGCInvocationKind gckind,
                  JS::gcreason::Reason reason);
     void budgetIncrementalGC(int64_t *budget);
     void resetIncrementalGC(const char *reason);
     void incrementalCollectSlice(int64_t budget, JS::gcreason::Reason reason,
                                  JSGCInvocationKind gckind);
     void pushZealSelectedObjects();
     bool beginMarkPhase();
     bool shouldPreserveJITCode(JSCompartment *comp, int64_t currentTime);
+    void bufferGrayRoots();
     bool drainMarkStack(SliceBudget &sliceBudget, gcstats::Phase phase);
     template <class CompartmentIterT> void markWeakReferences(gcstats::Phase phase);
     void markWeakReferencesInCurrentGroup(gcstats::Phase phase);
     template <class ZoneIterT, class CompartmentIterT> void markGrayReferences();
     void markGrayReferencesInCurrentGroup();
     void beginSweepPhase(bool lastGC);
     void findZoneGroups();
     bool findZoneEdgesForWeakMaps();
@@ -164,16 +214,18 @@ class GCRuntime
     bool sweepPhase(SliceBudget &sliceBudget);
     void endSweepPhase(JSGCInvocationKind gckind, bool lastGC);
     void sweepZones(FreeOp *fop, bool lastGC);
 
     void computeNonIncrementalMarkingForValidation();
     void validateIncrementalMarking();
     void finishMarkingValidation();
 
+    void markConservativeStackRoots(JSTracer *trc, bool useSavedRoots);
+
 #ifdef DEBUG
     void checkForCompartmentMismatches();
     void markAllWeakReferences(gcstats::Phase phase);
     void markAllGrayReferences();
 #endif
 
   public:  // Internal state, public for now
     JSRuntime             *rt;
@@ -447,32 +499,33 @@ class GCRuntime
     size_t                systemPageSize;
 
     /* The OS allocation granularity may not match the page size. */
     size_t                systemAllocGranularity;
 
     /* Strong references on scripts held for PCCount profiling API. */
     js::ScriptAndCountsVector *scriptAndCountsVector;
 
+  private:
     /* Always preserve JIT code during GCs, for testing. */
     bool                  alwaysPreserveCode;
 
 #ifdef DEBUG
     size_t                noGCOrAllocationCheck;
 #endif
 
     /* Synchronize GC heap access between main thread and GCHelperThread. */
-    PRLock   *lock;
+    PRLock                *lock;
     mozilla::DebugOnly<PRThread *>   lockOwner;
 
     js::GCHelperThread helperThread;
 
     ConservativeGCData conservativeGC;
 
+    //friend class js::gc::Chunk; // todo: remove
     friend class js::GCHelperThread;
-    friend class js::gc::AutoPrepareForTracing; /* For recordNativeStackTopForGC(). */
     friend class js::gc::MarkingValidator;
 };
 
 } /* namespace gc */
 } /* namespace js */
 
 #endif
--- a/js/src/gc/Iteration.cpp
+++ b/js/src/gc/Iteration.cpp
@@ -17,19 +17,20 @@
 using namespace js;
 using namespace js::gc;
 
 void
 js::TraceRuntime(JSTracer *trc)
 {
     JS_ASSERT(!IS_GC_MARKING_TRACER(trc));
 
-    MinorGC(trc->runtime(), JS::gcreason::EVICT_NURSERY);
-    AutoPrepareForTracing prep(trc->runtime(), WithAtoms);
-    MarkRuntime(trc);
+    JSRuntime *rt = trc->runtime();
+    MinorGC(rt, JS::gcreason::EVICT_NURSERY);
+    AutoPrepareForTracing prep(rt, WithAtoms);
+    rt->gc.markRuntime(trc);
 }
 
 static void
 IterateCompartmentsArenasCells(JSRuntime *rt, Zone *zone, void *data,
                                JSIterateCompartmentCallback compartmentCallback,
                                IterateArenaCallback arenaCallback,
                                IterateCellCallback cellCallback)
 {
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -779,17 +779,17 @@ js::Nursery::collect(JSRuntime *rt, JS::
     sb.markGenericEntries(&trc);
     TIME_END(markGenericEntries);
 
     TIME_START(checkHashTables);
     CheckHashTablesAfterMovingGC(rt);
     TIME_END(checkHashTables);
 
     TIME_START(markRuntime);
-    MarkRuntime(&trc);
+    rt->gc.markRuntime(&trc);
     TIME_END(markRuntime);
 
     TIME_START(markDebugger);
     Debugger::markAll(&trc);
     TIME_END(markDebugger);
 
     TIME_START(clearNewObjectCache);
     rt->newObjectCache.clearNurseryObjects(rt);
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -278,58 +278,55 @@ MarkRangeConservativelyAndSkipIon(JSTrac
         i = jitEnd;
     }
 #endif
 
     // Mark everything after the most recent Ion activation.
     MarkRangeConservatively(trc, i, end);
 }
 
-static MOZ_NEVER_INLINE void
-MarkConservativeStackRoots(JSTracer *trc, bool useSavedRoots)
+MOZ_NEVER_INLINE void
+gc::GCRuntime::markConservativeStackRoots(JSTracer *trc, bool useSavedRoots)
 {
-    JSRuntime *rt = trc->runtime();
-
 #ifdef DEBUG
     if (useSavedRoots) {
         for (PerThreadData::SavedGCRoot *root = rt->mainThread.gcSavedRoots.begin();
              root != rt->mainThread.gcSavedRoots.end();
              root++)
         {
             trc->setTracingName("cstack");
             MarkKind(trc, &root->thing, root->kind);
         }
         return;
     }
 
-    if (rt->gc.incrementalState == MARK_ROOTS)
+    if (incrementalState == MARK_ROOTS)
         rt->mainThread.gcSavedRoots.clearAndFree();
 #endif
 
-    ConservativeGCData *cgcd = &rt->gc.conservativeGC;
-    if (!cgcd->hasStackToScan()) {
+    if (!conservativeGC.hasStackToScan()) {
 #ifdef JS_THREADSAFE
         JS_ASSERT(!rt->requestDepth);
 #endif
         return;
     }
 
     uintptr_t *stackMin, *stackEnd;
 #if JS_STACK_GROWTH_DIRECTION > 0
     stackMin = reinterpret_cast<uintptr_t *>(rt->nativeStackBase);
-    stackEnd = cgcd->nativeStackTop;
+    stackEnd = conservativeGC.nativeStackTop;
 #else
-    stackMin = cgcd->nativeStackTop + 1;
+    stackMin = conservativeGC.nativeStackTop + 1;
     stackEnd = reinterpret_cast<uintptr_t *>(rt->nativeStackBase);
 #endif
 
     JS_ASSERT(stackMin <= stackEnd);
     MarkRangeConservativelyAndSkipIon(trc, rt, stackMin, stackEnd);
-    MarkRangeConservatively(trc, cgcd->registerSnapshot.words,
-                            ArrayEnd(cgcd->registerSnapshot.words));
+    MarkRangeConservatively(trc, conservativeGC.registerSnapshot.words,
+                            ArrayEnd(conservativeGC.registerSnapshot.words));
 }
 
 void
 js::MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv)
 {
     const uintptr_t *begin = beginv->payloadUIntPtr();
     const uintptr_t *end = endv->payloadUIntPtr();
 #ifdef JS_NUNBOX32
@@ -656,19 +653,18 @@ js::gc::MarkPersistentRootedChains(JSTra
     // Mark the PersistentRooted chains of types that are never null.
     PersistentRootedMarker<jsid>::markChain<MarkIdRoot>(trc, rt->idPersistentRooteds,
                                                         "PersistentRooted<jsid>");
     PersistentRootedMarker<Value>::markChain<MarkValueRoot>(trc, rt->valuePersistentRooteds,
                                                             "PersistentRooted<Value>");
 }
 
 void
-js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots)
+js::gc::GCRuntime::markRuntime(JSTracer *trc, bool useSavedRoots)
 {
-    JSRuntime *rt = trc->runtime();
     JS_ASSERT(trc->callback != GCMarker::GrayCallback);
 
     JS_ASSERT(!rt->mainThread.suppressGC);
 
     if (IS_GC_MARKING_TRACER(trc)) {
         for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
             if (!c->zone()->isCollecting())
                 c->markCrossCompartmentWrappers(trc);
@@ -677,22 +673,22 @@ js::gc::MarkRuntime(JSTracer *trc, bool 
     }
 
     AutoGCRooter::traceAll(trc);
 
     if (!rt->isBeingDestroyed()) {
 #ifdef JSGC_USE_EXACT_ROOTING
         MarkExactStackRoots(trc);
 #else
-        MarkConservativeStackRoots(trc, useSavedRoots);
+        markConservativeStackRoots(trc, useSavedRoots);
 #endif
         rt->markSelfHostingGlobal(trc);
     }
 
-    for (RootRange r = rt->gc.rootsHash.all(); !r.empty(); r.popFront()) {
+    for (RootRange r = rootsHash.all(); !r.empty(); r.popFront()) {
         const RootEntry &entry = r.front();
         const char *name = entry.value().name ? entry.value().name : "root";
         JSGCRootType type = entry.value().type;
         void *key = entry.key();
         if (type == JS_GC_ROOT_VALUE_PTR) {
             MarkValueRoot(trc, reinterpret_cast<Value *>(key), name);
         } else if (*reinterpret_cast<void **>(key)){
             if (type == JS_GC_ROOT_STRING_PTR)
@@ -703,18 +699,18 @@ js::gc::MarkRuntime(JSTracer *trc, bool 
                 MarkScriptRoot(trc, reinterpret_cast<JSScript **>(key), name);
             else
                 MOZ_ASSUME_UNREACHABLE("unexpected js::RootInfo::type value");
         }
     }
 
     MarkPersistentRootedChains(trc);
 
-    if (rt->gc.scriptAndCountsVector) {
-        ScriptAndCountsVector &vec = *rt->gc.scriptAndCountsVector;
+    if (scriptAndCountsVector) {
+        ScriptAndCountsVector &vec = *scriptAndCountsVector;
         for (size_t i = 0; i < vec.length(); i++)
             MarkScriptRoot(trc, &vec[i].script, "scriptAndCountsVector");
     }
 
     if (!rt->isBeingDestroyed() && !trc->runtime()->isHeapMinorCollecting()) {
         if (!IS_GC_MARKING_TRACER(trc) || rt->atomsCompartment()->zone()->isCollecting()) {
             MarkPermanentAtoms(trc);
             MarkAtoms(trc);
@@ -727,17 +723,17 @@ js::gc::MarkRuntime(JSTracer *trc, bool 
     for (ContextIter acx(rt); !acx.done(); acx.next())
         acx->mark(trc);
 
     for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
         if (IS_GC_MARKING_TRACER(trc) && !zone->isCollecting())
             continue;
 
         /* Do not discard scripts with counts while profiling. */
-        if (rt->profilingScripts && !rt->isHeapMinorCollecting()) {
+        if (rt->profilingScripts && !isHeapMinorCollecting()) {
             for (ZoneCellIterUnderGC i(zone, FINALIZE_SCRIPT); !i.done(); i.next()) {
                 JSScript *script = i.get<JSScript>();
                 if (script->hasScriptCounts()) {
                     MarkScriptRoot(trc, &script, "profilingScripts");
                     JS_ASSERT(script == i.get<JSScript>());
                 }
             }
         }
@@ -763,46 +759,45 @@ js::gc::MarkRuntime(JSTracer *trc, bool 
     }
 
     MarkInterpreterActivations(rt, trc);
 
 #ifdef JS_ION
     jit::MarkJitActivations(rt, trc);
 #endif
 
-    if (!rt->isHeapMinorCollecting()) {
+    if (!isHeapMinorCollecting()) {
         /*
          * All JSCompartment::mark does is mark the globals for compartments
          * which have been entered. Globals aren't nursery allocated so there's
          * no need to do this for minor GCs.
          */
         for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next())
             c->markRoots(trc);
 
         /*
          * The embedding can register additional roots here.
          *
          * We don't need to trace these in a minor GC because all pointers into
          * the nursery should be in the store buffer, and we want to avoid the
          * time taken to trace all these roots.
          */
-        for (size_t i = 0; i < rt->gc.blackRootTracers.length(); i++) {
-            const Callback<JSTraceDataOp> &e = rt->gc.blackRootTracers[i];
+        for (size_t i = 0; i < blackRootTracers.length(); i++) {
+            const Callback<JSTraceDataOp> &e = blackRootTracers[i];
             (*e.op)(trc, e.data);
         }
 
         /* During GC, we don't mark gray roots at this stage. */
-        if (JSTraceDataOp op = rt->gc.grayRootTracer.op) {
+        if (JSTraceDataOp op = grayRootTracer.op) {
             if (!IS_GC_MARKING_TRACER(trc))
-                (*op)(trc, rt->gc.grayRootTracer.data);
+                (*op)(trc, grayRootTracer.data);
         }
     }
 }
 
 void
-js::gc::BufferGrayRoots(GCMarker *gcmarker)
+js::gc::GCRuntime::bufferGrayRoots()
 {
-    JSRuntime *rt = gcmarker->runtime();
-    gcmarker->startBufferingGrayRoots();
-    if (JSTraceDataOp op = rt->gc.grayRootTracer.op)
-        (*op)(gcmarker, rt->gc.grayRootTracer.data);
-    gcmarker->endBufferingGrayRoots();
+    marker.startBufferingGrayRoots();
+    if (JSTraceDataOp op = grayRootTracer.op)
+        (*op)(&marker, grayRootTracer.data);
+    marker.endBufferingGrayRoots();
 }
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -217,17 +217,17 @@ gc::StartVerifyPreBarriers(JSRuntime *rt
 
     /* Create the root node. */
     trc->curnode = MakeNode(trc, nullptr, JSGCTraceKind(0));
 
     /* We want MarkRuntime to save the roots to gcSavedRoots. */
     rt->gc.incrementalState = MARK_ROOTS;
 
     /* Make all the roots be edges emanating from the root node. */
-    MarkRuntime(trc);
+    rt->gc.markRuntime(trc);
 
     VerifyNode *node;
     node = trc->curnode;
     if (trc->edgeptr == trc->term)
         goto oom;
 
     /* For each edge, make a node for it if one doesn't already exist. */
     while ((char *)node < trc->edgeptr) {
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -711,17 +711,17 @@ StopRequest(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime();
     JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
 
     JS_ASSERT(rt->requestDepth != 0);
     if (rt->requestDepth != 1) {
         rt->requestDepth--;
     } else {
-        rt->gc.conservativeGC.updateForRequestEnd();
+        rt->gc.notifyRequestEnd();
         rt->requestDepth = 0;
         rt->triggerActivityCallback(false);
     }
 }
 #endif /* JS_THREADSAFE */
 
 JS_PUBLIC_API(void)
 JS_BeginRequest(JSContext *cx)
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -404,17 +404,17 @@ ArenaHeader::checkSynchronizedWithFreeLi
      */
     JS_ASSERT(allocated());
 
     /*
      * We can be called from the background finalization thread when the free
      * list in the zone can mutate at any moment. We cannot do any
      * checks in this case.
      */
-    if (IsBackgroundFinalized(getAllocKind()) && zone->runtimeFromAnyThread()->gc.helperThread.onBackgroundThread())
+    if (IsBackgroundFinalized(getAllocKind()) && zone->runtimeFromAnyThread()->gc.onBackgroundThread())
         return;
 
     FreeSpan firstSpan = firstFreeSpan.decompact(arenaAddress());
     if (firstSpan.isEmpty())
         return;
     const FreeList *freeList = zone->allocator.arenas.getFreeList(getAllocKind());
     if (freeList->isEmpty() || firstSpan.arenaAddress() != freeList->arenaAddress())
         return;
@@ -919,22 +919,22 @@ Chunk::recycleArena(ArenaHeader *aheader
 void
 Chunk::releaseArena(ArenaHeader *aheader)
 {
     JS_ASSERT(aheader->allocated());
     JS_ASSERT(!aheader->hasDelayedMarking);
     Zone *zone = aheader->zone;
     JSRuntime *rt = zone->runtimeFromAnyThread();
     AutoLockGC maybeLock;
-    if (rt->gc.helperThread.sweeping())
+    if (rt->gc.isBackgroundSweeping())
         maybeLock.lock(rt);
 
     JS_ASSERT(rt->gc.bytes >= ArenaSize);
     JS_ASSERT(zone->gcBytes >= ArenaSize);
-    if (rt->gc.helperThread.sweeping())
+    if (rt->gc.isBackgroundSweeping())
         zone->reduceGCTriggerBytes(zone->gcHeapGrowthFactor * ArenaSize);
     rt->gc.bytes -= ArenaSize;
     zone->gcBytes -= ArenaSize;
 
     aheader->setAsNotAllocated();
     addArenaToFreeList(rt, aheader);
 
     if (info.numArenasFree == 1) {
@@ -1174,16 +1174,20 @@ GCRuntime::initGCZeal()
 #endif
 
 /* Lifetime for type sets attached to scripts containing observed types. */
 static const int64_t JIT_SCRIPT_RELEASE_TYPES_INTERVAL = 60 * 1000 * 1000;
 
 bool
 GCRuntime::init(uint32_t maxbytes)
 {
+    lock = PR_NewLock();
+    if (!lock)
+        return false;
+
     if (!chunkSet.init(INITIAL_CHUNK_CAPACITY))
         return false;
 
     if (!rootsHash.init(256))
         return false;
 
     if (!helperThread.init())
         return false;
@@ -1214,17 +1218,17 @@ GCRuntime::init(uint32_t maxbytes)
 
     if (!marker.init(mode))
         return false;
 
     return true;
 }
 
 void
-GCRuntime::recordNativeStackTopForGC()
+GCRuntime::recordNativeStackTop()
 {
 #ifdef JS_THREADSAFE
     /* Record the stack top here only if we are called from a request. */
     if (!rt->requestDepth)
         return;
 #endif
     conservativeGC.recordStackTop();
 }
@@ -1263,16 +1267,23 @@ GCRuntime::finish()
     }
 
     chunkPool.expireAndFree(rt, true);
 
     if (rootsHash.initialized())
         rootsHash.clear();
 
     FinishPersistentRootedChains(rt);
+
+#ifdef JS_THREADSAFE
+    if (lock) {
+        PR_DestroyLock(lock);
+        lock = nullptr;
+    }
+#endif
 }
 
 void
 js::gc::FinishPersistentRootedChains(JSRuntime *rt)
 {
     /* The lists of persistent roots are stored on the shadow runtime. */
     rt->functionPersistentRooteds.clear();
     rt->idPersistentRooteds.clear();
@@ -2973,19 +2984,19 @@ GCRuntime::beginMarkPhase()
     for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
         /* Unmark all weak maps in the compartments being collected. */
         WeakMapBase::unmarkCompartment(c);
     }
 
     if (isFull)
         UnmarkScriptData(rt);
 
-    MarkRuntime(gcmarker);
+    markRuntime(gcmarker);
     if (isIncremental)
-        BufferGrayRoots(gcmarker);
+        bufferGrayRoots();
 
     /*
      * This code ensures that if a zone is "dead", then it will be
      * collected in this GC. A zone is considered dead if its maybeAlive
      * flag is false. The maybeAlive flag is set if:
      *   (1) the zone has incoming cross-compartment edges, or
      *   (2) an object in the zone was marked during root marking, either
      *       as a black root or a gray root.
@@ -3208,17 +3219,17 @@ js::gc::MarkingValidator::nonIncremental
     gcmarker->reset();
 
     for (GCChunkSet::Range r(gc->chunkSet.all()); !r.empty(); r.popFront())
         r.front()->bitmap.clear();
 
     {
         gcstats::AutoPhase ap1(gc->stats, gcstats::PHASE_MARK);
         gcstats::AutoPhase ap2(gc->stats, gcstats::PHASE_MARK_ROOTS);
-        MarkRuntime(gcmarker, true);
+        gc->markRuntime(gcmarker, true);
     }
 
     {
         gcstats::AutoPhase ap1(gc->stats, gcstats::PHASE_MARK);
         SliceBudget budget;
         gc->incrementalState = MARK;
         gc->marker.drainMarkStack(budget);
     }
@@ -4227,17 +4238,17 @@ class AutoGCSession
 } /* anonymous namespace */
 
 /* Start a new heap session. */
 AutoTraceSession::AutoTraceSession(JSRuntime *rt, js::HeapState heapState)
   : lock(rt),
     runtime(rt),
     prevState(rt->gc.heapState)
 {
-    JS_ASSERT(!rt->gc.noGCOrAllocationCheck);
+    JS_ASSERT(rt->gc.isAllocAllowed());
     JS_ASSERT(rt->gc.heapState == Idle);
     JS_ASSERT(heapState != Idle);
 #ifdef JSGC_GENERATIONAL
     JS_ASSERT_IF(heapState == MajorCollecting, rt->gc.nursery.isEmpty());
 #endif
 
     // Threads with an exclusive context can hit refillFreeList while holding
     // the exclusive access lock. To avoid deadlocking when we try to acquire
@@ -4801,17 +4812,17 @@ GCRuntime::collect(bool incremental, int
         return;
 #endif
 
     JS_ASSERT_IF(!incremental || budget != SliceBudget::Unlimited, JSGC_INCREMENTAL);
 
     AutoStopVerifyingBarriers av(rt, reason == JS::gcreason::SHUTDOWN_CC ||
                                      reason == JS::gcreason::DESTROY_RUNTIME);
 
-    recordNativeStackTopForGC();
+    recordNativeStackTop();
 
     int zoneCount = 0;
     int compartmentCount = 0;
     int collectedCount = 0;
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         if (mode == JSGC_MODE_GLOBAL)
             zone->scheduleGC();
 
@@ -4947,17 +4958,17 @@ JS_FRIEND_API(void)
 JS::ShrinkGCBuffers(JSRuntime *rt)
 {
     AutoLockGC lock(rt);
     JS_ASSERT(!rt->isHeapBusy());
 
     if (!rt->useHelperThreads())
         ExpireChunksAndArenas(rt, true);
     else
-        rt->gc.helperThread.startBackgroundShrink();
+        rt->gc.startBackgroundShrink();
 }
 
 void
 js::MinorGC(JSRuntime *rt, JS::gcreason::Reason reason)
 {
     rt->gc.minorGC(reason);
 }
 
@@ -5016,17 +5027,17 @@ GCRuntime::gcIfNeeded(JSContext *cx)
 
     if (isNeeded)
         gcSlice(GC_NORMAL, rt->gc.triggerReason, 0);
 }
 
 void
 js::gc::FinishBackgroundFinalize(JSRuntime *rt)
 {
-    rt->gc.helperThread.waitBackgroundSweepEnd();
+    rt->gc.waitBackgroundSweepEnd();
 }
 
 AutoFinishGC::AutoFinishGC(JSRuntime *rt)
 {
     if (JS::IsIncrementalGCInProgress(rt)) {
         JS::PrepareForIncrementalGC(rt);
         JS::FinishIncrementalGC(rt, JS::gcreason::API);
     }
@@ -5034,17 +5045,17 @@ AutoFinishGC::AutoFinishGC(JSRuntime *rt
     gc::FinishBackgroundFinalize(rt);
 }
 
 AutoPrepareForTracing::AutoPrepareForTracing(JSRuntime *rt, ZoneSelector selector)
   : finish(rt),
     session(rt),
     copy(rt, selector)
 {
-    rt->gc.recordNativeStackTopForGC();
+    rt->gc.recordNativeStackTop();
 }
 
 JSCompartment *
 js::NewCompartment(JSContext *cx, Zone *zone, JSPrincipals *principals,
                    const JS::CompartmentOptions &options)
 {
     JSRuntime *rt = cx->runtime();
     JS_AbortIfWrongThread(rt);
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -306,23 +306,49 @@ class ZoneCellIterUnderGC : public ZoneC
 #ifdef JSGC_GENERATIONAL
         JS_ASSERT(zone->runtimeFromAnyThread()->gc.nursery.isEmpty());
 #endif
         JS_ASSERT(zone->runtimeFromAnyThread()->isHeapBusy());
         init(zone, kind);
     }
 };
 
+/* In debug builds, assert that no allocation occurs while it is live. */
+class AutoAssertNoAlloc
+{
+#ifdef JS_DEBUG
+    GCRuntime *gc;
+
+  public:
+    AutoAssertNoAlloc() : gc(nullptr) {}
+    AutoAssertNoAlloc(JSRuntime *rt) : gc(nullptr) {
+        disallowAlloc(rt);
+    }
+    void disallowAlloc(JSRuntime *rt) {
+        JS_ASSERT(!gc);
+        gc = &rt->gc;
+        gc->disallowAlloc();
+    }
+    ~AutoAssertNoAlloc() {
+        if (gc)
+            gc->allowAlloc();
+    }
+#else
+  public:
+    AutoAssertNoAlloc() {}
+    AutoAssertNoAlloc(JSRuntime *) {}
+#endif
+};
+
 class ZoneCellIter : public ZoneCellIterImpl
 {
+    AutoAssertNoAlloc noAlloc;
     ArenaLists *lists;
     AllocKind kind;
-#ifdef DEBUG
-    size_t *counter;
-#endif
+
   public:
     ZoneCellIter(JS::Zone *zone, AllocKind kind)
       : lists(&zone->allocator.arenas),
         kind(kind)
     {
         /*
          * We have a single-threaded runtime, so there's no need to protect
          * against other threads iterating or allocating. However, we do have
@@ -344,30 +370,23 @@ class ZoneCellIter : public ZoneCellIter
 
         if (lists->isSynchronizedFreeList(kind)) {
             lists = nullptr;
         } else {
             JS_ASSERT(!zone->runtimeFromMainThread()->isHeapBusy());
             lists->copyFreeListToArena(kind);
         }
 
-#ifdef DEBUG
         /* Assert that no GCs can occur while a ZoneCellIter is live. */
-        counter = &zone->runtimeFromAnyThread()->gc.noGCOrAllocationCheck;
-        ++*counter;
-#endif
+        noAlloc.disallowAlloc(zone->runtimeFromMainThread());
 
         init(zone, kind);
     }
 
     ~ZoneCellIter() {
-#ifdef DEBUG
-        JS_ASSERT(*counter > 0);
-        --*counter;
-#endif
         if (lists)
             lists->clearFreeListInArena(kind);
     }
 };
 
 class GCZonesIter
 {
   private:
@@ -476,17 +495,17 @@ CheckAllocatorState(ThreadSafeContext *c
     JSContext *ncx = cx->asJSContext();
     JSRuntime *rt = ncx->runtime();
 #if defined(JS_GC_ZEAL) || defined(DEBUG)
     JS_ASSERT_IF(rt->isAtomsCompartment(ncx->compartment()),
                  kind == FINALIZE_STRING ||
                  kind == FINALIZE_FAT_INLINE_STRING ||
                  kind == FINALIZE_JITCODE);
     JS_ASSERT(!rt->isHeapBusy());
-    JS_ASSERT(!rt->gc.noGCOrAllocationCheck);
+    JS_ASSERT(rt->gc.isAllocAllowed());
 #endif
 
     // For testing out of memory conditions
     if (!PossiblyFail()) {
         js_ReportOutOfMemory(cx);
         return false;
     }
 
--- a/js/src/jsopcode.cpp
+++ b/js/src/jsopcode.cpp
@@ -809,17 +809,18 @@ ToDisassemblySource(JSContext *cx, Handl
             return false;
         nbytes = JS_sprintf_append(nullptr, "%s", nbytes);
         if (!nbytes)
             return false;
         bytes->initBytes(nbytes);
         return true;
     }
 
-    if (cx->runtime()->isHeapBusy() || cx->runtime()->gc.noGCOrAllocationCheck) {
+    JSRuntime *rt = cx->runtime();
+    if (rt->isHeapBusy() || !rt->gc.isAllocAllowed()) {
         char *source = JS_sprintf_append(nullptr, "<value>");
         if (!source)
             return false;
         bytes->initBytes(source);
         return true;
     }
 
     if (v.isObject()) {
--- a/js/src/vm/ForkJoin.cpp
+++ b/js/src/vm/ForkJoin.cpp
@@ -474,17 +474,17 @@ ForkJoinActivation::ForkJoinActivation(J
 
     if (JS::IsIncrementalGCInProgress(cx->runtime())) {
         JS::PrepareForIncrementalGC(cx->runtime());
         JS::FinishIncrementalGC(cx->runtime(), JS::gcreason::API);
     }
 
     MinorGC(cx->runtime(), JS::gcreason::API);
 
-    cx->runtime()->gc.helperThread.waitBackgroundSweepEnd();
+    cx->runtime()->gc.waitBackgroundSweepEnd();
 
     JS_ASSERT(!cx->runtime()->needsBarrier());
     JS_ASSERT(!cx->zone()->needsBarrier());
 }
 
 ForkJoinActivation::~ForkJoinActivation()
 {
     cx_->perThreadData->jitTop = prevJitTop_;
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -268,20 +268,16 @@ JSRuntime::init(uint32_t maxbytes)
 {
 #ifdef JS_THREADSAFE
     ownerThread_ = PR_GetCurrentThread();
 
     interruptLock = PR_NewLock();
     if (!interruptLock)
         return false;
 
-    gc.lock = PR_NewLock();
-    if (!gc.lock)
-        return false;
-
     exclusiveAccessLock = PR_NewLock();
     if (!exclusiveAccessLock)
         return false;
 #endif
 
     if (!mainThread.init())
         return false;
 
@@ -435,21 +431,16 @@ JSRuntime::~JSRuntime()
 
 #if !EXPOSE_INTL_API
     FinishRuntimeNumberState(this);
 #endif
 
     gc.finish();
     atomsCompartment_ = nullptr;
 
-#ifdef JS_THREADSAFE
-    if (gc.lock)
-        PR_DestroyLock(gc.lock);
-#endif
-
     js_free(defaultLocale);
 #ifdef JS_YARR
     js_delete(bumpAlloc_);
 #endif
     js_delete(mathCache_);
 #ifdef JS_ION
     js_delete(jitRuntime_);
 #endif
@@ -755,17 +746,17 @@ JSRuntime::onOutOfMemory(void *p, size_t
     if (isHeapBusy())
         return nullptr;
 
     /*
      * Retry when we are done with the background sweeping and have stopped
      * all the allocations and released the empty GC chunks.
      */
     JS::ShrinkGCBuffers(this);
-    gc.helperThread.waitBackgroundSweepOrAllocEnd();
+    gc.waitBackgroundSweepOrAllocEnd();
     if (!p)
         p = js_malloc(nbytes);
     else if (p == reinterpret_cast<void *>(1))
         p = js_calloc(nbytes);
     else
       p = js_realloc(p, nbytes);
     if (p)
         return p;
@@ -850,17 +841,17 @@ JSRuntime::assertCanLock(RuntimeLock whi
     switch (which) {
       case ExclusiveAccessLock:
         JS_ASSERT(exclusiveAccessOwner != PR_GetCurrentThread());
       case WorkerThreadStateLock:
         JS_ASSERT(!WorkerThreadState().isLocked());
       case InterruptLock:
         JS_ASSERT(!currentThreadOwnsInterruptLock());
       case GCLock:
-        JS_ASSERT(gc.lockOwner != PR_GetCurrentThread());
+        gc.assertCanLock();
         break;
       default:
         MOZ_CRASH();
     }
 #endif // JS_THREADSAFE
 }
 
 void
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -975,32 +975,22 @@ struct JSRuntime : public JS::shadow::Ru
     }
 #else
     int gcZeal() { return 0; }
     bool upcomingZealousGC() { return false; }
     bool needZealousGC() { return false; }
 #endif
 
     void lockGC() {
-#ifdef JS_THREADSAFE
         assertCanLock(js::GCLock);
-        PR_Lock(gc.lock);
-        JS_ASSERT(!gc.lockOwner);
-#ifdef DEBUG
-        gc.lockOwner = PR_GetCurrentThread();
-#endif
-#endif
+        gc.lockGC();
     }
 
     void unlockGC() {
-#ifdef JS_THREADSAFE
-        JS_ASSERT(gc.lockOwner == PR_GetCurrentThread());
-        gc.lockOwner = nullptr;
-        PR_Unlock(gc.lock);
-#endif
+        gc.unlockGC();
     }
 
 #if defined(JS_ARM_SIMULATOR) || defined(JS_MIPS_SIMULATOR)
     js::jit::SimulatorRuntime *simulatorRuntime_;
 #endif
 
   public:
     void setNeedsBarrier(bool needs) {
@@ -1489,17 +1479,17 @@ VersionIsKnown(JSVersion version)
 {
     return VersionNumber(version) != JSVERSION_UNKNOWN;
 }
 
 inline void
 FreeOp::free_(void *p)
 {
     if (shouldFreeLater()) {
-        runtime()->gc.helperThread.freeLater(p);
+        runtime()->gc.freeLater(p);
         return;
     }
     js_free(p);
 }
 
 class AutoLockGC
 {
   public: