Backed out 3 changesets (bug 1021114, bug 988486) for GC crashes on a CLOSED TREE.
authorRyan VanderMeulen <ryanvm@gmail.com>
Sat, 07 Jun 2014 00:03:17 -0400
changeset 207703 6932ea846a288caa5b0d664822eac542cdc57b8c
parent 207702 33ff7ab025d1bee80370ac138cb91c1353ea38a9
child 207704 1d64d17b89675cc13c55e6d4358c2068072bd03a
push id494
push userraliiev@mozilla.com
push dateMon, 25 Aug 2014 18:42:16 +0000
treeherdermozilla-release@a3cc3e46b571 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs1021114, 988486
milestone32.0a1
backs outf56234ba7ec78819ad274cf738583ee3e150bdc6
14a4a906225384565d6c23c796fa57d185af228c
03eccac81e158aabf1e1fba832e13fb647d361b6
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out 3 changesets (bug 1021114, bug 988486) for GC crashes on a CLOSED TREE. Backed out changeset f56234ba7ec7 (bug 1021114) Backed out changeset 14a4a9062253 (bug 988486) Backed out changeset 03eccac81e15 (bug 988486)
dom/xbl/nsXBLMaybeCompiled.h
js/public/GCAPI.h
js/public/Id.h
js/public/RootingAPI.h
js/public/Value.h
js/src/builtin/TestingFunctions.cpp
js/src/gc/GCRuntime.h
js/src/gc/Nursery.cpp
js/src/gc/RootMarking.cpp
js/src/gc/Statistics.cpp
js/src/gc/Statistics.h
js/src/jit/CompileWrappers.cpp
js/src/jsapi.cpp
js/src/jsapi.h
js/src/jsfriendapi.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsgcinlines.h
js/src/jsinfer.h
js/src/jsinferinlines.h
js/src/jsobj.cpp
js/src/jsopcode.cpp
js/src/jsscript.h
js/src/vm/PropDesc.h
js/src/vm/Runtime-inl.h
js/src/vm/Runtime.cpp
js/src/vm/Runtime.h
--- a/dom/xbl/nsXBLMaybeCompiled.h
+++ b/dom/xbl/nsXBLMaybeCompiled.h
@@ -11,19 +11,16 @@
 /*
  * A union containing either a pointer representing uncompiled source or a
  * JSObject* representing the compiled result.  The class is templated on the
  * source object type.
  *
  * The purpose of abstracting this as a separate class is to allow it to be
  * wrapped in a JS::Heap<T> to correctly handle post-barriering of the JSObject
  * pointer, when present.
- *
- * No implementation of rootKind() is provided, which prevents
- * Root<nsXBLMaybeCompiled<UncompiledT>> from being used.
  */
 template <class UncompiledT>
 class nsXBLMaybeCompiled
 {
 public:
   nsXBLMaybeCompiled() : mUncompiled(BIT_UNCOMPILED) {}
 
   nsXBLMaybeCompiled(UncompiledT* uncompiled)
@@ -86,16 +83,21 @@ namespace js {
 
 template <class UncompiledT>
 struct GCMethods<nsXBLMaybeCompiled<UncompiledT> >
 {
   typedef struct GCMethods<JSObject *> Base;
 
   static nsXBLMaybeCompiled<UncompiledT> initial() { return nsXBLMaybeCompiled<UncompiledT>(); }
 
+  /*
+   * No implementation of kind() is provided to prevent
+   * Root<nsXBLMaybeCompiled<UncompiledT>> from being used.
+   */
+
   static bool poisoned(nsXBLMaybeCompiled<UncompiledT> function)
   {
     return function.IsCompiled() && Base::poisoned(function.GetJSFunction());
   }
 
   static bool needsPostBarrier(nsXBLMaybeCompiled<UncompiledT> function)
   {
     return function.IsCompiled() && Base::needsPostBarrier(function.GetJSFunction());
--- a/js/public/GCAPI.h
+++ b/js/public/GCAPI.h
@@ -370,17 +370,17 @@ ShrinkGCBuffers(JSRuntime *rt);
 
 /*
  * Assert if a GC occurs while this class is live. This class does not disable
  * the static rooting hazard analysis.
  */
 class JS_PUBLIC_API(AutoAssertOnGC)
 {
 #ifdef DEBUG
-    js::gc::GCRuntime *gc;
+    JSRuntime *runtime;
     size_t gcNumber;
 
   public:
     AutoAssertOnGC();
     explicit AutoAssertOnGC(JSRuntime *rt);
     ~AutoAssertOnGC();
 
     static void VerifyIsSafeToGC(JSRuntime *rt);
--- a/js/public/Id.h
+++ b/js/public/Id.h
@@ -166,16 +166,17 @@ IsPoisonedId(jsid iden)
     if (JSID_IS_OBJECT(iden))
         return JS::IsPoisonedPtr(JSID_TO_OBJECT(iden));
     return false;
 }
 
 template <> struct GCMethods<jsid>
 {
     static jsid initial() { return JSID_VOID; }
+    static ThingRootKind kind() { return THING_ROOT_ID; }
     static bool poisoned(jsid id) { return IsPoisonedId(id); }
     static bool needsPostBarrier(jsid id) { return false; }
 #ifdef JSGC_GENERATIONAL
     static void postBarrier(jsid *idp) {}
     static void relocate(jsid *idp) {}
 #endif
 };
 
--- a/js/public/RootingAPI.h
+++ b/js/public/RootingAPI.h
@@ -649,28 +649,30 @@ struct RootKind<T *>
 {
     static ThingRootKind rootKind() { return T::rootKind(); }
 };
 
 template <typename T>
 struct GCMethods<T *>
 {
     static T *initial() { return nullptr; }
+    static ThingRootKind kind() { return RootKind<T *>::rootKind(); }
     static bool poisoned(T *v) { return JS::IsPoisonedPtr(v); }
     static bool needsPostBarrier(T *v) { return false; }
 #ifdef JSGC_GENERATIONAL
     static void postBarrier(T **vp) {}
     static void relocate(T **vp) {}
 #endif
 };
 
 template <>
 struct GCMethods<JSObject *>
 {
     static JSObject *initial() { return nullptr; }
+    static ThingRootKind kind() { return RootKind<JSObject *>::rootKind(); }
     static bool poisoned(JSObject *v) { return JS::IsPoisonedPtr(v); }
     static bool needsPostBarrier(JSObject *v) {
         return v != nullptr && gc::IsInsideNursery(reinterpret_cast<gc::Cell *>(v));
     }
 #ifdef JSGC_GENERATIONAL
     static void postBarrier(JSObject **vp) {
         JS::HeapCellPostBarrier(reinterpret_cast<js::gc::Cell **>(vp));
     }
@@ -700,17 +702,17 @@ namespace JS {
  */
 template <typename T>
 class MOZ_STACK_CLASS Rooted : public js::RootedBase<T>
 {
     /* Note: CX is a subclass of either ContextFriendFields or PerThreadDataFriendFields. */
     template <typename CX>
     void init(CX *cx) {
 #ifdef JSGC_TRACK_EXACT_ROOTS
-        js::ThingRootKind kind = js::RootKind<T>::rootKind();
+        js::ThingRootKind kind = js::GCMethods<T>::kind();
         this->stack = &cx->thingGCRooters[kind];
         this->prev = *stack;
         *stack = reinterpret_cast<Rooted<void*>*>(this);
 
         MOZ_ASSERT(!js::GCMethods<T>::poisoned(ptr));
 #endif
     }
 
--- a/js/public/Value.h
+++ b/js/public/Value.h
@@ -1545,22 +1545,24 @@ JS_PUBLIC_API(void) HeapValueRelocate(Va
 }
 #endif
 
 namespace js {
 
 template <> struct GCMethods<const JS::Value>
 {
     static JS::Value initial() { return JS::UndefinedValue(); }
+    static ThingRootKind kind() { return THING_ROOT_VALUE; }
     static bool poisoned(const JS::Value &v) { return JS::IsPoisonedValue(v); }
 };
 
 template <> struct GCMethods<JS::Value>
 {
     static JS::Value initial() { return JS::UndefinedValue(); }
+    static ThingRootKind kind() { return THING_ROOT_VALUE; }
     static bool poisoned(const JS::Value &v) { return JS::IsPoisonedValue(v); }
     static bool needsPostBarrier(const JS::Value &v) {
         return v.isObject() && gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(&v.toObject()));
     }
 #ifdef JSGC_GENERATIONAL
     static void postBarrier(JS::Value *v) { JS::HeapValuePostBarrier(v); }
     static void relocate(JS::Value *v) { JS::HeapValueRelocate(v); }
 #endif
--- a/js/src/builtin/TestingFunctions.cpp
+++ b/js/src/builtin/TestingFunctions.cpp
@@ -515,17 +515,17 @@ SelectForGC(JSContext *cx, unsigned argc
      * start to detect missing pre-barriers. It is invalid for nursery things
      * to be in the set, so evict the nursery before adding items.
      */
     JSRuntime *rt = cx->runtime();
     MinorGC(rt, JS::gcreason::EVICT_NURSERY);
 
     for (unsigned i = 0; i < args.length(); i++) {
         if (args[i].isObject()) {
-            if (!rt->gc.selectForMarking(&args[i].toObject()))
+            if (!rt->gc.selectedForMarking.append(&args[i].toObject()))
                 return false;
         }
     }
 
     args.rval().setUndefined();
     return true;
 }
 
@@ -594,17 +594,17 @@ DeterministicGC(JSContext *cx, unsigned 
     CallArgs args = CallArgsFromVp(argc, vp);
 
     if (args.length() != 1) {
         RootedObject callee(cx, &args.callee());
         ReportUsageError(cx, callee, "Wrong number of arguments");
         return false;
     }
 
-    cx->runtime()->gc.setDeterministic(ToBoolean(args[0]));
+    gc::SetDeterministicGC(cx, ToBoolean(args[0]));
     args.rval().setUndefined();
     return true;
 }
 #endif /* JS_GC_ZEAL */
 
 static bool
 GCSlice(JSContext *cx, unsigned argc, Value *vp)
 {
@@ -636,33 +636,33 @@ ValidateGC(JSContext *cx, unsigned argc,
     CallArgs args = CallArgsFromVp(argc, vp);
 
     if (args.length() != 1) {
         RootedObject callee(cx, &args.callee());
         ReportUsageError(cx, callee, "Wrong number of arguments");
         return false;
     }
 
-    cx->runtime()->gc.setValidate(ToBoolean(args[0]));
+    gc::SetValidateGC(cx, ToBoolean(args[0]));
     args.rval().setUndefined();
     return true;
 }
 
 static bool
 FullCompartmentChecks(JSContext *cx, unsigned argc, jsval *vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
 
     if (args.length() != 1) {
         RootedObject callee(cx, &args.callee());
         ReportUsageError(cx, callee, "Wrong number of arguments");
         return false;
     }
 
-    cx->runtime()->gc.setFullCompartmentChecks(ToBoolean(args[0]));
+    gc::SetFullCompartmentChecks(cx, ToBoolean(args[0]));
     args.rval().setUndefined();
     return true;
 }
 
 static bool
 NondeterministicGetWeakMapKeys(JSContext *cx, unsigned argc, jsval *vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -21,16 +21,33 @@
 
 /* Perform validation of incremental marking in debug builds but not on B2G. */
 #if defined(DEBUG) && !defined(MOZ_B2G)
 #define JS_GC_MARKING_VALIDATION
 #endif
 
 namespace js {
 
+struct ScriptAndCounts
+{
+    /* This structure is stored and marked from the JSRuntime. */
+    JSScript *script;
+    ScriptCounts scriptCounts;
+
+    PCCounts &getPCCounts(jsbytecode *pc) const {
+        return scriptCounts.pcCountsVector[script->pcToOffset(pc)];
+    }
+
+    jit::IonScriptCounts *getIonCounts() const {
+        return scriptCounts.ionCounts;
+    }
+};
+
+typedef Vector<ScriptAndCounts, 0, SystemAllocPolicy> ScriptAndCountsVector;
+
 namespace gc {
 
 typedef Vector<JS::Zone *, 4, SystemAllocPolicy> ZoneVector;
 
 class MarkingValidator;
 class AutoPrepareForTracing;
 
 struct ConservativeGCData
@@ -91,20 +108,17 @@ class CallbackVector : public Vector<Cal
 
 class GCRuntime
 {
   public:
     explicit GCRuntime(JSRuntime *rt);
     bool init(uint32_t maxbytes);
     void finish();
 
-    inline int zeal();
-    inline bool upcomingZealousGC();
-    inline bool needZealousGC();
-
+    void setGCZeal(uint8_t zeal, uint32_t frequency);
     template <typename T> bool addRoot(T *rp, const char *name, JSGCRootType rootType);
     void removeRoot(void *rp);
     void setMarkStackLimit(size_t limit);
 
     bool isHeapBusy() { return heapState != js::Idle; }
     bool isHeapMajorCollecting() { return heapState == js::MajorCollecting; }
     bool isHeapMinorCollecting() { return heapState == js::MinorCollecting; }
     bool isHeapCollecting() { return isHeapMajorCollecting() || isHeapMinorCollecting(); }
@@ -114,31 +128,24 @@ class GCRuntime
     void maybeGC(Zone *zone);
     void minorGC(JS::gcreason::Reason reason);
     void minorGC(JSContext *cx, JS::gcreason::Reason reason);
     void gcIfNeeded(JSContext *cx);
     void collect(bool incremental, int64_t budget, JSGCInvocationKind gckind,
                  JS::gcreason::Reason reason);
     void gcSlice(JSGCInvocationKind gckind, JS::gcreason::Reason reason, int64_t millis);
     void runDebugGC();
-    inline void poke();
 
     void markRuntime(JSTracer *trc, bool useSavedRoots = false);
 
 #ifdef JS_GC_ZEAL
-    const void *addressOfZealMode() { return &zealMode; }
-    void setZeal(uint8_t zeal, uint32_t frequency);
-    void setNextScheduled(uint32_t count);
     void verifyPreBarriers();
     void verifyPostBarriers();
     void maybeVerifyPreBarriers(bool always);
     void maybeVerifyPostBarriers(bool always);
-    bool selectForMarking(JSObject *object);
-    void clearSelectedForMarking();
-    void setDeterministic(bool enable);
 #endif
 
   public:
     // Internal public interface
     void recordNativeStackTop();
 #ifdef JS_THREADSAFE
     void notifyRequestEnd() { conservativeGC.updateForRequestEnd(); }
 #endif
@@ -189,65 +196,40 @@ class GCRuntime
 
 #ifdef DEBUG
     bool isAllocAllowed() { return noGCOrAllocationCheck == 0; }
     void disallowAlloc() { ++noGCOrAllocationCheck; }
     void allowAlloc() {
         JS_ASSERT(!isAllocAllowed());
         --noGCOrAllocationCheck;
     }
-
-    bool isInsideUnsafeRegion() { return inUnsafeRegion != 0; }
-    void enterUnsafeRegion() { ++inUnsafeRegion; }
-    void leaveUnsafeRegion() {
-        JS_ASSERT(inUnsafeRegion > 0);
-        --inUnsafeRegion;
-    }
 #endif
 
     void setAlwaysPreserveCode() { alwaysPreserveCode = true; }
 
     bool isGenerationalGCEnabled() { return generationalDisabled == 0; }
     void disableGenerationalGC();
     void enableGenerationalGC();
 
-    void setGrayRootsTracer(JSTraceDataOp traceOp, void *data);
-    bool addBlackRootsTracer(JSTraceDataOp traceOp, void *data);
-    void removeBlackRootsTracer(JSTraceDataOp traceOp, void *data);
-
-    void setMaxMallocBytes(size_t value);
-    void resetMallocBytes();
-    bool isTooMuchMalloc() const { return mallocBytes <= 0; }
-    void updateMallocCounter(JS::Zone *zone, size_t nbytes);
-    void onTooMuchMalloc();
-
-    void setGCCallback(JSGCCallback callback, void *data);
-    bool addFinalizeCallback(JSFinalizeCallback callback, void *data);
-    void removeFinalizeCallback(JSFinalizeCallback func);
-    JS::GCSliceCallback setSliceCallback(JS::GCSliceCallback callback);
-
-    void setValidate(bool enable);
-    void setFullCompartmentChecks(bool enable);
-
 #ifdef JS_GC_ZEAL
     void startVerifyPreBarriers();
     bool endVerifyPreBarriers();
     void startVerifyPostBarriers();
     bool endVerifyPostBarriers();
     void finishVerifier();
 #endif
 
   private:
     // For ArenaLists::allocateFromArenaInline()
     friend class ArenaLists;
     Chunk *pickChunk(Zone *zone, AutoMaybeStartBackgroundAllocation &maybeStartBackgroundAllocation);
 
     inline bool wantBackgroundAllocation() const;
 
-    bool initZeal();
+    bool initGCZeal();
     void requestInterrupt(JS::gcreason::Reason reason);
     bool gcCycle(bool incremental, int64_t budget, JSGCInvocationKind gckind,
                  JS::gcreason::Reason reason);
     void budgetIncrementalGC(int64_t *budget);
     void resetIncrementalGC(const char *reason);
     void incrementalCollectSlice(int64_t budget, JS::gcreason::Reason reason,
                                  JSGCInvocationKind gckind);
     void pushZealSelectedObjects();
@@ -470,26 +452,25 @@ class GCRuntime
      * zone with no incoming cross-compartment pointers. Typically if
      * this happens it signals that an incremental GC is marking too much
      * stuff. At various times we check this counter and, if it has changed, we
      * run an immediate, non-incremental GC to clean up the dead
      * zones. This should happen very rarely.
      */
     unsigned              objectsMarkedInDeadZones;
 
-    bool                  poked;
+    bool                  poke;
 
     volatile js::HeapState heapState;
 
 #ifdef JSGC_GENERATIONAL
     js::Nursery           nursery;
     js::gc::StoreBuffer   storeBuffer;
 #endif
 
-  private:
     /*
      * These options control the zealousness of the GC. The fundamental values
      * are   nextScheduled and gcDebugCompartmentGC. At every allocation,
      *   nextScheduled is decremented. When it reaches zero, we do either a
      * full or a compartmental GC, based on   debugCompartmentGC.
      *
      * At this point, if   zeal_ is one of the types that trigger periodic
      * collection, then   nextScheduled is reset to the value of
@@ -517,97 +498,82 @@ class GCRuntime
     int                   incrementalLimit;
 
     js::Vector<JSObject *, 0, js::SystemAllocPolicy>   selectedForMarking;
 #endif
 
     bool                  validate;
     bool                  fullCompartmentChecks;
 
-    Callback<JSGCCallback>  gcCallback;
+    JSGCCallback          gcCallback;
+    void                  *gcCallbackData;
+
+    JS::GCSliceCallback   sliceCallback;
     CallbackVector<JSFinalizeCallback> finalizeCallbacks;
 
     /*
      * Malloc counter to measure memory pressure for GC scheduling. It runs
      * from   maxMallocBytes down to zero.
      */
     mozilla::Atomic<ptrdiff_t, mozilla::ReleaseAcquire>   mallocBytes;
 
     /*
-     * Whether a GC has been triggered as a result of mallocBytes falling
+     * Whether a GC has been triggered as a result of   mallocBytes falling
      * below zero.
      */
     mozilla::Atomic<bool, mozilla::ReleaseAcquire>   mallocGCTriggered;
 
     /*
      * The trace operations to trace embedding-specific GC roots. One is for
      * tracing through black roots and the other is for tracing through gray
      * roots. The black/gray distinction is only relevant to the cycle
      * collector.
      */
     CallbackVector<JSTraceDataOp> blackRootTracers;
     Callback<JSTraceDataOp> grayRootTracer;
 
+    /*
+     * The GC can only safely decommit memory when the page size of the
+     * running process matches the compiled arena size.
+     */
+    size_t                systemPageSize;
+
+    /* The OS allocation granularity may not match the page size. */
+    size_t                systemAllocGranularity;
+
+    /* Strong references on scripts held for PCCount profiling API. */
+    js::ScriptAndCountsVector *scriptAndCountsVector;
+
 #ifdef DEBUG
     /*
      * Some regions of code are hard for the static rooting hazard analysis to
      * understand. In those cases, we trade the static analysis for a dynamic
      * analysis. When this is non-zero, we should assert if we trigger, or
      * might trigger, a GC.
      */
     int inUnsafeRegion;
 #endif
 
+  private:
     /* Always preserve JIT code during GCs, for testing. */
     bool                  alwaysPreserveCode;
 
 #ifdef DEBUG
     size_t                noGCOrAllocationCheck;
 #endif
 
     /* Synchronize GC heap access between main thread and GCHelperState. */
     PRLock                *lock;
     mozilla::DebugOnly<PRThread *>   lockOwner;
 
     GCHelperState helperState;
 
     ConservativeGCData conservativeGC;
 
+    //friend class js::gc::Chunk; // todo: remove
     friend class js::GCHelperState;
     friend class js::gc::MarkingValidator;
 };
 
-#ifdef JS_GC_ZEAL
-inline int
-GCRuntime::zeal() {
-    return zealMode;
-}
-
-inline bool
-GCRuntime::upcomingZealousGC() {
-    return nextScheduled == 1;
-}
-
-inline bool
-GCRuntime::needZealousGC() {
-    if (nextScheduled > 0 && --nextScheduled == 0) {
-        if (zealMode == ZealAllocValue ||
-            zealMode == ZealGenerationalGCValue ||
-            (zealMode >= ZealIncrementalRootsThenFinish &&
-             zealMode <= ZealIncrementalMultipleSlices))
-        {
-            nextScheduled = zealFrequency;
-        }
-        return true;
-    }
-    return false;
-}
-#else
-inline int GCRuntime::zeal() { return 0; }
-inline bool GCRuntime::upcomingZealousGC() { return false; }
-inline bool GCRuntime::needZealousGC() { return false; }
-#endif
-
-
 } /* namespace gc */
 } /* namespace js */
 
 #endif
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -102,17 +102,17 @@ js::Nursery::enable()
 {
     JS_ASSERT(isEmpty());
     if (isEnabled())
         return;
     numActiveChunks_ = 1;
     setCurrentChunk(0);
     currentStart_ = position();
 #ifdef JS_GC_ZEAL
-    if (runtime()->gcZeal() == ZealGenerationalGCValue)
+    if (runtime()->gc.zealMode == ZealGenerationalGCValue)
         enterZealMode();
 #endif
 }
 
 void
 js::Nursery::disable()
 {
     JS_ASSERT(isEmpty());
@@ -124,17 +124,17 @@ js::Nursery::disable()
 }
 
 bool
 js::Nursery::isEmpty() const
 {
     JS_ASSERT(runtime_);
     if (!isEnabled())
         return true;
-    JS_ASSERT_IF(runtime_->gcZeal() != ZealGenerationalGCValue, currentStart_ == start());
+    JS_ASSERT_IF(runtime_->gc.zealMode != ZealGenerationalGCValue, currentStart_ == start());
     return position() == currentStart_;
 }
 
 #ifdef JS_GC_ZEAL
 void
 js::Nursery::enterZealMode() {
     if (isEnabled())
         numActiveChunks_ = NumNurseryChunks;
@@ -925,17 +925,17 @@ void
 js::Nursery::sweep()
 {
 #ifdef JS_GC_ZEAL
     /* Poison the nursery contents so touching a freed object will crash. */
     JS_POISON((void *)start(), JS_SWEPT_NURSERY_PATTERN, NurserySize);
     for (int i = 0; i < NumNurseryChunks; ++i)
         initChunk(i);
 
-    if (runtime()->gcZeal() == ZealGenerationalGCValue) {
+    if (runtime()->gc.zealMode == ZealGenerationalGCValue) {
         MOZ_ASSERT(numActiveChunks_ == NumNurseryChunks);
 
         /* Only reset the alloc point when we are close to the end. */
         if (currentChunk_ + 1 == NumNurseryChunks)
             setCurrentChunk(0);
     } else
 #endif
     {
@@ -950,26 +950,26 @@ js::Nursery::sweep()
     /* Set current start position for isEmpty checks. */
     currentStart_ = position();
 }
 
 void
 js::Nursery::growAllocableSpace()
 {
 #ifdef JS_GC_ZEAL
-    MOZ_ASSERT_IF(runtime()->gcZeal() == ZealGenerationalGCValue,
+    MOZ_ASSERT_IF(runtime()->gc.zealMode == ZealGenerationalGCValue,
                   numActiveChunks_ == NumNurseryChunks);
 #endif
     numActiveChunks_ = Min(numActiveChunks_ * 2, NumNurseryChunks);
 }
 
 void
 js::Nursery::shrinkAllocableSpace()
 {
 #ifdef JS_GC_ZEAL
-    if (runtime()->gcZeal() == ZealGenerationalGCValue)
+    if (runtime()->gc.zealMode == ZealGenerationalGCValue)
         return;
 #endif
     numActiveChunks_ = Max(numActiveChunks_ - 1, 1);
     updateDecommittedRegion();
 }
 
 #endif /* JSGC_GENERATIONAL */
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -725,18 +725,18 @@ js::gc::GCRuntime::markRuntime(JSTracer 
                 MarkScriptRoot(trc, reinterpret_cast<JSScript **>(key), name);
             else
                 MOZ_ASSUME_UNREACHABLE("unexpected js::RootInfo::type value");
         }
     }
 
     MarkPersistentRootedChains(trc);
 
-    if (rt->scriptAndCountsVector) {
-        ScriptAndCountsVector &vec = *rt->scriptAndCountsVector;
+    if (scriptAndCountsVector) {
+        ScriptAndCountsVector &vec = *scriptAndCountsVector;
         for (size_t i = 0; i < vec.length(); i++)
             MarkScriptRoot(trc, &vec[i].script, "scriptAndCountsVector");
     }
 
     if (!rt->isBeingDestroyed() && !trc->runtime()->isHeapMinorCollecting()) {
         if (!IS_GC_MARKING_TRACER(trc) || rt->atomsCompartment()->zone()->isCollecting()) {
             MarkPermanentAtoms(trc);
             MarkAtoms(trc);
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -441,18 +441,17 @@ Statistics::Statistics(JSRuntime *rt)
     fp(nullptr),
     fullFormat(false),
     gcDepth(0),
     collectedCount(0),
     zoneCount(0),
     compartmentCount(0),
     nonincrementalReason(nullptr),
     preBytes(0),
-    phaseNestingDepth(0),
-    sliceCallback(nullptr)
+    phaseNestingDepth(0)
 {
     PodArrayZero(phaseTotals);
     PodArrayZero(counts);
 
     char *env = getenv("MOZ_GCTIMER");
     if (!env || strcmp(env, "none") == 0) {
         fp = nullptr;
         return;
@@ -485,23 +484,16 @@ Statistics::~Statistics()
             }
         }
 
         if (fp != stdout && fp != stderr)
             fclose(fp);
     }
 }
 
-JS::GCSliceCallback
-Statistics::setSliceCallback(JS::GCSliceCallback newCallback) {
-    JS::GCSliceCallback oldCallback = sliceCallback;
-    sliceCallback = newCallback;
-    return oldCallback;
-}
-
 void
 Statistics::printStats()
 {
     if (fullFormat) {
         StatisticsSerializer ss(StatisticsSerializer::AsText);
         formatData(ss, 0);
         char *msg = ss.finishCString();
         if (msg) {
@@ -584,19 +576,19 @@ Statistics::beginSlice(int collectedCoun
     (void) slices.append(data); /* Ignore any OOMs here. */
 
     if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
         (*cb)(JS_TELEMETRY_GC_REASON, reason);
 
     // Slice callbacks should only fire for the outermost level
     if (++gcDepth == 1) {
         bool wasFullGC = collectedCount == zoneCount;
-        if (sliceCallback)
-            (*sliceCallback)(runtime, first ? JS::GC_CYCLE_BEGIN : JS::GC_SLICE_BEGIN,
-                             JS::GCDescription(!wasFullGC));
+        if (JS::GCSliceCallback cb = runtime->gc.sliceCallback)
+            (*cb)(runtime, first ? JS::GC_CYCLE_BEGIN : JS::GC_SLICE_BEGIN,
+                  JS::GCDescription(!wasFullGC));
     }
 }
 
 void
 Statistics::endSlice()
 {
     slices.back().end = PRMJ_Now();
     slices.back().endFaults = SystemPageAllocator::GetPageFaultCount();
@@ -608,19 +600,19 @@ Statistics::endSlice()
 
     bool last = runtime->gc.incrementalState == gc::NO_INCREMENTAL;
     if (last)
         endGC();
 
     // Slice callbacks should only fire for the outermost level
     if (--gcDepth == 0) {
         bool wasFullGC = collectedCount == zoneCount;
-        if (sliceCallback)
-            (*sliceCallback)(runtime, last ? JS::GC_CYCLE_END : JS::GC_SLICE_END,
-                             JS::GCDescription(!wasFullGC));
+        if (JS::GCSliceCallback cb = runtime->gc.sliceCallback)
+            (*cb)(runtime, last ? JS::GC_CYCLE_END : JS::GC_SLICE_END,
+                  JS::GCDescription(!wasFullGC));
     }
 
     /* Do this after the slice callback since it uses these values. */
     if (last)
         PodArrayZero(counts);
 }
 
 void
--- a/js/src/gc/Statistics.h
+++ b/js/src/gc/Statistics.h
@@ -93,18 +93,16 @@ struct Statistics {
     }
 
     int64_t beginSCC();
     void endSCC(unsigned scc, int64_t start);
 
     jschar *formatMessage();
     jschar *formatJSON(uint64_t timestamp);
 
-    JS::GCSliceCallback setSliceCallback(JS::GCSliceCallback callback);
-
   private:
     JSRuntime *runtime;
 
     int64_t startupTime;
 
     FILE *fp;
     bool fullFormat;
 
@@ -157,18 +155,16 @@ struct Statistics {
     static const size_t MAX_NESTING = 8;
     Phase phaseNesting[MAX_NESTING];
 #endif
     mozilla::DebugOnly<size_t> phaseNestingDepth;
 
     /* Sweep times for SCCs of compartments. */
     Vector<int64_t, 0, SystemAllocPolicy> sccTimes;
 
-    JS::GCSliceCallback sliceCallback;
-
     void beginGC();
     void endGC();
 
     void gcDuration(int64_t *total, int64_t *maxPause);
     void sccDurations(int64_t *total, int64_t *maxPause);
     void printStats();
     bool formatData(StatisticsSerializer &ss, uint64_t timestamp);
 
--- a/js/src/jit/CompileWrappers.cpp
+++ b/js/src/jit/CompileWrappers.cpp
@@ -63,17 +63,17 @@ CompileRuntime::addressOfLastCachedNativ
 {
     return &runtime()->nativeIterCache.last;
 }
 
 #ifdef JS_GC_ZEAL
 const void *
 CompileRuntime::addressOfGCZeal()
 {
-    return runtime()->gc.addressOfZealMode();
+    return &runtime()->gc.zealMode;
 }
 #endif
 
 const void *
 CompileRuntime::addressOfInterrupt()
 {
     return &runtime()->interrupt;
 }
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -1610,23 +1610,30 @@ JS::RemoveScriptRootRT(JSRuntime *rt, JS
 {
     RemoveRoot(rt, (void *)rp);
     *rp = nullptr;
 }
 
 JS_PUBLIC_API(bool)
 JS_AddExtraGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data)
 {
-    return rt->gc.addBlackRootsTracer(traceOp, data);
+    AssertHeapIsIdle(rt);
+    return !!rt->gc.blackRootTracers.append(Callback<JSTraceDataOp>(traceOp, data));
 }
 
 JS_PUBLIC_API(void)
 JS_RemoveExtraGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data)
 {
-    return rt->gc.removeBlackRootsTracer(traceOp, data);
+    for (size_t i = 0; i < rt->gc.blackRootTracers.length(); i++) {
+        Callback<JSTraceDataOp> *e = &rt->gc.blackRootTracers[i];
+        if (e->op == traceOp && e->data == data) {
+            rt->gc.blackRootTracers.erase(e);
+            break;
+        }
+    }
 }
 
 #ifdef DEBUG
 
 typedef struct JSHeapDumpNode JSHeapDumpNode;
 
 struct JSHeapDumpNode {
     void            *thing;
@@ -1887,31 +1894,38 @@ JS_MaybeGC(JSContext *cx)
 {
     MaybeGC(cx);
 }
 
 JS_PUBLIC_API(void)
 JS_SetGCCallback(JSRuntime *rt, JSGCCallback cb, void *data)
 {
     AssertHeapIsIdle(rt);
-    rt->gc.setGCCallback(cb, data);
+    rt->gc.gcCallback = cb;
+    rt->gc.gcCallbackData = data;
 }
 
 JS_PUBLIC_API(bool)
 JS_AddFinalizeCallback(JSRuntime *rt, JSFinalizeCallback cb, void *data)
 {
     AssertHeapIsIdle(rt);
-    return rt->gc.addFinalizeCallback(cb, data);
+    return rt->gc.finalizeCallbacks.append(Callback<JSFinalizeCallback>(cb, data));
 }
 
 JS_PUBLIC_API(void)
 JS_RemoveFinalizeCallback(JSRuntime *rt, JSFinalizeCallback cb)
 {
-    AssertHeapIsIdle(rt);
-    rt->gc.removeFinalizeCallback(cb);
+    for (Callback<JSFinalizeCallback> *p = rt->gc.finalizeCallbacks.begin();
+         p < rt->gc.finalizeCallbacks.end(); p++)
+    {
+        if (p->op == cb) {
+            rt->gc.finalizeCallbacks.erase(p);
+            break;
+        }
+    }
 }
 
 JS_PUBLIC_API(bool)
 JS_IsAboutToBeFinalized(JS::Heap<JSObject *> *objp)
 {
     return IsObjectAboutToBeFinalized(objp->unsafeGet());
 }
 
@@ -1926,17 +1940,17 @@ JS_SetGCParameter(JSRuntime *rt, JSGCPar
 {
     switch (key) {
       case JSGC_MAX_BYTES: {
         JS_ASSERT(value >= rt->gc.bytes);
         rt->gc.maxBytes = value;
         break;
       }
       case JSGC_MAX_MALLOC_BYTES:
-        rt->gc.setMaxMallocBytes(value);
+        rt->setGCMaxMallocBytes(value);
         break;
       case JSGC_SLICE_TIME_BUDGET:
         rt->gc.sliceBudget = SliceBudget::TimeBudget(value);
         break;
       case JSGC_MARK_STACK_LIMIT:
         js::SetMarkStackLimit(rt, value);
         break;
       case JSGC_HIGH_FREQUENCY_TIME_LIMIT:
@@ -6201,23 +6215,23 @@ JS_AbortIfWrongThread(JSRuntime *rt)
     if (!js::TlsPerThreadData.get()->associatedWith(rt))
         MOZ_CRASH();
 }
 
 #ifdef JS_GC_ZEAL
 JS_PUBLIC_API(void)
 JS_SetGCZeal(JSContext *cx, uint8_t zeal, uint32_t frequency)
 {
-    cx->runtime()->gc.setZeal(zeal, frequency);
+    SetGCZeal(cx->runtime(), zeal, frequency);
 }
 
 JS_PUBLIC_API(void)
 JS_ScheduleGC(JSContext *cx, uint32_t count)
 {
-    cx->runtime()->gc.setNextScheduled(count);
+    cx->runtime()->gc.nextScheduled = count;
 }
 #endif
 
 JS_PUBLIC_API(void)
 JS_SetParallelParsingEnabled(JSRuntime *rt, bool enabled)
 {
 #ifdef JS_ION
     rt->setParallelParsingEnabled(enabled);
--- a/js/src/jsapi.h
+++ b/js/src/jsapi.h
@@ -3004,16 +3004,17 @@ class MutablePropertyDescriptorOperation
 
 } /* namespace JS */
 
 namespace js {
 
 template <>
 struct GCMethods<JSPropertyDescriptor> {
     static JSPropertyDescriptor initial() { return JSPropertyDescriptor(); }
+    static ThingRootKind kind() { return THING_ROOT_PROPERTY_DESCRIPTOR; }
     static bool poisoned(const JSPropertyDescriptor &desc) {
         return (desc.obj && JS::IsPoisonedPtr(desc.obj)) ||
                (desc.attrs & JSPROP_GETTER && desc.getter && JS::IsPoisonedPtr(desc.getter)) ||
                (desc.attrs & JSPROP_SETTER && desc.setter && JS::IsPoisonedPtr(desc.setter)) ||
                (desc.value.isGCThing() && JS::IsPoisonedPtr(desc.value.toGCThing()));
     }
 };
 
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -58,17 +58,18 @@ JS_FRIEND_API(SourceHook *)
 js::ForgetSourceHook(JSRuntime *rt)
 {
     return rt->sourceHook.forget();
 }
 
 JS_FRIEND_API(void)
 JS_SetGrayGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data)
 {
-    rt->gc.setGrayRootsTracer(traceOp, data);
+    rt->gc.grayRootTracer.op = traceOp;
+    rt->gc.grayRootTracer.data = data;
 }
 
 JS_FRIEND_API(JSString *)
 JS_GetAnonymousString(JSRuntime *rt)
 {
     JS_ASSERT(rt->hasContexts());
     return rt->commonNames->anonymous;
 }
@@ -858,17 +859,19 @@ JS_FRIEND_API(bool)
 js::IsContextRunningJS(JSContext *cx)
 {
     return cx->currentlyRunning();
 }
 
 JS_FRIEND_API(JS::GCSliceCallback)
 JS::SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback)
 {
-    return rt->gc.setSliceCallback(callback);
+    JS::GCSliceCallback old = rt->gc.sliceCallback;
+    rt->gc.sliceCallback = callback;
+    return old;
 }
 
 JS_FRIEND_API(bool)
 JS::WasIncrementalGC(JSRuntime *rt)
 {
     return rt->gc.isIncremental;
 }
 
@@ -1018,17 +1021,17 @@ JS_FRIEND_API(void)
 JS::IncrementalValueBarrier(const Value &v)
 {
     js::HeapValue::writeBarrierPre(v);
 }
 
 JS_FRIEND_API(void)
 JS::PokeGC(JSRuntime *rt)
 {
-    rt->gc.poke();
+    rt->gc.poke = true;
 }
 
 JS_FRIEND_API(JSCompartment *)
 js::GetAnyCompartmentInZone(JS::Zone *zone)
 {
     CompartmentsInZoneIter comp(zone);
     JS_ASSERT(!comp.done());
     return comp.get();
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1090,50 +1090,59 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
     markingValidator(nullptr),
 #endif
     interFrameGC(0),
     sliceBudget(SliceBudget::Unlimited),
     incrementalEnabled(true),
     generationalDisabled(0),
     manipulatingDeadZones(false),
     objectsMarkedInDeadZones(0),
-    poked(false),
+    poke(false),
     heapState(Idle),
 #ifdef JSGC_GENERATIONAL
     nursery(rt),
     storeBuffer(rt, nursery),
 #endif
 #ifdef JS_GC_ZEAL
     zealMode(0),
     zealFrequency(0),
     nextScheduled(0),
     deterministicOnly(false),
     incrementalLimit(0),
 #endif
     validate(true),
     fullCompartmentChecks(false),
+    gcCallback(nullptr),
+    sliceCallback(nullptr),
     mallocBytes(0),
     mallocGCTriggered(false),
+    scriptAndCountsVector(nullptr),
 #ifdef DEBUG
     inUnsafeRegion(0),
 #endif
     alwaysPreserveCode(false),
 #ifdef DEBUG
     noGCOrAllocationCheck(0),
 #endif
     lock(nullptr),
     lockOwner(nullptr),
     helperState(rt)
 {
 }
 
 #ifdef JS_GC_ZEAL
 
+extern void
+js::SetGCZeal(JSRuntime *rt, uint8_t zeal, uint32_t frequency)
+{
+    rt->gc.setGCZeal(zeal, frequency);
+}
+
 void
-GCRuntime::setZeal(uint8_t zeal, uint32_t frequency)
+GCRuntime::setGCZeal(uint8_t zeal, uint32_t frequency)
 {
     if (verifyPreData)
         VerifyBarriers(rt, PreBarrierVerifier);
     if (verifyPostData)
         VerifyBarriers(rt, PostBarrierVerifier);
 
 #ifdef JSGC_GENERATIONAL
     if (zealMode == ZealGenerationalGCValue) {
@@ -1146,24 +1155,18 @@ GCRuntime::setZeal(uint8_t zeal, uint32_
 #endif
 
     bool schedule = zeal >= js::gc::ZealAllocValue;
     zealMode = zeal;
     zealFrequency = frequency;
     nextScheduled = schedule ? frequency : 0;
 }
 
-void
-GCRuntime::setNextScheduled(uint32_t count)
-{
-    nextScheduled = count;
-}
-
 bool
-GCRuntime::initZeal()
+GCRuntime::initGCZeal()
 {
     const char *env = getenv("JS_GC_ZEAL");
     if (!env)
         return true;
 
     int zeal = -1;
     int frequency = JS_DEFAULT_ZEAL_FREQ;
     if (strcmp(env, "help") != 0) {
@@ -1189,17 +1192,17 @@ GCRuntime::initZeal()
                 "  9: Incremental GC in two slices: 1) mark all 2) new marking and finish\n"
                 " 10: Incremental GC in multiple slices\n"
                 " 11: Verify post write barriers between instructions\n"
                 " 12: Verify post write barriers between paints\n"
                 " 13: Purge analysis state every F allocations (default: 100)\n");
         return false;
     }
 
-    setZeal(zeal, frequency);
+    setGCZeal(zeal, frequency);
     return true;
 }
 
 #endif
 
 /* Lifetime for type sets attached to scripts containing observed types. */
 static const int64_t JIT_SCRIPT_RELEASE_TYPES_INTERVAL = 60 * 1000 * 1000;
 
@@ -1221,32 +1224,32 @@ GCRuntime::init(uint32_t maxbytes)
     if (!helperState.init())
         return false;
 
     /*
      * Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
      * for default backward API compatibility.
      */
     maxBytes = maxbytes;
-    setMaxMallocBytes(maxbytes);
+    rt->setGCMaxMallocBytes(maxbytes);
 
 #ifndef JS_MORE_DETERMINISTIC
     jitReleaseTime = PRMJ_Now() + JIT_SCRIPT_RELEASE_TYPES_INTERVAL;
 #endif
 
 #ifdef JSGC_GENERATIONAL
     if (!nursery.init())
         return false;
 
     if (!storeBuffer.enable())
         return false;
 #endif
 
 #ifdef JS_GC_ZEAL
-    if (!initZeal())
+    if (!initGCZeal())
         return false;
 #endif
 
     if (!marker.init(mode))
         return false;
 
     return true;
 }
@@ -1321,73 +1324,16 @@ js::gc::FinishPersistentRootedChains(JSR
     rt->stringPersistentRooteds.clear();
     rt->valuePersistentRooteds.clear();
 }
 
 template <typename T> struct BarrierOwner {};
 template <typename T> struct BarrierOwner<T *> { typedef T result; };
 template <> struct BarrierOwner<Value> { typedef HeapValue result; };
 
-bool
-GCRuntime::addBlackRootsTracer(JSTraceDataOp traceOp, void *data)
-{
-    AssertHeapIsIdle(rt);
-    return !!blackRootTracers.append(Callback<JSTraceDataOp>(traceOp, data));
-}
-
-void
-GCRuntime::removeBlackRootsTracer(JSTraceDataOp traceOp, void *data)
-{
-    // Can be called from finalizers
-    for (size_t i = 0; i < blackRootTracers.length(); i++) {
-        Callback<JSTraceDataOp> *e = &blackRootTracers[i];
-        if (e->op == traceOp && e->data == data) {
-            blackRootTracers.erase(e);
-        }
-    }
-}
-
-void
-GCRuntime::setGrayRootsTracer(JSTraceDataOp traceOp, void *data)
-{
-    AssertHeapIsIdle(rt);
-    grayRootTracer.op = traceOp;
-    grayRootTracer.data = data;
-}
-
-void
-GCRuntime::setGCCallback(JSGCCallback callback, void *data)
-{
-    gcCallback.op = callback;
-    gcCallback.data = data;
-}
-
-bool
-GCRuntime::addFinalizeCallback(JSFinalizeCallback callback, void *data)
-{
-    return finalizeCallbacks.append(Callback<JSFinalizeCallback>(callback, data));
-}
-
-void
-GCRuntime::removeFinalizeCallback(JSFinalizeCallback callback)
-{
-    for (Callback<JSFinalizeCallback> *p = finalizeCallbacks.begin();
-         p < finalizeCallbacks.end(); p++) {
-        if (p->op == callback) {
-            finalizeCallbacks.erase(p);
-            break;
-        }
-    }
-}
-
-JS::GCSliceCallback
-GCRuntime::setSliceCallback(JS::GCSliceCallback callback) {
-    return stats.setSliceCallback(callback);
-}
-
 template <typename T>
 bool
 GCRuntime::addRoot(T *rp, const char *name, JSGCRootType rootType)
 {
     /*
      * Sometimes Firefox will hold weak references to objects and then convert
      * them to strong references by calling AddRoot (e.g., via PreserveWrapper,
      * or ModifyBusyCount in workers). We need a read barrier to cover these
@@ -1398,17 +1344,17 @@ GCRuntime::addRoot(T *rp, const char *na
 
     return rt->gc.rootsHash.put((void *)rp, RootInfo(name, rootType));
 }
 
 void
 GCRuntime::removeRoot(void *rp)
 {
     rootsHash.remove(rp);
-    poke();
+    poke = true;
 }
 
 template <typename T>
 static bool
 AddRoot(JSRuntime *rt, T *rp, const char *name, JSGCRootType rootType)
 {
     return rt->gc.addRoot(rp, name, rootType);
 }
@@ -1472,52 +1418,19 @@ js::RemoveRawValueRoot(JSContext *cx, Va
 }
 
 void
 js::RemoveRoot(JSRuntime *rt, void *rp)
 {
     rt->gc.removeRoot(rp);
 }
 
-void
-GCRuntime::setMaxMallocBytes(size_t value)
-{
-    /*
-     * For compatibility treat any value that exceeds PTRDIFF_T_MAX to
-     * mean that value.
-     */
-    maxMallocBytes = (ptrdiff_t(value) >= 0) ? value : size_t(-1) >> 1;
-    resetMallocBytes();
-    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next())
-        zone->setGCMaxMallocBytes(value);
-}
-
-void
-GCRuntime::resetMallocBytes()
-{
-    mallocBytes = ptrdiff_t(maxMallocBytes);
-    mallocGCTriggered = false;
-}
-
-void
-GCRuntime::updateMallocCounter(JS::Zone *zone, size_t nbytes)
-{
-    mallocBytes -= ptrdiff_t(nbytes);
-    if (MOZ_UNLIKELY(isTooMuchMalloc()))
-        onTooMuchMalloc();
-    else if (zone)
-        zone->updateMallocCounter(nbytes);
-}
-
-void
-GCRuntime::onTooMuchMalloc()
-{
-    if (!mallocGCTriggered)
-        mallocGCTriggered = triggerGC(JS::gcreason::TOO_MUCH_MALLOC);
-}
+typedef RootedValueMap::Range RootRange;
+typedef RootedValueMap::Entry RootEntry;
+typedef RootedValueMap::Enum RootEnum;
 
 static size_t
 ComputeTriggerBytes(Zone *zone, size_t lastBytes, size_t maxBytes, JSGCInvocationKind gckind)
 {
     size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, zone->runtimeFromMainThread()->gc.allocationThreshold);
     double trigger = double(base) * zone->gcHeapGrowthFactor;
     return size_t(Min(double(maxBytes), trigger));
 }
@@ -4436,26 +4349,26 @@ AutoGCSession::~AutoGCSession()
 #ifndef JS_MORE_DETERMINISTIC
     gc->nextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN;
 #endif
 
     gc->chunkAllocationSinceLastGC = false;
 
 #ifdef JS_GC_ZEAL
     /* Keeping these around after a GC is dangerous. */
-    gc->clearSelectedForMarking();
+    gc->selectedForMarking.clearAndFree();
 #endif
 
     /* Clear gcMallocBytes for all compartments */
     for (ZonesIter zone(gc->rt, WithAtoms); !zone.done(); zone.next()) {
         zone->resetGCMallocBytes();
         zone->unscheduleGC();
     }
 
-    gc->resetMallocBytes();
+    gc->rt->resetGCMallocBytes();
 }
 
 AutoCopyFreeListToArenas::AutoCopyFreeListToArenas(JSRuntime *rt, ZoneSelector selector)
   : runtime(rt),
     selector(selector)
 {
     for (ZonesIter zone(rt, selector); !zone.done(); zone.next())
         zone->allocator.arenas.copyFreeListsToArenas();
@@ -4778,17 +4691,17 @@ GCRuntime::budgetIncrementalGC(int64_t *
 
     if (mode != JSGC_MODE_INCREMENTAL) {
         resetIncrementalGC("GC mode change");
         *budget = SliceBudget::Unlimited;
         stats.nonincremental("GC mode");
         return;
     }
 
-    if (isTooMuchMalloc()) {
+    if (rt->isTooMuchMalloc()) {
         *budget = SliceBudget::Unlimited;
         stats.nonincremental("malloc bytes trigger");
     }
 
     bool reset = false;
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         if (zone->gcBytes >= zone->gcTriggerBytes) {
             *budget = SliceBudget::Unlimited;
@@ -4977,31 +4890,31 @@ GCRuntime::collect(bool incremental, int
         gcstats::AutoGCSlice agc(stats, collectedCount, zoneCount, compartmentCount, reason);
 
         /*
          * Let the API user decide to defer a GC if it wants to (unless this
          * is the last context). Invoke the callback regardless.
          */
         if (incrementalState == NO_INCREMENTAL) {
             gcstats::AutoPhase ap(stats, gcstats::PHASE_GC_BEGIN);
-            if (gcCallback.op)
-                gcCallback.op(rt, JSGC_BEGIN, gcCallback.data);
+            if (gcCallback)
+                gcCallback(rt, JSGC_BEGIN, gcCallbackData);
         }
 
-        poked = false;
+        poke = false;
         bool wasReset = gcCycle(incremental, budget, gckind, reason);
 
         if (incrementalState == NO_INCREMENTAL) {
             gcstats::AutoPhase ap(stats, gcstats::PHASE_GC_END);
-            if (gcCallback.op)
-                gcCallback.op(rt, JSGC_END, gcCallback.data);
+            if (gcCallback)
+                gcCallback(rt, JSGC_END, gcCallbackData);
         }
 
         /* Need to re-schedule all zones for GC. */
-        if (poked && shouldCleanUpEverything)
+        if (poke && shouldCleanUpEverything)
             JS::PrepareForFullGC(rt);
 
         /*
          * This code makes an extra effort to collect compartments that we
          * thought were dead at the start of the GC. See the large comment in
          * beginMarkPhase.
          */
         bool repeatForDeadZone = false;
@@ -5017,17 +4930,17 @@ GCRuntime::collect(bool incremental, int
         }
 
         /*
          * If we reset an existing GC, we need to start a new one. Also, we
          * repeat GCs that happen during shutdown (the gcShouldCleanUpEverything
          * case) until we can be sure that no additional garbage is created
          * (which typically happens if roots are dropped during finalizers).
          */
-        repeat = (poked && shouldCleanUpEverything) || wasReset || repeatForDeadZone;
+        repeat = (poke && shouldCleanUpEverything) || wasReset || repeatForDeadZone;
     } while (repeat);
 
     if (incrementalState == NO_INCREMENTAL) {
 #ifdef JS_THREADSAFE
         EnqueuePendingParseTasksAfterGC(rt);
 #endif
     }
 }
@@ -5377,50 +5290,37 @@ GCRuntime::runDebugGC()
     } else {
         collect(false, SliceBudget::Unlimited, GC_NORMAL, JS::gcreason::DEBUG_GC);
     }
 
 #endif
 }
 
 void
-GCRuntime::setValidate(bool enabled)
-{
-    JS_ASSERT(!isHeapMajorCollecting());
-    validate = enabled;
+gc::SetDeterministicGC(JSContext *cx, bool enabled)
+{
+#ifdef JS_GC_ZEAL
+    JSRuntime *rt = cx->runtime();
+    rt->gc.deterministicOnly = enabled;
+#endif
 }
 
 void
-GCRuntime::setFullCompartmentChecks(bool enabled)
-{
-    JS_ASSERT(!isHeapMajorCollecting());
-    fullCompartmentChecks = enabled;
-}
-
-#ifdef JS_GC_ZEAL
-bool
-GCRuntime::selectForMarking(JSObject *object)
-{
-    JS_ASSERT(!isHeapMajorCollecting());
-    return selectedForMarking.append(object);
+gc::SetValidateGC(JSContext *cx, bool enabled)
+{
+    JSRuntime *rt = cx->runtime();
+    rt->gc.validate = enabled;
 }
 
 void
-GCRuntime::clearSelectedForMarking()
-{
-    selectedForMarking.clearAndFree();
-}
-
-void
-GCRuntime::setDeterministic(bool enabled)
-{
-    JS_ASSERT(!isHeapMajorCollecting());
-    deterministicOnly = enabled;
-}
-#endif
+gc::SetFullCompartmentChecks(JSContext *cx, bool enabled)
+{
+    JSRuntime *rt = cx->runtime();
+    rt->gc.fullCompartmentChecks = enabled;
+}
 
 #ifdef DEBUG
 
 /* Should only be called manually under gdb */
 void PreventGCDuringInteractiveDebug()
 {
     TlsPerThreadData.get()->suppressGC++;
 }
@@ -5469,16 +5369,115 @@ js::ReleaseAllJITCode(FreeOp *fop)
             jit::FinishDiscardBaselineScript(fop, script);
         }
 
         zone->jitZone()->optimizedStubSpace()->free();
     }
 #endif
 }
 
+/*
+ * There are three possible PCCount profiling states:
+ *
+ * 1. None: Neither scripts nor the runtime have count information.
+ * 2. Profile: Active scripts have count information, the runtime does not.
+ * 3. Query: Scripts do not have count information, the runtime does.
+ *
+ * When starting to profile scripts, counting begins immediately, with all JIT
+ * code discarded and recompiled with counts as necessary. Active interpreter
+ * frames will not begin profiling until they begin executing another script
+ * (via a call or return).
+ *
+ * The below API functions manage transitions to new states, according
+ * to the table below.
+ *
+ *                                  Old State
+ *                          -------------------------
+ * Function                 None      Profile   Query
+ * --------
+ * StartPCCountProfiling    Profile   Profile   Profile
+ * StopPCCountProfiling     None      Query     Query
+ * PurgePCCounts            None      None      None
+ */
+
+static void
+ReleaseScriptCounts(FreeOp *fop)
+{
+    JSRuntime *rt = fop->runtime();
+    JS_ASSERT(rt->gc.scriptAndCountsVector);
+
+    ScriptAndCountsVector &vec = *rt->gc.scriptAndCountsVector;
+
+    for (size_t i = 0; i < vec.length(); i++)
+        vec[i].scriptCounts.destroy(fop);
+
+    fop->delete_(rt->gc.scriptAndCountsVector);
+    rt->gc.scriptAndCountsVector = nullptr;
+}
+
+JS_FRIEND_API(void)
+js::StartPCCountProfiling(JSContext *cx)
+{
+    JSRuntime *rt = cx->runtime();
+
+    if (rt->profilingScripts)
+        return;
+
+    if (rt->gc.scriptAndCountsVector)
+        ReleaseScriptCounts(rt->defaultFreeOp());
+
+    ReleaseAllJITCode(rt->defaultFreeOp());
+
+    rt->profilingScripts = true;
+}
+
+JS_FRIEND_API(void)
+js::StopPCCountProfiling(JSContext *cx)
+{
+    JSRuntime *rt = cx->runtime();
+
+    if (!rt->profilingScripts)
+        return;
+    JS_ASSERT(!rt->gc.scriptAndCountsVector);
+
+    ReleaseAllJITCode(rt->defaultFreeOp());
+
+    ScriptAndCountsVector *vec = cx->new_<ScriptAndCountsVector>(SystemAllocPolicy());
+    if (!vec)
+        return;
+
+    for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
+        for (ZoneCellIter i(zone, FINALIZE_SCRIPT); !i.done(); i.next()) {
+            JSScript *script = i.get<JSScript>();
+            if (script->hasScriptCounts() && script->types) {
+                ScriptAndCounts sac;
+                sac.script = script;
+                sac.scriptCounts.set(script->releaseScriptCounts());
+                if (!vec->append(sac))
+                    sac.scriptCounts.destroy(rt->defaultFreeOp());
+            }
+        }
+    }
+
+    rt->profilingScripts = false;
+    rt->gc.scriptAndCountsVector = vec;
+}
+
+JS_FRIEND_API(void)
+js::PurgePCCounts(JSContext *cx)
+{
+    JSRuntime *rt = cx->runtime();
+
+    if (!rt->gc.scriptAndCountsVector)
+        return;
+    JS_ASSERT(!rt->profilingScripts);
+
+    ReleaseScriptCounts(rt->defaultFreeOp());
+}
+
 void
 js::PurgeJITCaches(Zone *zone)
 {
 #ifdef JS_ION
     for (ZoneCellIterUnderGC i(zone, FINALIZE_SCRIPT); !i.done(); i.next()) {
         JSScript *script = i.get<JSScript>();
 
         /* Discard Ion caches. */
@@ -5597,72 +5596,74 @@ JS::AssertGCThingMustBeTenured(JSObject 
 {
     JS_ASSERT((!IsNurseryAllocable(obj->tenuredGetAllocKind()) || obj->getClass()->finalize) &&
               obj->isTenured());
 }
 
 JS_FRIEND_API(void)
 js::gc::AssertGCThingHasType(js::gc::Cell *cell, JSGCTraceKind kind)
 {
+#ifdef DEBUG
     JS_ASSERT(cell);
     if (IsInsideNursery(cell))
         JS_ASSERT(kind == JSTRACE_OBJECT);
     else
         JS_ASSERT(MapAllocToTraceKind(cell->tenuredGetAllocKind()) == kind);
+#endif
 }
 
 JS_FRIEND_API(size_t)
 JS::GetGCNumber()
 {
     JSRuntime *rt = js::TlsPerThreadData.get()->runtimeFromMainThread();
     if (!rt)
         return 0;
     return rt->gc.number;
 }
 #endif
 
 #ifdef DEBUG
 JS::AutoAssertOnGC::AutoAssertOnGC()
-  : gc(nullptr), gcNumber(0)
+  : runtime(nullptr), gcNumber(0)
 {
     js::PerThreadData *data = js::TlsPerThreadData.get();
     if (data) {
         /*
          * GC's from off-thread will always assert, so off-thread is implicitly
          * AutoAssertOnGC. We still need to allow AutoAssertOnGC to be used in
          * code that works from both threads, however. We also use this to
          * annotate the off thread run loops.
          */
-        JSRuntime *runtime = data->runtimeIfOnOwnerThread();
+        runtime = data->runtimeIfOnOwnerThread();
         if (runtime) {
-            gc = &runtime->gc;
-            gcNumber = gc->number;
-            gc->enterUnsafeRegion();
+            gcNumber = runtime->gc.number;
+            ++runtime->gc.inUnsafeRegion;
         }
     }
 }
 
 JS::AutoAssertOnGC::AutoAssertOnGC(JSRuntime *rt)
-  : gc(&rt->gc), gcNumber(rt->gc.number)
-{
-    gc->enterUnsafeRegion();
+  : runtime(rt), gcNumber(rt->gc.number)
+{
+    ++rt->gc.inUnsafeRegion;
 }
 
 JS::AutoAssertOnGC::~AutoAssertOnGC()
 {
-    if (gc) {
-        gc->leaveUnsafeRegion();
+    if (runtime) {
+        --runtime->gc.inUnsafeRegion;
+        MOZ_ASSERT(runtime->gc.inUnsafeRegion >= 0);
 
         /*
          * The following backstop assertion should never fire: if we bumped the
          * gcNumber, we should have asserted because inUnsafeRegion was true.
          */
-        MOZ_ASSERT(gcNumber == gc->number, "GC ran inside an AutoAssertOnGC scope.");
+        MOZ_ASSERT(gcNumber == runtime->gc.number, "GC ran inside an AutoAssertOnGC scope.");
     }
 }
 
 /* static */ void
 JS::AutoAssertOnGC::VerifyIsSafeToGC(JSRuntime *rt)
 {
-    if (rt->gc.isInsideUnsafeRegion())
+    if (rt->gc.inUnsafeRegion > 0)
         MOZ_CRASH("[AutoAssertOnGC] possible GC in GC-unsafe region");
 }
 #endif
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -895,16 +895,21 @@ extern void
 PrepareForDebugGC(JSRuntime *rt);
 
 extern void
 MinorGC(JSRuntime *rt, JS::gcreason::Reason reason);
 
 extern void
 MinorGC(JSContext *cx, JS::gcreason::Reason reason);
 
+#ifdef JS_GC_ZEAL
+extern void
+SetGCZeal(JSRuntime *rt, uint8_t zeal, uint32_t frequency);
+#endif
+
 /* Functions for managing cross compartment gray pointers. */
 
 extern void
 DelayCrossCompartmentGrayMarking(JSObject *src);
 
 extern void
 NotifyGCNukeWrapper(JSObject *o);
 
@@ -1151,16 +1156,25 @@ namespace gc {
 
 extern void
 GCIfNeeded(JSContext *cx);
 
 /* Tries to run a GC no matter what (used for GC zeal). */
 void
 RunDebugGC(JSContext *cx);
 
+void
+SetDeterministicGC(JSContext *cx, bool enabled);
+
+void
+SetValidateGC(JSContext *cx, bool enabled);
+
+void
+SetFullCompartmentChecks(JSContext *cx, bool enabled);
+
 /* Wait for the background thread to finish sweeping if it is running. */
 void
 FinishBackgroundFinalize(JSRuntime *rt);
 
 /*
  * Merge all contents of source into target. This can only be used if source is
  * the only compartment in its zone.
  */
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -71,25 +71,25 @@ GetGCThingTraceKind(const void *thing)
     const Cell *cell = static_cast<const Cell *>(thing);
 #ifdef JSGC_GENERATIONAL
     if (IsInsideNursery(cell))
         return JSTRACE_OBJECT;
 #endif
     return MapAllocToTraceKind(cell->tenuredGetAllocKind());
 }
 
-inline void
-GCRuntime::poke()
+static inline void
+GCPoke(JSRuntime *rt)
 {
-    poked = true;
+    rt->gc.poke = true;
 
 #ifdef JS_GC_ZEAL
     /* Schedule a GC to happen "soon" after a GC poke. */
-    if (zealMode == ZealPokeValue)
-        nextScheduled = 1;
+    if (rt->gcZeal() == js::gc::ZealPokeValue)
+        rt->gc.nextScheduled = 1;
 #endif
 }
 
 class ArenaIter
 {
     ArenaHeader *aheader;
     ArenaHeader *remainingHeader;
 
@@ -484,17 +484,17 @@ CheckAllocatorState(ThreadSafeContext *c
     // For testing out of memory conditions
     if (!PossiblyFail()) {
         js_ReportOutOfMemory(cx);
         return false;
     }
 
     if (allowGC) {
 #ifdef JS_GC_ZEAL
-        if (rt->gc.needZealousGC())
+        if (rt->needZealousGC())
             js::gc::RunDebugGC(ncx);
 #endif
 
         if (rt->interrupt) {
             // Invoking the interrupt callback can fail and we can't usefully
             // handle that here. Just check in case we need to collect instead.
             js::gc::GCIfNeeded(ncx);
         }
--- a/js/src/jsinfer.h
+++ b/js/src/jsinfer.h
@@ -66,22 +66,24 @@ template <>
 struct RootKind<TaggedProto>
 {
     static ThingRootKind rootKind() { return THING_ROOT_OBJECT; }
 };
 
 template <> struct GCMethods<const TaggedProto>
 {
     static TaggedProto initial() { return TaggedProto(); }
+    static ThingRootKind kind() { return THING_ROOT_OBJECT; }
     static bool poisoned(const TaggedProto &v) { return IsPoisonedPtr(v.raw()); }
 };
 
 template <> struct GCMethods<TaggedProto>
 {
     static TaggedProto initial() { return TaggedProto(); }
+    static ThingRootKind kind() { return THING_ROOT_OBJECT; }
     static bool poisoned(const TaggedProto &v) { return IsPoisonedPtr(v.raw()); }
 };
 
 template<class Outer>
 class TaggedProtoOperations
 {
     const TaggedProto *value() const {
         return static_cast<const Outer*>(this)->extract();
--- a/js/src/jsinferinlines.h
+++ b/js/src/jsinferinlines.h
@@ -1266,26 +1266,28 @@ JSScript::ensureHasTypes(JSContext *cx)
 }
 
 namespace js {
 
 template <>
 struct GCMethods<const types::Type>
 {
     static types::Type initial() { return types::Type::UnknownType(); }
+    static ThingRootKind kind() { return THING_ROOT_TYPE; }
     static bool poisoned(const types::Type &v) {
         return (v.isTypeObject() && IsPoisonedPtr(v.typeObject()))
             || (v.isSingleObject() && IsPoisonedPtr(v.singleObject()));
     }
 };
 
 template <>
 struct GCMethods<types::Type>
 {
     static types::Type initial() { return types::Type::UnknownType(); }
+    static ThingRootKind kind() { return THING_ROOT_TYPE; }
     static bool poisoned(const types::Type &v) {
         return (v.isTypeObject() && IsPoisonedPtr(v.typeObject()))
             || (v.isSingleObject() && IsPoisonedPtr(v.singleObject()));
     }
 };
 
 } // namespace js
 
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -5269,17 +5269,17 @@ baseops::DeleteGeneric(JSContext *cx, Ha
     if (!shape || proto != obj) {
         /*
          * If no property, or the property comes from a prototype, call the
          * class's delProperty hook, passing succeeded as the result parameter.
          */
         return CallJSDeletePropertyOp(cx, obj->getClass()->delProperty, obj, id, succeeded);
     }
 
-    cx->runtime()->gc.poke();
+    GCPoke(cx->runtime());
 
     if (IsImplicitDenseOrTypedArrayElement(shape)) {
         if (obj->is<TypedArrayObject>()) {
             // Don't delete elements from typed arrays.
             *succeeded = false;
             return true;
         }
 
--- a/js/src/jsopcode.cpp
+++ b/js/src/jsopcode.cpp
@@ -1931,124 +1931,25 @@ js::IsValidBytecodeOffset(JSContext *cx,
     for (BytecodeRange r(cx, script); !r.empty(); r.popFront()) {
         size_t here = r.frontOffset();
         if (here >= offset)
             return here == offset;
     }
     return false;
 }
 
-/*
- * There are three possible PCCount profiling states:
- *
- * 1. None: Neither scripts nor the runtime have count information.
- * 2. Profile: Active scripts have count information, the runtime does not.
- * 3. Query: Scripts do not have count information, the runtime does.
- *
- * When starting to profile scripts, counting begins immediately, with all JIT
- * code discarded and recompiled with counts as necessary. Active interpreter
- * frames will not begin profiling until they begin executing another script
- * (via a call or return).
- *
- * The below API functions manage transitions to new states, according
- * to the table below.
- *
- *                                  Old State
- *                          -------------------------
- * Function                 None      Profile   Query
- * --------
- * StartPCCountProfiling    Profile   Profile   Profile
- * StopPCCountProfiling     None      Query     Query
- * PurgePCCounts            None      None      None
- */
-
-static void
-ReleaseScriptCounts(FreeOp *fop)
-{
-    JSRuntime *rt = fop->runtime();
-    JS_ASSERT(rt->scriptAndCountsVector);
-
-    ScriptAndCountsVector &vec = *rt->scriptAndCountsVector;
-
-    for (size_t i = 0; i < vec.length(); i++)
-        vec[i].scriptCounts.destroy(fop);
-
-    fop->delete_(rt->scriptAndCountsVector);
-    rt->scriptAndCountsVector = nullptr;
-}
-
-JS_FRIEND_API(void)
-js::StartPCCountProfiling(JSContext *cx)
-{
-    JSRuntime *rt = cx->runtime();
-
-    if (rt->profilingScripts)
-        return;
-
-    if (rt->scriptAndCountsVector)
-        ReleaseScriptCounts(rt->defaultFreeOp());
-
-    ReleaseAllJITCode(rt->defaultFreeOp());
-
-    rt->profilingScripts = true;
-}
-
-JS_FRIEND_API(void)
-js::StopPCCountProfiling(JSContext *cx)
-{
-    JSRuntime *rt = cx->runtime();
-
-    if (!rt->profilingScripts)
-        return;
-    JS_ASSERT(!rt->scriptAndCountsVector);
-
-    ReleaseAllJITCode(rt->defaultFreeOp());
-
-    ScriptAndCountsVector *vec = cx->new_<ScriptAndCountsVector>(SystemAllocPolicy());
-    if (!vec)
-        return;
-
-    for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
-        for (ZoneCellIter i(zone, FINALIZE_SCRIPT); !i.done(); i.next()) {
-            JSScript *script = i.get<JSScript>();
-            if (script->hasScriptCounts() && script->types) {
-                ScriptAndCounts sac;
-                sac.script = script;
-                sac.scriptCounts.set(script->releaseScriptCounts());
-                if (!vec->append(sac))
-                    sac.scriptCounts.destroy(rt->defaultFreeOp());
-            }
-        }
-    }
-
-    rt->profilingScripts = false;
-    rt->scriptAndCountsVector = vec;
-}
-
-JS_FRIEND_API(void)
-js::PurgePCCounts(JSContext *cx)
-{
-    JSRuntime *rt = cx->runtime();
-
-    if (!rt->scriptAndCountsVector)
-        return;
-    JS_ASSERT(!rt->profilingScripts);
-
-    ReleaseScriptCounts(rt->defaultFreeOp());
-}
-
 JS_FRIEND_API(size_t)
 js::GetPCCountScriptCount(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime();
 
-    if (!rt->scriptAndCountsVector)
+    if (!rt->gc.scriptAndCountsVector)
         return 0;
 
-    return rt->scriptAndCountsVector->length();
+    return rt->gc.scriptAndCountsVector->length();
 }
 
 enum MaybeComma {NO_COMMA, COMMA};
 
 static void
 AppendJSONProperty(StringBuffer &buf, const char *name, MaybeComma comma = COMMA)
 {
     if (comma)
@@ -2073,22 +1974,22 @@ AppendArrayJSONProperties(JSContext *cx,
     }
 }
 
 JS_FRIEND_API(JSString *)
 js::GetPCCountScriptSummary(JSContext *cx, size_t index)
 {
     JSRuntime *rt = cx->runtime();
 
-    if (!rt->scriptAndCountsVector || index >= rt->scriptAndCountsVector->length()) {
+    if (!rt->gc.scriptAndCountsVector || index >= rt->gc.scriptAndCountsVector->length()) {
         JS_ReportErrorNumber(cx, js_GetErrorMessage, nullptr, JSMSG_BUFFER_TOO_SMALL);
         return nullptr;
     }
 
-    const ScriptAndCounts &sac = (*rt->scriptAndCountsVector)[index];
+    const ScriptAndCounts &sac = (*rt->gc.scriptAndCountsVector)[index];
     RootedScript script(cx, sac.script);
 
     /*
      * OOM on buffer appends here will not be caught immediately, but since
      * StringBuffer uses a ContextAllocPolicy will trigger an exception on the
      * context if they occur, which we'll catch before returning.
      */
     StringBuffer buf(cx);
@@ -2333,22 +2234,22 @@ GetPCCountJSON(JSContext *cx, const Scri
     return !cx->isExceptionPending();
 }
 
 JS_FRIEND_API(JSString *)
 js::GetPCCountScriptContents(JSContext *cx, size_t index)
 {
     JSRuntime *rt = cx->runtime();
 
-    if (!rt->scriptAndCountsVector || index >= rt->scriptAndCountsVector->length()) {
+    if (!rt->gc.scriptAndCountsVector || index >= rt->gc.scriptAndCountsVector->length()) {
         JS_ReportErrorNumber(cx, js_GetErrorMessage, nullptr, JSMSG_BUFFER_TOO_SMALL);
         return nullptr;
     }
 
-    const ScriptAndCounts &sac = (*rt->scriptAndCountsVector)[index];
+    const ScriptAndCounts &sac = (*rt->gc.scriptAndCountsVector)[index];
     JSScript *script = sac.script;
 
     StringBuffer buf(cx);
 
     if (!script->functionNonDelazifying() && !script->compileAndGo())
         return buf.finishString();
 
     {
--- a/js/src/jsscript.h
+++ b/js/src/jsscript.h
@@ -268,16 +268,17 @@ class Bindings
 
     static js::ThingRootKind rootKind() { return js::THING_ROOT_BINDINGS; }
     void trace(JSTracer *trc);
 };
 
 template <>
 struct GCMethods<Bindings> {
     static Bindings initial();
+    static ThingRootKind kind() { return THING_ROOT_BINDINGS; }
     static bool poisoned(const Bindings &bindings) {
         return IsPoisonedPtr(static_cast<Shape *>(bindings.callObjShape()));
     }
 };
 
 class ScriptCounts
 {
     friend class ::JSScript;
@@ -1922,31 +1923,16 @@ extern void
 UnmarkScriptData(JSRuntime *rt);
 
 extern void
 SweepScriptData(JSRuntime *rt);
 
 extern void
 FreeScriptData(JSRuntime *rt);
 
-struct ScriptAndCounts
-{
-    /* This structure is stored and marked from the JSRuntime. */
-    JSScript *script;
-    ScriptCounts scriptCounts;
-
-    PCCounts &getPCCounts(jsbytecode *pc) const {
-        return scriptCounts.pcCountsVector[script->pcToOffset(pc)];
-    }
-
-    jit::IonScriptCounts *getIonCounts() const {
-        return scriptCounts.ionCounts;
-    }
-};
-
 struct GSNCache;
 
 jssrcnote *
 GetSrcNote(GSNCache &cache, JSScript *script, jsbytecode *pc);
 
 } /* namespace js */
 
 extern jssrcnote *
--- a/js/src/vm/PropDesc.h
+++ b/js/src/vm/PropDesc.h
@@ -331,16 +331,17 @@ class MutablePropDescOperations : public
 
 } /* namespace JS */
 
 namespace js {
 
 template <>
 struct GCMethods<PropDesc> {
     static PropDesc initial() { return PropDesc(); }
+    static ThingRootKind kind() { return THING_ROOT_PROP_DESC; }
     static bool poisoned(const PropDesc &desc) {
         return JS::IsPoisonedPtr(desc.descObj_) ||
                (desc.value_.isGCThing() &&
                 JS::IsPoisonedPtr(desc.value_.toGCThing())) ||
                (desc.get_.isGCThing() &&
                 JS::IsPoisonedPtr(desc.get_.toGCThing())) ||
                (desc.set_.isGCThing() &&
                 JS::IsPoisonedPtr(desc.set_.toGCThing()));
--- a/js/src/vm/Runtime-inl.h
+++ b/js/src/vm/Runtime-inl.h
@@ -51,17 +51,17 @@ NewObjectCache::newObjectFromHit(JSConte
 
     // Do an end run around JSObject::type() to avoid doing AutoUnprotectCell
     // on the templateObj, which is not a GC thing and can't use runtimeFromAnyThread.
     types::TypeObject *type = templateObj->type_;
 
     if (type->shouldPreTenure())
         heap = gc::TenuredHeap;
 
-    if (cx->runtime()->gc.upcomingZealousGC())
+    if (cx->runtime()->upcomingZealousGC())
         return nullptr;
 
     // Trigger an identical allocation to the one that notified us of OOM
     // so that we trigger the right kind of GC automatically.
     if (allowGC) {
         mozilla::DebugOnly<JSObject *> obj =
             js::gc::AllocateObjectForCacheHit<allowGC>(cx, entry->kind, heap);
         JS_ASSERT(!obj);
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -176,17 +176,16 @@ JSRuntime::JSRuntime(JSRuntime *parentRu
 #ifdef DEBUG
     activeContext(nullptr),
 #endif
     gc(thisFromCtor()),
     gcInitialized(false),
 #if defined(JS_ARM_SIMULATOR) || defined(JS_MIPS_SIMULATOR)
     simulatorRuntime_(nullptr),
 #endif
-    scriptAndCountsVector(nullptr),
     NaNValue(DoubleNaNValue()),
     negativeInfinityValue(DoubleValue(NegativeInfinity<double>())),
     positiveInfinityValue(DoubleValue(PositiveInfinity<double>())),
     emptyString(nullptr),
     debugMode(false),
     spsProfiler(thisFromCtor()),
     profilingScripts(false),
     hadOutOfMemory(false),
@@ -691,33 +690,53 @@ JSRuntime::triggerActivityCallback(bool 
      * property and ensures that it remains true in the future.
      */
     AutoSuppressGC suppress(this);
 
     activityCallback(activityCallbackArg, active);
 }
 
 void
+JSRuntime::setGCMaxMallocBytes(size_t value)
+{
+    /*
+     * For compatibility treat any value that exceeds PTRDIFF_T_MAX to
+     * mean that value.
+     */
+    gc.maxMallocBytes = (ptrdiff_t(value) >= 0) ? value : size_t(-1) >> 1;
+    resetGCMallocBytes();
+    for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next())
+        zone->setGCMaxMallocBytes(value);
+}
+
+void
 JSRuntime::updateMallocCounter(size_t nbytes)
 {
     updateMallocCounter(nullptr, nbytes);
 }
 
 void
 JSRuntime::updateMallocCounter(JS::Zone *zone, size_t nbytes)
 {
-    gc.updateMallocCounter(zone, nbytes);
+    /* We tolerate any thread races when updating gcMallocBytes. */
+    gc.mallocBytes -= ptrdiff_t(nbytes);
+    if (MOZ_UNLIKELY(gc.mallocBytes <= 0))
+        onTooMuchMalloc();
+    else if (zone)
+        zone->updateMallocCounter(nbytes);
 }
 
 JS_FRIEND_API(void)
 JSRuntime::onTooMuchMalloc()
 {
     if (!CurrentThreadCanAccessRuntime(this))
         return;
-    gc.onTooMuchMalloc();
+
+    if (!gc.mallocGCTriggered)
+        gc.mallocGCTriggered = TriggerGC(this, JS::gcreason::TOO_MUCH_MALLOC);
 }
 
 JS_FRIEND_API(void *)
 JSRuntime::onOutOfMemory(void *p, size_t nbytes)
 {
     return onOutOfMemory(p, nbytes, nullptr);
 }
 
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -129,18 +129,16 @@ struct ScopeCoordinateNameCache {
 
     Shape *shape;
     Map map;
 
     ScopeCoordinateNameCache() : shape(nullptr) {}
     void purge();
 };
 
-typedef Vector<ScriptAndCounts, 0, SystemAllocPolicy> ScriptAndCountsVector;
-
 struct EvalCacheEntry
 {
     JSScript *script;
     JSScript *callerScript;
     jsbytecode *pc;
 };
 
 struct EvalCacheLookup
@@ -950,17 +948,41 @@ struct JSRuntime : public JS::shadow::Ru
         gc.marker.setGCMode(mode);
     }
 
     bool isHeapBusy() { return gc.isHeapBusy(); }
     bool isHeapMajorCollecting() { return gc.isHeapMajorCollecting(); }
     bool isHeapMinorCollecting() { return gc.isHeapMinorCollecting(); }
     bool isHeapCollecting() { return gc.isHeapCollecting(); }
 
-    int gcZeal() { return gc.zeal(); }
+#ifdef JS_GC_ZEAL
+    int gcZeal() { return gc.zealMode; }
+
+    bool upcomingZealousGC() {
+        return gc.nextScheduled == 1;
+    }
+
+    bool needZealousGC() {
+        if (gc.nextScheduled > 0 && --gc.nextScheduled == 0) {
+            if (gcZeal() == js::gc::ZealAllocValue ||
+                gcZeal() == js::gc::ZealGenerationalGCValue ||
+                (gcZeal() >= js::gc::ZealIncrementalRootsThenFinish &&
+                 gcZeal() <= js::gc::ZealIncrementalMultipleSlices))
+            {
+                gc.nextScheduled = gc.zealFrequency;
+            }
+            return true;
+        }
+        return false;
+    }
+#else
+    int gcZeal() { return 0; }
+    bool upcomingZealousGC() { return false; }
+    bool needZealousGC() { return false; }
+#endif
 
     void lockGC() {
         assertCanLock(js::GCLock);
         gc.lockGC();
     }
 
     void unlockGC() {
         gc.unlockGC();
@@ -975,19 +997,16 @@ struct JSRuntime : public JS::shadow::Ru
         needsBarrier_ = needs;
     }
 
 #if defined(JS_ARM_SIMULATOR) || defined(JS_MIPS_SIMULATOR)
     js::jit::SimulatorRuntime *simulatorRuntime() const;
     void setSimulatorRuntime(js::jit::SimulatorRuntime *srt);
 #endif
 
-    /* Strong references on scripts held for PCCount profiling API. */
-    js::ScriptAndCountsVector *scriptAndCountsVector;
-
     /* Well-known numbers held for use by this runtime's contexts. */
     const js::Value     NaNValue;
     const js::Value     negativeInfinityValue;
     const js::Value     positiveInfinityValue;
 
     js::PropertyName    *emptyString;
 
     /* List of active contexts sharing this runtime. */
@@ -1252,29 +1271,40 @@ struct JSRuntime : public JS::shadow::Ru
 
     JSRuntime(JSRuntime *parentRuntime);
     ~JSRuntime();
 
     bool init(uint32_t maxbytes);
 
     JSRuntime *thisFromCtor() { return this; }
 
+    void setGCMaxMallocBytes(size_t value);
+
+    void resetGCMallocBytes() {
+        gc.mallocBytes = ptrdiff_t(gc.maxMallocBytes);
+        gc.mallocGCTriggered = false;
+    }
+
     /*
      * Call this after allocating memory held by GC things, to update memory
      * pressure counters or report the OOM error if necessary. If oomError and
      * cx is not null the function also reports OOM error.
      *
      * The function must be called outside the GC lock and in case of OOM error
      * the caller must ensure that no deadlock possible during OOM reporting.
      */
     void updateMallocCounter(size_t nbytes);
     void updateMallocCounter(JS::Zone *zone, size_t nbytes);
 
     void reportAllocationOverflow() { js_ReportAllocationOverflow(nullptr); }
 
+    bool isTooMuchMalloc() const {
+        return gc.mallocBytes <= 0;
+    }
+
     /*
      * The function must be called outside the GC lock.
      */
     JS_FRIEND_API(void) onTooMuchMalloc();
 
     /*
      * This should be called after system malloc/realloc returns nullptr to try
      * to recove some memory or to report an error. Failures in malloc and