Bug 1159540 - Organize and comment the marking paths; r=sfink
authorTerrence Cole <terrence@mozilla.com>
Wed, 29 Apr 2015 10:23:24 -0700
changeset 273304 2ac0d383d39a83bbd27cce543753aea8c357beca
parent 273303 2736f805602675f719a38304ec689472e0e497c2
child 273305 796c77a8118b0ab5b4a5361780fea1ff3c09366a
push id863
push userraliiev@mozilla.com
push dateMon, 03 Aug 2015 13:22:43 +0000
treeherdermozilla-release@f6321b14228d [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerssfink
bugs1159540
milestone40.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1159540 - Organize and comment the marking paths; r=sfink
js/public/TracingAPI.h
js/src/gc/Barrier.h
js/src/gc/Heap.h
js/src/gc/Marking.cpp
js/src/gc/Marking.h
js/src/gc/RootMarking.cpp
js/src/gc/StoreBuffer.cpp
js/src/gc/StoreBuffer.h
js/src/gc/Tracer.cpp
js/src/gc/Tracer.h
js/src/jit/JitFrames.cpp
js/src/jit/arm/Assembler-arm.cpp
js/src/jit/mips/Assembler-mips.cpp
js/src/jit/x86-shared/Assembler-x86-shared.cpp
js/src/jsobj.cpp
js/src/vm/Shape.h
--- a/js/public/TracingAPI.h
+++ b/js/public/TracingAPI.h
@@ -97,27 +97,29 @@ class JS_PUBLIC_API(JSTracer)
     // Return the weak map tracing behavior set on this tracer.
     WeakMapTraceKind eagerlyTraceWeakMaps() const { return eagerlyTraceWeakMaps_; }
 
     // An intermediate state on the road from C to C++ style dispatch.
     enum TracerKindTag {
         MarkingTracer,
         CallbackTracer
     };
-    bool isMarkingTracer() const { return tag == MarkingTracer; }
-    bool isCallbackTracer() const { return tag == CallbackTracer; }
+    bool isMarkingTracer() const { return tag_ == MarkingTracer; }
+    bool isCallbackTracer() const { return tag_ == CallbackTracer; }
     inline JS::CallbackTracer* asCallbackTracer();
 
   protected:
     JSTracer(JSRuntime* rt, TracerKindTag tag,
-             WeakMapTraceKind weakTraceKind = TraceWeakMapValues);
+             WeakMapTraceKind weakTraceKind = TraceWeakMapValues)
+      : runtime_(rt), tag_(tag), eagerlyTraceWeakMaps_(weakTraceKind)
+    {}
 
   private:
     JSRuntime*          runtime_;
-    TracerKindTag       tag;
+    TracerKindTag       tag_;
     WeakMapTraceKind    eagerlyTraceWeakMaps_;
 };
 
 namespace JS {
 
 class AutoTracingName;
 class AutoTracingIndex;
 class AutoTracingCallback;
@@ -129,17 +131,19 @@ class JS_PUBLIC_API(CallbackTracer) : pu
     CallbackTracer(JSRuntime* rt, JSTraceCallback traceCallback,
                    WeakMapTraceKind weakTraceKind = TraceWeakMapValues)
       : JSTracer(rt, JSTracer::CallbackTracer, weakTraceKind), callback(traceCallback),
         contextName_(nullptr), contextIndex_(InvalidIndex), contextFunctor_(nullptr),
         contextRealLocation_(nullptr)
     {}
 
     // Update the trace callback.
-    void setTraceCallback(JSTraceCallback traceCallback);
+    void setTraceCallback(JSTraceCallback traceCallback) {
+        callback = traceCallback;
+    }
 
     // Test if the given callback is the same as our callback.
     bool hasCallback(JSTraceCallback maybeCallback) const {
         return maybeCallback == callback;
     }
 
     // Call the callback.
     void invoke(void** thing, JSGCTraceKind kind) {
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -6,19 +6,19 @@
 
 #ifndef gc_Barrier_h
 #define gc_Barrier_h
 
 #include "NamespaceImports.h"
 
 #include "gc/Heap.h"
 #include "gc/StoreBuffer.h"
-#include "js/HashTable.h"
 #include "js/Id.h"
 #include "js/RootingAPI.h"
+#include "js/Value.h"
 
 /*
  * A write barrier is a mechanism used by incremental or generation GCs to
  * ensure that every value that needs to be marked is marked. In general, the
  * write barrier should be invoked whenever a write can cause the set of things
  * traced through by the GC to change. This includes:
  *   - writes to object properties
  *   - writes to array slots
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -48,27 +48,27 @@ RuntimeFromMainThreadIsHeapMajorCollecti
 // a helper thread.
 extern bool
 CurrentThreadIsIonCompiling();
 #endif
 
 extern bool
 UnmarkGrayCellRecursively(gc::Cell* cell, JSGCTraceKind kind);
 
+extern void
+TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, gc::Cell** thingp, const char* name);
+
 namespace gc {
 
 struct Arena;
 class ArenaList;
 class SortedArenaList;
 struct ArenaHeader;
 struct Chunk;
 
-extern void
-TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, Cell** thingp, const char* name);
-
 /*
  * This flag allows an allocation site to request a specific heap based upon the
  * estimated lifetime or lifetime requirements of objects allocated from that
  * site.
  */
 enum InitialHeap {
     DefaultHeap,
     TenuredHeap
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -100,16 +100,19 @@ using mozilla::MakeRange;
 //     ------  Direct calls                                                                     //
 //     . . .   Static dispatch                                                                  //
 //     ======  Dispatch through a manual stack.                                                 //
 //                                                                                              //
 void * const js::NullPtr::constNullValue = nullptr;
 
 JS_PUBLIC_DATA(void * const) JS::NullPtr::constNullValue = nullptr;
 
+
+/*** Tracing Invariants **************************************************************************/
+
 #if defined(DEBUG)
 template<typename T>
 static inline bool
 IsThingPoisoned(T* thing)
 {
     const uint8_t poisonBytes[] = {
         JS_FRESH_NURSERY_PATTERN,
         JS_SWEPT_NURSERY_PATTERN,
@@ -294,21 +297,37 @@ ShouldMarkCrossCompartment(JSTracer* trc
 }
 
 static bool
 ShouldMarkCrossCompartment(JSTracer* trc, JSObject* src, Value val)
 {
     return val.isMarkable() && ShouldMarkCrossCompartment(trc, src, (Cell*)val.toGCThing());
 }
 
+template <typename T>
+static bool
+ZoneIsAtomsZoneForString(JSRuntime* rt, T* thing)
+{
+    JSGCTraceKind kind = GetGCThingTraceKind(thing);
+    if (kind == JSTRACE_STRING || kind == JSTRACE_SYMBOL)
+        return rt->isAtomsZone(thing->zone());
+    return false;
+}
+
+#define JS_COMPARTMENT_ASSERT(rt, thing) \
+    MOZ_ASSERT((thing)->zone()->isGCMarking() || ZoneIsAtomsZoneForString((rt), (thing)))
+
 #define JS_ROOT_MARKING_ASSERT(trc) \
     MOZ_ASSERT_IF(trc->isMarkingTracer(), \
                   trc->runtime()->gc.state() == NO_INCREMENTAL || \
                   trc->runtime()->gc.state() == MARK_ROOTS);
 
+
+/*** Tracing Interface ***************************************************************************/
+
 // A C++ version of JSGCTraceKind
 enum class TraceKind {
 #define NAMES(name, _, __) name,
 FOR_EACH_GC_LAYOUT(NAMES)
 #undef NAMES
 };
 
 #define FOR_EACH_GC_POINTER_TYPE(D) \
@@ -490,16 +509,77 @@ js::TraceProcessGlobalRoot(JSTracer* trc
     if (trc->isMarkingTracer())
         thing->markIfUnmarked(gc::BLACK);
     else
         DoCallback(trc->asCallbackTracer(), ConvertToBase(&thing), name);
 }
 template void js::TraceProcessGlobalRoot<JSAtom>(JSTracer*, JSAtom*, const char*);
 template void js::TraceProcessGlobalRoot<JS::Symbol>(JSTracer*, JS::Symbol*, const char*);
 
+void
+js::TraceObjectSlots(JSTracer* trc, NativeObject* obj, uint32_t start, uint32_t nslots)
+{
+    JS::AutoTracingIndex index(trc, start);
+    for (uint32_t i = start; i < (start + nslots); ++i) {
+        HeapSlot& slot = obj->getSlotRef(i);
+        if (InternalGCMethods<Value>::isMarkable(slot))
+            DispatchToTracer(trc, slot.unsafeGet(), "object slot");
+        ++index;
+    }
+}
+
+void
+gc::MarkValueForBarrier(JSTracer* trc, Value* valuep, const char* name)
+{
+    MOZ_ASSERT(!trc->runtime()->isHeapCollecting());
+    TraceManuallyBarrieredEdge(trc, valuep, name);
+}
+
+void
+gc::MarkIdForBarrier(JSTracer* trc, jsid* idp, const char* name)
+{
+    TraceManuallyBarrieredEdge(trc, idp, name);
+}
+
+// A typed functor adaptor for TraceRoot.
+struct TraceRootFunctor {
+    template <typename T>
+    void operator()(JSTracer* trc, Cell** thingp, const char* name) {
+        TraceRoot(trc, reinterpret_cast<T**>(thingp), name);
+    }
+};
+
+void
+js::TraceGenericPointerRoot(JSTracer* trc, Cell** thingp, const char* name)
+{
+    MOZ_ASSERT(thingp);
+    if (!*thingp)
+        return;
+    TraceRootFunctor f;
+    CallTyped(f, (*thingp)->getTraceKind(), trc, thingp, name);
+}
+
+// A typed functor adaptor for TraceManuallyBarrieredEdge.
+struct TraceManuallyBarrieredEdgeFunctor {
+    template <typename T>
+    void operator()(JSTracer* trc, Cell** thingp, const char* name) {
+        TraceManuallyBarrieredEdge(trc, reinterpret_cast<T**>(thingp), name);
+    }
+};
+
+void
+js::TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, Cell** thingp, const char* name)
+{
+    MOZ_ASSERT(thingp);
+    if (!*thingp)
+        return;
+    TraceManuallyBarrieredEdgeFunctor f;
+    CallTyped(f, (*thingp)->getTraceKind(), trc, thingp, name);
+}
+
 // This method is responsible for dynamic dispatch to the real tracer
 // implementation. Consider replacing this choke point with virtual dispatch:
 // a sufficiently smart C++ compiler may be able to devirtualize some paths.
 template <typename T>
 void
 DispatchToTracer(JSTracer* trc, T* thingp, const char* name)
 {
 #define IS_SAME_TYPE_OR(name, type, _) mozilla::IsSame<type*, T>::value ||
@@ -509,16 +589,19 @@ DispatchToTracer(JSTracer* trc, T* thing
             mozilla::IsSame<T, jsid>::value,
             "Only the base cell layout types are allowed into marking/tracing internals");
 #undef IS_SAME_TYPE_OR
     if (trc->isMarkingTracer())
         return DoMarking(static_cast<GCMarker*>(trc), *thingp);
     DoCallback(trc->asCallbackTracer(), thingp, name);
 }
 
+
+/*** GC Marking Interface *************************************************************************/
+
 template <typename T>
 static inline bool
 MustSkipMarking(T thing)
 {
     // Don't mark things outside a zone if we are in a per-zone GC.
     return !thing->zone()->isGCMarking();
 }
 
@@ -697,16 +780,23 @@ js::GCMarker::mark(T* thing)
     CheckTracedThing(this, thing);
     JS_COMPARTMENT_ASSERT(runtime(), thing);
     MOZ_ASSERT(!IsInsideNursery(gc::TenuredCell::fromPointer(thing)));
     return gc::ParticipatesInCC<T>::value
            ? gc::TenuredCell::fromPointer(thing)->markIfUnmarked(markColor())
            : gc::TenuredCell::fromPointer(thing)->markIfUnmarked(gc::BLACK);
 }
 
+
+/*** Inline, Eager GC Marking *********************************************************************/
+
+// Each of the eager, inline marking paths is directly preceeded by the
+// out-of-line, generic tracing code for comparison. Both paths must end up
+// traversing equivalent subgraphs.
+
 void
 LazyScript::traceChildren(JSTracer* trc)
 {
     if (function_)
         TraceEdge(trc, &function_, "function");
 
     if (sourceObject_)
         TraceEdge(trc, &sourceObject_, "sourceObject");
@@ -748,17 +838,17 @@ js::GCMarker::eagerlyMarkChildren(LazySc
     for (auto i : MakeRange(thing->numFreeVariables()))
         traverse(thing, static_cast<JSString*>(freeVariables[i].atom()));
 
     HeapPtrFunction* innerFunctions = thing->innerFunctions();
     for (auto i : MakeRange(thing->numInnerFunctions()))
         traverse(thing, static_cast<JSObject*>(innerFunctions[i]));
 }
 
-inline void
+void
 Shape::traceChildren(JSTracer* trc)
 {
     TraceEdge(trc, &base_, "base");
     TraceEdge(trc, &propidRef(), "propid");
     if (parent)
         TraceEdge(trc, &parent, "parent");
 
     if (hasGetterObject())
@@ -956,591 +1046,59 @@ js::GCMarker::lazilyMarkChildren(ObjectG
 
     if (TypeDescr* descr = group->maybeTypeDescr())
         traverse(group, static_cast<JSObject*>(descr));
 
     if (JSFunction* fun = group->maybeInterpretedFunction())
         traverse(group, static_cast<JSObject*>(fun));
 }
 
-
-template <typename T>
-static inline void
-CheckIsMarkedThing(T* thingp)
-{
-#define IS_SAME_TYPE_OR(name, type, _) mozilla::IsSame<type*, T>::value ||
-    static_assert(
-            FOR_EACH_GC_LAYOUT(IS_SAME_TYPE_OR)
-            false, "Only the base cell layout types are allowed into marking/tracing internals");
-#undef IS_SAME_TYPE_OR
-
-#ifdef DEBUG
-    MOZ_ASSERT(thingp);
-    MOZ_ASSERT(*thingp);
-    JSRuntime* rt = (*thingp)->runtimeFromAnyThread();
-    MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(*thingp),
-                  CurrentThreadCanAccessRuntime(rt) ||
-                  (rt->isHeapCollecting() && rt->gc.state() == SWEEP));
-#endif
-}
-
-template <typename T>
-static bool
-IsMarkedInternal(T* thingp)
-{
-    CheckIsMarkedThing(thingp);
-    JSRuntime* rt = (*thingp)->runtimeFromAnyThread();
-
-    if (IsInsideNursery(*thingp)) {
-        MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
-        return rt->gc.nursery.getForwardedPointer(thingp);
-    }
+
+/*** Mark-stack Marking ***************************************************************************/
 
-    Zone* zone = (*thingp)->asTenured().zoneFromAnyThread();
-    if (!zone->isCollectingFromAnyThread() || zone->isGCFinished())
-        return true;
-    if (zone->isGCCompacting() && IsForwarded(*thingp))
-        *thingp = Forwarded(*thingp);
-    return (*thingp)->asTenured().isMarked();
-}
-
-template <typename S>
-struct IsMarkedFunctor : public IdentityDefaultAdaptor<S> {
-    template <typename T> S operator()(T* t, bool* rv) {
-        *rv = IsMarkedInternal(&t);
-        return js::gc::RewrapValueOrId<S, T*>::wrap(t);
-    }
-};
-
-template <>
 bool
-IsMarkedInternal<Value>(Value* valuep)
+GCMarker::drainMarkStack(SliceBudget& budget)
 {
-    bool rv = true;
-    *valuep = DispatchValueTyped(IsMarkedFunctor<Value>(), *valuep, &rv);
-    return rv;
-}
+#ifdef DEBUG
+    struct AutoCheckCompartment {
+        bool& flag;
+        explicit AutoCheckCompartment(bool& comparmentCheckFlag) : flag(comparmentCheckFlag) {
+            MOZ_ASSERT(!flag);
+            flag = true;
+        }
+        ~AutoCheckCompartment() { flag = false; }
+    } acc(strictCompartmentChecking);
+#endif
 
-template <>
-bool
-IsMarkedInternal<jsid>(jsid* idp)
-{
-    bool rv = true;
-    *idp = DispatchIdTyped(IsMarkedFunctor<jsid>(), *idp, &rv);
-    return rv;
-}
-
-template <typename T>
-static bool
-IsAboutToBeFinalizedInternal(T* thingp)
-{
-    CheckIsMarkedThing(thingp);
-    T thing = *thingp;
-    JSRuntime* rt = thing->runtimeFromAnyThread();
-
-    /* Permanent atoms are never finalized by non-owning runtimes. */
-    if (ThingIsPermanentAtomOrWellKnownSymbol(thing) && !TlsPerThreadData.get()->associatedWith(rt))
+    if (budget.isOverBudget())
         return false;
 
-    Nursery& nursery = rt->gc.nursery;
-    MOZ_ASSERT_IF(!rt->isHeapMinorCollecting(), !IsInsideNursery(thing));
-    if (rt->isHeapMinorCollecting()) {
-        if (IsInsideNursery(thing))
-            return !nursery.getForwardedPointer(thingp);
-        return false;
-    }
-
-    Zone* zone = thing->asTenured().zoneFromAnyThread();
-    if (zone->isGCSweeping()) {
-        if (thing->asTenured().arenaHeader()->allocatedDuringIncremental)
-            return false;
-        return !thing->asTenured().isMarked();
-    }
-    else if (zone->isGCCompacting() && IsForwarded(thing)) {
-        *thingp = Forwarded(thing);
-        return false;
-    }
-
-    return false;
-}
-
-template <typename S>
-struct IsAboutToBeFinalizedFunctor : public IdentityDefaultAdaptor<S> {
-    template <typename T> S operator()(T* t, bool* rv) {
-        *rv = IsAboutToBeFinalizedInternal(&t);
-        return js::gc::RewrapValueOrId<S, T*>::wrap(t);
-    }
-};
-
-template <>
-bool
-IsAboutToBeFinalizedInternal<Value>(Value* valuep)
-{
-    bool rv = false;
-    *valuep = DispatchValueTyped(IsAboutToBeFinalizedFunctor<Value>(), *valuep, &rv);
-    return rv;
-}
-
-template <>
-bool
-IsAboutToBeFinalizedInternal<jsid>(jsid* idp)
-{
-    bool rv = false;
-    *idp = DispatchIdTyped(IsAboutToBeFinalizedFunctor<jsid>(), *idp, &rv);
-    return rv;
-}
-
-namespace js {
-namespace gc {
-
-template <typename T>
-bool
-IsMarkedUnbarriered(T* thingp)
-{
-    return IsMarkedInternal(ConvertToBase(thingp));
-}
-
-template <typename T>
-bool
-IsMarked(BarrieredBase<T>* thingp)
-{
-    return IsMarkedInternal(ConvertToBase(thingp->unsafeGet()));
-}
-
-template <typename T>
-bool
-IsMarked(ReadBarriered<T>* thingp)
-{
-    return IsMarkedInternal(ConvertToBase(thingp->unsafeGet()));
-}
-
-template <typename T>
-bool
-IsAboutToBeFinalizedUnbarriered(T* thingp)
-{
-    return IsAboutToBeFinalizedInternal(ConvertToBase(thingp));
-}
-
-template <typename T>
-bool
-IsAboutToBeFinalized(BarrieredBase<T>* thingp)
-{
-    return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeGet()));
-}
-
-template <typename T>
-bool
-IsAboutToBeFinalized(ReadBarriered<T>* thingp)
-{
-    return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeGet()));
-}
-
-// Instantiate a copy of the Tracing templates for each derived type.
-#define INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS(type) \
-    template bool IsMarkedUnbarriered<type>(type*); \
-    template bool IsMarked<type>(BarrieredBase<type>*); \
-    template bool IsMarked<type>(ReadBarriered<type>*); \
-    template bool IsAboutToBeFinalizedUnbarriered<type>(type*); \
-    template bool IsAboutToBeFinalized<type>(BarrieredBase<type>*); \
-    template bool IsAboutToBeFinalized<type>(ReadBarriered<type>*);
-FOR_EACH_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS)
-#undef INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS
-
-template <typename T>
-T*
-UpdateIfRelocated(JSRuntime* rt, T** thingp)
-{
-    MOZ_ASSERT(thingp);
-    if (!*thingp)
-        return nullptr;
-
-    if (rt->isHeapMinorCollecting() && IsInsideNursery(*thingp)) {
-        rt->gc.nursery.getForwardedPointer(thingp);
-        return *thingp;
-    }
-
-    Zone* zone = (*thingp)->zone();
-    if (zone->isGCCompacting() && IsForwarded(*thingp))
-        *thingp = Forwarded(*thingp);
-
-    return *thingp;
-}
-
-} /* namespace gc */
-} /* namespace js */
-
-/*** Externally Typed Marking ***/
-
-// A typed functor adaptor for TraceRoot.
-struct TraceRootFunctor {
-    template <typename T>
-    void operator()(JSTracer* trc, Cell** thingp, const char* name) {
-        TraceRoot(trc, reinterpret_cast<T**>(thingp), name);
-    }
-};
-
-// A typed functor adaptor for TraceManuallyBarrieredEdge.
-struct TraceManuallyBarrieredEdgeFunctor {
-    template <typename T>
-    void operator()(JSTracer* trc, Cell** thingp, const char* name) {
-        TraceManuallyBarrieredEdge(trc, reinterpret_cast<T**>(thingp), name);
-    }
-};
-
-void
-js::gc::TraceGenericPointerRoot(JSTracer* trc, Cell** thingp, const char* name)
-{
-    MOZ_ASSERT(thingp);
-    if (!*thingp)
-        return;
-    TraceRootFunctor f;
-    CallTyped(f, (*thingp)->getTraceKind(), trc, thingp, name);
-}
-
-void
-js::gc::TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, Cell** thingp, const char* name)
-{
-    MOZ_ASSERT(thingp);
-    if (!*thingp)
-        return;
-    TraceManuallyBarrieredEdgeFunctor f;
-    CallTyped(f, (*thingp)->getTraceKind(), trc, thingp, name);
-}
-
-/*** Type Marking ***/
-
-void
-TypeSet::MarkTypeRoot(JSTracer* trc, TypeSet::Type* v, const char* name)
-{
-    JS_ROOT_MARKING_ASSERT(trc);
-    MarkTypeUnbarriered(trc, v, name);
-}
-
-void
-TypeSet::MarkTypeUnbarriered(JSTracer* trc, TypeSet::Type* v, const char* name)
-{
-    if (v->isSingletonUnchecked()) {
-        JSObject* obj = v->singleton();
-        DispatchToTracer(trc, &obj, name);
-        *v = TypeSet::ObjectType(obj);
-    } else if (v->isGroupUnchecked()) {
-        ObjectGroup* group = v->group();
-        DispatchToTracer(trc, &group, name);
-        *v = TypeSet::ObjectType(group);
-    }
-}
-
-/*** Slot Marking ***/
-
-void
-gc::MarkObjectSlots(JSTracer* trc, NativeObject* obj, uint32_t start, uint32_t nslots)
-{
-    JS::AutoTracingIndex index(trc, start);
-    for (uint32_t i = start; i < (start + nslots); ++i) {
-        HeapSlot& slot = obj->getSlotRef(i);
-        if (InternalGCMethods<Value>::isMarkable(slot))
-            DispatchToTracer(trc, slot.unsafeGet(), "object slot");
-        ++index;
-    }
-}
-
-/*** Special Marking ***/
-
-void
-gc::MarkValueForBarrier(JSTracer* trc, Value* valuep, const char* name)
-{
-    MOZ_ASSERT(!trc->runtime()->isHeapCollecting());
-    TraceManuallyBarrieredEdge(trc, valuep, name);
-}
-
-void
-gc::MarkIdForBarrier(JSTracer* trc, jsid* idp, const char* name)
-{
-    TraceManuallyBarrieredEdge(trc, idp, name);
-}
-
-/*** Push Mark Stack ***/
-
-/*
- * This function is used by the cycle collector to trace through a
- * shape. The cycle collector does not care about shapes or base
- * shapes, so those are not marked. Instead, any shapes or base shapes
- * that are encountered have their children marked. Stack space is
- * bounded.
- */
-void
-gc::MarkCycleCollectorChildren(JSTracer* trc, Shape* shape)
-{
-    /*
-     * We need to mark the global, but it's OK to only do this once instead of
-     * doing it for every Shape in our lineage, since it's always the same
-     * global.
-     */
-    JSObject* global = shape->compartment()->unsafeUnbarrieredMaybeGlobal();
-    MOZ_ASSERT(global);
-    TraceManuallyBarrieredEdge(trc, &global, "global");
-
-    do {
-        MOZ_ASSERT(global == shape->compartment()->unsafeUnbarrieredMaybeGlobal());
-
-        MOZ_ASSERT(shape->base());
-        shape->base()->assertConsistency();
-
-        TraceEdge(trc, &shape->propidRef(), "propid");
-
-        if (shape->hasGetterObject()) {
-            JSObject* tmp = shape->getterObject();
-            TraceManuallyBarrieredEdge(trc, &tmp, "getter");
-            MOZ_ASSERT(tmp == shape->getterObject());
+    for (;;) {
+        while (!stack.isEmpty()) {
+            processMarkStackTop(budget);
+            if (budget.isOverBudget()) {
+                saveValueRanges();
+                return false;
+            }
         }
 
-        if (shape->hasSetterObject()) {
-            JSObject* tmp = shape->setterObject();
-            TraceManuallyBarrieredEdge(trc, &tmp, "setter");
-            MOZ_ASSERT(tmp == shape->setterObject());
-        }
-
-        shape = shape->previous();
-    } while (shape);
-}
-
-void
-TraceObjectGroupCycleCollectorChildrenCallback(JS::CallbackTracer* trc,
-                                               void** thingp, JSGCTraceKind kind);
-
-// Object groups can point to other object groups via an UnboxedLayout or the
-// the original unboxed group link. There can potentially be deep or cyclic
-// chains of such groups to trace through without going through a thing that
-// participates in cycle collection. These need to be handled iteratively to
-// avoid blowing the stack when running the cycle collector's callback tracer.
-struct ObjectGroupCycleCollectorTracer : public JS::CallbackTracer
-{
-    explicit ObjectGroupCycleCollectorTracer(JS::CallbackTracer* innerTracer)
-        : JS::CallbackTracer(innerTracer->runtime(),
-                             TraceObjectGroupCycleCollectorChildrenCallback,
-                             DoNotTraceWeakMaps),
-          innerTracer(innerTracer)
-    {}
+        if (!hasDelayedChildren())
+            break;
 
-    JS::CallbackTracer* innerTracer;
-    Vector<ObjectGroup*, 4, SystemAllocPolicy> seen, worklist;
-};
-
-void
-TraceObjectGroupCycleCollectorChildrenCallback(JS::CallbackTracer* trcArg,
-                                               void** thingp, JSGCTraceKind kind)
-{
-    ObjectGroupCycleCollectorTracer* trc = static_cast<ObjectGroupCycleCollectorTracer*>(trcArg);
-    JS::GCCellPtr thing(*thingp, kind);
-
-    if (thing.isObject() || thing.isScript()) {
-        // Invoke the inner cycle collector callback on this child. It will not
-        // recurse back into TraceChildren.
-        trc->innerTracer->invoke(thingp, kind);
-        return;
-    }
-
-    if (thing.isObjectGroup()) {
-        // If this group is required to be in an ObjectGroup chain, trace it
-        // via the provided worklist rather than continuing to recurse.
-        ObjectGroup* group = static_cast<ObjectGroup*>(thing.asCell());
-        if (group->maybeUnboxedLayout()) {
-            for (size_t i = 0; i < trc->seen.length(); i++) {
-                if (trc->seen[i] == group)
-                    return;
-            }
-            if (trc->seen.append(group) && trc->worklist.append(group)) {
-                return;
-            } else {
-                // If append fails, keep tracing normally. The worst that will
-                // happen is we end up overrecursing.
-            }
+        /*
+         * Mark children of things that caused too deep recursion during the
+         * above tracing. Don't do this until we're done with everything
+         * else.
+         */
+        if (!markDelayedChildren(budget)) {
+            saveValueRanges();
+            return false;
         }
     }
 
-    TraceChildren(trc, thing.asCell(), thing.kind());
-}
-
-void
-gc::MarkCycleCollectorChildren(JSTracer* trc, ObjectGroup* group)
-{
-    MOZ_ASSERT(trc->isCallbackTracer());
-
-    // Early return if this group is not required to be in an ObjectGroup chain.
-    if (!group->maybeUnboxedLayout()) {
-        TraceChildren(trc, group, JSTRACE_OBJECT_GROUP);
-        return;
-    }
-
-    ObjectGroupCycleCollectorTracer groupTracer(trc->asCallbackTracer());
-    TraceChildren(&groupTracer, group, JSTRACE_OBJECT_GROUP);
-
-    while (!groupTracer.worklist.empty()) {
-        ObjectGroup* innerGroup = groupTracer.worklist.popCopy();
-        TraceChildren(&groupTracer, innerGroup, JSTRACE_OBJECT_GROUP);
-    }
-}
-
-template<typename T>
-static void
-PushArenaTyped(GCMarker* gcmarker, ArenaHeader* aheader)
-{
-    for (ArenaCellIterUnderGC i(aheader); !i.done(); i.next())
-        gcmarker->traverse(i.get<T>());
-}
-
-void
-gc::PushArena(GCMarker* gcmarker, ArenaHeader* aheader)
-{
-    switch (MapAllocToTraceKind(aheader->getAllocKind())) {
-      case JSTRACE_OBJECT:
-        PushArenaTyped<JSObject>(gcmarker, aheader);
-        break;
-
-      case JSTRACE_SCRIPT:
-        PushArenaTyped<JSScript>(gcmarker, aheader);
-        break;
-
-      case JSTRACE_STRING:
-        PushArenaTyped<JSString>(gcmarker, aheader);
-        break;
-
-      case JSTRACE_SYMBOL:
-        PushArenaTyped<JS::Symbol>(gcmarker, aheader);
-        break;
-
-      case JSTRACE_BASE_SHAPE:
-        PushArenaTyped<js::BaseShape>(gcmarker, aheader);
-        break;
-
-      case JSTRACE_JITCODE:
-        PushArenaTyped<js::jit::JitCode>(gcmarker, aheader);
-        break;
-
-      case JSTRACE_LAZY_SCRIPT:
-        PushArenaTyped<LazyScript>(gcmarker, aheader);
-        break;
-
-      case JSTRACE_SHAPE:
-        PushArenaTyped<js::Shape>(gcmarker, aheader);
-        break;
-
-      case JSTRACE_OBJECT_GROUP:
-        PushArenaTyped<js::ObjectGroup>(gcmarker, aheader);
-        break;
-
-      default:
-        MOZ_CRASH("Invalid trace kind in PushArena.");
-    }
-}
-
-struct SlotArrayLayout
-{
-    union {
-        HeapSlot* end;
-        uintptr_t kind;
-    };
-    union {
-        HeapSlot* start;
-        uintptr_t index;
-    };
-    NativeObject* obj;
-
-    static void staticAsserts() {
-        /* This should have the same layout as three mark stack items. */
-        JS_STATIC_ASSERT(sizeof(SlotArrayLayout) == 3 * sizeof(uintptr_t));
-    }
-};
-
-/*
- * During incremental GC, we return from drainMarkStack without having processed
- * the entire stack. At that point, JS code can run and reallocate slot arrays
- * that are stored on the stack. To prevent this from happening, we replace all
- * ValueArrayTag stack items with SavedValueArrayTag. In the latter, slots
- * pointers are replaced with slot indexes, and slot array end pointers are
- * replaced with the kind of index (properties vs. elements).
- */
-void
-GCMarker::saveValueRanges()
-{
-    for (uintptr_t* p = stack.tos_; p > stack.stack_; ) {
-        uintptr_t tag = *--p & StackTagMask;
-        if (tag == ValueArrayTag) {
-            *p &= ~StackTagMask;
-            p -= 2;
-            SlotArrayLayout* arr = reinterpret_cast<SlotArrayLayout*>(p);
-            NativeObject* obj = arr->obj;
-            MOZ_ASSERT(obj->isNative());
-
-            HeapSlot* vp = obj->getDenseElementsAllowCopyOnWrite();
-            if (arr->end == vp + obj->getDenseInitializedLength()) {
-                MOZ_ASSERT(arr->start >= vp);
-                arr->index = arr->start - vp;
-                arr->kind = HeapSlot::Element;
-            } else {
-                HeapSlot* vp = obj->fixedSlots();
-                unsigned nfixed = obj->numFixedSlots();
-                if (arr->start == arr->end) {
-                    arr->index = obj->slotSpan();
-                } else if (arr->start >= vp && arr->start < vp + nfixed) {
-                    MOZ_ASSERT(arr->end == vp + Min(nfixed, obj->slotSpan()));
-                    arr->index = arr->start - vp;
-                } else {
-                    MOZ_ASSERT(arr->start >= obj->slots_ &&
-                               arr->end == obj->slots_ + obj->slotSpan() - nfixed);
-                    arr->index = (arr->start - obj->slots_) + nfixed;
-                }
-                arr->kind = HeapSlot::Slot;
-            }
-            p[2] |= SavedValueArrayTag;
-        } else if (tag == SavedValueArrayTag) {
-            p -= 2;
-        }
-    }
-}
-
-bool
-GCMarker::restoreValueArray(NativeObject* obj, void** vpp, void** endp)
-{
-    uintptr_t start = stack.pop();
-    HeapSlot::Kind kind = (HeapSlot::Kind) stack.pop();
-
-    if (kind == HeapSlot::Element) {
-        if (!obj->is<ArrayObject>())
-            return false;
-
-        uint32_t initlen = obj->getDenseInitializedLength();
-        HeapSlot* vp = obj->getDenseElementsAllowCopyOnWrite();
-        if (start < initlen) {
-            *vpp = vp + start;
-            *endp = vp + initlen;
-        } else {
-            /* The object shrunk, in which case no scanning is needed. */
-            *vpp = *endp = vp;
-        }
-    } else {
-        MOZ_ASSERT(kind == HeapSlot::Slot);
-        HeapSlot* vp = obj->fixedSlots();
-        unsigned nfixed = obj->numFixedSlots();
-        unsigned nslots = obj->slotSpan();
-        if (start < nslots) {
-            if (start < nfixed) {
-                *vpp = vp + start;
-                *endp = vp + Min(nfixed, nslots);
-            } else {
-                *vpp = obj->slots_ + start - nfixed;
-                *endp = obj->slots_ + nslots - nfixed;
-            }
-        } else {
-            /* The object shrunk, in which case no scanning is needed. */
-            *vpp = *endp = vp;
-        }
-    }
-
-    MOZ_ASSERT(*vpp <= *endp);
     return true;
 }
 
 inline void
 GCMarker::processMarkStackTop(SliceBudget& budget)
 {
     /*
      * The function uses explicit goto and implements the scanning of the
@@ -1747,98 +1305,771 @@ GCMarker::processMarkStackTop(SliceBudge
             }
         }
         MOZ_ASSERT(nslots <= nobj->numFixedSlots());
         end = vp + nslots;
         goto scan_value_array;
     }
 }
 
-bool
-GCMarker::drainMarkStack(SliceBudget& budget)
+struct SlotArrayLayout
+{
+    union {
+        HeapSlot* end;
+        uintptr_t kind;
+    };
+    union {
+        HeapSlot* start;
+        uintptr_t index;
+    };
+    NativeObject* obj;
+
+    static void staticAsserts() {
+        /* This should have the same layout as three mark stack items. */
+        JS_STATIC_ASSERT(sizeof(SlotArrayLayout) == 3 * sizeof(uintptr_t));
+    }
+};
+
+/*
+ * During incremental GC, we return from drainMarkStack without having processed
+ * the entire stack. At that point, JS code can run and reallocate slot arrays
+ * that are stored on the stack. To prevent this from happening, we replace all
+ * ValueArrayTag stack items with SavedValueArrayTag. In the latter, slots
+ * pointers are replaced with slot indexes, and slot array end pointers are
+ * replaced with the kind of index (properties vs. elements).
+ */
+void
+GCMarker::saveValueRanges()
 {
-#ifdef DEBUG
-    struct AutoCheckCompartment {
-        bool& flag;
-        explicit AutoCheckCompartment(bool& comparmentCheckFlag) : flag(comparmentCheckFlag) {
-            MOZ_ASSERT(!flag);
-            flag = true;
+    for (uintptr_t* p = stack.tos_; p > stack.stack_; ) {
+        uintptr_t tag = *--p & StackTagMask;
+        if (tag == ValueArrayTag) {
+            *p &= ~StackTagMask;
+            p -= 2;
+            SlotArrayLayout* arr = reinterpret_cast<SlotArrayLayout*>(p);
+            NativeObject* obj = arr->obj;
+            MOZ_ASSERT(obj->isNative());
+
+            HeapSlot* vp = obj->getDenseElementsAllowCopyOnWrite();
+            if (arr->end == vp + obj->getDenseInitializedLength()) {
+                MOZ_ASSERT(arr->start >= vp);
+                arr->index = arr->start - vp;
+                arr->kind = HeapSlot::Element;
+            } else {
+                HeapSlot* vp = obj->fixedSlots();
+                unsigned nfixed = obj->numFixedSlots();
+                if (arr->start == arr->end) {
+                    arr->index = obj->slotSpan();
+                } else if (arr->start >= vp && arr->start < vp + nfixed) {
+                    MOZ_ASSERT(arr->end == vp + Min(nfixed, obj->slotSpan()));
+                    arr->index = arr->start - vp;
+                } else {
+                    MOZ_ASSERT(arr->start >= obj->slots_ &&
+                               arr->end == obj->slots_ + obj->slotSpan() - nfixed);
+                    arr->index = (arr->start - obj->slots_) + nfixed;
+                }
+                arr->kind = HeapSlot::Slot;
+            }
+            p[2] |= SavedValueArrayTag;
+        } else if (tag == SavedValueArrayTag) {
+            p -= 2;
         }
-        ~AutoCheckCompartment() { flag = false; }
-    } acc(strictCompartmentChecking);
-#endif
+    }
+}
+
+bool
+GCMarker::restoreValueArray(NativeObject* obj, void** vpp, void** endp)
+{
+    uintptr_t start = stack.pop();
+    HeapSlot::Kind kind = (HeapSlot::Kind) stack.pop();
+
+    if (kind == HeapSlot::Element) {
+        if (!obj->is<ArrayObject>())
+            return false;
 
-    if (budget.isOverBudget())
+        uint32_t initlen = obj->getDenseInitializedLength();
+        HeapSlot* vp = obj->getDenseElementsAllowCopyOnWrite();
+        if (start < initlen) {
+            *vpp = vp + start;
+            *endp = vp + initlen;
+        } else {
+            /* The object shrunk, in which case no scanning is needed. */
+            *vpp = *endp = vp;
+        }
+    } else {
+        MOZ_ASSERT(kind == HeapSlot::Slot);
+        HeapSlot* vp = obj->fixedSlots();
+        unsigned nfixed = obj->numFixedSlots();
+        unsigned nslots = obj->slotSpan();
+        if (start < nslots) {
+            if (start < nfixed) {
+                *vpp = vp + start;
+                *endp = vp + Min(nfixed, nslots);
+            } else {
+                *vpp = obj->slots_ + start - nfixed;
+                *endp = obj->slots_ + nslots - nfixed;
+            }
+        } else {
+            /* The object shrunk, in which case no scanning is needed. */
+            *vpp = *endp = vp;
+        }
+    }
+
+    MOZ_ASSERT(*vpp <= *endp);
+    return true;
+}
+
+
+/*** Mark Stack ***********************************************************************************/
+
+bool
+MarkStack::init(JSGCMode gcMode)
+{
+    setBaseCapacity(gcMode);
+
+    MOZ_ASSERT(!stack_);
+    uintptr_t* newStack = js_pod_malloc<uintptr_t>(baseCapacity_);
+    if (!newStack)
         return false;
 
-    for (;;) {
-        while (!stack.isEmpty()) {
-            processMarkStackTop(budget);
-            if (budget.isOverBudget()) {
-                saveValueRanges();
-                return false;
+    setStack(newStack, 0, baseCapacity_);
+    return true;
+}
+
+void
+MarkStack::setBaseCapacity(JSGCMode mode)
+{
+    switch (mode) {
+      case JSGC_MODE_GLOBAL:
+      case JSGC_MODE_COMPARTMENT:
+        baseCapacity_ = NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY;
+        break;
+      case JSGC_MODE_INCREMENTAL:
+        baseCapacity_ = INCREMENTAL_MARK_STACK_BASE_CAPACITY;
+        break;
+      default:
+        MOZ_CRASH("bad gc mode");
+    }
+
+    if (baseCapacity_ > maxCapacity_)
+        baseCapacity_ = maxCapacity_;
+}
+
+void
+MarkStack::setMaxCapacity(size_t maxCapacity)
+{
+    MOZ_ASSERT(isEmpty());
+    maxCapacity_ = maxCapacity;
+    if (baseCapacity_ > maxCapacity_)
+        baseCapacity_ = maxCapacity_;
+
+    reset();
+}
+
+void
+MarkStack::reset()
+{
+    if (capacity() == baseCapacity_) {
+        // No size change; keep the current stack.
+        setStack(stack_, 0, baseCapacity_);
+        return;
+    }
+
+    uintptr_t* newStack = (uintptr_t*)js_realloc(stack_, sizeof(uintptr_t) * baseCapacity_);
+    if (!newStack) {
+        // If the realloc fails, just keep using the existing stack; it's
+        // not ideal but better than failing.
+        newStack = stack_;
+        baseCapacity_ = capacity();
+    }
+    setStack(newStack, 0, baseCapacity_);
+}
+
+bool
+MarkStack::enlarge(unsigned count)
+{
+    size_t newCapacity = Min(maxCapacity_, capacity() * 2);
+    if (newCapacity < capacity() + count)
+        return false;
+
+    size_t tosIndex = position();
+
+    uintptr_t* newStack = (uintptr_t*)js_realloc(stack_, sizeof(uintptr_t) * newCapacity);
+    if (!newStack)
+        return false;
+
+    setStack(newStack, tosIndex, newCapacity);
+    return true;
+}
+
+void
+MarkStack::setGCMode(JSGCMode gcMode)
+{
+    // The mark stack won't be resized until the next call to reset(), but
+    // that will happen at the end of the next GC.
+    setBaseCapacity(gcMode);
+}
+
+size_t
+MarkStack::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
+{
+    return mallocSizeOf(stack_);
+}
+
+
+/*** GCMarker *************************************************************************************/
+
+/*
+ * DoNotTraceWeakMaps: the GC is recomputing the liveness of WeakMap entries,
+ * so we delay visting entries.
+ */
+GCMarker::GCMarker(JSRuntime* rt)
+  : JSTracer(rt, JSTracer::MarkingTracer, DoNotTraceWeakMaps),
+    stack(size_t(-1)),
+    color(BLACK),
+    unmarkedArenaStackTop(nullptr),
+    markLaterArenas(0),
+    started(false),
+    strictCompartmentChecking(false)
+{
+}
+
+bool
+GCMarker::init(JSGCMode gcMode)
+{
+    return stack.init(gcMode);
+}
+
+void
+GCMarker::start()
+{
+    MOZ_ASSERT(!started);
+    started = true;
+    color = BLACK;
+
+    MOZ_ASSERT(!unmarkedArenaStackTop);
+    MOZ_ASSERT(markLaterArenas == 0);
+
+}
+
+void
+GCMarker::stop()
+{
+    MOZ_ASSERT(isDrained());
+
+    MOZ_ASSERT(started);
+    started = false;
+
+    MOZ_ASSERT(!unmarkedArenaStackTop);
+    MOZ_ASSERT(markLaterArenas == 0);
+
+    /* Free non-ballast stack memory. */
+    stack.reset();
+}
+
+void
+GCMarker::reset()
+{
+    color = BLACK;
+
+    stack.reset();
+    MOZ_ASSERT(isMarkStackEmpty());
+
+    while (unmarkedArenaStackTop) {
+        ArenaHeader* aheader = unmarkedArenaStackTop;
+        MOZ_ASSERT(aheader->hasDelayedMarking);
+        MOZ_ASSERT(markLaterArenas);
+        unmarkedArenaStackTop = aheader->getNextDelayedMarking();
+        aheader->unsetDelayedMarking();
+        aheader->markOverflow = 0;
+        aheader->allocatedDuringIncremental = 0;
+        markLaterArenas--;
+    }
+    MOZ_ASSERT(isDrained());
+    MOZ_ASSERT(!markLaterArenas);
+}
+
+void
+GCMarker::markDelayedChildren(ArenaHeader* aheader)
+{
+    if (aheader->markOverflow) {
+        bool always = aheader->allocatedDuringIncremental;
+        aheader->markOverflow = 0;
+
+        for (ArenaCellIterUnderGC i(aheader); !i.done(); i.next()) {
+            TenuredCell* t = i.getCell();
+            if (always || t->isMarked()) {
+                t->markIfUnmarked();
+                JS_TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind()));
             }
         }
+    } else {
+        MOZ_ASSERT(aheader->allocatedDuringIncremental);
+        PushArena(this, aheader);
+    }
+    aheader->allocatedDuringIncremental = 0;
+    /*
+     * Note that during an incremental GC we may still be allocating into
+     * aheader. However, prepareForIncrementalGC sets the
+     * allocatedDuringIncremental flag if we continue marking.
+     */
+}
 
-        if (!hasDelayedChildren())
-            break;
+bool
+GCMarker::markDelayedChildren(SliceBudget& budget)
+{
+    GCRuntime& gc = runtime()->gc;
+    gcstats::AutoPhase ap(gc.stats, gc.state() == MARK, gcstats::PHASE_MARK_DELAYED);
 
+    MOZ_ASSERT(unmarkedArenaStackTop);
+    do {
         /*
-         * Mark children of things that caused too deep recursion during the
-         * above tracing. Don't do this until we're done with everything
-         * else.
+         * If marking gets delayed at the same arena again, we must repeat
+         * marking of its things. For that we pop arena from the stack and
+         * clear its hasDelayedMarking flag before we begin the marking.
          */
-        if (!markDelayedChildren(budget)) {
-            saveValueRanges();
+        ArenaHeader* aheader = unmarkedArenaStackTop;
+        MOZ_ASSERT(aheader->hasDelayedMarking);
+        MOZ_ASSERT(markLaterArenas);
+        unmarkedArenaStackTop = aheader->getNextDelayedMarking();
+        aheader->unsetDelayedMarking();
+        markLaterArenas--;
+        markDelayedChildren(aheader);
+
+        budget.step(150);
+        if (budget.isOverBudget())
             return false;
-        }
-    }
+    } while (unmarkedArenaStackTop);
+    MOZ_ASSERT(!markLaterArenas);
 
     return true;
 }
 
-struct TraceChildrenFunctor {
-    template <typename T>
-    void operator()(JSTracer* trc, void* thing) {
-        static_cast<T*>(thing)->traceChildren(trc);
+template<typename T>
+static void
+PushArenaTyped(GCMarker* gcmarker, ArenaHeader* aheader)
+{
+    for (ArenaCellIterUnderGC i(aheader); !i.done(); i.next())
+        gcmarker->traverse(i.get<T>());
+}
+
+void
+gc::PushArena(GCMarker* gcmarker, ArenaHeader* aheader)
+{
+    switch (MapAllocToTraceKind(aheader->getAllocKind())) {
+      case JSTRACE_OBJECT:
+        PushArenaTyped<JSObject>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_SCRIPT:
+        PushArenaTyped<JSScript>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_STRING:
+        PushArenaTyped<JSString>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_SYMBOL:
+        PushArenaTyped<JS::Symbol>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_BASE_SHAPE:
+        PushArenaTyped<js::BaseShape>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_JITCODE:
+        PushArenaTyped<js::jit::JitCode>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_LAZY_SCRIPT:
+        PushArenaTyped<LazyScript>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_SHAPE:
+        PushArenaTyped<js::Shape>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_OBJECT_GROUP:
+        PushArenaTyped<js::ObjectGroup>(gcmarker, aheader);
+        break;
+
+      default:
+        MOZ_CRASH("Invalid trace kind in PushArena.");
+    }
+}
+
+#ifdef DEBUG
+void
+GCMarker::checkZone(void* p)
+{
+    MOZ_ASSERT(started);
+    DebugOnly<Cell*> cell = static_cast<Cell*>(p);
+    MOZ_ASSERT_IF(cell->isTenured(), cell->asTenured().zone()->isCollecting());
+}
+#endif
+
+size_t
+GCMarker::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
+{
+    size_t size = stack.sizeOfExcludingThis(mallocSizeOf);
+    for (ZonesIter zone(runtime(), WithAtoms); !zone.done(); zone.next())
+        size += zone->gcGrayRoots.sizeOfExcludingThis(mallocSizeOf);
+    return size;
+}
+
+void
+js::SetMarkStackLimit(JSRuntime* rt, size_t limit)
+{
+    rt->gc.setMarkStackLimit(limit);
+}
+
+/*** IsMarked / IsAboutToBeFinalized **************************************************************/
+
+template <typename T>
+static inline void
+CheckIsMarkedThing(T* thingp)
+{
+#define IS_SAME_TYPE_OR(name, type, _) mozilla::IsSame<type*, T>::value ||
+    static_assert(
+            FOR_EACH_GC_LAYOUT(IS_SAME_TYPE_OR)
+            false, "Only the base cell layout types are allowed into marking/tracing internals");
+#undef IS_SAME_TYPE_OR
+
+#ifdef DEBUG
+    MOZ_ASSERT(thingp);
+    MOZ_ASSERT(*thingp);
+    JSRuntime* rt = (*thingp)->runtimeFromAnyThread();
+    MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(*thingp),
+                  CurrentThreadCanAccessRuntime(rt) ||
+                  (rt->isHeapCollecting() && rt->gc.state() == SWEEP));
+#endif
+}
+
+template <typename T>
+static bool
+IsMarkedInternal(T* thingp)
+{
+    CheckIsMarkedThing(thingp);
+    JSRuntime* rt = (*thingp)->runtimeFromAnyThread();
+
+    if (IsInsideNursery(*thingp)) {
+        MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
+        return rt->gc.nursery.getForwardedPointer(thingp);
+    }
+
+    Zone* zone = (*thingp)->asTenured().zoneFromAnyThread();
+    if (!zone->isCollectingFromAnyThread() || zone->isGCFinished())
+        return true;
+    if (zone->isGCCompacting() && IsForwarded(*thingp))
+        *thingp = Forwarded(*thingp);
+    return (*thingp)->asTenured().isMarked();
+}
+
+template <typename S>
+struct IsMarkedFunctor : public IdentityDefaultAdaptor<S> {
+    template <typename T> S operator()(T* t, bool* rv) {
+        *rv = IsMarkedInternal(&t);
+        return js::gc::RewrapValueOrId<S, T*>::wrap(t);
+    }
+};
+
+template <>
+bool
+IsMarkedInternal<Value>(Value* valuep)
+{
+    bool rv = true;
+    *valuep = DispatchValueTyped(IsMarkedFunctor<Value>(), *valuep, &rv);
+    return rv;
+}
+
+template <>
+bool
+IsMarkedInternal<jsid>(jsid* idp)
+{
+    bool rv = true;
+    *idp = DispatchIdTyped(IsMarkedFunctor<jsid>(), *idp, &rv);
+    return rv;
+}
+
+template <typename T>
+static bool
+IsAboutToBeFinalizedInternal(T* thingp)
+{
+    CheckIsMarkedThing(thingp);
+    T thing = *thingp;
+    JSRuntime* rt = thing->runtimeFromAnyThread();
+
+    /* Permanent atoms are never finalized by non-owning runtimes. */
+    if (ThingIsPermanentAtomOrWellKnownSymbol(thing) && !TlsPerThreadData.get()->associatedWith(rt))
+        return false;
+
+    Nursery& nursery = rt->gc.nursery;
+    MOZ_ASSERT_IF(!rt->isHeapMinorCollecting(), !IsInsideNursery(thing));
+    if (rt->isHeapMinorCollecting()) {
+        if (IsInsideNursery(thing))
+            return !nursery.getForwardedPointer(thingp);
+        return false;
+    }
+
+    Zone* zone = thing->asTenured().zoneFromAnyThread();
+    if (zone->isGCSweeping()) {
+        if (thing->asTenured().arenaHeader()->allocatedDuringIncremental)
+            return false;
+        return !thing->asTenured().isMarked();
+    }
+    else if (zone->isGCCompacting() && IsForwarded(thing)) {
+        *thingp = Forwarded(thing);
+        return false;
+    }
+
+    return false;
+}
+
+template <typename S>
+struct IsAboutToBeFinalizedFunctor : public IdentityDefaultAdaptor<S> {
+    template <typename T> S operator()(T* t, bool* rv) {
+        *rv = IsAboutToBeFinalizedInternal(&t);
+        return js::gc::RewrapValueOrId<S, T*>::wrap(t);
     }
 };
 
+template <>
+bool
+IsAboutToBeFinalizedInternal<Value>(Value* valuep)
+{
+    bool rv = false;
+    *valuep = DispatchValueTyped(IsAboutToBeFinalizedFunctor<Value>(), *valuep, &rv);
+    return rv;
+}
+
+template <>
+bool
+IsAboutToBeFinalizedInternal<jsid>(jsid* idp)
+{
+    bool rv = false;
+    *idp = DispatchIdTyped(IsAboutToBeFinalizedFunctor<jsid>(), *idp, &rv);
+    return rv;
+}
+
+namespace js {
+namespace gc {
+
+template <typename T>
+bool
+IsMarkedUnbarriered(T* thingp)
+{
+    return IsMarkedInternal(ConvertToBase(thingp));
+}
+
+template <typename T>
+bool
+IsMarked(BarrieredBase<T>* thingp)
+{
+    return IsMarkedInternal(ConvertToBase(thingp->unsafeGet()));
+}
+
+template <typename T>
+bool
+IsMarked(ReadBarriered<T>* thingp)
+{
+    return IsMarkedInternal(ConvertToBase(thingp->unsafeGet()));
+}
+
+template <typename T>
+bool
+IsAboutToBeFinalizedUnbarriered(T* thingp)
+{
+    return IsAboutToBeFinalizedInternal(ConvertToBase(thingp));
+}
+
+template <typename T>
+bool
+IsAboutToBeFinalized(BarrieredBase<T>* thingp)
+{
+    return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeGet()));
+}
+
+template <typename T>
+bool
+IsAboutToBeFinalized(ReadBarriered<T>* thingp)
+{
+    return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeGet()));
+}
+
+// Instantiate a copy of the Tracing templates for each derived type.
+#define INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS(type) \
+    template bool IsMarkedUnbarriered<type>(type*); \
+    template bool IsMarked<type>(BarrieredBase<type>*); \
+    template bool IsMarked<type>(ReadBarriered<type>*); \
+    template bool IsAboutToBeFinalizedUnbarriered<type>(type*); \
+    template bool IsAboutToBeFinalized<type>(BarrieredBase<type>*); \
+    template bool IsAboutToBeFinalized<type>(ReadBarriered<type>*);
+FOR_EACH_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS)
+#undef INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS
+
+} /* namespace gc */
+} /* namespace js */
+
+
+/*** Type Marking *********************************************************************************/
+
 void
-js::TraceChildren(JSTracer* trc, void* thing, JSGCTraceKind kind)
+TypeSet::MarkTypeRoot(JSTracer* trc, TypeSet::Type* v, const char* name)
+{
+    JS_ROOT_MARKING_ASSERT(trc);
+    MarkTypeUnbarriered(trc, v, name);
+}
+
+void
+TypeSet::MarkTypeUnbarriered(JSTracer* trc, TypeSet::Type* v, const char* name)
 {
-    MOZ_ASSERT(thing);
-    TraceChildrenFunctor f;
-    CallTyped(f, kind, trc, thing);
+    if (v->isSingletonUnchecked()) {
+        JSObject* obj = v->singleton();
+        DispatchToTracer(trc, &obj, name);
+        *v = TypeSet::ObjectType(obj);
+    } else if (v->isGroupUnchecked()) {
+        ObjectGroup* group = v->group();
+        DispatchToTracer(trc, &group, name);
+        *v = TypeSet::ObjectType(group);
+    }
+}
+
+
+/*** Cycle Collector Helpers **********************************************************************/
+
+// This function is used by the cycle collector to trace through a shape. The
+// cycle collector does not care about shapes or base shapes, so those are not
+// marked. Instead, any shapes or base shapes that are encountered have their
+// children marked. Stack space is bounded.
+//
+// Note: The canonical way for an embedding to implement this functionality
+// would be through a custom CallbackTracer that ignores unrequired children
+// and pushes to a separate mark stack in order to bound the call stack usage.
+// We've implemented like this purely for performance.
+void
+gc::MarkCycleCollectorChildren(JSTracer* trc, Shape* shape)
+{
+    // We need to mark the global, but it's OK to only do this once instead of
+    // doing it for every Shape in our lineage, since it's always the same
+    // global.
+    JSObject* global = shape->compartment()->unsafeUnbarrieredMaybeGlobal();
+    MOZ_ASSERT(global);
+    TraceManuallyBarrieredEdge(trc, &global, "global");
+
+    do {
+        MOZ_ASSERT(global == shape->compartment()->unsafeUnbarrieredMaybeGlobal());
+
+        MOZ_ASSERT(shape->base());
+        shape->base()->assertConsistency();
+
+        TraceEdge(trc, &shape->propidRef(), "propid");
+
+        if (shape->hasGetterObject()) {
+            JSObject* tmp = shape->getterObject();
+            TraceManuallyBarrieredEdge(trc, &tmp, "getter");
+            MOZ_ASSERT(tmp == shape->getterObject());
+        }
+
+        if (shape->hasSetterObject()) {
+            JSObject* tmp = shape->setterObject();
+            TraceManuallyBarrieredEdge(trc, &tmp, "setter");
+            MOZ_ASSERT(tmp == shape->setterObject());
+        }
+
+        shape = shape->previous();
+    } while (shape);
 }
 
+void
+TraceObjectGroupCycleCollectorChildrenCallback(JS::CallbackTracer* trc,
+                                               void** thingp, JSGCTraceKind kind);
+
+// Object groups can point to other object groups via an UnboxedLayout or the
+// the original unboxed group link. There can potentially be deep or cyclic
+// chains of such groups to trace through without going through a thing that
+// participates in cycle collection. These need to be handled iteratively to
+// avoid blowing the stack when running the cycle collector's callback tracer.
+struct ObjectGroupCycleCollectorTracer : public JS::CallbackTracer
+{
+    explicit ObjectGroupCycleCollectorTracer(JS::CallbackTracer* innerTracer)
+        : JS::CallbackTracer(innerTracer->runtime(),
+                             TraceObjectGroupCycleCollectorChildrenCallback,
+                             DoNotTraceWeakMaps),
+          innerTracer(innerTracer)
+    {}
+
+    JS::CallbackTracer* innerTracer;
+    Vector<ObjectGroup*, 4, SystemAllocPolicy> seen, worklist;
+};
+
+void
+TraceObjectGroupCycleCollectorChildrenCallback(JS::CallbackTracer* trcArg,
+                                               void** thingp, JSGCTraceKind kind)
+{
+    ObjectGroupCycleCollectorTracer* trc = static_cast<ObjectGroupCycleCollectorTracer*>(trcArg);
+    JS::GCCellPtr thing(*thingp, kind);
+
+    if (thing.isObject() || thing.isScript()) {
+        // Invoke the inner cycle collector callback on this child. It will not
+        // recurse back into TraceChildren.
+        trc->innerTracer->invoke(thingp, kind);
+        return;
+    }
+
+    if (thing.isObjectGroup()) {
+        // If this group is required to be in an ObjectGroup chain, trace it
+        // via the provided worklist rather than continuing to recurse.
+        ObjectGroup* group = static_cast<ObjectGroup*>(thing.asCell());
+        if (group->maybeUnboxedLayout()) {
+            for (size_t i = 0; i < trc->seen.length(); i++) {
+                if (trc->seen[i] == group)
+                    return;
+            }
+            if (trc->seen.append(group) && trc->worklist.append(group)) {
+                return;
+            } else {
+                // If append fails, keep tracing normally. The worst that will
+                // happen is we end up overrecursing.
+            }
+        }
+    }
+
+    TraceChildren(trc, thing.asCell(), thing.kind());
+}
+
+void
+gc::MarkCycleCollectorChildren(JSTracer* trc, ObjectGroup* group)
+{
+    MOZ_ASSERT(trc->isCallbackTracer());
+
+    // Early return if this group is not required to be in an ObjectGroup chain.
+    if (!group->maybeUnboxedLayout()) {
+        TraceChildren(trc, group, JSTRACE_OBJECT_GROUP);
+        return;
+    }
+
+    ObjectGroupCycleCollectorTracer groupTracer(trc->asCallbackTracer());
+    TraceChildren(&groupTracer, group, JSTRACE_OBJECT_GROUP);
+
+    while (!groupTracer.worklist.empty()) {
+        ObjectGroup* innerGroup = groupTracer.worklist.popCopy();
+        TraceChildren(&groupTracer, innerGroup, JSTRACE_OBJECT_GROUP);
+    }
+}
+
+
+/*** Cycle Collector Barrier Implementation *******************************************************/
+
 #ifdef DEBUG
 static void
 AssertNonGrayGCThing(JS::CallbackTracer* trc, void** thingp, JSGCTraceKind kind)
 {
     DebugOnly<Cell*> thing(static_cast<Cell*>(*thingp));
     MOZ_ASSERT_IF(thing->isTenured(), !thing->asTenured().isMarked(js::gc::GRAY));
 }
-
-template <typename T>
-bool
-gc::ZoneIsGCMarking(T* thing)
-{
-    return thing->zone()->isGCMarking();
-}
-
-template <typename T>
-bool
-js::gc::ZoneIsAtomsZoneForString(JSRuntime* rt, T* thing)
-{
-    JSGCTraceKind kind = GetGCThingTraceKind(thing);
-    if (kind == JSTRACE_STRING || kind == JSTRACE_SYMBOL)
-        return rt->isAtomsZone(thing->zone());
-    return false;
-}
 #endif
 
 static void
 UnmarkGrayChildren(JS::CallbackTracer* trc, void** thingp, JSGCTraceKind kind);
 
 struct UnmarkGrayTracer : public JS::CallbackTracer
 {
     /*
--- a/js/src/gc/Marking.h
+++ b/js/src/gc/Marking.h
@@ -2,113 +2,334 @@
  * vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef gc_Marking_h
 #define gc_Marking_h
 
+#include "mozilla/DebugOnly.h"
+
 #include "jsfriendapi.h"
 
-#include "gc/Barrier.h"
-
-namespace js {
-
-/*** Tracing ***/
-
-// Trace through an edge in the live object graph on behalf of tracing. The
-// effect of tracing the edge depends on the JSTracer being used.
-template <typename T>
-void
-TraceEdge(JSTracer* trc, BarrieredBase<T>* thingp, const char* name);
-
-// Trace through a "root" edge. These edges are the initial edges in the object
-// graph traversal. Root edges are asserted to only be traversed in the initial
-// phase of a GC.
-template <typename T>
-void
-TraceRoot(JSTracer* trc, T* thingp, const char* name);
-
-// Like TraceEdge, but for edges that do not use one of the automatic barrier
-// classes and, thus, must be treated specially for moving GC. This method is
-// separate from TraceEdge to make accidental use of such edges more obvious.
-template <typename T>
-void
-TraceManuallyBarrieredEdge(JSTracer* trc, T* thingp, const char* name);
-
-// Trace all edges contained in the given array.
-template <typename T>
-void
-TraceRange(JSTracer* trc, size_t len, BarrieredBase<T>* vec, const char* name);
+#include "gc/Heap.h"
+#include "gc/Tracer.h"
+#include "js/GCAPI.h"
+#include "js/SliceBudget.h"
+#include "js/TracingAPI.h"
 
-// Trace all root edges in the given array.
-template <typename T>
-void
-TraceRootRange(JSTracer* trc, size_t len, T* vec, const char* name);
-
-// Trace an edge that crosses compartment boundaries. If the compartment of the
-// destination thing is not being GC'd, then the edge will not be traced.
-template <typename T>
-void
-TraceCrossCompartmentEdge(JSTracer* trc, JSObject* src, BarrieredBase<T>* dst,
-                          const char* name);
-
-// As above but with manual barriers.
-template <typename T>
-void
-TraceManuallyBarrieredCrossCompartmentEdge(JSTracer* trc, JSObject* src, T* dst,
-                                           const char* name);
-
-// Permanent atoms and well-known symbols are shared between runtimes and must
-// use a separate marking path so that we can filter them out of normal heap
-// tracing.
-template <typename T>
-void
-TraceProcessGlobalRoot(JSTracer* trc, T* thing, const char* name);
-
+class JSLinearString;
+class JSRope;
+namespace js {
+class BaseShape;
+class GCMarker;
+class LazyScript;
+class NativeObject;
+class ObjectGroup;
 namespace gc {
-
-/* Return true if the pointer is nullptr, or if it is a tagged pointer to
- * nullptr.
- */
-MOZ_ALWAYS_INLINE bool
-IsNullTaggedPointer(void* p)
-{
-    return uintptr_t(p) < 32;
+struct ArenaHeader;
+}
+namespace jit {
+class JitCode;
 }
 
-/*** Externally Typed Marking ***/
+static const size_t NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY = 4096;
+static const size_t INCREMENTAL_MARK_STACK_BASE_CAPACITY = 32768;
+
+/*
+ * When the native stack is low, the GC does not call JS_TraceChildren to mark
+ * the reachable "children" of the thing. Rather the thing is put aside and
+ * JS_TraceChildren is called later with more space on the C stack.
+ *
+ * To implement such delayed marking of the children with minimal overhead for
+ * the normal case of sufficient native stack, the code adds a field per arena.
+ * The field markingDelay->link links all arenas with delayed things into a
+ * stack list with the pointer to stack top in GCMarker::unmarkedArenaStackTop.
+ * GCMarker::delayMarkingChildren adds arenas to the stack as necessary while
+ * markDelayedChildren pops the arenas from the stack until it empties.
+ */
+class MarkStack
+{
+    friend class GCMarker;
+
+    uintptr_t* stack_;
+    uintptr_t* tos_;
+    uintptr_t* end_;
+
+    // The capacity we start with and reset() to.
+    size_t baseCapacity_;
+    size_t maxCapacity_;
+
+  public:
+    explicit MarkStack(size_t maxCapacity)
+      : stack_(nullptr),
+        tos_(nullptr),
+        end_(nullptr),
+        baseCapacity_(0),
+        maxCapacity_(maxCapacity)
+    {}
+
+    ~MarkStack() {
+        js_free(stack_);
+    }
+
+    size_t capacity() { return end_ - stack_; }
+
+    ptrdiff_t position() const { return tos_ - stack_; }
+
+    void setStack(uintptr_t* stack, size_t tosIndex, size_t capacity) {
+        stack_ = stack;
+        tos_ = stack + tosIndex;
+        end_ = stack + capacity;
+    }
+
+    bool init(JSGCMode gcMode);
+
+    void setBaseCapacity(JSGCMode mode);
+    size_t maxCapacity() const { return maxCapacity_; }
+    void setMaxCapacity(size_t maxCapacity);
+
+    bool push(uintptr_t item) {
+        if (tos_ == end_) {
+            if (!enlarge(1))
+                return false;
+        }
+        MOZ_ASSERT(tos_ < end_);
+        *tos_++ = item;
+        return true;
+    }
+
+    bool push(uintptr_t item1, uintptr_t item2, uintptr_t item3) {
+        uintptr_t* nextTos = tos_ + 3;
+        if (nextTos > end_) {
+            if (!enlarge(3))
+                return false;
+            nextTos = tos_ + 3;
+        }
+        MOZ_ASSERT(nextTos <= end_);
+        tos_[0] = item1;
+        tos_[1] = item2;
+        tos_[2] = item3;
+        tos_ = nextTos;
+        return true;
+    }
+
+    bool isEmpty() const {
+        return tos_ == stack_;
+    }
+
+    uintptr_t pop() {
+        MOZ_ASSERT(!isEmpty());
+        return *--tos_;
+    }
+
+    void reset();
+
+    /* Grow the stack, ensuring there is space for at least count elements. */
+    bool enlarge(unsigned count);
+
+    void setGCMode(JSGCMode gcMode);
+
+    size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
+};
+
+class GCMarker : public JSTracer
+{
+  public:
+    explicit GCMarker(JSRuntime* rt);
+    bool init(JSGCMode gcMode);
+
+    void setMaxCapacity(size_t maxCap) { stack.setMaxCapacity(maxCap); }
+    size_t maxCapacity() const { return stack.maxCapacity(); }
+
+    void start();
+    void stop();
+    void reset();
+
+    // Mark the given GC thing and traverse its children at some point.
+    template <typename T> void traverse(T thing);
+
+    // Calls traverse on target after making additional assertions.
+    template <typename S, typename T> void traverse(S source, T target);
+
+    // C++ requires explicit declarations of partial template instantiations.
+    template <typename S> void traverse(S source, jsid target);
 
-void
-TraceGenericPointerRoot(JSTracer* trc, Cell** thingp, const char* name);
+    /*
+     * Care must be taken changing the mark color from gray to black. The cycle
+     * collector depends on the invariant that there are no black to gray edges
+     * in the GC heap. This invariant lets the CC not trace through black
+     * objects. If this invariant is violated, the cycle collector may free
+     * objects that are still reachable.
+     */
+    void setMarkColorGray() {
+        MOZ_ASSERT(isDrained());
+        MOZ_ASSERT(color == gc::BLACK);
+        color = gc::GRAY;
+    }
+    void setMarkColorBlack() {
+        MOZ_ASSERT(isDrained());
+        MOZ_ASSERT(color == gc::GRAY);
+        color = gc::BLACK;
+    }
+    uint32_t markColor() const { return color; }
+
+    void delayMarkingArena(gc::ArenaHeader* aheader);
+    void delayMarkingChildren(const void* thing);
+    void markDelayedChildren(gc::ArenaHeader* aheader);
+    bool markDelayedChildren(SliceBudget& budget);
+    bool hasDelayedChildren() const {
+        return !!unmarkedArenaStackTop;
+    }
+
+    bool isDrained() {
+        return isMarkStackEmpty() && !unmarkedArenaStackTop;
+    }
+
+    bool drainMarkStack(SliceBudget& budget);
+
+    void setGCMode(JSGCMode mode) { stack.setGCMode(mode); }
+
+    size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
+
+#ifdef DEBUG
+    bool shouldCheckCompartments() { return strictCompartmentChecking; }
+#endif
+
+    /* This is public exclusively for ScanRope. */
+    MarkStack stack;
+
+  private:
+#ifdef DEBUG
+    void checkZone(void* p);
+#else
+    void checkZone(void* p) {}
+#endif
+
+    /*
+     * We use a common mark stack to mark GC things of different types and use
+     * the explicit tags to distinguish them when it cannot be deduced from
+     * the context of push or pop operation.
+     */
+    enum StackTag {
+        ValueArrayTag,
+        ObjectTag,
+        GroupTag,
+        SavedValueArrayTag,
+        JitCodeTag,
+        LastTag = JitCodeTag
+    };
+
+    static const uintptr_t StackTagMask = 7;
+    static_assert(StackTagMask >= uintptr_t(LastTag), "The tag mask must subsume the tags.");
+    static_assert(StackTagMask <= gc::CellMask, "The tag mask must be embeddable in a Cell*.");
+
+    // Push an object onto the stack for later tracing and assert that it has
+    // already been marked.
+    void repush(JSObject* obj) {
+        MOZ_ASSERT(gc::TenuredCell::fromPointer(obj)->isMarked(markColor()));
+        pushTaggedPtr(ObjectTag, obj);
+    }
+
+    template <typename T> void markAndTraceChildren(T* thing);
+    template <typename T> void markAndPush(StackTag tag, T* thing);
+    template <typename T> void markAndScan(T* thing);
+    void eagerlyMarkChildren(JSLinearString* str);
+    void eagerlyMarkChildren(JSRope* rope);
+    void eagerlyMarkChildren(JSString* str);
+    void eagerlyMarkChildren(LazyScript *thing);
+    void eagerlyMarkChildren(Shape* shape);
+    void lazilyMarkChildren(ObjectGroup* group);
+
+    // We may not have concrete types yet, so this has to be out of the header.
+    template <typename T>
+    void dispatchToTraceChildren(T* thing);
+
+    // Mark the given GC thing, but do not trace its children. Return true
+    // if the thing became marked.
+    template <typename T>
+    bool mark(T* thing);
+
+    void pushTaggedPtr(StackTag tag, void* ptr) {
+        checkZone(ptr);
+        uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
+        MOZ_ASSERT(!(addr & StackTagMask));
+        if (!stack.push(addr | uintptr_t(tag)))
+            delayMarkingChildren(ptr);
+    }
+
+    void pushValueArray(JSObject* obj, void* start, void* end) {
+        checkZone(obj);
+
+        MOZ_ASSERT(start <= end);
+        uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag;
+        uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
+        uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
+
+        /*
+         * Push in the reverse order so obj will be on top. If we cannot push
+         * the array, we trigger delay marking for the whole object.
+         */
+        if (!stack.push(endAddr, startAddr, tagged))
+            delayMarkingChildren(obj);
+    }
+
+    bool isMarkStackEmpty() {
+        return stack.isEmpty();
+    }
+
+    bool restoreValueArray(NativeObject* obj, void** vpp, void** endp);
+    void saveValueRanges();
+    inline void processMarkStackTop(SliceBudget& budget);
+
+    /* The color is only applied to objects and functions. */
+    uint32_t color;
+
+    /* Pointer to the top of the stack of arenas we are delaying marking on. */
+    js::gc::ArenaHeader* unmarkedArenaStackTop;
+
+    /* Count of arenas that are currently in the stack. */
+    mozilla::DebugOnly<size_t> markLaterArenas;
+
+    /* Assert that start and stop are called with correct ordering. */
+    mozilla::DebugOnly<bool> started;
+
+    /*
+     * If this is true, all marked objects must belong to a compartment being
+     * GCed. This is used to look for compartment bugs.
+     */
+    mozilla::DebugOnly<bool> strictCompartmentChecking;
+};
 
 void
-TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, Cell** thingp, const char* name);
+SetMarkStackLimit(JSRuntime* rt, size_t limit);
 
-/*** Slot Marking ***/
+bool
+IsBufferingGrayRoots(JSTracer* trc);
 
-void
-MarkObjectSlots(JSTracer* trc, NativeObject* obj, uint32_t start, uint32_t nslots);
+} /* namespace js */
+namespace js {
+namespace gc {
 
 /*** Special Cases ***/
 
 /*
  * Trace through a shape or group iteratively during cycle collection to avoid
  * deep or infinite recursion.
  */
 void
 MarkCycleCollectorChildren(JSTracer* trc, Shape* shape);
 void
 MarkCycleCollectorChildren(JSTracer* trc, ObjectGroup* group);
 
 void
 PushArena(GCMarker* gcmarker, ArenaHeader* aheader);
 
-/*** Generic ***/
+/*** Liveness ***/
 
 template <typename T>
 bool
 IsMarkedUnbarriered(T* thingp);
 
 template <typename T>
 bool
 IsMarked(BarrieredBase<T>* thingp);
@@ -138,20 +359,26 @@ ToMarkable(const Value& v)
 }
 
 inline Cell*
 ToMarkable(Cell* cell)
 {
     return cell;
 }
 
-/*
- * HashKeyRef represents a reference to a HashMap key. This should normally
- * be used through the HashTableWriteBarrierPost function.
- */
+// Return true if the pointer is nullptr, or if it is a tagged pointer to
+// nullptr.
+MOZ_ALWAYS_INLINE bool
+IsNullTaggedPointer(void* p)
+{
+    return uintptr_t(p) < 32;
+}
+
+// HashKeyRef represents a reference to a HashMap key. This should normally
+// be used through the HashTableWriteBarrierPost function.
 template <typename Map, typename Key>
 class HashKeyRef : public BufferableRef
 {
     Map* map;
     Key key;
 
   public:
     HashKeyRef(Map* m, const Key& k) : map(m), key(k) {}
@@ -162,33 +389,32 @@ class HashKeyRef : public BufferableRef
         if (!p)
             return;
         JS::AutoOriginalTraceLocation reloc(trc, (void**)&*p);
         TraceManuallyBarrieredEdge(trc, &key, "HashKeyRef");
         map->rekeyIfMoved(prior, key);
     }
 };
 
+// Wrap a GC thing pointer into a new Value or jsid. The type system enforces
+// that the thing pointer is a wrappable type.
 template <typename S, typename T>
 struct RewrapValueOrId {};
 #define DECLARE_REWRAP(S, T, method, prefix) \
     template <> struct RewrapValueOrId<S, T> { \
         static S wrap(T thing) { return method ( prefix thing ); } \
     }
 DECLARE_REWRAP(JS::Value, JSObject*, JS::ObjectOrNullValue, );
 DECLARE_REWRAP(JS::Value, JSString*, JS::StringValue, );
 DECLARE_REWRAP(JS::Value, JS::Symbol*, JS::SymbolValue, );
 DECLARE_REWRAP(jsid, JSString*, NON_INTEGER_ATOM_TO_JSID, (JSAtom*));
 DECLARE_REWRAP(jsid, JS::Symbol*, SYMBOL_TO_JSID, );
 
 } /* namespace gc */
 
-void
-TraceChildren(JSTracer* trc, void* thing, JSGCTraceKind kind);
-
 bool
 UnmarkGrayShapeRecursively(Shape* shape);
 
 template<typename T>
 void
 CheckTracedThing(JSTracer* trc, T thing);
 
 } /* namespace js */
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -547,16 +547,38 @@ js::gc::GCRuntime::markRuntime(JSTracer*
         /* During GC, we don't mark gray roots at this stage. */
         if (JSTraceDataOp op = grayRootTracer.op) {
             if (traceOrMark == TraceRuntime)
                 (*op)(trc, grayRootTracer.data);
         }
     }
 }
 
+// Append traced things to a buffer on the zone for use later in the GC.
+// See the comment in GCRuntime.h above grayBufferState for details.
+class BufferGrayRootsTracer : public JS::CallbackTracer
+{
+    // Set to false if we OOM while buffering gray roots.
+    bool bufferingGrayRootsFailed;
+
+    void appendGrayRoot(gc::TenuredCell* thing, JSGCTraceKind kind);
+
+  public:
+    explicit BufferGrayRootsTracer(JSRuntime* rt)
+      : JS::CallbackTracer(rt, grayTraceCallback), bufferingGrayRootsFailed(false)
+    {}
+
+    static void grayTraceCallback(JS::CallbackTracer* trc, void** thingp, JSGCTraceKind kind) {
+        auto tracer = static_cast<BufferGrayRootsTracer*>(trc);
+        tracer->appendGrayRoot(gc::TenuredCell::fromPointer(*thingp), kind);
+    }
+
+    bool failed() const { return bufferingGrayRootsFailed; }
+};
+
 void
 js::gc::GCRuntime::bufferGrayRoots()
 {
     // Precondition: the state has been reset to "unused" after the last GC
     //               and the zone's buffers have been cleared.
     MOZ_ASSERT(grayBufferState == GrayBufferState::Unused);
     for (GCZonesIter zone(rt); !zone.done(); zone.next())
         MOZ_ASSERT(zone->gcGrayRoots.empty());
@@ -616,8 +638,18 @@ GCRuntime::markBufferedGrayRoots(JS::Zon
 void
 GCRuntime::resetBufferedGrayRoots() const
 {
     MOZ_ASSERT(grayBufferState != GrayBufferState::Okay,
                "Do not clear the gray buffers unless we are Failed or becoming Unused");
     for (GCZonesIter zone(rt); !zone.done(); zone.next())
         zone->gcGrayRoots.clearAndFree();
 }
+
+// Return true if this trace is happening on behalf of gray buffering during
+// the marking phase of incremental GC.
+bool
+js::IsBufferingGrayRoots(JSTracer* trc)
+{
+    return trc->isCallbackTracer() &&
+           trc->asCallbackTracer()->hasCallback(BufferGrayRootsTracer::grayTraceCallback);
+}
+
--- a/js/src/gc/StoreBuffer.cpp
+++ b/js/src/gc/StoreBuffer.cpp
@@ -37,17 +37,17 @@ StoreBuffer::SlotsEdge::mark(JSTracer* t
         int32_t clampedStart = Min(start_, initLen);
         int32_t clampedEnd = Min(start_ + count_, initLen);
         TraceRange(trc, clampedEnd - clampedStart,
                    static_cast<HeapSlot*>(obj->getDenseElements() + clampedStart), "element");
     } else {
         int32_t start = Min(uint32_t(start_), obj->slotSpan());
         int32_t end = Min(uint32_t(start_) + count_, obj->slotSpan());
         MOZ_ASSERT(end >= start);
-        MarkObjectSlots(trc, obj, start, end - start);
+        TraceObjectSlots(trc, obj, start, end - start);
     }
 }
 
 void
 StoreBuffer::WholeCellEdges::mark(JSTracer* trc) const
 {
     MOZ_ASSERT(edge->isTenured());
     JSGCTraceKind kind = GetGCThingTraceKind(edge);
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -10,17 +10,16 @@
 #include "mozilla/Attributes.h"
 #include "mozilla/DebugOnly.h"
 #include "mozilla/ReentrancyGuard.h"
 
 #include "jsalloc.h"
 
 #include "ds/LifoAlloc.h"
 #include "gc/Nursery.h"
-#include "gc/Tracer.h"
 #include "js/MemoryMetrics.h"
 
 namespace js {
 namespace gc {
 
 /*
  * BufferableRef represents an abstract reference for use in the generational
  * GC's remembered set. Entries in the store buffer that cannot be represented
--- a/js/src/gc/Tracer.cpp
+++ b/js/src/gc/Tracer.cpp
@@ -16,30 +16,34 @@
 #include "jsscript.h"
 #include "jsutil.h"
 #include "NamespaceImports.h"
 
 #include "gc/GCInternals.h"
 #include "gc/Marking.h"
 #include "gc/Zone.h"
 
+#include "vm/Shape.h"
 #include "vm/Symbol.h"
 
 #include "jsgcinlines.h"
 
 using namespace js;
 using namespace js::gc;
 using mozilla::DebugOnly;
 
 namespace js {
 template<typename T>
 void
 CheckTracedThing(JSTracer* trc, T thing);
 } // namespace js
 
+
+/*** Callback Tracer Dispatch ********************************************************************/
+
 template <typename T>
 T
 DoCallback(JS::CallbackTracer* trc, T* thingp, const char* name)
 {
     CheckTracedThing(trc, *thingp);
     JSGCTraceKind kind = MapTypeToTraceKind<typename mozilla::RemovePointer<T>::Type>::kind;
     JS::AutoTracingName ctx(trc, name);
     trc->invoke((void**)thingp, kind);
@@ -68,16 +72,34 @@ DoCallback<Value>(JS::CallbackTracer* tr
 template <>
 jsid
 DoCallback<jsid>(JS::CallbackTracer* trc, jsid* idp, const char* name)
 {
     *idp = DispatchIdTyped(DoCallbackFunctor<jsid>(), *idp, trc, name);
     return *idp;
 }
 
+void
+JS::CallbackTracer::getTracingEdgeName(char* buffer, size_t bufferSize)
+{
+    MOZ_ASSERT(bufferSize > 0);
+    if (contextFunctor_) {
+        (*contextFunctor_)(this, buffer, bufferSize);
+        return;
+    }
+    if (contextIndex_ != InvalidIndex) {
+        JS_snprintf(buffer, bufferSize, "%s[%lu]", contextName_, contextIndex_);
+        return;
+    }
+    JS_snprintf(buffer, bufferSize, "%s", contextName_);
+}
+
+
+/*** Public Tracing API **************************************************************************/
+
 JS_PUBLIC_API(void)
 JS_CallUnbarrieredValueTracer(JSTracer* trc, Value* valuep, const char* name)
 {
     TraceManuallyBarrieredEdge(trc, valuep, name);
 }
 
 JS_PUBLIC_API(void)
 JS_CallUnbarrieredIdTracer(JSTracer* trc, jsid* idp, const char* name)
@@ -153,16 +175,31 @@ JS_CallTenuredObjectTracer(JSTracer* trc
 }
 
 JS_PUBLIC_API(void)
 JS_TraceChildren(JSTracer* trc, void* thing, JSGCTraceKind kind)
 {
     js::TraceChildren(trc, thing, kind);
 }
 
+struct TraceChildrenFunctor {
+    template <typename T>
+    void operator()(JSTracer* trc, void* thing) {
+        static_cast<T*>(thing)->traceChildren(trc);
+    }
+};
+
+void
+js::TraceChildren(JSTracer* trc, void* thing, JSGCTraceKind kind)
+{
+    MOZ_ASSERT(thing);
+    TraceChildrenFunctor f;
+    CallTyped(f, kind, trc, thing);
+}
+
 JS_PUBLIC_API(void)
 JS_TraceRuntime(JSTracer* trc)
 {
     AssertHeapIsIdle(trc->runtime());
     TraceRuntime(trc);
 }
 
 JS_PUBLIC_API(void)
@@ -214,16 +251,19 @@ JS_TraceIncomingCCWs(JSTracer* trc, cons
                     MOZ_ASSERT(script == key.wrapped);
                     break;
                 }
             }
         }
     }
 }
 
+
+/*** Traced Edge Printer *************************************************************************/
+
 static size_t
 CountDecimalDigits(size_t num)
 {
     size_t numDigits = 0;
     do {
         num /= 10;
         numDigits++;
     } while (num > 0);
@@ -363,282 +403,8 @@ JS_GetTraceThingInfo(char* buf, size_t b
           }
 
           default:
             break;
         }
     }
     buf[bufsize - 1] = '\0';
 }
-
-JSTracer::JSTracer(JSRuntime* rt, TracerKindTag kindTag,
-                   WeakMapTraceKind weakTraceKind /* = TraceWeakMapValues */)
-  : runtime_(rt)
-  , tag(kindTag)
-  , eagerlyTraceWeakMaps_(weakTraceKind)
-{
-}
-
-void
-JS::CallbackTracer::getTracingEdgeName(char* buffer, size_t bufferSize)
-{
-    MOZ_ASSERT(bufferSize > 0);
-    if (contextFunctor_) {
-        (*contextFunctor_)(this, buffer, bufferSize);
-        return;
-    }
-    if (contextIndex_ != InvalidIndex) {
-        JS_snprintf(buffer, bufferSize, "%s[%lu]", contextName_, contextIndex_);
-        return;
-    }
-    JS_snprintf(buffer, bufferSize, "%s", contextName_);
-}
-
-void
-JS::CallbackTracer::setTraceCallback(JSTraceCallback traceCallback)
-{
-    callback = traceCallback;
-}
-
-bool
-MarkStack::init(JSGCMode gcMode)
-{
-    setBaseCapacity(gcMode);
-
-    MOZ_ASSERT(!stack_);
-    uintptr_t* newStack = js_pod_malloc<uintptr_t>(baseCapacity_);
-    if (!newStack)
-        return false;
-
-    setStack(newStack, 0, baseCapacity_);
-    return true;
-}
-
-void
-MarkStack::setBaseCapacity(JSGCMode mode)
-{
-    switch (mode) {
-      case JSGC_MODE_GLOBAL:
-      case JSGC_MODE_COMPARTMENT:
-        baseCapacity_ = NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY;
-        break;
-      case JSGC_MODE_INCREMENTAL:
-        baseCapacity_ = INCREMENTAL_MARK_STACK_BASE_CAPACITY;
-        break;
-      default:
-        MOZ_CRASH("bad gc mode");
-    }
-
-    if (baseCapacity_ > maxCapacity_)
-        baseCapacity_ = maxCapacity_;
-}
-
-void
-MarkStack::setMaxCapacity(size_t maxCapacity)
-{
-    MOZ_ASSERT(isEmpty());
-    maxCapacity_ = maxCapacity;
-    if (baseCapacity_ > maxCapacity_)
-        baseCapacity_ = maxCapacity_;
-
-    reset();
-}
-
-void
-MarkStack::reset()
-{
-    if (capacity() == baseCapacity_) {
-        // No size change; keep the current stack.
-        setStack(stack_, 0, baseCapacity_);
-        return;
-    }
-
-    uintptr_t* newStack = (uintptr_t*)js_realloc(stack_, sizeof(uintptr_t) * baseCapacity_);
-    if (!newStack) {
-        // If the realloc fails, just keep using the existing stack; it's
-        // not ideal but better than failing.
-        newStack = stack_;
-        baseCapacity_ = capacity();
-    }
-    setStack(newStack, 0, baseCapacity_);
-}
-
-bool
-MarkStack::enlarge(unsigned count)
-{
-    size_t newCapacity = Min(maxCapacity_, capacity() * 2);
-    if (newCapacity < capacity() + count)
-        return false;
-
-    size_t tosIndex = position();
-
-    uintptr_t* newStack = (uintptr_t*)js_realloc(stack_, sizeof(uintptr_t) * newCapacity);
-    if (!newStack)
-        return false;
-
-    setStack(newStack, tosIndex, newCapacity);
-    return true;
-}
-
-void
-MarkStack::setGCMode(JSGCMode gcMode)
-{
-    // The mark stack won't be resized until the next call to reset(), but
-    // that will happen at the end of the next GC.
-    setBaseCapacity(gcMode);
-}
-
-size_t
-MarkStack::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
-{
-    return mallocSizeOf(stack_);
-}
-
-/*
- * DoNotTraceWeakMaps: the GC is recomputing the liveness of WeakMap entries,
- * so we delay visting entries.
- */
-GCMarker::GCMarker(JSRuntime* rt)
-  : JSTracer(rt, JSTracer::MarkingTracer, DoNotTraceWeakMaps),
-    stack(size_t(-1)),
-    color(BLACK),
-    unmarkedArenaStackTop(nullptr),
-    markLaterArenas(0),
-    started(false),
-    strictCompartmentChecking(false)
-{
-}
-
-bool
-GCMarker::init(JSGCMode gcMode)
-{
-    return stack.init(gcMode);
-}
-
-void
-GCMarker::start()
-{
-    MOZ_ASSERT(!started);
-    started = true;
-    color = BLACK;
-
-    MOZ_ASSERT(!unmarkedArenaStackTop);
-    MOZ_ASSERT(markLaterArenas == 0);
-
-}
-
-void
-GCMarker::stop()
-{
-    MOZ_ASSERT(isDrained());
-
-    MOZ_ASSERT(started);
-    started = false;
-
-    MOZ_ASSERT(!unmarkedArenaStackTop);
-    MOZ_ASSERT(markLaterArenas == 0);
-
-    /* Free non-ballast stack memory. */
-    stack.reset();
-}
-
-void
-GCMarker::reset()
-{
-    color = BLACK;
-
-    stack.reset();
-    MOZ_ASSERT(isMarkStackEmpty());
-
-    while (unmarkedArenaStackTop) {
-        ArenaHeader* aheader = unmarkedArenaStackTop;
-        MOZ_ASSERT(aheader->hasDelayedMarking);
-        MOZ_ASSERT(markLaterArenas);
-        unmarkedArenaStackTop = aheader->getNextDelayedMarking();
-        aheader->unsetDelayedMarking();
-        aheader->markOverflow = 0;
-        aheader->allocatedDuringIncremental = 0;
-        markLaterArenas--;
-    }
-    MOZ_ASSERT(isDrained());
-    MOZ_ASSERT(!markLaterArenas);
-}
-
-void
-GCMarker::markDelayedChildren(ArenaHeader* aheader)
-{
-    if (aheader->markOverflow) {
-        bool always = aheader->allocatedDuringIncremental;
-        aheader->markOverflow = 0;
-
-        for (ArenaCellIterUnderGC i(aheader); !i.done(); i.next()) {
-            TenuredCell* t = i.getCell();
-            if (always || t->isMarked()) {
-                t->markIfUnmarked();
-                JS_TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind()));
-            }
-        }
-    } else {
-        MOZ_ASSERT(aheader->allocatedDuringIncremental);
-        PushArena(this, aheader);
-    }
-    aheader->allocatedDuringIncremental = 0;
-    /*
-     * Note that during an incremental GC we may still be allocating into
-     * aheader. However, prepareForIncrementalGC sets the
-     * allocatedDuringIncremental flag if we continue marking.
-     */
-}
-
-bool
-GCMarker::markDelayedChildren(SliceBudget& budget)
-{
-    GCRuntime& gc = runtime()->gc;
-    gcstats::AutoPhase ap(gc.stats, gc.state() == MARK, gcstats::PHASE_MARK_DELAYED);
-
-    MOZ_ASSERT(unmarkedArenaStackTop);
-    do {
-        /*
-         * If marking gets delayed at the same arena again, we must repeat
-         * marking of its things. For that we pop arena from the stack and
-         * clear its hasDelayedMarking flag before we begin the marking.
-         */
-        ArenaHeader* aheader = unmarkedArenaStackTop;
-        MOZ_ASSERT(aheader->hasDelayedMarking);
-        MOZ_ASSERT(markLaterArenas);
-        unmarkedArenaStackTop = aheader->getNextDelayedMarking();
-        aheader->unsetDelayedMarking();
-        markLaterArenas--;
-        markDelayedChildren(aheader);
-
-        budget.step(150);
-        if (budget.isOverBudget())
-            return false;
-    } while (unmarkedArenaStackTop);
-    MOZ_ASSERT(!markLaterArenas);
-
-    return true;
-}
-
-#ifdef DEBUG
-void
-GCMarker::checkZone(void* p)
-{
-    MOZ_ASSERT(started);
-    DebugOnly<Cell*> cell = static_cast<Cell*>(p);
-    MOZ_ASSERT_IF(cell->isTenured(), cell->asTenured().zone()->isCollecting());
-}
-#endif
-
-size_t
-GCMarker::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
-{
-    size_t size = stack.sizeOfExcludingThis(mallocSizeOf);
-    for (ZonesIter zone(runtime(), WithAtoms); !zone.done(); zone.next())
-        size += zone->gcGrayRoots.sizeOfExcludingThis(mallocSizeOf);
-    return size;
-}
-
-void
-js::SetMarkStackLimit(JSRuntime* rt, size_t limit)
-{
-    rt->gc.setMarkStackLimit(limit);
-}
--- a/js/src/gc/Tracer.h
+++ b/js/src/gc/Tracer.h
@@ -2,352 +2,122 @@
  * vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef js_Tracer_h
 #define js_Tracer_h
 
-#include "mozilla/DebugOnly.h"
-
-#include "gc/Heap.h"
-#include "js/GCAPI.h"
-#include "js/SliceBudget.h"
-#include "js/TracingAPI.h"
-
-class JSLinearString;
-class JSRope;
-namespace js {
-class BaseShape;
-class GCMarker;
-class LazyScript;
-class NativeObject;
-class ObjectGroup;
-namespace gc {
-struct ArenaHeader;
-}
-namespace jit {
-class JitCode;
-}
-
-static const size_t NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY = 4096;
-static const size_t INCREMENTAL_MARK_STACK_BASE_CAPACITY = 32768;
+#include "jsfriendapi.h"
 
-/*
- * When the native stack is low, the GC does not call JS_TraceChildren to mark
- * the reachable "children" of the thing. Rather the thing is put aside and
- * JS_TraceChildren is called later with more space on the C stack.
- *
- * To implement such delayed marking of the children with minimal overhead for
- * the normal case of sufficient native stack, the code adds a field per arena.
- * The field markingDelay->link links all arenas with delayed things into a
- * stack list with the pointer to stack top in GCMarker::unmarkedArenaStackTop.
- * GCMarker::delayMarkingChildren adds arenas to the stack as necessary while
- * markDelayedChildren pops the arenas from the stack until it empties.
- */
-class MarkStack
-{
-    friend class GCMarker;
-
-    uintptr_t* stack_;
-    uintptr_t* tos_;
-    uintptr_t* end_;
-
-    // The capacity we start with and reset() to.
-    size_t baseCapacity_;
-    size_t maxCapacity_;
+#include "gc/Barrier.h"
 
-  public:
-    explicit MarkStack(size_t maxCapacity)
-      : stack_(nullptr),
-        tos_(nullptr),
-        end_(nullptr),
-        baseCapacity_(0),
-        maxCapacity_(maxCapacity)
-    {}
-
-    ~MarkStack() {
-        js_free(stack_);
-    }
-
-    size_t capacity() { return end_ - stack_; }
-
-    ptrdiff_t position() const { return tos_ - stack_; }
-
-    void setStack(uintptr_t* stack, size_t tosIndex, size_t capacity) {
-        stack_ = stack;
-        tos_ = stack + tosIndex;
-        end_ = stack + capacity;
-    }
-
-    bool init(JSGCMode gcMode);
-
-    void setBaseCapacity(JSGCMode mode);
-    size_t maxCapacity() const { return maxCapacity_; }
-    void setMaxCapacity(size_t maxCapacity);
+namespace js {
 
-    bool push(uintptr_t item) {
-        if (tos_ == end_) {
-            if (!enlarge(1))
-                return false;
-        }
-        MOZ_ASSERT(tos_ < end_);
-        *tos_++ = item;
-        return true;
-    }
-
-    bool push(uintptr_t item1, uintptr_t item2, uintptr_t item3) {
-        uintptr_t* nextTos = tos_ + 3;
-        if (nextTos > end_) {
-            if (!enlarge(3))
-                return false;
-            nextTos = tos_ + 3;
-        }
-        MOZ_ASSERT(nextTos <= end_);
-        tos_[0] = item1;
-        tos_[1] = item2;
-        tos_[2] = item3;
-        tos_ = nextTos;
-        return true;
-    }
-
-    bool isEmpty() const {
-        return tos_ == stack_;
-    }
-
-    uintptr_t pop() {
-        MOZ_ASSERT(!isEmpty());
-        return *--tos_;
-    }
-
-    void reset();
-
-    /* Grow the stack, ensuring there is space for at least count elements. */
-    bool enlarge(unsigned count);
-
-    void setGCMode(JSGCMode gcMode);
+// Internal Tracing API
+//
+// Tracing is an abstract visitation of each edge in a JS heap graph.[1] The
+// most common (and performance sensitive) use of this infrastructure is for GC
+// "marking" as part of the mark-and-sweep collector; however, this
+// infrastructure is much more general than that and is used for many other
+// purposes as well.
+//
+// One commonly misunderstood subtlety of the tracing architecture is the role
+// of graph verticies versus graph edges. Graph verticies are the heap
+// allocations -- GC things -- that are returned by Allocate. Graph edges are
+// pointers -- including tagged pointers like Value and jsid -- that link the
+// allocations into a complex heap. The tracing API deals *only* with edges.
+// Any action taken on the target of a graph edge is independent to the tracing
+// itself.
+//
+// Another common misunderstanding relates to the role of the JSTracer. The
+// JSTracer instance determines what tracing does when visiting an edge; it
+// does not itself participate in the tracing process, other than to be passed
+// through as opaque data. It works like a closure in that respect.
+//
+// Tracing implementations internal to SpiderMonkey should use these interfaces
+// instead of the public interfaces in js/TracingAPI.h. Unlike the public
+// tracing methods, these work on internal types and avoid an external call.
+//
+// Note that the implementations for these methods are, surprisingly, in
+// js/src/gc/Marking.cpp. This is so that the compiler can inline as much as
+// possible in the common, marking pathways. Conceptually, however, they remain
+// as part of the generic "tracing" architecture, rather than the more specific
+// marking implementation of tracing.
+//
+// 1 - In SpiderMonkey, we call this concept tracing rather than visiting
+//     because "visiting" is already used by the compiler. Also, it's been
+//     called "tracing" forever and changing it would be extremely difficult at
+//     this point.
 
-    size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
-};
-
-#ifdef DEBUG
-namespace gc {
-
+// Trace through an edge in the live object graph on behalf of tracing. The
+// effect of tracing the edge depends on the JSTracer being used.
 template <typename T>
-extern bool
-ZoneIsGCMarking(T* thing);
-
-template <typename T>
-extern bool
-ZoneIsAtomsZoneForString(JSRuntime* rt, T* thing);
-
-} /* namespace gc */
-#endif
-
-#define JS_COMPARTMENT_ASSERT(rt, thing) \
-    MOZ_ASSERT(gc::ZoneIsGCMarking((thing)) || gc::ZoneIsAtomsZoneForString((rt), (thing)))
+void
+TraceEdge(JSTracer* trc, BarrieredBase<T>* thingp, const char* name);
 
-class GCMarker : public JSTracer
-{
-  public:
-    explicit GCMarker(JSRuntime* rt);
-    bool init(JSGCMode gcMode);
-
-    void setMaxCapacity(size_t maxCap) { stack.setMaxCapacity(maxCap); }
-    size_t maxCapacity() const { return stack.maxCapacity(); }
-
-    void start();
-    void stop();
-    void reset();
-
-    // Mark the given GC thing and traverse its children at some point.
-    template <typename T> void traverse(T thing);
-
-    // Calls traverse on target after making additional assertions.
-    template <typename S, typename T> void traverse(S source, T target);
-
-    // C++ requires explicit declarations of partial template instantiations.
-    template <typename S> void traverse(S source, jsid target);
+// Trace through a "root" edge. These edges are the initial edges in the object
+// graph traversal. Root edges are asserted to only be traversed in the initial
+// phase of a GC.
+template <typename T>
+void
+TraceRoot(JSTracer* trc, T* thingp, const char* name);
 
-    /*
-     * Care must be taken changing the mark color from gray to black. The cycle
-     * collector depends on the invariant that there are no black to gray edges
-     * in the GC heap. This invariant lets the CC not trace through black
-     * objects. If this invariant is violated, the cycle collector may free
-     * objects that are still reachable.
-     */
-    void setMarkColorGray() {
-        MOZ_ASSERT(isDrained());
-        MOZ_ASSERT(color == gc::BLACK);
-        color = gc::GRAY;
-    }
-    void setMarkColorBlack() {
-        MOZ_ASSERT(isDrained());
-        MOZ_ASSERT(color == gc::GRAY);
-        color = gc::BLACK;
-    }
-    uint32_t markColor() const { return color; }
+// Like TraceEdge, but for edges that do not use one of the automatic barrier
+// classes and, thus, must be treated specially for moving GC. This method is
+// separate from TraceEdge to make accidental use of such edges more obvious.
+template <typename T>
+void
+TraceManuallyBarrieredEdge(JSTracer* trc, T* thingp, const char* name);
 
-    void delayMarkingArena(gc::ArenaHeader* aheader);
-    void delayMarkingChildren(const void* thing);
-    void markDelayedChildren(gc::ArenaHeader* aheader);
-    bool markDelayedChildren(SliceBudget& budget);
-    bool hasDelayedChildren() const {
-        return !!unmarkedArenaStackTop;
-    }
-
-    bool isDrained() {
-        return isMarkStackEmpty() && !unmarkedArenaStackTop;
-    }
-
-    bool drainMarkStack(SliceBudget& budget);
-
-    void setGCMode(JSGCMode mode) { stack.setGCMode(mode); }
-
-    size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
-
-#ifdef DEBUG
-    bool shouldCheckCompartments() { return strictCompartmentChecking; }
-#endif
-
-    /* This is public exclusively for ScanRope. */
-    MarkStack stack;
+// Trace all edges contained in the given array.
+template <typename T>
+void
+TraceRange(JSTracer* trc, size_t len, BarrieredBase<T>* vec, const char* name);
 
-  private:
-#ifdef DEBUG
-    void checkZone(void* p);
-#else
-    void checkZone(void* p) {}
-#endif
-
-    /*
-     * We use a common mark stack to mark GC things of different types and use
-     * the explicit tags to distinguish them when it cannot be deduced from
-     * the context of push or pop operation.
-     */
-    enum StackTag {
-        ValueArrayTag,
-        ObjectTag,
-        GroupTag,
-        SavedValueArrayTag,
-        JitCodeTag,
-        LastTag = JitCodeTag
-    };
+// Trace all root edges in the given array.
+template <typename T>
+void
+TraceRootRange(JSTracer* trc, size_t len, T* vec, const char* name);
 
-    static const uintptr_t StackTagMask = 7;
-    static_assert(StackTagMask >= uintptr_t(LastTag), "The tag mask must subsume the tags.");
-    static_assert(StackTagMask <= gc::CellMask, "The tag mask must be embeddable in a Cell*.");
-
-    // Push an object onto the stack for later tracing and assert that it has
-    // already been marked.
-    void repush(JSObject* obj) {
-        MOZ_ASSERT(gc::TenuredCell::fromPointer(obj)->isMarked(markColor()));
-        pushTaggedPtr(ObjectTag, obj);
-    }
-
-    template <typename T> void markAndTraceChildren(T* thing);
-    template <typename T> void markAndPush(StackTag tag, T* thing);
-    template <typename T> void markAndScan(T* thing);
-    void eagerlyMarkChildren(JSLinearString* str);
-    void eagerlyMarkChildren(JSRope* rope);
-    void eagerlyMarkChildren(JSString* str);
-    void eagerlyMarkChildren(LazyScript *thing);
-    void eagerlyMarkChildren(Shape* shape);
-    void lazilyMarkChildren(ObjectGroup* group);
-
-    // We may not have concrete types yet, so this has to be out of the header.
-    template <typename T>
-    void dispatchToTraceChildren(T* thing);
+// Trace an edge that crosses compartment boundaries. If the compartment of the
+// destination thing is not being GC'd, then the edge will not be traced.
+template <typename T>
+void
+TraceCrossCompartmentEdge(JSTracer* trc, JSObject* src, BarrieredBase<T>* dst,
+                          const char* name);
 
-    // Mark the given GC thing, but do not trace its children. Return true
-    // if the thing became marked.
-    template <typename T>
-    bool mark(T* thing);
-
-    void pushTaggedPtr(StackTag tag, void* ptr) {
-        checkZone(ptr);
-        uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
-        MOZ_ASSERT(!(addr & StackTagMask));
-        if (!stack.push(addr | uintptr_t(tag)))
-            delayMarkingChildren(ptr);
-    }
-
-    void pushValueArray(JSObject* obj, void* start, void* end) {
-        checkZone(obj);
-
-        MOZ_ASSERT(start <= end);
-        uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag;
-        uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
-        uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
+// As above but with manual barriers.
+template <typename T>
+void
+TraceManuallyBarrieredCrossCompartmentEdge(JSTracer* trc, JSObject* src, T* dst,
+                                           const char* name);
 
-        /*
-         * Push in the reverse order so obj will be on top. If we cannot push
-         * the array, we trigger delay marking for the whole object.
-         */
-        if (!stack.push(endAddr, startAddr, tagged))
-            delayMarkingChildren(obj);
-    }
-
-    bool isMarkStackEmpty() {
-        return stack.isEmpty();
-    }
-
-    bool restoreValueArray(NativeObject* obj, void** vpp, void** endp);
-    void saveValueRanges();
-    inline void processMarkStackTop(SliceBudget& budget);
-
-    /* The color is only applied to objects and functions. */
-    uint32_t color;
-
-    /* Pointer to the top of the stack of arenas we are delaying marking on. */
-    js::gc::ArenaHeader* unmarkedArenaStackTop;
-
-    /* Count of arenas that are currently in the stack. */
-    mozilla::DebugOnly<size_t> markLaterArenas;
+// Permanent atoms and well-known symbols are shared between runtimes and must
+// use a separate marking path so that we can filter them out of normal heap
+// tracing.
+template <typename T>
+void
+TraceProcessGlobalRoot(JSTracer* trc, T* thing, const char* name);
 
-    /* Assert that start and stop are called with correct ordering. */
-    mozilla::DebugOnly<bool> started;
+// Trace a root edge that uses the base GC thing type, instead of a more
+// specific type.
+void
+TraceGenericPointerRoot(JSTracer* trc, gc::Cell** thingp, const char* name);
 
-    /*
-     * If this is true, all marked objects must belong to a compartment being
-     * GCed. This is used to look for compartment bugs.
-     */
-    mozilla::DebugOnly<bool> strictCompartmentChecking;
-};
-
-// Append traced things to a buffer on the zone for use later in the GC.
-// See the comment in GCRuntime.h above grayBufferState for details.
-class BufferGrayRootsTracer : public JS::CallbackTracer
-{
-    // Set to false if we OOM while buffering gray roots.
-    bool bufferingGrayRootsFailed;
-
-    void appendGrayRoot(gc::TenuredCell* thing, JSGCTraceKind kind);
+// Trace a non-root edge that uses the base GC thing type, instead of a more
+// specific type.
+void
+TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, gc::Cell** thingp, const char* name);
 
-  public:
-    explicit BufferGrayRootsTracer(JSRuntime* rt)
-      : JS::CallbackTracer(rt, grayTraceCallback), bufferingGrayRootsFailed(false)
-    {}
-
-    static void grayTraceCallback(JS::CallbackTracer* trc, void** thingp, JSGCTraceKind kind) {
-        auto tracer = static_cast<BufferGrayRootsTracer*>(trc);
-        tracer->appendGrayRoot(gc::TenuredCell::fromPointer(*thingp), kind);
-    }
-
-    bool failed() const { return bufferingGrayRootsFailed; }
-};
+// Object slots are not stored as a contiguous vector, so marking them as such
+// will lead to the wrong indicies, if such are requested when tracing.
+void
+TraceObjectSlots(JSTracer* trc, NativeObject* obj, uint32_t start, uint32_t nslots);
 
+// Depricated. Please use one of the strongly typed variants above.
 void
-SetMarkStackLimit(JSRuntime* rt, size_t limit);
+TraceChildren(JSTracer* trc, void* thing, JSGCTraceKind kind);
 
-// Return true if this trace is happening on behalf of gray buffering during
-// the marking phase of incremental GC.
-inline bool
-IsBufferingGrayRoots(JSTracer* trc)
-{
-    return trc->isCallbackTracer() &&
-           trc->asCallbackTracer()->hasCallback(BufferGrayRootsTracer::grayTraceCallback);
-}
-
-} /* namespace js */
+} // namespace js
 
 #endif /* js_Tracer_h */
--- a/js/src/jit/JitFrames.cpp
+++ b/js/src/jit/JitFrames.cpp
@@ -1089,17 +1089,17 @@ MarkIonJSFrame(JSTracer* trc, const JitF
     }
 
     uintptr_t* spill = frame.spillBase();
     LiveGeneralRegisterSet gcRegs = safepoint.gcSpills();
     LiveGeneralRegisterSet valueRegs = safepoint.valueSpills();
     for (GeneralRegisterBackwardIterator iter(safepoint.allGprSpills()); iter.more(); iter++) {
         --spill;
         if (gcRegs.has(*iter))
-            gc::TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(spill), "ion-gc-spill");
+            TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(spill), "ion-gc-spill");
         else if (valueRegs.has(*iter))
             TraceRoot(trc, reinterpret_cast<Value*>(spill), "ion-value-spill");
     }
 
 #ifdef JS_NUNBOX32
     LAllocation type, payload;
     while (safepoint.getNunboxSlot(&type, &payload)) {
         jsval_layout layout;
@@ -1417,17 +1417,17 @@ MarkJitExitFrame(JSTracer* trc, const Ji
             break;
           case VMFunction::RootFunction:
             TraceRoot(trc, reinterpret_cast<JSFunction**>(argBase), "ion-vm-args");
             break;
           case VMFunction::RootValue:
             TraceRoot(trc, reinterpret_cast<Value*>(argBase), "ion-vm-args");
             break;
           case VMFunction::RootCell:
-            gc::TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(argBase), "ion-vm-args");
+            TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(argBase), "ion-vm-args");
             break;
         }
 
         switch (f->argProperties(explicitArg)) {
           case VMFunction::WordByValue:
           case VMFunction::WordByRef:
             argBase += sizeof(void*);
             break;
@@ -1451,17 +1451,17 @@ MarkJitExitFrame(JSTracer* trc, const Ji
             break;
           case VMFunction::RootFunction:
             TraceRoot(trc, footer->outParam<JSFunction*>(), "ion-vm-out");
             break;
           case VMFunction::RootValue:
             TraceRoot(trc, footer->outParam<Value>(), "ion-vm-outvp");
             break;
           case VMFunction::RootCell:
-            gc::TraceGenericPointerRoot(trc, footer->outParam<gc::Cell*>(), "ion-vm-out");
+            TraceGenericPointerRoot(trc, footer->outParam<gc::Cell*>(), "ion-vm-out");
             break;
         }
     }
 
     MarkJitExitFrameCopiedArguments(trc, f, footer);
 }
 
 static void
--- a/js/src/jit/arm/Assembler-arm.cpp
+++ b/js/src/jit/arm/Assembler-arm.cpp
@@ -807,18 +807,18 @@ TraceOneDataRelocation(JSTracer* trc, It
 
     // The low bit shouldn't be set. If it is, we probably got a dummy
     // pointer inserted by CodeGenerator::visitNurseryObject, but we
     // shouldn't be able to trigger GC before those are patched to their
     // real values.
     MOZ_ASSERT(!(uintptr_t(ptr) & 0x1));
 
     // No barrier needed since these are constants.
-    gc::TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(&ptr),
-                                                 "ion-masm-ptr");
+    TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(&ptr),
+                                             "ion-masm-ptr");
 
     if (ptr != prior) {
         MacroAssemblerARM::ma_mov_patch(Imm32(int32_t(ptr)), dest, Assembler::Always, rs, ins);
 
         // L_LDR won't cause any instructions to be updated.
         if (rs != Assembler::L_LDR) {
             AutoFlushICache::flush(uintptr_t(ins), 4);
             AutoFlushICache::flush(uintptr_t(ins->next()), 4);
--- a/js/src/jit/mips/Assembler-mips.cpp
+++ b/js/src/jit/mips/Assembler-mips.cpp
@@ -290,17 +290,17 @@ TraceOneDataRelocation(JSTracer* trc, In
 
     // The low bit shouldn't be set. If it is, we probably got a dummy
     // pointer inserted by CodeGenerator::visitNurseryObject, but we
     // shouldn't be able to trigger GC before those are patched to their
     // real values.
     MOZ_ASSERT(!(uintptr_t(ptr) & 0x1));
 
     // No barrier needed since these are constants.
-    gc::TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(&ptr),
+    TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(&ptr),
                                                  "ion-masm-ptr");
     if (ptr != prior) {
         Assembler::UpdateLuiOriValue(inst, inst->next(), uint32_t(ptr));
         AutoFlushICache::flush(uintptr_t(inst), 8);
     }
 }
 
 static void
--- a/js/src/jit/x86-shared/Assembler-x86-shared.cpp
+++ b/js/src/jit/x86-shared/Assembler-x86-shared.cpp
@@ -69,18 +69,18 @@ TraceDataRelocations(JSTracer* trc, uint
 
         // The low bit shouldn't be set. If it is, we probably got a dummy
         // pointer inserted by CodeGenerator::visitNurseryObject, but we
         // shouldn't be able to trigger GC before those are patched to their
         // real values.
         MOZ_ASSERT(!(*reinterpret_cast<uintptr_t*>(ptr) & 0x1));
 
         // No barrier needed since these are constants.
-        gc::TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(ptr),
-                                                     "ion-masm-ptr");
+        TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(ptr),
+                                                 "ion-masm-ptr");
     }
 }
 
 
 void
 AssemblerX86Shared::TraceDataRelocations(JSTracer* trc, JitCode* code, CompactBufferReader& reader)
 {
     ::TraceDataRelocations(trc, code->raw(), reader);
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -4082,17 +4082,17 @@ JSObject::traceChildren(JSTracer* trc)
     if (clasp->isNative()) {
         NativeObject* nobj = &as<NativeObject>();
 
         TraceEdge(trc, &nobj->shape_, "shape");
 
         {
             GetObjectSlotNameFunctor func(nobj);
             JS::AutoTracingDetails ctx(trc, func);
-            MarkObjectSlots(trc, nobj, 0, nobj->slotSpan());
+            TraceObjectSlots(trc, nobj, 0, nobj->slotSpan());
         }
 
         do {
             if (nobj->denseElementsAreCopyOnWrite()) {
                 HeapPtrNativeObject& owner = nobj->getElementsHeader()->ownerObject();
                 if (owner != nobj) {
                     TraceEdge(trc, &owner, "objectElementsOwner");
                     break;
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -928,17 +928,17 @@ class Shape : public gc::TenuredCell
 #endif
 
     void sweep();
     void finalize(FreeOp* fop);
     void removeChild(Shape* child);
 
     static inline ThingRootKind rootKind() { return THING_ROOT_SHAPE; }
 
-    inline void traceChildren(JSTracer* trc);
+    void traceChildren(JSTracer* trc);
 
     inline Shape* search(ExclusiveContext* cx, jsid id);
     inline Shape* searchLinear(jsid id);
 
     void fixupAfterMovingGC();
     void fixupGetterSetterForBarrier(JSTracer* trc);
 
     /* For JIT usage */