Bug 1341355 - Add abstractions to improve type checking of mark stack contents and add iterator r=sfink
authorJon Coppeard <jcoppeard@mozilla.com>
Tue, 21 Feb 2017 17:53:24 +0000
changeset 373175 80f3a16286b0d4ef6896853d02087c44c93e0857
parent 373174 a6aebb9814445268db688d5dbcb5cac511f23686
child 373176 b8ecc482d087b47df5cc9a95549f2b5f642c3b85
push id10863
push userjlorenzo@mozilla.com
push dateMon, 06 Mar 2017 23:02:23 +0000
treeherdermozilla-aurora@0931190cd725 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerssfink
bugs1341355
milestone54.0a1
Bug 1341355 - Add abstractions to improve type checking of mark stack contents and add iterator r=sfink
js/src/gc/Marking.cpp
js/src/gc/Marking.h
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -1133,17 +1133,17 @@ js::GCMarker::eagerlyMarkChildren(JSRope
     // This function tries to scan the whole rope tree using the marking stack
     // as temporary storage. If that becomes full, the unscanned ropes are
     // added to the delayed marking list. When the function returns, the
     // marking stack is at the same depth as it was on entry. This way we avoid
     // using tags when pushing ropes to the stack as ropes never leak to other
     // users of the stack. This also assumes that a rope can only point to
     // other ropes or linear strings, it cannot refer to GC things of other
     // types.
-    ptrdiff_t savedPos = stack.position();
+    size_t savedPos = stack.position();
     JS_DIAGNOSTICS_ASSERT(rope->getTraceKind() == JS::TraceKind::String);
 #ifdef JS_DEBUG
     static const size_t DEEP_ROPE_THRESHOLD = 100000;
     static const size_t ROPE_CYCLE_HISTORY = 100;
     DebugOnly<size_t> ropeDepth = 0;
     JSRope* history[ROPE_CYCLE_HISTORY];
 #endif
     while (true) {
@@ -1196,19 +1196,17 @@ js::GCMarker::eagerlyMarkChildren(JSRope
                     delayMarkingChildren(next);
                 next = &left->asRope();
             }
         }
         if (next) {
             rope = next;
         } else if (savedPos != stack.position()) {
             MOZ_ASSERT(savedPos < stack.position());
-            uintptr_t ptr = stack.pop();
-            MOZ_ASSERT((ptr & MarkStack::TagMask) == MarkStack::TempRopeTag);
-            rope = reinterpret_cast<JSRope*>(ptr &~ MarkStack::TempRopeTag);
+            rope = stack.popPtr().asTempRope();
         } else {
             break;
         }
     }
     MOZ_ASSERT(savedPos == stack.position());
 }
 
 static inline void
@@ -1641,61 +1639,51 @@ GCMarker::processMarkStackTop(SliceBudge
      * The function uses explicit goto and implements the scanning of the
      * object directly. It allows to eliminate the tail recursion and
      * significantly improve the marking performance, see bug 641025.
      */
     HeapSlot* vp;
     HeapSlot* end;
     JSObject* obj;
 
-    // Decode
-    uintptr_t addr = stack.pop();
-    uintptr_t tag = addr & MarkStack::TagMask;
-    addr &= ~MarkStack::TagMask;
-
-    // Dispatch
-    switch (tag) {
+    switch (stack.peekTag()) {
       case MarkStack::ValueArrayTag: {
-        JS_STATIC_ASSERT(MarkStack::ValueArrayTag == 0);
-        MOZ_ASSERT(!(addr & CellMask));
-        obj = reinterpret_cast<JSObject*>(addr);
-        uintptr_t addr2 = stack.pop();
-        uintptr_t addr3 = stack.pop();
-        MOZ_ASSERT(addr2 <= addr3);
-        MOZ_ASSERT((addr3 - addr2) % sizeof(Value) == 0);
-        vp = reinterpret_cast<HeapSlot*>(addr2);
-        end = reinterpret_cast<HeapSlot*>(addr3);
+        auto array = stack.popValueArray();
+        obj = array.ptr.asValueArrayObject();
+        vp = array.start;
+        end = array.end;
         goto scan_value_array;
       }
 
       case MarkStack::ObjectTag: {
-        obj = reinterpret_cast<JSObject*>(addr);
+        obj = stack.popPtr().as<JSObject>();
         AssertZoneIsMarking(obj);
         goto scan_obj;
       }
 
       case MarkStack::GroupTag: {
-        return lazilyMarkChildren(reinterpret_cast<ObjectGroup*>(addr));
+        auto group = stack.popPtr().as<ObjectGroup>();
+        return lazilyMarkChildren(group);
       }
 
       case MarkStack::JitCodeTag: {
-        return reinterpret_cast<jit::JitCode*>(addr)->traceChildren(this);
+        auto code = stack.popPtr().as<jit::JitCode>();
+        return code->traceChildren(this);
       }
 
       case MarkStack::ScriptTag: {
-        return reinterpret_cast<JSScript*>(addr)->traceChildren(this);
+        auto script = stack.popPtr().as<JSScript>();
+        return script->traceChildren(this);
       }
 
       case MarkStack::SavedValueArrayTag: {
-        MOZ_ASSERT(!(addr & CellMask));
-        JSObject* obj = reinterpret_cast<JSObject*>(addr);
-        HeapSlot* vp;
-        HeapSlot* end;
-        if (restoreValueArray(obj, (void**)&vp, (void**)&end))
-            pushValueArray(&obj->as<NativeObject>(), vp, end);
+        auto savedArray = stack.popSavedValueArray();
+        JSObject* obj = savedArray.ptr.asSavedValueArrayObject();
+        if (restoreValueArray(savedArray, &vp, &end))
+            pushValueArray(obj, vp, end);
         else
             repush(obj);
         return;
       }
 
       default: MOZ_CRASH("Invalid tag in mark stack");
     }
     return;
@@ -1787,106 +1775,93 @@ GCMarker::processMarkStackTop(SliceBudge
             }
         }
         MOZ_ASSERT(nslots <= nobj->numFixedSlots());
         end = vp + nslots;
         goto scan_value_array;
     }
 }
 
-struct SlotArrayLayout
-{
-    union {
-        HeapSlot* end;
-        uintptr_t kind;
-    };
-    union {
-        HeapSlot* start;
-        uintptr_t index;
-    };
-    NativeObject* obj;
-
-    static void staticAsserts() {
-        /* This should have the same layout as three mark stack items. */
-        JS_STATIC_ASSERT(sizeof(SlotArrayLayout) == 3 * sizeof(uintptr_t));
-    }
-};
-
 /*
  * During incremental GC, we return from drainMarkStack without having processed
  * the entire stack. At that point, JS code can run and reallocate slot arrays
  * that are stored on the stack. To prevent this from happening, we replace all
  * ValueArrayTag stack items with SavedValueArrayTag. In the latter, slots
  * pointers are replaced with slot indexes, and slot array end pointers are
  * replaced with the kind of index (properties vs. elements).
  */
 void
 GCMarker::saveValueRanges()
 {
-    for (uintptr_t* p = stack.top(); p > stack.base(); ) {
-        uintptr_t tag = *--p & MarkStack::TagMask;
+    MarkStackIter iter(stack);
+    while (!iter.done()) {
+        auto tag = iter.peekTag();
         if (tag == MarkStack::ValueArrayTag) {
-            *p &= ~MarkStack::TagMask;
-            p -= 2;
-            SlotArrayLayout* arr = reinterpret_cast<SlotArrayLayout*>(p);
-            NativeObject* obj = arr->obj;
+            auto array = iter.peekValueArray();
+
+            NativeObject* obj = &array.ptr.asValueArrayObject()->as<NativeObject>();
             MOZ_ASSERT(obj->isNative());
 
+            uintptr_t index;
+            HeapSlot::Kind kind;
             HeapSlot* vp = obj->getDenseElementsAllowCopyOnWrite();
-            if (arr->end == vp + obj->getDenseInitializedLength()) {
-                MOZ_ASSERT(arr->start >= vp);
-                arr->index = arr->start - vp;
-                arr->kind = HeapSlot::Element;
+            if (array.end == vp + obj->getDenseInitializedLength()) {
+                MOZ_ASSERT(array.start >= vp);
+                index = array.start - vp;
+                kind = HeapSlot::Element;
             } else {
                 HeapSlot* vp = obj->fixedSlots();
                 unsigned nfixed = obj->numFixedSlots();
-                if (arr->start == arr->end) {
-                    arr->index = obj->slotSpan();
-                } else if (arr->start >= vp && arr->start < vp + nfixed) {
-                    MOZ_ASSERT(arr->end == vp + Min(nfixed, obj->slotSpan()));
-                    arr->index = arr->start - vp;
+                if (array.start == array.end) {
+                    index = obj->slotSpan();
+                } else if (array.start >= vp && array.start < vp + nfixed) {
+                    MOZ_ASSERT(array.end == vp + Min(nfixed, obj->slotSpan()));
+                    index = array.start - vp;
                 } else {
-                    MOZ_ASSERT(arr->start >= obj->slots_ &&
-                               arr->end == obj->slots_ + obj->slotSpan() - nfixed);
-                    arr->index = (arr->start - obj->slots_) + nfixed;
+                    MOZ_ASSERT(array.start >= obj->slots_ &&
+                               array.end == obj->slots_ + obj->slotSpan() - nfixed);
+                    index = (array.start - obj->slots_) + nfixed;
                 }
-                arr->kind = HeapSlot::Slot;
+                kind = HeapSlot::Slot;
             }
-            p[2] |= MarkStack::SavedValueArrayTag;
+            iter.saveValueArray(obj, index, kind);
+            iter.nextArray();
         } else if (tag == MarkStack::SavedValueArrayTag) {
-            p -= 2;
+            iter.nextArray();
+        } else {
+            iter.nextPtr();
         }
     }
 }
 
 bool
-GCMarker::restoreValueArray(JSObject* objArg, void** vpp, void** endp)
+GCMarker::restoreValueArray(const MarkStack::SavedValueArray& array,
+                            HeapSlot** vpp, HeapSlot** endp)
 {
-    uintptr_t start = stack.pop();
-    HeapSlot::Kind kind = (HeapSlot::Kind) stack.pop();
-
+    JSObject* objArg = array.ptr.asSavedValueArrayObject();
     if (!objArg->isNative())
         return false;
     NativeObject* obj = &objArg->as<NativeObject>();
 
-    if (kind == HeapSlot::Element) {
+    uintptr_t start = array.index;
+    if (array.kind == HeapSlot::Element) {
         if (!obj->is<ArrayObject>())
             return false;
 
         uint32_t initlen = obj->getDenseInitializedLength();
         HeapSlot* vp = obj->getDenseElementsAllowCopyOnWrite();
         if (start < initlen) {
             *vpp = vp + start;
             *endp = vp + initlen;
         } else {
             /* The object shrunk, in which case no scanning is needed. */
             *vpp = *endp = vp;
         }
     } else {
-        MOZ_ASSERT(kind == HeapSlot::Slot);
+        MOZ_ASSERT(array.kind == HeapSlot::Slot);
         HeapSlot* vp = obj->fixedSlots();
         unsigned nfixed = obj->numFixedSlots();
         unsigned nslots = obj->slotSpan();
         if (start < nslots) {
             if (start < nfixed) {
                 *vpp = vp + start;
                 *endp = vp + Min(nfixed, nslots);
             } else {
@@ -1901,50 +1876,162 @@ GCMarker::restoreValueArray(JSObject* ob
 
     MOZ_ASSERT(*vpp <= *endp);
     return true;
 }
 
 
 /*** Mark Stack ***********************************************************************************/
 
+static_assert(sizeof(MarkStack::TaggedPtr) == sizeof(uintptr_t),
+              "A TaggedPtr should be the same size as a pointer");
+static_assert(sizeof(MarkStack::ValueArray) == sizeof(MarkStack::SavedValueArray),
+              "ValueArray and SavedValueArray should be the same size");
+static_assert((sizeof(MarkStack::ValueArray) % sizeof(uintptr_t)) == 0,
+              "ValueArray and SavedValueArray should be multiples of the pointer size");
+
+static const size_t ValueArrayWords = sizeof(MarkStack::ValueArray) / sizeof(uintptr_t);
+
 template <typename T>
 struct MapTypeToMarkStackTag {};
-
 template <>
 struct MapTypeToMarkStackTag<JSObject*> { static const auto value = MarkStack::ObjectTag; };
 template <>
 struct MapTypeToMarkStackTag<ObjectGroup*> { static const auto value = MarkStack::GroupTag; };
 template <>
 struct MapTypeToMarkStackTag<jit::JitCode*> { static const auto value = MarkStack::JitCodeTag; };
 template <>
 struct MapTypeToMarkStackTag<JSScript*> { static const auto value = MarkStack::ScriptTag; };
 
+static inline bool
+TagIsArrayTag(MarkStack::Tag tag)
+{
+    return tag == MarkStack::ValueArrayTag || tag == MarkStack::SavedValueArrayTag;
+}
+
+static inline void
+CheckValueArray(const MarkStack::ValueArray& array)
+{
+    MOZ_ASSERT(array.ptr.tag() == MarkStack::ValueArrayTag);
+    MOZ_ASSERT(uintptr_t(array.start) <= uintptr_t(array.end));
+    MOZ_ASSERT((uintptr_t(array.end) - uintptr_t(array.start)) % sizeof(Value) == 0);
+}
+
+static inline void
+CheckSavedValueArray(const MarkStack::SavedValueArray& array)
+{
+    MOZ_ASSERT(array.ptr.tag() == MarkStack::SavedValueArrayTag);
+    MOZ_ASSERT(array.kind == HeapSlot::Slot || array.kind == HeapSlot::Element);
+}
+
+inline
+MarkStack::TaggedPtr::TaggedPtr(Tag tag, Cell* ptr)
+  : bits(tag | uintptr_t(ptr))
+{
+    MOZ_ASSERT(tag <= LastTag);
+    MOZ_ASSERT((uintptr_t(ptr) & CellMask) == 0);
+}
+
+inline MarkStack::Tag
+MarkStack::TaggedPtr::tag() const
+{
+    auto tag = Tag(bits & TagMask);
+    MOZ_ASSERT(tag <= LastTag);
+    return tag;
+}
+
+inline Cell*
+MarkStack::TaggedPtr::ptr() const
+{
+    return reinterpret_cast<Cell*>(bits & ~TagMask);
+}
+
+template <typename T>
+inline T*
+MarkStack::TaggedPtr::as() const
+{
+    MOZ_ASSERT(tag() == MapTypeToMarkStackTag<T*>::value);
+    MOZ_ASSERT(ptr()->asTenured().getTraceKind() == MapTypeToTraceKind<T>::kind);
+    return static_cast<T*>(ptr());
+}
+
+inline JSObject*
+MarkStack::TaggedPtr::asValueArrayObject() const
+{
+    MOZ_ASSERT(tag() == ValueArrayTag);
+    MOZ_ASSERT(ptr()->asTenured().getTraceKind() == JS::TraceKind::Object);
+    return static_cast<JSObject*>(ptr());
+}
+
+inline JSObject*
+MarkStack::TaggedPtr::asSavedValueArrayObject() const
+{
+    MOZ_ASSERT(tag() == SavedValueArrayTag);
+    MOZ_ASSERT(ptr()->asTenured().getTraceKind() == JS::TraceKind::Object);
+    return static_cast<JSObject*>(ptr());
+}
+
+inline JSRope*
+MarkStack::TaggedPtr::asTempRope() const
+{
+    MOZ_ASSERT(tag() == TempRopeTag);
+    MOZ_ASSERT(ptr()->asTenured().getTraceKind() == JS::TraceKind::String);
+    return static_cast<JSRope*>(ptr());
+}
+
+inline
+MarkStack::ValueArray::ValueArray(JSObject* obj, HeapSlot* startArg, HeapSlot* endArg)
+  : end(endArg), start(startArg), ptr(ValueArrayTag, obj)
+{}
+
+inline
+MarkStack::SavedValueArray::SavedValueArray(JSObject* obj, size_t indexArg, HeapSlot::Kind kindArg)
+  : kind(kindArg), index(indexArg), ptr(SavedValueArrayTag, obj)
+{}
+
 MarkStack::MarkStack(size_t maxCapacity)
-  : stack_(nullptr),
-    tos_(nullptr),
-    end_(nullptr),
-    baseCapacity_(0),
-    maxCapacity_(maxCapacity)
+  : stack_(nullptr)
+  , tos_(nullptr)
+  , end_(nullptr)
+  , baseCapacity_(0)
+  , maxCapacity_(maxCapacity)
+#ifdef DEBUG
+  , iteratorCount_(0)
+#endif
 {}
 
+MarkStack::~MarkStack()
+{
+    MOZ_ASSERT(iteratorCount_ == 0);
+    js_free(stack_);
+}
+
 bool
 MarkStack::init(JSGCMode gcMode)
 {
     setBaseCapacity(gcMode);
 
     MOZ_ASSERT(!stack_);
-    uintptr_t* newStack = js_pod_malloc<uintptr_t>(baseCapacity_);
+    auto newStack = js_pod_malloc<TaggedPtr>(baseCapacity_);
     if (!newStack)
         return false;
 
     setStack(newStack, 0, baseCapacity_);
     return true;
 }
 
+inline void
+MarkStack::setStack(TaggedPtr* stack, size_t tosIndex, size_t capacity)
+{
+    MOZ_ASSERT(iteratorCount_ == 0);
+    stack_ = stack;
+    tos_ = stack + tosIndex;
+    end_ = stack + capacity;
+}
+
 void
 MarkStack::setBaseCapacity(JSGCMode mode)
 {
     switch (mode) {
       case JSGC_MODE_GLOBAL:
       case JSGC_MODE_ZONE:
         baseCapacity_ = NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY;
         break;
@@ -1966,92 +2053,162 @@ MarkStack::setMaxCapacity(size_t maxCapa
     MOZ_ASSERT(isEmpty());
     maxCapacity_ = maxCapacity;
     if (baseCapacity_ > maxCapacity_)
         baseCapacity_ = maxCapacity_;
 
     reset();
 }
 
+inline bool
+MarkStack::pushTaggedPtr(Tag tag, Cell* ptr)
+{
+    if (!ensureSpace(1))
+        return false;
+
+    MOZ_ASSERT(tos_ < end_);
+    *tos_++ = TaggedPtr(tag, ptr);
+    return true;
+}
+
 template <typename T>
 inline bool
 MarkStack::push(T* ptr)
 {
-    uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
-    MOZ_ASSERT(!(addr & TagMask));
-    auto tag = MapTypeToMarkStackTag<T*>::value;
-    return pushTaggedPtr(addr | uintptr_t(tag));
+    return pushTaggedPtr(MapTypeToMarkStackTag<T*>::value, ptr);
 }
 
 inline bool
 MarkStack::pushTempRope(JSRope* rope)
 {
-    return pushTaggedPtr(uintptr_t(rope) | TempRopeTag);
+    return pushTaggedPtr(TempRopeTag, rope);
+}
+
+inline bool
+MarkStack::push(JSObject* obj, HeapSlot* start, HeapSlot* end)
+{
+    return push(ValueArray(obj, start, end));
 }
 
 inline bool
-MarkStack::pushTaggedPtr(uintptr_t item)
+MarkStack::push(const ValueArray& array)
 {
-    if (tos_ == end_) {
-        if (!enlarge(1))
-            return false;
-    }
-    MOZ_ASSERT(tos_ < end_);
-    *tos_++ = item;
+    CheckValueArray(array);
+
+    if (!ensureSpace(ValueArrayWords))
+        return false;
+
+    *reinterpret_cast<ValueArray*>(tos_.ref()) = array;
+    tos_ += ValueArrayWords;
+    MOZ_ASSERT(tos_ <= end_);
+    MOZ_ASSERT(peekTag() == ValueArrayTag);
     return true;
 }
 
 inline bool
-MarkStack::push(uintptr_t item1, uintptr_t item2, uintptr_t item3)
+MarkStack::push(const SavedValueArray& array)
+{
+    CheckSavedValueArray(array);
+
+    if (!ensureSpace(ValueArrayWords))
+        return false;
+
+    *reinterpret_cast<SavedValueArray*>(tos_.ref()) = array;
+    tos_ += ValueArrayWords;
+    MOZ_ASSERT(tos_ <= end_);
+    MOZ_ASSERT(peekTag() == SavedValueArrayTag);
+    return true;
+}
+
+inline const MarkStack::TaggedPtr&
+MarkStack::peekPtr() const
+{
+    MOZ_ASSERT(!isEmpty());
+    return tos_[-1];
+}
+
+inline MarkStack::Tag
+MarkStack::peekTag() const
 {
-    uintptr_t* nextTos = tos_ + 3;
-    if (nextTos > end_) {
-        if (!enlarge(3))
-            return false;
-        nextTos = tos_ + 3;
-    }
-    MOZ_ASSERT(nextTos <= end_);
-    tos_[0] = item1;
-    tos_[1] = item2;
-    tos_[2] = item3;
-    tos_ = nextTos;
-    return true;
+    return peekPtr().tag();
+}
+
+inline MarkStack::TaggedPtr
+MarkStack::popPtr()
+{
+    MOZ_ASSERT(!isEmpty());
+    MOZ_ASSERT(!TagIsArrayTag(peekTag()));
+    tos_--;
+    return *tos_;
+}
+
+inline MarkStack::ValueArray
+MarkStack::popValueArray()
+{
+    MOZ_ASSERT(peekTag() == ValueArrayTag);
+    MOZ_ASSERT(position() >= ValueArrayWords);
+
+    tos_ -= ValueArrayWords;
+    const auto& array = *reinterpret_cast<ValueArray*>(tos_.ref());
+    CheckValueArray(array);
+    return array;
+}
+
+inline MarkStack::SavedValueArray
+MarkStack::popSavedValueArray()
+{
+    MOZ_ASSERT(peekTag() == SavedValueArrayTag);
+    MOZ_ASSERT(position() >= ValueArrayWords);
+
+    tos_ -= ValueArrayWords;
+    const auto& array = *reinterpret_cast<SavedValueArray*>(tos_.ref());
+    CheckSavedValueArray(array);
+    return array;
 }
 
 void
 MarkStack::reset()
 {
     if (capacity() == baseCapacity_) {
         // No size change; keep the current stack.
         setStack(stack_, 0, baseCapacity_);
         return;
     }
 
     MOZ_ASSERT(baseCapacity_ != 0);
-    uintptr_t* newStack = (uintptr_t*)js_realloc(stack_, sizeof(uintptr_t) * baseCapacity_);
+    auto newStack = js_pod_realloc<TaggedPtr>(stack_, capacity(), baseCapacity_);
     if (!newStack) {
         // If the realloc fails, just keep using the existing stack; it's
         // not ideal but better than failing.
         newStack = stack_;
         baseCapacity_ = capacity();
     }
     setStack(newStack, 0, baseCapacity_);
 }
 
+inline bool
+MarkStack::ensureSpace(size_t count)
+{
+    if ((tos_ + count) <= end_)
+        return true;
+
+    return enlarge(count);
+}
+
 bool
-MarkStack::enlarge(unsigned count)
+MarkStack::enlarge(size_t count)
 {
     size_t newCapacity = Min(maxCapacity_.ref(), capacity() * 2);
     if (newCapacity < capacity() + count)
         return false;
 
     size_t tosIndex = position();
 
     MOZ_ASSERT(newCapacity != 0);
-    uintptr_t* newStack = (uintptr_t*)js_realloc(stack_, sizeof(uintptr_t) * newCapacity);
+    auto newStack = js_pod_realloc<TaggedPtr>(stack_, capacity(), newCapacity);
     if (!newStack)
         return false;
 
     setStack(newStack, tosIndex, newCapacity);
     return true;
 }
 
 void
@@ -2063,16 +2220,94 @@ MarkStack::setGCMode(JSGCMode gcMode)
 }
 
 size_t
 MarkStack::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
 {
     return mallocSizeOf(stack_);
 }
 
+MarkStackIter::MarkStackIter(const MarkStack& stack)
+  : stack_(stack),
+    pos_(stack.tos_)
+{
+    stack.iteratorCount_++;
+}
+
+MarkStackIter::~MarkStackIter()
+{
+    MOZ_ASSERT(stack_.iteratorCount_);
+    stack_.iteratorCount_--;
+}
+
+inline size_t
+MarkStackIter::position() const
+{
+    return pos_ - stack_.stack_;
+}
+
+inline bool
+MarkStackIter::done() const
+{
+    return position() == 0;
+}
+
+inline const MarkStack::TaggedPtr&
+MarkStackIter::peekPtr() const
+{
+    MOZ_ASSERT(!done());
+    return pos_[-1];
+}
+
+inline MarkStack::Tag
+MarkStackIter::peekTag() const
+{
+    return peekPtr().tag();
+}
+
+inline MarkStack::ValueArray
+MarkStackIter::peekValueArray() const
+{
+    MOZ_ASSERT(peekTag() == MarkStack::ValueArrayTag);
+    MOZ_ASSERT(position() >= ValueArrayWords);
+
+    const auto& array = *reinterpret_cast<MarkStack::ValueArray*>(pos_ - ValueArrayWords);
+    CheckValueArray(array);
+    return array;
+}
+
+inline void
+MarkStackIter::nextPtr()
+{
+    MOZ_ASSERT(!done());
+    MOZ_ASSERT(!TagIsArrayTag(peekTag()));
+    pos_--;
+}
+
+inline void
+MarkStackIter::nextArray()
+{
+    MOZ_ASSERT(TagIsArrayTag(peekTag()));
+    MOZ_ASSERT(position() >= ValueArrayWords);
+    pos_ -= ValueArrayWords;
+}
+
+void
+MarkStackIter::saveValueArray(NativeObject* obj, uintptr_t index, HeapSlot::Kind kind)
+{
+    MOZ_ASSERT(peekTag() == MarkStack::ValueArrayTag);
+    MOZ_ASSERT(peekPtr().asValueArrayObject() == obj);
+    MOZ_ASSERT(position() >= ValueArrayWords);
+
+    auto& array = *reinterpret_cast<MarkStack::SavedValueArray*>(pos_ - ValueArrayWords);
+    array = MarkStack::SavedValueArray(obj, index, kind);
+    CheckSavedValueArray(array);
+    MOZ_ASSERT(peekTag() == MarkStack::SavedValueArrayTag);
+}
+
 
 /*** GCMarker *************************************************************************************/
 
 /*
  * ExpandWeakMaps: the GC is recomputing the liveness of WeakMap entries by
  * expanding each live WeakMap into its constituent key->value edges, a table
  * of which will be consulted in a later phase whenever marking a potential
  * key.
@@ -2165,27 +2400,17 @@ GCMarker::pushTaggedPtr(T* ptr)
     if (!stack.push(ptr))
         delayMarkingChildren(ptr);
 }
 
 void
 GCMarker::pushValueArray(JSObject* obj, HeapSlot* start, HeapSlot* end)
 {
     checkZone(obj);
-
-    MOZ_ASSERT(start <= end);
-    uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | MarkStack::ValueArrayTag;
-    uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
-    uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
-
-    /*
-     * Push in the reverse order so obj will be on top. If we cannot push
-     * the array, we trigger delay marking for the whole object.
-     */
-    if (!stack.push(endAddr, startAddr, tagged))
+    if (!stack.push(obj, start, end))
         delayMarkingChildren(obj);
 }
 
 void
 GCMarker::repush(JSObject* obj)
 {
     MOZ_ASSERT(gc::TenuredCell::fromPointer(obj)->isMarked(markColor()));
     pushTaggedPtr(obj);
--- a/js/src/gc/Marking.h
+++ b/js/src/gc/Marking.h
@@ -38,38 +38,32 @@ namespace jit {
 class JitCode;
 } // namespace jit
 
 static const size_t NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY = 4096;
 static const size_t INCREMENTAL_MARK_STACK_BASE_CAPACITY = 32768;
 
 namespace gc {
 
+class MarkStackIter;
+
 /*
  * When the native stack is low, the GC does not call js::TraceChildren to mark
  * the reachable "children" of the thing. Rather the thing is put aside and
  * js::TraceChildren is called later with more space on the C stack.
  *
  * To implement such delayed marking of the children with minimal overhead for
  * the normal case of sufficient native stack, the code adds a field per arena.
  * The field markingDelay->link links all arenas with delayed things into a
  * stack list with the pointer to stack top in GCMarker::unmarkedArenaStackTop.
  * GCMarker::delayMarkingChildren adds arenas to the stack as necessary while
  * markDelayedChildren pops the arenas from the stack until it empties.
  */
 class MarkStack
 {
-    ActiveThreadData<uintptr_t*> stack_;
-    ActiveThreadData<uintptr_t*> tos_;
-    ActiveThreadData<uintptr_t*> end_;
-
-    // The capacity we start with and reset() to.
-    ActiveThreadData<size_t> baseCapacity_;
-    ActiveThreadData<size_t> maxCapacity_;
-
   public:
     /*
      * We use a common mark stack to mark GC things of different types and use
      * the explicit tags to distinguish them when it cannot be deduced from
      * the context of push or pop operation.
      */
     enum Tag {
         ValueArrayTag,
@@ -82,69 +76,138 @@ class MarkStack
 
         LastTag = TempRopeTag
     };
 
     static const uintptr_t TagMask = 7;
     static_assert(TagMask >= uintptr_t(LastTag), "The tag mask must subsume the tags.");
     static_assert(TagMask <= gc::CellMask, "The tag mask must be embeddable in a Cell*.");
 
-    explicit MarkStack(size_t maxCapacity);
+    class TaggedPtr
+    {
+        uintptr_t bits;
+
+        Cell* ptr() const;
+
+      public:
+        TaggedPtr(Tag tag, Cell* ptr);
+        Tag tag() const;
+        template <typename T> T* as() const;
+        JSObject* asValueArrayObject() const;
+        JSObject* asSavedValueArrayObject() const;
+        JSRope* asTempRope() const;
+    };
 
-    ~MarkStack() {
-        js_free(stack_);
-    }
+    struct ValueArray
+    {
+        ValueArray(JSObject* obj, HeapSlot* start, HeapSlot* end);
+
+        HeapSlot* end;
+        HeapSlot* start;
+        TaggedPtr ptr;
+    };
+
+    struct SavedValueArray
+    {
+        SavedValueArray(JSObject* obj, size_t index, HeapSlot::Kind kind);
+
+        uintptr_t kind;
+        uintptr_t index;
+        TaggedPtr ptr;
+    };
+
+    explicit MarkStack(size_t maxCapacity);
+    ~MarkStack();
 
     size_t capacity() { return end_ - stack_; }
 
-    ptrdiff_t position() const { return tos_ - stack_; }
+    size_t position() const {
+        auto result = tos_ - stack_;
+        MOZ_ASSERT(result >= 0);
+        return size_t(result);
+    }
 
-    void setStack(uintptr_t* stack, size_t tosIndex, size_t capacity) {
-        stack_ = stack;
-        tos_ = stack + tosIndex;
-        end_ = stack + capacity;
-    }
+    void setStack(TaggedPtr* stack, size_t tosIndex, size_t capacity);
 
     MOZ_MUST_USE bool init(JSGCMode gcMode);
 
     void setBaseCapacity(JSGCMode mode);
     size_t maxCapacity() const { return maxCapacity_; }
     void setMaxCapacity(size_t maxCapacity);
 
-    uintptr_t* base() { return stack_; }
-    uintptr_t* top() { return tos_; }
-
     template <typename T>
     MOZ_MUST_USE bool push(T* ptr);
-    MOZ_MUST_USE bool push(uintptr_t item1, uintptr_t item2, uintptr_t item3);
+    MOZ_MUST_USE bool push(JSObject* obj, HeapSlot* start, HeapSlot* end);
+    MOZ_MUST_USE bool push(const ValueArray& array);
+    MOZ_MUST_USE bool push(const SavedValueArray& array);
 
     // GCMarker::eagerlyMarkChildren uses unused marking stack as temporary
     // storage to hold rope pointers.
     MOZ_MUST_USE bool pushTempRope(JSRope* ptr);
 
     bool isEmpty() const {
         return tos_ == stack_;
     }
 
-    uintptr_t pop() {
-        MOZ_ASSERT(!isEmpty());
-        return *--tos_;
-    }
+    Tag peekTag() const;
+    TaggedPtr popPtr();
+    ValueArray popValueArray();
+    SavedValueArray popSavedValueArray();
 
     void reset();
 
-    /* Grow the stack, ensuring there is space for at least count elements. */
-    MOZ_MUST_USE bool enlarge(unsigned count);
-
     void setGCMode(JSGCMode gcMode);
 
     size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
 
   private:
-    MOZ_MUST_USE bool pushTaggedPtr(uintptr_t item);
+    MOZ_MUST_USE bool ensureSpace(size_t count);
+
+    /* Grow the stack, ensuring there is space for at least count elements. */
+    MOZ_MUST_USE bool enlarge(size_t count);
+
+    const TaggedPtr& peekPtr() const;
+    MOZ_MUST_USE bool pushTaggedPtr(Tag tag, Cell* ptr);
+
+    ActiveThreadData<TaggedPtr*> stack_;
+    ActiveThreadData<TaggedPtr*> tos_;
+    ActiveThreadData<TaggedPtr*> end_;
+
+    // The capacity we start with and reset() to.
+    ActiveThreadData<size_t> baseCapacity_;
+    ActiveThreadData<size_t> maxCapacity_;
+
+#ifdef DEBUG
+    mutable size_t iteratorCount_;
+#endif
+
+    friend class MarkStackIter;
+};
+
+class MarkStackIter
+{
+    const MarkStack& stack_;
+    MarkStack::TaggedPtr* pos_;
+
+  public:
+    explicit MarkStackIter(const MarkStack& stack);
+    ~MarkStackIter();
+
+    bool done() const;
+    MarkStack::Tag peekTag() const;
+    MarkStack::ValueArray peekValueArray() const;
+    void nextPtr();
+    void nextArray();
+
+    // Mutate the current ValueArray to a SavedValueArray.
+    void saveValueArray(NativeObject* obj, uintptr_t index, HeapSlot::Kind kind);
+
+  private:
+    size_t position() const;
+    const MarkStack::TaggedPtr& peekPtr() const;
 };
 
 struct WeakKeyTableHashPolicy {
     typedef JS::GCCellPtr Lookup;
     static HashNumber hash(const Lookup& v, const mozilla::HashCodeScrambler&) {
         return mozilla::HashGeneric(v.asCell());
     }
     static bool match(const JS::GCCellPtr& k, const Lookup& l) { return k == l; }
@@ -284,17 +347,18 @@ class GCMarker : public JSTracer
     inline void pushTaggedPtr(T* ptr);
 
     inline void pushValueArray(JSObject* obj, HeapSlot* start, HeapSlot* end);
 
     bool isMarkStackEmpty() {
         return stack.isEmpty();
     }
 
-    MOZ_MUST_USE bool restoreValueArray(JSObject* obj, void** vpp, void** endp);
+    MOZ_MUST_USE bool restoreValueArray(const gc::MarkStack::SavedValueArray& array,
+                                        HeapSlot** vpp, HeapSlot** endp);
     void saveValueRanges();
     inline void processMarkStackTop(SliceBudget& budget);
 
     /* The mark stack. Pointers in this stack are "gray" in the GC sense. */
     gc::MarkStack stack;
 
     /* The color is only applied to objects and functions. */
     ActiveThreadData<uint32_t> color;