Bug 1162301 - Move tenuring implementation to TenuringTracer; r=sfink
authorTerrence Cole <terrence@mozilla.com>
Thu, 07 May 2015 10:17:43 -0700
changeset 274406 cbfddea9ef08e966db91eca85a5ddf71adf2b07e
parent 274405 8e4ff6268286d3f15198d8d1874e18a98dffe07d
child 274407 2993c8d2fbb1c01b178065fc24859d9e12979e45
push id863
push userraliiev@mozilla.com
push dateMon, 03 Aug 2015 13:22:43 +0000
treeherdermozilla-release@f6321b14228d [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerssfink
bugs1162301
milestone40.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1162301 - Move tenuring implementation to TenuringTracer; r=sfink
js/src/builtin/MapObject.cpp
js/src/gc/Marking.cpp
js/src/gc/Marking.h
js/src/gc/Nursery.cpp
js/src/gc/Nursery.h
js/src/gc/StoreBuffer.cpp
js/src/gc/StoreBuffer.h
js/src/jit/Ion.cpp
js/src/jscompartment.cpp
js/src/jsgc.h
js/src/vm/NativeObject.h
js/src/vm/ObjectGroup.cpp
js/src/vm/Shape.cpp
js/src/vm/Shape.h
--- a/js/src/builtin/MapObject.cpp
+++ b/js/src/builtin/MapObject.cpp
@@ -1127,17 +1127,17 @@ template <typename TableType>
 class OrderedHashTableRef : public gc::BufferableRef
 {
     TableType* table;
     Value key;
 
   public:
     explicit OrderedHashTableRef(TableType* t, const Value& k) : table(t), key(k) {}
 
-    void mark(JSTracer* trc) {
+    void trace(JSTracer* trc) override {
         MOZ_ASSERT(UnbarrieredHashPolicy::hash(key) ==
                    HashableValue::Hasher::hash(*reinterpret_cast<HashableValue*>(&key)));
         Value prior = key;
         TraceManuallyBarrieredEdge(trc, &key, "ordered hash table key");
         table->rekeyOneEntry(prior, key);
     }
 };
 
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -1762,28 +1762,28 @@ template <>
 void
 DoTenuring<jsid>(TenuringTracer& mover, jsid* idp)
 {
     MOZ_ASSERT_IF(JSID_IS_GCTHING(*idp), !IsInsideNursery(JSID_TO_GCTHING(*idp).asCell()));
 }
 
 template <typename T>
 void
-StoreBuffer::MonoTypeBuffer<T>::mark(StoreBuffer* owner, TenuringTracer& mover)
+js::gc::StoreBuffer::MonoTypeBuffer<T>::trace(StoreBuffer* owner, TenuringTracer& mover)
 {
     mozilla::ReentrancyGuard g(*owner);
     MOZ_ASSERT(owner->isEnabled());
     MOZ_ASSERT(stores_.initialized());
     sinkStores(owner);
     for (typename StoreSet::Range r = stores_.all(); !r.empty(); r.popFront())
-        r.front().mark(mover);
+        r.front().trace(mover);
 }
 
 void
-StoreBuffer::SlotsEdge::mark(TenuringTracer& mover) const
+js::gc::StoreBuffer::SlotsEdge::trace(TenuringTracer& mover) const
 {
     NativeObject* obj = object();
 
     // Beware JSObject::swap exchanging a native object for a non-native one.
     if (!obj->isNative())
         return;
 
     if (IsInsideNursery(obj))
@@ -1799,17 +1799,17 @@ StoreBuffer::SlotsEdge::mark(TenuringTra
         int32_t start = Min(uint32_t(start_), obj->slotSpan());
         int32_t end = Min(uint32_t(start_) + count_, obj->slotSpan());
         MOZ_ASSERT(end >= start);
         TraceObjectSlots(&mover, obj, start, end - start);
     }
 }
 
 void
-StoreBuffer::WholeCellEdges::mark(TenuringTracer& mover) const
+js::gc::StoreBuffer::WholeCellEdges::trace(TenuringTracer& mover) const
 {
     MOZ_ASSERT(edge->isTenured());
     JSGCTraceKind kind = GetGCThingTraceKind(edge);
     if (kind <= JSTRACE_OBJECT) {
         JSObject* object = static_cast<JSObject*>(edge);
 
         // FIXME: bug 1161664 -- call the inline path below, now that it is accessable.
         object->traceChildren(&mover);
@@ -1827,73 +1827,65 @@ StoreBuffer::WholeCellEdges::mark(Tenuri
 
         return;
     }
     MOZ_ASSERT(kind == JSTRACE_JITCODE);
     static_cast<jit::JitCode*>(edge)->traceChildren(&mover);
 }
 
 void
-StoreBuffer::CellPtrEdge::mark(TenuringTracer& mover) const
+js::gc::StoreBuffer::CellPtrEdge::trace(TenuringTracer& mover) const
 {
     if (!*edge)
         return;
 
     MOZ_ASSERT(GetGCThingTraceKind(*edge) == JSTRACE_OBJECT);
     DoTenuring(mover, reinterpret_cast<JSObject**>(edge));
 }
 
 void
-StoreBuffer::ValueEdge::mark(TenuringTracer& mover) const
+js::gc::StoreBuffer::ValueEdge::trace(TenuringTracer& mover) const
 {
     if (deref())
         DoTenuring(mover, edge);
 }
 
 /* Insert the given relocation entry into the list of things to visit. */
 void
-TenuringTracer::insertIntoFixupList(RelocationOverlay* entry) {
+js::TenuringTracer::insertIntoFixupList(RelocationOverlay* entry) {
     *tail = entry;
     tail = &entry->next_;
     *tail = nullptr;
 }
 
 JSObject*
-TenuringTracer::moveToTenured(JSObject* obj) {
-    return (JSObject*)nursery_.moveToTenured(*this, obj);
-}
-
-void*
-js::Nursery::moveToTenured(TenuringTracer& mover, JSObject* src)
+js::TenuringTracer::moveToTenured(JSObject* src)
 {
-    AllocKind dstKind = src->allocKindForTenure(*this);
+    MOZ_ASSERT(IsInsideNursery(src));
+
+    AllocKind dstKind = src->allocKindForTenure(nursery());
     Zone* zone = src->zone();
-    JSObject* dst = reinterpret_cast<JSObject*>(allocateFromTenured(zone, dstKind));
-    if (!dst)
-        CrashAtUnhandlableOOM("Failed to allocate object while tenuring.");
+    TenuredCell* t = zone->arenas.allocateFromFreeList(dstKind, Arena::thingSize(dstKind));
+    if (!t) {
+        zone->arenas.checkEmptyFreeList(dstKind);
+        AutoMaybeStartBackgroundAllocation maybeStartBackgroundAllocation;
+        t = zone->arenas.allocateFromArena(zone, dstKind, maybeStartBackgroundAllocation);
+        if (!t)
+            CrashAtUnhandlableOOM("Failed to allocate object while tenuring.");
+    }
+    JSObject* dst = reinterpret_cast<JSObject*>(t);
 
-    mover.tenuredSize += moveObjectToTenured(mover, dst, src, dstKind);
+    tenuredSize += moveObjectToTenured(dst, src, dstKind);
 
     RelocationOverlay* overlay = RelocationOverlay::fromCell(src);
     overlay->forwardTo(dst);
-    mover.insertIntoFixupList(overlay);
+    insertIntoFixupList(overlay);
 
     TracePromoteToTenured(src, dst);
-    return static_cast<void*>(dst);
-}
-
-MOZ_ALWAYS_INLINE TenuredCell*
-js::Nursery::allocateFromTenured(Zone* zone, AllocKind thingKind)
-{
-    TenuredCell* t = zone->arenas.allocateFromFreeList(thingKind, Arena::thingSize(thingKind));
-    if (t)
-        return t;
-    zone->arenas.checkEmptyFreeList(thingKind);
-    AutoMaybeStartBackgroundAllocation maybeStartBackgroundAllocation;
-    return zone->arenas.allocateFromArena(zone, thingKind, maybeStartBackgroundAllocation);
+    return dst;
 }
 
 // Structure for counting how many times objects in a particular group have
 // been tenured during a minor collection.
 struct TenureCount
 {
     ObjectGroup* group;
     int count;
@@ -1913,136 +1905,131 @@ struct Nursery::TenureCountCache
     }
 };
 
 void
 js::Nursery::collectToFixedPoint(TenuringTracer& mover, TenureCountCache& tenureCounts)
 {
     for (RelocationOverlay* p = mover.head; p; p = p->next()) {
         JSObject* obj = static_cast<JSObject*>(p->forwardingAddress());
-        traceObject(mover, obj);
+        mover.traceObject(obj);
 
         TenureCount& entry = tenureCounts.findEntry(obj->group());
         if (entry.group == obj->group()) {
             entry.count++;
         } else if (!entry.group) {
             entry.group = obj->group();
             entry.count = 1;
         }
     }
 }
 
+// Visit all object children of the object and trace them.
 MOZ_ALWAYS_INLINE void
-js::Nursery::traceObject(TenuringTracer& mover, JSObject* obj)
+js::TenuringTracer::traceObject(JSObject* obj)
 {
     const Class* clasp = obj->getClass();
     if (clasp->trace) {
         if (clasp->trace == InlineTypedObject::obj_trace) {
             TypeDescr* descr = &obj->as<InlineTypedObject>().typeDescr();
-            if (descr->hasTraceList()) {
-                markTraceList(mover, descr->traceList(),
-                              obj->as<InlineTypedObject>().inlineTypedMem());
-            }
+            if (descr->hasTraceList())
+                markTraceList(descr->traceList(), obj->as<InlineTypedObject>().inlineTypedMem());
             return;
         }
         if (clasp == &UnboxedPlainObject::class_) {
             JSObject** pexpando = obj->as<UnboxedPlainObject>().addressOfExpando();
             if (*pexpando)
-                markObject(mover, pexpando);
+                markObject(pexpando);
             const UnboxedLayout& layout = obj->as<UnboxedPlainObject>().layoutDontCheckGeneration();
-            if (layout.traceList()) {
-                markTraceList(mover, layout.traceList(),
-                              obj->as<UnboxedPlainObject>().data());
-            }
+            if (layout.traceList())
+                markTraceList(layout.traceList(), obj->as<UnboxedPlainObject>().data());
             return;
         }
-        clasp->trace(&mover, obj);
+        clasp->trace(this, obj);
     }
 
     MOZ_ASSERT(obj->isNative() == clasp->isNative());
     if (!clasp->isNative())
         return;
     NativeObject* nobj = &obj->as<NativeObject>();
 
     // Note: the contents of copy on write elements pointers are filled in
     // during parsing and cannot contain nursery pointers.
     if (!nobj->hasEmptyElements() && !nobj->denseElementsAreCopyOnWrite())
-        markSlots(mover, nobj->getDenseElements(), nobj->getDenseInitializedLength());
+        markSlots(nobj->getDenseElements(), nobj->getDenseInitializedLength());
 
     HeapSlot* fixedStart;
     HeapSlot* fixedEnd;
     HeapSlot* dynStart;
     HeapSlot* dynEnd;
     nobj->getSlotRange(0, nobj->slotSpan(), &fixedStart, &fixedEnd, &dynStart, &dynEnd);
-    markSlots(mover, fixedStart, fixedEnd);
-    markSlots(mover, dynStart, dynEnd);
+    markSlots(fixedStart, fixedEnd);
+    markSlots(dynStart, dynEnd);
 }
 
 MOZ_ALWAYS_INLINE void
-js::Nursery::markSlots(TenuringTracer& mover, HeapSlot* vp, uint32_t nslots)
+js::TenuringTracer::markSlots(HeapSlot* vp, uint32_t nslots)
 {
-    markSlots(mover, vp, vp + nslots);
+    markSlots(vp, vp + nslots);
 }
 
 MOZ_ALWAYS_INLINE void
-js::Nursery::markSlots(TenuringTracer& mover, HeapSlot* vp, HeapSlot* end)
+js::TenuringTracer::markSlots(HeapSlot* vp, HeapSlot* end)
 {
     for (; vp != end; ++vp)
-        markSlot(mover, vp);
+        markSlot(vp);
 }
 
 MOZ_ALWAYS_INLINE void
-js::Nursery::markSlot(TenuringTracer& mover, HeapSlot* slotp)
+js::TenuringTracer::markSlot(HeapSlot* slotp)
 {
     if (!slotp->isObject())
         return;
 
     JSObject* obj = &slotp->toObject();
-    if (markObject(mover, &obj))
+    if (markObject(&obj))
         slotp->unsafeGet()->setObject(*obj);
 }
 
 MOZ_ALWAYS_INLINE void
-js::Nursery::markTraceList(TenuringTracer& mover, const int32_t* traceList, uint8_t* memory)
+js::TenuringTracer::markTraceList(const int32_t* traceList, uint8_t* memory)
 {
     while (*traceList != -1) {
         // Strings are not in the nursery and do not need tracing.
         traceList++;
     }
     traceList++;
     while (*traceList != -1) {
         JSObject** pobj = reinterpret_cast<JSObject **>(memory + *traceList);
-        markObject(mover, pobj);
+        markObject(pobj);
         traceList++;
     }
     traceList++;
     while (*traceList != -1) {
         HeapSlot* pslot = reinterpret_cast<HeapSlot *>(memory + *traceList);
-        markSlot(mover, pslot);
+        markSlot(pslot);
         traceList++;
     }
 }
 
 MOZ_ALWAYS_INLINE bool
-js::Nursery::markObject(TenuringTracer& mover, JSObject** pobj)
+js::TenuringTracer::markObject(JSObject** pobj)
 {
     if (!IsInsideNursery(*pobj))
         return false;
 
-    if (getForwardedPointer(pobj))
+    if (nursery().getForwardedPointer(pobj))
         return true;
 
-    *pobj = static_cast<JSObject*>(moveToTenured(mover, *pobj));
+    *pobj = moveToTenured(*pobj);
     return true;
 }
 
-
 MOZ_ALWAYS_INLINE size_t
-js::Nursery::moveObjectToTenured(TenuringTracer& mover,
-                                 JSObject* dst, JSObject* src, AllocKind dstKind)
+js::TenuringTracer::moveObjectToTenured(JSObject* dst, JSObject* src, AllocKind dstKind)
 {
     size_t srcSize = Arena::thingSize(dstKind);
     size_t tenuredSize = srcSize;
 
     /*
      * Arrays do not necessarily have the same AllocKind between src and dst.
      * We deal with this by copying elements manually, possibly re-inlining
      * them if there is adequate room inline in dst.
@@ -2065,84 +2052,84 @@ js::Nursery::moveObjectToTenured(Tenurin
         // happen for dictionaries, which are native objects.
         if (&nsrc->shape_ == ndst->shape_->listp) {
             MOZ_ASSERT(nsrc->shape_->inDictionary());
             ndst->shape_->listp = &ndst->shape_;
         }
     }
 
     if (src->is<InlineTypedObject>()) {
-        InlineTypedObject::objectMovedDuringMinorGC(&mover, dst, src);
+        InlineTypedObject::objectMovedDuringMinorGC(this, dst, src);
     } else if (src->is<UnboxedArrayObject>()) {
-        tenuredSize += UnboxedArrayObject::objectMovedDuringMinorGC(&mover, dst, src, dstKind);
+        tenuredSize += UnboxedArrayObject::objectMovedDuringMinorGC(this, dst, src, dstKind);
     } else {
         // Objects with JSCLASS_SKIP_NURSERY_FINALIZE need to be handled above
         // to ensure any additional nursery buffers they hold are moved.
         MOZ_ASSERT(!(src->getClass()->flags & JSCLASS_SKIP_NURSERY_FINALIZE));
     }
 
     return tenuredSize;
 }
 
 MOZ_ALWAYS_INLINE size_t
-js::Nursery::moveSlotsToTenured(NativeObject* dst, NativeObject* src, AllocKind dstKind)
+js::TenuringTracer::moveSlotsToTenured(NativeObject* dst, NativeObject* src, AllocKind dstKind)
 {
     /* Fixed slots have already been copied over. */
     if (!src->hasDynamicSlots())
         return 0;
 
-    if (!isInside(src->slots_)) {
-        removeMallocedBuffer(src->slots_);
+    if (!nursery().isInside(src->slots_)) {
+        nursery().removeMallocedBuffer(src->slots_);
         return 0;
     }
 
     Zone* zone = src->zone();
     size_t count = src->numDynamicSlots();
     dst->slots_ = zone->pod_malloc<HeapSlot>(count);
     if (!dst->slots_)
         CrashAtUnhandlableOOM("Failed to allocate slots while tenuring.");
     PodCopy(dst->slots_, src->slots_, count);
-    setSlotsForwardingPointer(src->slots_, dst->slots_, count);
+    nursery().setSlotsForwardingPointer(src->slots_, dst->slots_, count);
     return count * sizeof(HeapSlot);
 }
 
 MOZ_ALWAYS_INLINE size_t
-js::Nursery::moveElementsToTenured(NativeObject* dst, NativeObject* src, AllocKind dstKind)
+js::TenuringTracer::moveElementsToTenured(NativeObject* dst, NativeObject* src, AllocKind dstKind)
 {
     if (src->hasEmptyElements() || src->denseElementsAreCopyOnWrite())
         return 0;
 
     Zone* zone = src->zone();
     ObjectElements* srcHeader = src->getElementsHeader();
     ObjectElements* dstHeader;
 
     /* TODO Bug 874151: Prefer to put element data inline if we have space. */
-    if (!isInside(srcHeader)) {
+    if (!nursery().isInside(srcHeader)) {
         MOZ_ASSERT(src->elements_ == dst->elements_);
-        removeMallocedBuffer(srcHeader);
+        nursery().removeMallocedBuffer(srcHeader);
         return 0;
     }
 
     size_t nslots = ObjectElements::VALUES_PER_HEADER + srcHeader->capacity;
 
     /* Unlike other objects, Arrays can have fixed elements. */
     if (src->is<ArrayObject>() && nslots <= GetGCKindSlots(dstKind)) {
         dst->as<ArrayObject>().setFixedElements();
         dstHeader = dst->as<ArrayObject>().getElementsHeader();
         js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
-        setElementsForwardingPointer(srcHeader, dstHeader, nslots);
+        nursery().setElementsForwardingPointer(srcHeader, dstHeader, nslots);
         return nslots * sizeof(HeapSlot);
     }
 
     MOZ_ASSERT(nslots >= 2);
     dstHeader = reinterpret_cast<ObjectElements*>(zone->pod_malloc<HeapSlot>(nslots));
     if (!dstHeader)
         CrashAtUnhandlableOOM("Failed to allocate elements while tenuring.");
     js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
-    setElementsForwardingPointer(srcHeader, dstHeader, nslots);
+    nursery().setElementsForwardingPointer(srcHeader, dstHeader, nslots);
     dst->elements_ = dstHeader->elements();
     return nslots * sizeof(HeapSlot);
 }
 
 
 /*** IsMarked / IsAboutToBeFinalized **************************************************************/
 
 template <typename T>
--- a/js/src/gc/Marking.h
+++ b/js/src/gc/Marking.h
@@ -374,17 +374,17 @@ template <typename Map, typename Key>
 class HashKeyRef : public BufferableRef
 {
     Map* map;
     Key key;
 
   public:
     HashKeyRef(Map* m, const Key& k) : map(m), key(k) {}
 
-    void mark(JSTracer* trc) {
+    void trace(JSTracer* trc) override {
         Key prior = key;
         typename Map::Ptr p = map->lookup(key);
         if (!p)
             return;
         TraceManuallyBarrieredEdge(trc, &key, "HashKeyRef");
         map->rekeyIfMoved(prior, key);
     }
 };
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -433,43 +433,43 @@ js::Nursery::collect(JSRuntime* rt, JS::
     AutoStopVerifyingBarriers av(rt, false);
     AutoDisableProxyCheck disableStrictProxyChecking(rt);
     DebugOnly<AutoEnterOOMUnsafeRegion> oomUnsafeRegion;
 
     // Move objects pointed to by roots from the nursery to the major heap.
     TenuringTracer mover(rt, this);
 
     // Mark the store buffer. This must happen first.
-    TIME_START(markValues);
-    sb.markValues(mover);
-    TIME_END(markValues);
+    TIME_START(traceValues);
+    sb.traceValues(mover);
+    TIME_END(traceValues);
 
-    TIME_START(markCells);
-    sb.markCells(mover);
-    TIME_END(markCells);
+    TIME_START(traceCells);
+    sb.traceCells(mover);
+    TIME_END(traceCells);
 
-    TIME_START(markSlots);
-    sb.markSlots(mover);
-    TIME_END(markSlots);
+    TIME_START(traceSlots);
+    sb.traceSlots(mover);
+    TIME_END(traceSlots);
 
-    TIME_START(markWholeCells);
-    sb.markWholeCells(mover);
-    TIME_END(markWholeCells);
+    TIME_START(traceWholeCells);
+    sb.traceWholeCells(mover);
+    TIME_END(traceWholeCells);
 
-    TIME_START(markRelocatableValues);
-    sb.markRelocatableValues(mover);
-    TIME_END(markRelocatableValues);
+    TIME_START(traceRelocatableValues);
+    sb.traceRelocatableValues(mover);
+    TIME_END(traceRelocatableValues);
 
-    TIME_START(markRelocatableCells);
-    sb.markRelocatableCells(mover);
-    TIME_END(markRelocatableCells);
+    TIME_START(traceRelocatableCells);
+    sb.traceRelocatableCells(mover);
+    TIME_END(traceRelocatableCells);
 
-    TIME_START(markGenericEntries);
-    sb.markGenericEntries(&mover);
-    TIME_END(markGenericEntries);
+    TIME_START(traceGenericEntries);
+    sb.traceGenericEntries(&mover);
+    TIME_END(traceGenericEntries);
 
     TIME_START(markRuntime);
     rt->gc.markRuntime(&mover);
     TIME_END(markRuntime);
 
     TIME_START(markDebugger);
     {
         gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_MARK_ROOTS);
@@ -569,23 +569,23 @@ js::Nursery::collect(JSRuntime* rt, JS::
 
 #define FMT " %6" PRIu64
         fprintf(stderr,
                 "MinorGC: %20s %5.1f%% %4d" FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT "\n",
                 js::gcstats::ExplainReason(reason),
                 promotionRate * 100,
                 numActiveChunks_,
                 totalTime,
-                TIME_TOTAL(markValues),
-                TIME_TOTAL(markCells),
-                TIME_TOTAL(markSlots),
-                TIME_TOTAL(markWholeCells),
-                TIME_TOTAL(markRelocatableValues),
-                TIME_TOTAL(markRelocatableCells),
-                TIME_TOTAL(markGenericEntries),
+                TIME_TOTAL(traceValues),
+                TIME_TOTAL(traceCells),
+                TIME_TOTAL(traceSlots),
+                TIME_TOTAL(traceWholeCells),
+                TIME_TOTAL(traceRelocatableValues),
+                TIME_TOTAL(traceRelocatableCells),
+                TIME_TOTAL(traceGenericEntries),
                 TIME_TOTAL(checkHashTables),
                 TIME_TOTAL(markRuntime),
                 TIME_TOTAL(markDebugger),
                 TIME_TOTAL(clearNewObjectCache),
                 TIME_TOTAL(collectToFP),
                 TIME_TOTAL(sweepArrayBufferViewList),
                 TIME_TOTAL(updateJitActivations),
                 TIME_TOTAL(freeMallocedBuffers),
--- a/js/src/gc/Nursery.h
+++ b/js/src/gc/Nursery.h
@@ -62,19 +62,32 @@ class TenuringTracer : public JSTracer
     // Save and restore all of the runtime state we use during MinorGC.
     bool savedRuntimeNeedBarrier;
 
     TenuringTracer(JSRuntime* rt, Nursery* nursery);
     ~TenuringTracer();
 
   public:
     const Nursery& nursery() const { return nursery_; }
-    JSObject* moveToTenured(JSObject* thing);
+    Nursery& nursery() { return nursery_; }
+    JSObject* moveToTenured(JSObject* src);
 
     void insertIntoFixupList(gc::RelocationOverlay* entry);
+
+  private:
+    size_t moveObjectToTenured(JSObject* dst, JSObject* src, gc::AllocKind dstKind);
+    size_t moveElementsToTenured(NativeObject* dst, NativeObject* src, gc::AllocKind dstKind);
+    size_t moveSlotsToTenured(NativeObject* dst, NativeObject* src, gc::AllocKind dstKind);
+
+    MOZ_ALWAYS_INLINE void traceObject(JSObject* src);
+    MOZ_ALWAYS_INLINE void markSlots(HeapSlot* vp, uint32_t nslots);
+    MOZ_ALWAYS_INLINE void markSlots(HeapSlot* vp, HeapSlot* end);
+    MOZ_ALWAYS_INLINE void markSlot(HeapSlot* slotp);
+    MOZ_ALWAYS_INLINE void markTraceList(const int32_t* traceList, uint8_t* memory);
+    MOZ_ALWAYS_INLINE bool markObject(JSObject** pobj);
 };
 
 class Nursery
 {
   public:
     static const size_t Alignment = gc::ChunkSize;
     static const size_t ChunkShift = gc::ChunkShift;
 
@@ -317,28 +330,16 @@ class Nursery
     /* Common internal allocator function. */
     void* allocate(size_t size);
 
     /*
      * Move the object at |src| in the Nursery to an already-allocated cell
      * |dst| in Tenured.
      */
     void collectToFixedPoint(TenuringTracer& trc, TenureCountCache& tenureCounts);
-    MOZ_ALWAYS_INLINE void traceObject(TenuringTracer& trc, JSObject* src);
-    MOZ_ALWAYS_INLINE void markSlots(TenuringTracer& trc, HeapSlot* vp, uint32_t nslots);
-    MOZ_ALWAYS_INLINE void markSlots(TenuringTracer& trc, HeapSlot* vp, HeapSlot* end);
-    MOZ_ALWAYS_INLINE void markSlot(TenuringTracer& trc, HeapSlot* slotp);
-    MOZ_ALWAYS_INLINE void markTraceList(TenuringTracer& trc,
-                                         const int32_t* traceList, uint8_t* memory);
-    MOZ_ALWAYS_INLINE bool markObject(TenuringTracer& trc, JSObject** pobj);
-    void* moveToTenured(TenuringTracer& trc, JSObject* src);
-    size_t moveObjectToTenured(TenuringTracer& trc, JSObject* dst, JSObject* src,
-                               gc::AllocKind dstKind);
-    size_t moveElementsToTenured(NativeObject* dst, NativeObject* src, gc::AllocKind dstKind);
-    size_t moveSlotsToTenured(NativeObject* dst, NativeObject* src, gc::AllocKind dstKind);
 
     /* Handle relocation of slots/elements pointers stored in Ion frames. */
     void setForwardingPointer(void* oldData, void* newData, bool direct);
 
     void setSlotsForwardingPointer(HeapSlot* oldSlots, HeapSlot* newSlots, uint32_t nslots);
     void setElementsForwardingPointer(ObjectElements* oldHeader, ObjectElements* newHeader,
                                       uint32_t nelems);
 
--- a/js/src/gc/StoreBuffer.cpp
+++ b/js/src/gc/StoreBuffer.cpp
@@ -13,28 +13,28 @@
 #include "vm/Runtime.h"
 
 #include "jsgcinlines.h"
 
 using namespace js;
 using namespace js::gc;
 
 void
-StoreBuffer::GenericBuffer::mark(StoreBuffer* owner, JSTracer* trc)
+StoreBuffer::GenericBuffer::trace(StoreBuffer* owner, JSTracer* trc)
 {
     mozilla::ReentrancyGuard g(*owner);
     MOZ_ASSERT(owner->isEnabled());
     if (!storage_)
         return;
 
     for (LifoAlloc::Enum e(*storage_); !e.empty();) {
         unsigned size = *e.get<unsigned>();
         e.popFront<unsigned>();
         BufferableRef* edge = e.get<BufferableRef>(size);
-        edge->mark(trc);
+        edge->trace(trc);
         e.popFront(size);
     }
 }
 
 bool
 StoreBuffer::enable()
 {
     if (enabled_)
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -24,17 +24,17 @@ namespace gc {
  * BufferableRef represents an abstract reference for use in the generational
  * GC's remembered set. Entries in the store buffer that cannot be represented
  * with the simple pointer-to-a-pointer scheme must derive from this class and
  * use the generic store buffer interface.
  */
 class BufferableRef
 {
   public:
-    virtual void mark(JSTracer* trc) = 0;
+    virtual void trace(JSTracer* trc) = 0;
     bool maybeInRememberedSet(const Nursery&) const { return true; }
 };
 
 typedef HashSet<void*, PointerHasher<void*, 3>, SystemAllocPolicy> EdgeSet;
 
 /* The size of a single block of store buffer storage space. */
 static const size_t LifoAllocBlockSize = 1 << 16; /* 64KiB */
 
@@ -116,18 +116,18 @@ class StoreBuffer
         }
 
         /* Remove an item from the store buffer. */
         void unput(StoreBuffer* owner, const T& v) {
             sinkStores(owner);
             stores_.remove(v);
         }
 
-        /* Mark the source of all edges in the store buffer. */
-        void mark(StoreBuffer* owner, TenuringTracer& mover);
+        /* Trace the source of all edges in the store buffer. */
+        void trace(StoreBuffer* owner, TenuringTracer& mover);
 
         size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
             return stores_.sizeOfExcludingThis(mallocSizeOf);
         }
 
       private:
         MonoTypeBuffer& operator=(const MonoTypeBuffer& other) = delete;
     };
@@ -152,18 +152,18 @@ class StoreBuffer
 
             storage_->used() ? storage_->releaseAll() : storage_->freeAll();
         }
 
         bool isAboutToOverflow() const {
             return !storage_->isEmpty() && storage_->availableInCurrentChunk() < LowAvailableThreshold;
         }
 
-        /* Mark all generic edges. */
-        void mark(StoreBuffer* owner, JSTracer* trc);
+        /* Trace all generic edges. */
+        void trace(StoreBuffer* owner, JSTracer* trc);
 
         template <typename T>
         void put(StoreBuffer* owner, const T& t) {
             MOZ_ASSERT(storage_);
 
             /* Ensure T is derived from BufferableRef. */
             (void)static_cast<const BufferableRef*>(&t);
 
@@ -206,17 +206,17 @@ class StoreBuffer
         bool operator==(const CellPtrEdge& other) const { return edge == other.edge; }
         bool operator!=(const CellPtrEdge& other) const { return edge != other.edge; }
 
         bool maybeInRememberedSet(const Nursery& nursery) const {
             MOZ_ASSERT(IsInsideNursery(*edge));
             return !nursery.isInside(edge);
         }
 
-        void mark(TenuringTracer& mover) const;
+        void trace(TenuringTracer& mover) const;
 
         CellPtrEdge tagged() const { return CellPtrEdge((Cell**)(uintptr_t(edge) | 1)); }
         CellPtrEdge untagged() const { return CellPtrEdge((Cell**)(uintptr_t(edge) & ~1)); }
         bool isTagged() const { return bool(uintptr_t(edge) & 1); }
 
         typedef PointerEdgeHasher<CellPtrEdge> Hasher;
     };
 
@@ -231,17 +231,17 @@ class StoreBuffer
 
         Cell* deref() const { return edge->isGCThing() ? static_cast<Cell*>(edge->toGCThing()) : nullptr; }
 
         bool maybeInRememberedSet(const Nursery& nursery) const {
             MOZ_ASSERT(IsInsideNursery(deref()));
             return !nursery.isInside(edge);
         }
 
-        void mark(TenuringTracer& mover) const;
+        void trace(TenuringTracer& mover) const;
 
         ValueEdge tagged() const { return ValueEdge((JS::Value*)(uintptr_t(edge) | 1)); }
         ValueEdge untagged() const { return ValueEdge((JS::Value*)(uintptr_t(edge) & ~1)); }
         bool isTagged() const { return bool(uintptr_t(edge) & 1); }
 
         typedef PointerEdgeHasher<ValueEdge> Hasher;
     };
 
@@ -277,17 +277,17 @@ class StoreBuffer
         bool operator!=(const SlotsEdge& other) const {
             return !(*this == other);
         }
 
         bool maybeInRememberedSet(const Nursery& n) const {
             return !IsInsideNursery(reinterpret_cast<Cell*>(object()));
         }
 
-        void mark(TenuringTracer& mover) const;
+        void trace(TenuringTracer& mover) const;
 
         typedef struct {
             typedef SlotsEdge Lookup;
             static HashNumber hash(const Lookup& l) { return l.objectAndKind_ ^ l.start_ ^ l.count_; }
             static bool match(const SlotsEdge& k, const Lookup& l) { return k == l; }
         } Hasher;
     };
 
@@ -303,34 +303,34 @@ class StoreBuffer
         bool operator==(const WholeCellEdges& other) const { return edge == other.edge; }
         bool operator!=(const WholeCellEdges& other) const { return edge != other.edge; }
 
         bool maybeInRememberedSet(const Nursery&) const { return true; }
 
         static bool supportsDeduplication() { return true; }
         void* deduplicationKey() const { return (void*)edge; }
 
-        void mark(TenuringTracer& mover) const;
+        void trace(TenuringTracer& mover) const;
 
         typedef PointerEdgeHasher<WholeCellEdges> Hasher;
     };
 
     template <typename Key>
     struct CallbackRef : public BufferableRef
     {
-        typedef void (*MarkCallback)(JSTracer* trc, Key* key, void* data);
+        typedef void (*TraceCallback)(JSTracer* trc, Key* key, void* data);
 
-        CallbackRef(MarkCallback cb, Key* k, void* d) : callback(cb), key(k), data(d) {}
+        CallbackRef(TraceCallback cb, Key* k, void* d) : callback(cb), key(k), data(d) {}
 
-        virtual void mark(JSTracer* trc) {
+        virtual void trace(JSTracer* trc) {
             callback(trc, key, data);
         }
 
       private:
-        MarkCallback callback;
+        TraceCallback callback;
         Key* key;
         void* data;
     };
 
     bool isOkayToUseBuffer() const {
         /*
          * Disabled store buffers may not have a valid state; e.g. when stored
          * inline in the ChunkTrailer.
@@ -438,24 +438,24 @@ class StoreBuffer
     void putGeneric(const T& t) { putFromAnyThread(bufferGeneric, t);}
 
     /* Insert or update a callback entry. */
     template <typename Key>
     void putCallback(void (*callback)(JSTracer* trc, Key* key, void* data), Key* key, void* data) {
         putFromAnyThread(bufferGeneric, CallbackRef<Key>(callback, key, data));
     }
 
-    /* Methods to mark the source of all edges in the store buffer. */
-    void markValues(TenuringTracer& mover)            { bufferVal.mark(this, mover); }
-    void markCells(TenuringTracer& mover)             { bufferCell.mark(this, mover); }
-    void markSlots(TenuringTracer& mover)             { bufferSlot.mark(this, mover); }
-    void markWholeCells(TenuringTracer& mover)        { bufferWholeCell.mark(this, mover); }
-    void markRelocatableValues(TenuringTracer& mover) { bufferRelocVal.mark(this, mover); }
-    void markRelocatableCells(TenuringTracer& mover)  { bufferRelocCell.mark(this, mover); }
-    void markGenericEntries(JSTracer *trc)            { bufferGeneric.mark(this, trc); }
+    /* Methods to trace the source of all edges in the store buffer. */
+    void traceValues(TenuringTracer& mover)            { bufferVal.trace(this, mover); }
+    void traceCells(TenuringTracer& mover)             { bufferCell.trace(this, mover); }
+    void traceSlots(TenuringTracer& mover)             { bufferSlot.trace(this, mover); }
+    void traceWholeCells(TenuringTracer& mover)        { bufferWholeCell.trace(this, mover); }
+    void traceRelocatableValues(TenuringTracer& mover) { bufferRelocVal.trace(this, mover); }
+    void traceRelocatableCells(TenuringTracer& mover)  { bufferRelocCell.trace(this, mover); }
+    void traceGenericEntries(JSTracer *trc)            { bufferGeneric.trace(this, trc); }
 
     /* For use by our owned buffers and for testing. */
     void setAboutToOverflow();
 
     /* For jit access to the raw buffer. */
     void oolSinkStoresForWholeCellBuffer() { bufferWholeCell.sinkStores(this); }
     void* addressOfWholeCellBufferPointer() const { return (void*)&bufferWholeCell.insert_; }
     void* addressOfWholeCellBufferEnd() const {
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -1717,21 +1717,21 @@ void
 MIRGenerator::traceNurseryObjects(JSTracer* trc)
 {
     TraceRootRange(trc, nurseryObjects_.length(), nurseryObjects_.begin(), "ion-nursery-objects");
 }
 
 class MarkOffThreadNurseryObjects : public gc::BufferableRef
 {
   public:
-    void mark(JSTracer* trc);
+    void trace(JSTracer* trc) override;
 };
 
 void
-MarkOffThreadNurseryObjects::mark(JSTracer* trc)
+MarkOffThreadNurseryObjects::trace(JSTracer* trc)
 {
     JSRuntime* rt = trc->runtime();
 
     if (trc->runtime()->isHeapMinorCollecting()) {
         // Only reset hasIonNurseryObjects if we're doing an actual minor GC.
         MOZ_ASSERT(rt->jitRuntime()->hasIonNurseryObjects());
         rt->jitRuntime()->setHasIonNurseryObjects(false);
     }
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -208,17 +208,17 @@ class WrapperMapRef : public BufferableR
 {
     WrapperMap* map;
     CrossCompartmentKey key;
 
   public:
     WrapperMapRef(WrapperMap* map, const CrossCompartmentKey& key)
       : map(map), key(key) {}
 
-    void mark(JSTracer* trc) {
+    void trace(JSTracer* trc) override {
         CrossCompartmentKey prior = key;
         if (key.debugger)
             TraceManuallyBarrieredEdge(trc, &key.debugger, "CCW debugger");
         if (key.kind == CrossCompartmentKey::ObjectWrapper ||
             key.kind == CrossCompartmentKey::DebuggerObject ||
             key.kind == CrossCompartmentKey::DebuggerEnvironment ||
             key.kind == CrossCompartmentKey::DebuggerSource)
         {
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -909,16 +909,17 @@ class ArenaLists
     enum ArenaAllocMode { HasFreeThings = true, IsEmpty = false };
     template <ArenaAllocMode hasFreeThings>
     TenuredCell* allocateFromArenaInner(JS::Zone* zone, ArenaHeader* aheader, AllocKind kind);
 
     inline void normalizeBackgroundFinalizeState(AllocKind thingKind);
 
     friend class GCRuntime;
     friend class js::Nursery;
+    friend class js::TenuringTracer;
 };
 
 /* The number of GC cycles an empty chunk can survive before been released. */
 const size_t MAX_EMPTY_CHUNK_AGE = 4;
 
 } /* namespace gc */
 
 extern bool
--- a/js/src/vm/NativeObject.h
+++ b/js/src/vm/NativeObject.h
@@ -21,18 +21,18 @@
 #include "gc/Marking.h"
 #include "js/Value.h"
 #include "vm/Shape.h"
 #include "vm/String.h"
 #include "vm/TypeInference.h"
 
 namespace js {
 
-class Nursery;
 class Shape;
+class TenuringTracer;
 
 /*
  * To really poison a set of values, using 'magic' or 'undefined' isn't good
  * enough since often these will just be ignored by buggy code (see bug 629974)
  * in debug builds and crash in release builds. Instead, we use a safe-for-crash
  * pointer.
  */
 static MOZ_ALWAYS_INLINE void
@@ -175,19 +175,19 @@ class ObjectElements
         // before being copied: when setting the CONVERT_DOUBLE_ELEMENTS flag
         // the shared elements may change (from ints to doubles) without
         // making a copy first.
         COPY_ON_WRITE               = 0x4
     };
 
   private:
     friend class ::JSObject;
+    friend class ArrayObject;
     friend class NativeObject;
-    friend class ArrayObject;
-    friend class Nursery;
+    friend class TenuringTracer;
 
     friend bool js::SetIntegrityLevel(JSContext* cx, HandleObject obj, IntegrityLevel level);
 
     friend bool
     ArraySetLength(JSContext* cx, Handle<ArrayObject*> obj, HandleId id,
                    unsigned attrs, HandleValue value, ObjectOpResult& result);
 
     /* See Flags enum above. */
@@ -452,17 +452,17 @@ class NativeObject : public JSObject
      * Update the slot span directly for a dictionary object, and allocate
      * slots to cover the new span if necessary.
      */
     bool setSlotSpan(ExclusiveContext* cx, uint32_t span);
 
     bool toDictionaryMode(ExclusiveContext* cx);
 
   private:
-    friend class Nursery;
+    friend class TenuringTracer;
 
     /*
      * Get internal pointers to the range of values starting at start and
      * running for length.
      */
     void getSlotRangeUnchecked(uint32_t start, uint32_t length,
                                HeapSlot** fixedStart, HeapSlot** fixedEnd,
                                HeapSlot** slotsStart, HeapSlot** slotsEnd)
--- a/js/src/vm/ObjectGroup.cpp
+++ b/js/src/vm/ObjectGroup.cpp
@@ -409,17 +409,17 @@ class ObjectGroupCompartment::NewTableRe
     JSObject* proto;
     JSObject* associated;
 
   public:
     NewTableRef(NewTable* table, const Class* clasp, JSObject* proto, JSObject* associated)
         : table(table), clasp(clasp), proto(proto), associated(associated)
     {}
 
-    void mark(JSTracer* trc) {
+    void trace(JSTracer* trc) override {
         JSObject* prior = proto;
         TraceManuallyBarrieredEdge(trc, &proto, "newObjectGroups set prototype");
         if (prior == proto)
             return;
 
         NewTable::Ptr p = table->lookup(NewTable::Lookup(clasp, TaggedProto(prior),
                                                          TaggedProto(proto),
                                                          associated));
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -1398,17 +1398,17 @@ class InitialShapeSetRef : public Buffer
                        uint32_t objectFlags)
         : set(set),
           clasp(clasp),
           proto(proto),
           nfixed(nfixed),
           objectFlags(objectFlags)
     {}
 
-    void mark(JSTracer* trc) {
+    void trace(JSTracer* trc) override {
         TaggedProto priorProto = proto;
         if (proto.isObject()) {
             TraceManuallyBarrieredEdge(trc, reinterpret_cast<JSObject**>(&proto),
                                        "initialShapes set proto");
         }
         if (proto == priorProto)
             return;
 
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -105,18 +105,18 @@
  * a single BaseShape.
  */
 
 #define JSSLOT_FREE(clasp)  JSCLASS_RESERVED_SLOTS(clasp)
 
 namespace js {
 
 class Bindings;
-class Nursery;
 class StaticBlockObject;
+class TenuringTracer;
 
 typedef JSGetterOp GetterOp;
 typedef JSSetterOp SetterOp;
 typedef JSPropertyDescriptor PropertyDescriptor;
 
 /* Limit on the number of slotful properties in an object. */
 static const uint32_t SHAPE_INVALID_SLOT = JS_BIT(24) - 1;
 static const uint32_t SHAPE_MAXIMUM_SLOT = JS_BIT(24) - 2;
@@ -526,22 +526,22 @@ typedef HashSet<ReadBarrieredUnownedBase
                 SystemAllocPolicy> BaseShapeSet;
 
 
 class Shape : public gc::TenuredCell
 {
     friend class ::JSObject;
     friend class ::JSFunction;
     friend class Bindings;
-    friend class Nursery;
     friend class NativeObject;
     friend class PropertyTree;
     friend class StaticBlockObject;
+    friend class TenuringTracer;
+    friend struct StackBaseShape;
     friend struct StackShape;
-    friend struct StackBaseShape;
 
   protected:
     HeapPtrBaseShape    base_;
     PreBarrieredId      propid_;
 
     enum SlotInfo : uint32_t
     {
         /* Number of fixed slots in objects with this shape. */
@@ -1208,17 +1208,17 @@ Shape::Shape(const StackShape& other, ui
 // objects. It updates the pointers and the shape's entry in the parent's
 // KidsHash table.
 class ShapeGetterSetterRef : public gc::BufferableRef
 {
     AccessorShape* shape_;
 
   public:
     explicit ShapeGetterSetterRef(AccessorShape* shape) : shape_(shape) {}
-    void mark(JSTracer* trc) { shape_->fixupGetterSetterForBarrier(trc); }
+    void trace(JSTracer* trc) override { shape_->fixupGetterSetterForBarrier(trc); }
 };
 
 static inline void
 GetterSetterWriteBarrierPost(AccessorShape* shape)
 {
     MOZ_ASSERT(shape);
     if (shape->hasGetterObject()) {
         gc::StoreBuffer* sb = reinterpret_cast<gc::Cell*>(shape->getterObject())->storeBuffer();