Bug 1359342 - Pre-mark new allocations black during incremental GC r=sfink
authorJon Coppeard <jcoppeard@mozilla.com>
Tue, 27 Feb 2018 12:14:46 +0000
changeset 458037 07cab6799ff4886551c16489bf17ca2578893738
parent 458036 e8f96cecee43287b6b5e205d6e9b4d885b67d1ef
child 458038 c3d6247ece759ea353bf49815b3688406e6e37bf
push id8799
push usermtabara@mozilla.com
push dateThu, 01 Mar 2018 16:46:23 +0000
treeherdermozilla-beta@15334014dc67 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerssfink
bugs1359342
milestone60.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1359342 - Pre-mark new allocations black during incremental GC r=sfink
js/src/gc/Allocator.cpp
js/src/gc/ArenaList-inl.h
js/src/gc/ArenaList.h
js/src/gc/Barrier.cpp
js/src/gc/Cell.h
js/src/gc/GC.cpp
js/src/gc/GCRuntime.h
js/src/gc/Heap-inl.h
js/src/gc/Heap.h
js/src/gc/Marking.cpp
js/src/gc/PrivateIterators-inl.h
js/src/gc/Verifier.cpp
js/src/jit/CodeGenerator.cpp
--- a/js/src/gc/Allocator.cpp
+++ b/js/src/gc/Allocator.cpp
@@ -12,16 +12,17 @@
 #include "jit/JitCompartment.h"
 #include "threading/CpuCount.h"
 #include "vm/JSContext.h"
 #include "vm/Runtime.h"
 #include "vm/String.h"
 
 #include "gc/ArenaList-inl.h"
 #include "gc/Heap-inl.h"
+#include "gc/PrivateIterators-inl.h"
 #include "vm/JSObject-inl.h"
 
 using namespace js;
 using namespace gc;
 
 template <typename T, AllowGC allowGC /* = CanGC */>
 JSObject*
 js::Allocate(JSContext* cx, AllocKind kind, size_t nDynamicSlots, InitialHeap heap,
@@ -327,22 +328,25 @@ GCRuntime::gcIfNeededAtAllocation(JSCont
     return true;
 }
 
 template <typename T>
 /* static */ void
 GCRuntime::checkIncrementalZoneState(JSContext* cx, T* t)
 {
 #ifdef DEBUG
-    if (cx->helperThread())
+    if (cx->helperThread() || !t)
         return;
 
-    Zone* zone = cx->zone();
-    MOZ_ASSERT_IF(t && zone->wasGCStarted() && (zone->isGCMarking() || zone->isGCSweeping()),
-                  t->asTenured().arena()->allocatedDuringIncremental);
+    TenuredCell* cell = &t->asTenured();
+    Zone* zone = cell->zone();
+    if (zone->isGCMarking() || zone->isGCSweeping())
+        MOZ_ASSERT(cell->isMarkedBlack());
+    else
+        MOZ_ASSERT(!cell->isMarkedAny());
 #endif
 }
 
 
 // ///////////  Arena -> Thing Allocator  //////////////////////////////////////
 
 void
 GCRuntime::startBackgroundAllocTaskIfIdle()
@@ -447,34 +451,39 @@ ArenaLists::allocateFromArena(JS::Zone* 
     return allocateFromArenaInner(zone, arena, thingKind);
 }
 
 inline TenuredCell*
 ArenaLists::allocateFromArenaInner(JS::Zone* zone, Arena* arena, AllocKind kind)
 {
     size_t thingSize = Arena::thingSize(kind);
 
-    freeLists(kind) = arena->getFirstFreeSpan();
+    setFreeList(kind, arena->getFirstFreeSpan());
 
     if (MOZ_UNLIKELY(zone->wasGCStarted()))
         zone->runtimeFromAnyThread()->gc.arenaAllocatedDuringGC(zone, arena);
-    TenuredCell* thing = freeLists(kind)->allocate(thingSize);
+    TenuredCell* thing = freeList(kind)->allocate(thingSize);
+
     MOZ_ASSERT(thing); // This allocation is infallible.
     return thing;
 }
 
 void
 GCRuntime::arenaAllocatedDuringGC(JS::Zone* zone, Arena* arena)
 {
-    if (zone->needsIncrementalBarrier()) {
-        arena->allocatedDuringIncremental = true;
-        marker.delayMarkingArena(arena);
-    } else if (zone->isGCSweeping()) {
-        arena->setNextAllocDuringSweep(arenasAllocatedDuringSweep);
-        arenasAllocatedDuringSweep = arena;
+    // Ensure that anything allocated during the mark or sweep phases of an
+    // incremental GC will be marked black by pre-marking all free cells in the
+    // arena we are about to allocate from.
+
+    if (zone->needsIncrementalBarrier() || zone->isGCSweeping()) {
+        for (ArenaFreeCellIter iter(arena); !iter.done(); iter.next()) {
+            TenuredCell* cell = iter.getCell();
+            MOZ_ASSERT(!cell->isMarkedAny());
+            cell->markBlack();
+        }
     }
 }
 
 
 // ///////////  Chunk -> Arena Allocator  //////////////////////////////////////
 
 bool
 GCRuntime::wantBackgroundAllocation(const AutoLockGC& lock) const
--- a/js/src/gc/ArenaList-inl.h
+++ b/js/src/gc/ArenaList-inl.h
@@ -232,16 +232,38 @@ js::gc::SortedArenaList::toArenaList()
     // Point the tail of the final non-empty segment at null. Note that if
     // the list is empty, this will just set segments[0].head to null.
     segments[tailIndex].linkTo(nullptr);
     // Create an ArenaList with head and cursor set to the head and tail of
     // the first segment (if that segment is empty, only the head is used).
     return ArenaList(segments[0]);
 }
 
+void
+js::gc::ArenaLists::setFreeList(AllocKind i, FreeSpan* span)
+{
+#ifdef DEBUG
+    auto old = freeList(i);
+    if (!old->isEmpty())
+        old->getArena()->checkNoMarkedFreeCells();
+#endif
+    freeLists()[i] = span;
+}
+
+void
+js::gc::ArenaLists::clearFreeList(AllocKind i)
+{
+#ifdef DEBUG
+    auto old = freeList(i);
+    if (!old->isEmpty())
+        old->getArena()->checkNoMarkedFreeCells();
+#endif
+    freeLists()[i] = &placeholder;
+}
+
 js::gc::Arena*
 js::gc::ArenaLists::getFirstArena(AllocKind thingKind) const
 {
     return arenaLists(thingKind).head();
 }
 
 js::gc::Arena*
 js::gc::ArenaLists::getFirstArenaToSweep(AllocKind thingKind) const
@@ -298,33 +320,33 @@ js::gc::ArenaLists::doneBackgroundFinali
 
 bool
 js::gc::ArenaLists::needBackgroundFinalizeWait(AllocKind kind) const
 {
     return backgroundFinalizeState(kind) != BFS_DONE;
 }
 
 void
-js::gc::ArenaLists::purge()
+js::gc::ArenaLists::clearFreeLists()
 {
     for (auto i : AllAllocKinds())
-        freeLists(i) = &placeholder;
+        clearFreeList(i);
 }
 
 bool
 js::gc::ArenaLists::arenaIsInUse(Arena* arena, AllocKind kind) const
 {
     MOZ_ASSERT(arena);
-    return arena == freeLists(kind)->getArenaUnchecked();
+    return arena == freeList(kind)->getArenaUnchecked();
 }
 
 MOZ_ALWAYS_INLINE js::gc::TenuredCell*
 js::gc::ArenaLists::allocateFromFreeList(AllocKind thingKind, size_t thingSize)
 {
-    return freeLists(thingKind)->allocate(thingSize);
+    return freeList(thingKind)->allocate(thingSize);
 }
 
 void
 js::gc::ArenaLists::checkEmptyFreeLists()
 {
 #ifdef DEBUG
     for (auto i : AllAllocKinds())
         checkEmptyFreeList(i);
@@ -342,12 +364,12 @@ js::gc::ArenaLists::checkEmptyArenaLists
     }
 #endif
     return empty;
 }
 
 void
 js::gc::ArenaLists::checkEmptyFreeList(AllocKind kind)
 {
-    MOZ_ASSERT(freeLists(kind)->isEmpty());
+    MOZ_ASSERT(freeList(kind)->isEmpty());
 }
 
 #endif // gc_ArenaList_inl_h
--- a/js/src/gc/ArenaList.h
+++ b/js/src/gc/ArenaList.h
@@ -218,18 +218,23 @@ class ArenaLists
      * free things. Initially all the spans are initialized as empty. After we
      * find a new arena with available things we move its first free span into
      * the list and set the arena as fully allocated. way we do not need to
      * update the arena after the initial allocation. When starting the
      * GC we only move the head of the of the list of spans back to the arena
      * only for the arena that was not fully allocated.
      */
     ZoneGroupData<AllAllocKindArray<FreeSpan*>> freeLists_;
-    FreeSpan*& freeLists(AllocKind i) { return freeLists_.ref()[i]; }
-    FreeSpan* freeLists(AllocKind i) const { return freeLists_.ref()[i]; }
+    AllAllocKindArray<FreeSpan*>& freeLists() { return freeLists_.ref(); }
+    const AllAllocKindArray<FreeSpan*>& freeLists() const { return freeLists_.ref(); }
+
+    FreeSpan* freeList(AllocKind i) const { return freeLists()[i]; }
+
+    inline void setFreeList(AllocKind i, FreeSpan* span);
+    inline void clearFreeList(AllocKind i);
 
     // Because the JITs can allocate from the free lists, they cannot be null.
     // We use a placeholder FreeSpan that is empty (and wihout an associated
     // Arena) so the JITs can fall back gracefully.
     static FreeSpan placeholder;
 
     ZoneGroupOrGCTaskData<AllAllocKindArray<ArenaList>> arenaLists_;
     ArenaList& arenaLists(AllocKind i) { return arenaLists_.ref()[i]; }
@@ -281,19 +286,19 @@ class ArenaLists
     inline bool arenaListsAreEmpty() const;
 
     inline void unmarkAll();
 
     inline bool doneBackgroundFinalize(AllocKind kind) const;
     inline bool needBackgroundFinalizeWait(AllocKind kind) const;
 
     /* Clear the free lists so we won't try to allocate from swept arenas. */
-    inline void purge();
+    inline void clearFreeLists();
 
-    inline void prepareForIncrementalGC();
+    inline void unmarkPreMarkedFreeCells();
 
     /* Check if this arena is in use. */
     inline bool arenaIsInUse(Arena* arena, AllocKind kind) const;
 
     MOZ_ALWAYS_INLINE TenuredCell* allocateFromFreeList(AllocKind thingKind, size_t thingSize);
 
     /* Moves all arenas from |fromArenaLists| into |this|. */
     void adoptArenas(JSRuntime* runtime, ArenaLists* fromArenaLists, bool targetZoneIsCollecting);
--- a/js/src/gc/Barrier.cpp
+++ b/js/src/gc/Barrier.cpp
@@ -27,18 +27,17 @@ RuntimeFromActiveCooperatingThreadIsHeap
     return JS::CurrentThreadIsHeapMajorCollecting();
 }
 
 #ifdef DEBUG
 
 bool
 IsMarkedBlack(JSObject* obj)
 {
-    return obj->isMarkedBlack() ||
-           (obj->isTenured() && obj->asTenured().arena()->allocatedDuringIncremental);
+    return obj->isMarkedBlack();
 }
 
 bool
 HeapSlot::preconditionForSet(NativeObject* owner, Kind kind, uint32_t slot) const
 {
     if (kind == Slot)
         return &owner->getSlotRef(slot) == this;
 
--- a/js/src/gc/Cell.h
+++ b/js/src/gc/Cell.h
@@ -117,16 +117,17 @@ class TenuredCell : public Cell
     MOZ_ALWAYS_INLINE bool isMarkedAny() const;
     MOZ_ALWAYS_INLINE bool isMarkedBlack() const;
     MOZ_ALWAYS_INLINE bool isMarkedGray() const;
 
     // The return value indicates if the cell went from unmarked to marked.
     MOZ_ALWAYS_INLINE bool markIfUnmarked(MarkColor color = MarkColor::Black) const;
     MOZ_ALWAYS_INLINE void markBlack() const;
     MOZ_ALWAYS_INLINE void copyMarkBitsFrom(const TenuredCell* src);
+    MOZ_ALWAYS_INLINE void unmark();
 
     // Access to the arena.
     inline Arena* arena() const;
     inline AllocKind getAllocKind() const;
     inline JS::TraceKind getTraceKind() const;
     inline JS::Zone* zone() const;
     inline JS::Zone* zoneFromAnyThread() const;
     inline bool isInsideZone(JS::Zone* zone) const;
@@ -304,16 +305,22 @@ TenuredCell::markBlack() const
 void
 TenuredCell::copyMarkBitsFrom(const TenuredCell* src)
 {
     ChunkBitmap& bitmap = chunk()->bitmap;
     bitmap.copyMarkBit(this, src, ColorBit::BlackBit);
     bitmap.copyMarkBit(this, src, ColorBit::GrayOrBlackBit);
 }
 
+void
+TenuredCell::unmark()
+{
+    chunk()->bitmap.unmark(this);
+}
+
 inline Arena*
 TenuredCell::arena() const
 {
     MOZ_ASSERT(isTenured());
     uintptr_t addr = address();
     addr &= ~ArenaMask;
     return reinterpret_cast<Arena*>(addr);
 }
--- a/js/src/gc/GC.cpp
+++ b/js/src/gc/GC.cpp
@@ -513,16 +513,35 @@ ArenaCellIterImpl::get<JSObject>() const
 
 void
 Arena::unmarkAll()
 {
     uintptr_t* word = chunk()->bitmap.arenaBits(this);
     memset(word, 0, ArenaBitmapWords * sizeof(uintptr_t));
 }
 
+void
+Arena::unmarkPreMarkedFreeCells()
+{
+    for (ArenaFreeCellIter iter(this); !iter.done(); iter.next()) {
+        TenuredCell* cell = iter.getCell();
+        MOZ_ASSERT(cell->isMarkedBlack());
+        cell->unmark();
+    }
+}
+
+#ifdef DEBUG
+void
+Arena::checkNoMarkedFreeCells()
+{
+    for (ArenaFreeCellIter iter(this); !iter.done(); iter.next())
+        MOZ_ASSERT(!iter.getCell()->isMarkedAny());
+}
+#endif
+
 /* static */ void
 Arena::staticAsserts()
 {
     static_assert(size_t(AllocKind::LIMIT) <= 255,
                   "We must be able to fit the allockind into uint8_t.");
     static_assert(mozilla::ArrayLength(ThingSizes) == size_t(AllocKind::LIMIT),
                   "We haven't defined all thing sizes.");
     static_assert(mozilla::ArrayLength(FirstThingOffsets) == size_t(AllocKind::LIMIT),
@@ -540,17 +559,16 @@ Arena::finalize(FreeOp* fop, AllocKind t
     MOZ_ASSERT(thingSize >= MinCellSize);
     MOZ_ASSERT(thingSize <= 255);
 
     MOZ_ASSERT(allocated());
     MOZ_ASSERT(thingKind == getAllocKind());
     MOZ_ASSERT(thingSize == getThingSize());
     MOZ_ASSERT(!hasDelayedMarking);
     MOZ_ASSERT(!markOverflow);
-    MOZ_ASSERT(!allocatedDuringIncremental);
 
     uint_fast16_t firstThing = firstThingOffset(thingKind);
     uint_fast16_t firstThingOrSuccessorOfLastMarkedThing = firstThing;
     uint_fast16_t lastThing = ArenaSize - thingSize;
 
     FreeSpan newListHead;
     FreeSpan* newListTail = &newListHead;
     size_t nmarked = 0;
@@ -948,17 +966,16 @@ GCRuntime::GCRuntime(JSRuntime* rt) :
     safeToYield(true),
     sweepOnBackgroundThread(false),
     blocksToFreeAfterSweeping((size_t) JSContext::TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
     sweepGroupIndex(0),
     sweepGroups(nullptr),
     currentSweepGroup(nullptr),
     sweepZone(nullptr),
     abortSweepAfterCurrentGroup(false),
-    arenasAllocatedDuringSweep(nullptr),
     startedCompacting(false),
     relocatedArenasToRelease(nullptr),
 #ifdef JS_GC_ZEAL
     markingValidator(nullptr),
 #endif
     defaultTimeBudget_(TuningDefaults::DefaultTimeBudget),
     incrementalAllowed(true),
     compactingEnabled(TuningDefaults::CompactingEnabled),
@@ -2018,21 +2035,23 @@ void
 GCMarker::delayMarkingChildren(const void* thing)
 {
     const TenuredCell* cell = TenuredCell::fromPointer(thing);
     cell->arena()->markOverflow = 1;
     delayMarkingArena(cell->arena());
 }
 
 inline void
-ArenaLists::prepareForIncrementalGC()
-{
-    purge();
-    for (auto i : AllAllocKinds())
-        arenaLists(i).moveCursorToEnd();
+ArenaLists::unmarkPreMarkedFreeCells()
+{
+    for (auto i : AllAllocKinds()) {
+        FreeSpan* freeSpan = freeList(i);
+        if (!freeSpan->isEmpty())
+            freeSpan->getArena()->unmarkPreMarkedFreeCells();
+    }
 }
 
 /* Compacting GC */
 
 bool
 GCRuntime::shouldCompact()
 {
     // Compact on shrinking GC if enabled, but skip compacting in incremental
@@ -2266,17 +2285,16 @@ RelocateCell(Zone* zone, TenuredCell* sr
 }
 
 static void
 RelocateArena(Arena* arena, SliceBudget& sliceBudget)
 {
     MOZ_ASSERT(arena->allocated());
     MOZ_ASSERT(!arena->hasDelayedMarking);
     MOZ_ASSERT(!arena->markOverflow);
-    MOZ_ASSERT(!arena->allocatedDuringIncremental);
     MOZ_ASSERT(arena->bufferedCells()->isEmpty());
 
     Zone* zone = arena->zone;
 
     AllocKind thingKind = arena->getAllocKind();
     size_t thingSize = arena->getThingSize();
 
     for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
@@ -2354,17 +2372,17 @@ ArenaLists::relocateArenas(Zone* zone, A
 {
     // This is only called from the active thread while we are doing a GC, so
     // there is no need to lock.
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
     MOZ_ASSERT(runtime_->gc.isHeapCompacting());
     MOZ_ASSERT(!runtime_->gc.isBackgroundSweeping());
 
     // Clear all the free lists.
-    purge();
+    clearFreeLists();
 
     if (ShouldRelocateAllArenas(reason)) {
         zone->prepareForCompacting();
         for (auto kind : AllocKindsToRelocate) {
             ArenaList& al = arenaLists(kind);
             Arena* allArenas = al.head();
             al.clear();
             relocatedListOut = al.relocateArenas(allArenas, relocatedListOut, sliceBudget, stats);
@@ -2984,22 +3002,21 @@ ArenaLists::ArenaLists(JSRuntime* rt, Zo
     incrementalSweptArenaKind(group, AllocKind::LIMIT),
     incrementalSweptArenas(group),
     gcShapeArenasToUpdate(group, nullptr),
     gcAccessorShapeArenasToUpdate(group, nullptr),
     gcScriptArenasToUpdate(group, nullptr),
     gcObjectGroupArenasToUpdate(group, nullptr),
     savedEmptyArenas(group, nullptr)
 {
-    for (auto i : AllAllocKinds())
-        freeLists(i) = &placeholder;
-    for (auto i : AllAllocKinds())
+    for (auto i : AllAllocKinds()) {
+        freeLists()[i] = &placeholder;
         backgroundFinalizeState(i) = BFS_DONE;
-    for (auto i : AllAllocKinds())
         arenaListsToSweep(i) = nullptr;
+    }
 }
 
 void
 ReleaseArenaList(JSRuntime* rt, Arena* arena, const AutoLockGC& lock)
 {
     Arena* next;
     for (; arena; arena = next) {
         next = arena->next;
@@ -4261,22 +4278,23 @@ GCRuntime::beginMarkPhase(JS::gcreason::
     if (!prepareZonesForCollection(reason, &isFull.ref(), session.lock()))
         return false;
 
     /* If we're not collecting the atoms zone we can release the lock now. */
     if (!atomsZone->isCollecting())
         session.maybeLock.reset();
 
     /*
-     * Ensure that after the start of a collection we don't allocate into any
-     * existing arenas, as this can cause unreachable things to be marked.
+     * In an incremental GC, clear the area free lists to ensure that subsequent
+     * allocations refill them and end up marking new cells back. See
+     * arenaAllocatedDuringGC().
      */
     if (isIncremental) {
         for (GCZonesIter zone(rt); !zone.done(); zone.next())
-            zone->arenas.prepareForIncrementalGC();
+            zone->arenas.clearFreeLists();
     }
 
     marker.start();
     GCMarker* gcmarker = &marker;
 
     {
         gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::PREPARE);
         AutoLockHelperThreadState helperLock;
@@ -4738,18 +4756,16 @@ js::gc::MarkingValidator::validate()
         for (size_t i = 0; i < ArenasPerChunk; i++) {
             if (chunk->decommittedArenas.get(i))
                 continue;
             Arena* arena = &chunk->arenas[i];
             if (!arena->allocated())
                 continue;
             if (!arena->zone->isGCSweeping())
                 continue;
-            if (arena->allocatedDuringIncremental)
-                continue;
 
             AllocKind kind = arena->getAllocKind();
             uintptr_t thing = arena->thingsStart();
             uintptr_t end = arena->thingsEnd();
             while (thing < end) {
                 auto cell = reinterpret_cast<TenuredCell*>(thing);
 
                 /*
@@ -5583,17 +5599,19 @@ GCRuntime::beginSweepingSweepGroup(FreeO
     AutoSCC scc(stats(), sweepGroupIndex);
 
     bool sweepingAtoms = false;
     for (SweepGroupZonesIter zone(rt); !zone.done(); zone.next()) {
         /* Set the GC state to sweeping. */
         zone->changeGCState(Zone::Mark, Zone::Sweep);
 
         /* Purge the ArenaLists before sweeping. */
-        zone->arenas.purge();
+        if (isIncremental)
+            zone->arenas.unmarkPreMarkedFreeCells();
+        zone->arenas.clearFreeLists();
 
         if (zone->isAtomsZone())
             sweepingAtoms = true;
 
 #ifdef DEBUG
         zone->gcLastSweepGroupIndex = sweepGroupIndex;
 #endif
     }
@@ -5705,16 +5723,18 @@ GCRuntime::endSweepingSweepGroup(FreeOp*
 
     /* Update the GC state for zones we have swept. */
     for (SweepGroupZonesIter zone(rt); !zone.done(); zone.next()) {
         AutoLockGC lock(rt);
         zone->changeGCState(Zone::Sweep, Zone::Finished);
         zone->threshold.updateAfterGC(zone->usage.gcBytes(), invocationKind, tunables,
                                       schedulingState, lock);
         zone->updateAllGCMallocCountersOnGCEnd(lock);
+        if (isIncremental)
+            zone->arenas.unmarkPreMarkedFreeCells();
     }
 
     /*
      * Start background thread to sweep zones if required, sweeping the atoms
      * zone last if present.
      */
     bool sweepAtomsZone = false;
     ZoneList zones;
@@ -5727,22 +5747,16 @@ GCRuntime::endSweepingSweepGroup(FreeOp*
     if (sweepAtomsZone)
         zones.append(atomsZone);
 
     if (sweepOnBackgroundThread)
         queueZonesForBackgroundSweep(zones);
     else
         sweepBackgroundThings(zones, blocksToFreeAfterSweeping.ref());
 
-    /* Reset the list of arenas marked as being allocated during sweep phase. */
-    while (Arena* arena = arenasAllocatedDuringSweep) {
-        arenasAllocatedDuringSweep = arena->getNextAllocDuringSweep();
-        arena->unsetAllocDuringSweep();
-    }
-
     return Finished;
 }
 
 void
 GCRuntime::beginSweepPhase(JS::gcreason::Reason reason, AutoTraceSession& session)
 {
     /*
      * Sweep phase.
@@ -6754,16 +6768,17 @@ GCRuntime::resetIncrementalGC(gc::AbortR
         clearBufferedGrayRoots();
 
         for (GCCompartmentsIter c(rt); !c.done(); c.next())
             ResetGrayList(c);
 
         for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
             zone->setNeedsIncrementalBarrier(false);
             zone->changeGCState(Zone::Mark, Zone::NoGC);
+            zone->arenas.unmarkPreMarkedFreeCells();
         }
 
         blocksToFreeAfterSweeping.ref().freeAll();
 
         incrementalState = State::NotActive;
 
         MOZ_ASSERT(!marker.shouldCheckCompartments());
 
@@ -6881,33 +6896,42 @@ AutoGCSlice::AutoGCSlice(JSRuntime* rt)
     }
 }
 
 AutoGCSlice::~AutoGCSlice()
 {
     /* We can't use GCZonesIter if this is the end of the last slice. */
     for (ZonesIter zone(runtime, WithAtoms); !zone.done(); zone.next()) {
         MOZ_ASSERT(!zone->needsIncrementalBarrier());
-        if (zone->isGCMarking()) {
+        if (zone->isGCMarking())
             zone->setNeedsIncrementalBarrier(true);
-            zone->arenas.purge();
-        }
     }
 }
 
 void
 GCRuntime::pushZealSelectedObjects()
 {
 #ifdef JS_GC_ZEAL
     /* Push selected objects onto the mark stack and clear the list. */
     for (JSObject** obj = selectedForMarking.ref().begin(); obj != selectedForMarking.ref().end(); obj++)
         TraceManuallyBarrieredEdge(&marker, obj, "selected obj");
 #endif
 }
 
+void
+GCRuntime::changeToNonIncrementalGC()
+{
+    MOZ_ASSERT(isIncremental);
+
+    for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
+        if (zone->isGCMarking() || zone->isGCSweeping())
+            zone->arenas.unmarkPreMarkedFreeCells();
+    }
+}
+
 static bool
 IsShutdownGC(JS::gcreason::Reason reason)
 {
     return reason == JS::gcreason::SHUTDOWN_CC || reason == JS::gcreason::DESTROY_RUNTIME;
 }
 
 static bool
 ShouldCleanUpEverything(JS::gcreason::Reason reason, JSGCInvocationKind gckind)
@@ -6942,16 +6966,19 @@ GCRuntime::incrementalCollectSlice(Slice
      * cancelled by resetIncrementalGC().
      */
     useZeal = reason == JS::gcreason::DEBUG_GC && !budget.isUnlimited();
 #else
     bool useZeal = false;
 #endif
 
     MOZ_ASSERT_IF(isIncrementalGCInProgress(), isIncremental);
+    if (isIncrementalGCInProgress() && budget.isUnlimited())
+        changeToNonIncrementalGC();
+
     isIncremental = !budget.isUnlimited();
 
     if (useZeal && (hasZealMode(ZealMode::IncrementalRootsThenFinish) ||
                     hasZealMode(ZealMode::IncrementalMarkAllThenFinish) ||
                     hasZealMode(ZealMode::IncrementalSweepThenFinish)))
     {
         /*
          * Yields between slices occurs at predetermined points in these modes;
@@ -8008,19 +8035,21 @@ GCRuntime::mergeCompartments(JSCompartme
     for (auto thingKind : AllAllocKinds()) {
         for (ArenaIter aiter(source->zone(), thingKind); !aiter.done(); aiter.next()) {
             Arena* arena = aiter.get();
             arena->zone = target->zone();
             if (MOZ_UNLIKELY(targetZoneIsCollecting)) {
                 // If we are currently collecting the target zone then we must
                 // treat all merged things as if they were allocated during the
                 // collection.
-                arena->unmarkAll();
-                if (!arena->isEmpty())
-                    arenaAllocatedDuringGC(target->zone(), arena);
+                for (ArenaCellIterUnbarriered iter(arena); !iter.done(); iter.next()) {
+                    TenuredCell* cell = iter.getCell();
+                    MOZ_ASSERT(!cell->isMarkedAny());
+                    cell->markBlack();
+                }
             }
         }
     }
 
     // The source should be the only compartment in its zone.
     for (CompartmentsInZoneIter c(source->zone()); !c.done(); c.next())
         MOZ_ASSERT(c.get() == source);
 
@@ -8210,17 +8239,17 @@ js::ReleaseAllJITCode(FreeOp* fop)
 }
 
 void
 ArenaLists::adoptArenas(JSRuntime* rt, ArenaLists* fromArenaLists, bool targetZoneIsCollecting)
 {
     // GC may be active so take the lock here so we can mutate the arena lists.
     AutoLockGC lock(rt);
 
-    fromArenaLists->purge();
+    fromArenaLists->clearFreeLists();
 
     for (auto thingKind : AllAllocKinds()) {
         MOZ_ASSERT(fromArenaLists->backgroundFinalizeState(thingKind) == BFS_DONE);
         ArenaList* fromList = &fromArenaLists->arenaLists(thingKind);
         ArenaList* toList = &arenaLists(thingKind);
         fromList->check();
         toList->check();
         Arena* next;
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -549,16 +549,17 @@ class GCRuntime
                                            JS::gcreason::Reason reason);
     bool shouldRepeatForDeadZone(JS::gcreason::Reason reason);
     void incrementalCollectSlice(SliceBudget& budget, JS::gcreason::Reason reason,
                                  AutoTraceSession& session);
 
     friend class AutoCallGCCallbacks;
     void maybeCallGCCallback(JSGCStatus status);
 
+    void changeToNonIncrementalGC();
     void pushZealSelectedObjects();
     void purgeRuntime();
     MOZ_MUST_USE bool beginMarkPhase(JS::gcreason::Reason reason, AutoTraceSession& session);
     bool prepareZonesForCollection(JS::gcreason::Reason reason, bool* isFullOut,
                                    AutoLockForExclusiveAccess& lock);
     bool shouldPreserveJITCode(JSCompartment* comp, int64_t currentTime,
                                JS::gcreason::Reason reason, bool canAllocateMoreCode);
     void traceRuntimeForMajorGC(JSTracer* trc, AutoTraceSession& session);
@@ -834,21 +835,16 @@ class GCRuntime
     ActiveThreadData<mozilla::Maybe<AtomSet::Enum>> maybeAtomsToSweep;
     ActiveThreadOrGCTaskData<JS::detail::WeakCacheBase*> sweepCache;
     ActiveThreadData<bool> abortSweepAfterCurrentGroup;
 
     friend class SweepGroupsIter;
     friend class WeakCacheSweepIterator;
 
     /*
-     * List head of arenas allocated during the sweep phase.
-     */
-    ActiveThreadData<Arena*> arenasAllocatedDuringSweep;
-
-    /*
      * Incremental compacting state.
      */
     ActiveThreadData<bool> startedCompacting;
     ActiveThreadData<ZoneList> zonesToMaybeCompact;
     ActiveThreadData<Arena*> relocatedArenasToRelease;
 
 #ifdef JS_GC_ZEAL
     ActiveThreadData<MarkingValidator*> markingValidator;
--- a/js/src/gc/Heap-inl.h
+++ b/js/src/gc/Heap-inl.h
@@ -14,17 +14,16 @@
 
 inline void
 js::gc::Arena::init(JS::Zone* zoneArg, AllocKind kind)
 {
     MOZ_ASSERT(firstFreeSpan.isEmpty());
     MOZ_ASSERT(!zone);
     MOZ_ASSERT(!allocated());
     MOZ_ASSERT(!hasDelayedMarking);
-    MOZ_ASSERT(!allocatedDuringIncremental);
     MOZ_ASSERT(!markOverflow);
     MOZ_ASSERT(!auxNextLink);
 
     zone = zoneArg;
     allocKind = size_t(kind);
     setAsFullyUnused();
     if (zone->isAtomsZone())
         zone->runtimeFromAnyThread()->gc.atomMarking.registerArena(this);
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -96,16 +96,17 @@ const size_t ArenaBitmapWords = DivideAn
  *   Furthermore, the memory pointed to by |last| holds a FreeSpan structure
  *   that points to the next span (which may be empty); this works because
  *   sizeof(FreeSpan) is less than the smallest thingSize.
  */
 class FreeSpan
 {
     friend class Arena;
     friend class ArenaCellIterImpl;
+    friend class ArenaFreeCellIter;
 
     uint16_t first;
     uint16_t last;
 
   public:
     // This inits just |first| and |last|; if the span is non-empty it doesn't
     // do anything with the next span stored at |last|.
     void initBounds(uintptr_t firstArg, uintptr_t lastArg, const Arena* arena) {
@@ -240,34 +241,23 @@ class Arena
      * When collecting we sometimes need to keep an auxillary list of arenas,
      * for which we use the following fields. This happens for several reasons:
      *
      * When recursive marking uses too much stack, the marking is delayed and
      * the corresponding arenas are put into a stack. To distinguish the bottom
      * of the stack from the arenas not present in the stack we use the
      * markOverflow flag to tag arenas on the stack.
      *
-     * Delayed marking is also used for arenas that we allocate into during an
-     * incremental GC. In this case, we intend to mark all the objects in the
-     * arena, and it's faster to do this marking in bulk.
-     *
-     * When sweeping we keep track of which arenas have been allocated since
-     * the end of the mark phase. This allows us to tell whether a pointer to
-     * an unmarked object is yet to be finalized or has already been
-     * reallocated. We set the allocatedDuringIncremental flag for this and
-     * clear it at the end of the sweep phase.
-     *
      * To minimize the size of the header fields we record the next linkage as
      * address() >> ArenaShift and pack it with the allocKind and the flags.
      */
     size_t hasDelayedMarking : 1;
-    size_t allocatedDuringIncremental : 1;
     size_t markOverflow : 1;
-    size_t auxNextLink : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
-    static_assert(ArenaShift >= 8 + 1 + 1 + 1,
+    size_t auxNextLink : JS_BITS_PER_WORD - 8 - 1 - 1;
+    static_assert(ArenaShift >= 8 + 1 + 1,
                   "Arena::auxNextLink packing assumes that ArenaShift has "
                   "enough bits to cover allocKind and hasDelayedMarking.");
 
   private:
     union {
         /*
          * For arenas in zones other than the atoms zone, if non-null, points
          * to an ArenaCellSet that represents the set of cells in this arena
@@ -307,17 +297,16 @@ class Arena
 
     // Initialize an arena to its unallocated state. For arenas that were
     // previously allocated for some zone, use release() instead.
     void setAsNotAllocated() {
         firstFreeSpan.initAsEmpty();
         zone = nullptr;
         allocKind = size_t(AllocKind::LIMIT);
         hasDelayedMarking = 0;
-        allocatedDuringIncremental = 0;
         markOverflow = 0;
         auxNextLink = 0;
         bufferedCells_ = nullptr;
     }
 
     // Return an allocated arena to its unallocated state.
     inline void release();
 
@@ -414,44 +403,30 @@ class Arena
     }
 
     void unsetDelayedMarking() {
         MOZ_ASSERT(hasDelayedMarking);
         hasDelayedMarking = 0;
         auxNextLink = 0;
     }
 
-    Arena* getNextAllocDuringSweep() const {
-        MOZ_ASSERT(allocatedDuringIncremental);
-        return reinterpret_cast<Arena*>(auxNextLink << ArenaShift);
-    }
-
-    void setNextAllocDuringSweep(Arena* arena) {
-        MOZ_ASSERT(!(uintptr_t(arena) & ArenaMask));
-        MOZ_ASSERT(!auxNextLink && !allocatedDuringIncremental);
-        allocatedDuringIncremental = 1;
-        if (arena)
-            auxNextLink = arena->address() >> ArenaShift;
-    }
-
-    void unsetAllocDuringSweep() {
-        MOZ_ASSERT(allocatedDuringIncremental);
-        allocatedDuringIncremental = 0;
-        auxNextLink = 0;
-    }
-
     inline ArenaCellSet*& bufferedCells();
     inline size_t& atomBitmapStart();
 
     template <typename T>
     size_t finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize);
 
     static void staticAsserts();
 
     void unmarkAll();
+    void unmarkPreMarkedFreeCells();
+
+#ifdef DEBUG
+    void checkNoMarkedFreeCells();
+#endif
 };
 
 static_assert(ArenaZoneOffset == offsetof(Arena, zone),
               "The hardcoded API zone offset must match the actual offset.");
 
 static_assert(sizeof(Arena) == ArenaSize,
               "ArenaSize must match the actual size of the Arena structure.");
 
@@ -681,16 +656,24 @@ struct ChunkBitmap
         uintptr_t* dstWord, dstMask;
         getMarkWordAndMask(dst, colorBit, &dstWord, &dstMask);
 
         *dstWord &= ~dstMask;
         if (*srcWord & srcMask)
             *dstWord |= dstMask;
     }
 
+    MOZ_ALWAYS_INLINE void unmark(const TenuredCell* cell) {
+        uintptr_t* word, mask;
+        getMarkWordAndMask(cell, ColorBit::BlackBit, &word, &mask);
+        *word &= ~mask;
+        getMarkWordAndMask(cell, ColorBit::GrayOrBlackBit, &word, &mask);
+        *word &= ~mask;
+    }
+
     void clear() {
         memset((void*)bitmap, 0, sizeof(bitmap));
     }
 
     uintptr_t* arenaBits(Arena* arena) {
         static_assert(ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD,
                       "We assume that the part of the bitmap corresponding to the arena "
                       "has the exact number of words so we do not need to deal with a word "
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -1000,20 +1000,23 @@ JS_FOR_EACH_TRACEKIND(EXPAND_PARTICIPATE
 
 } // namespace
 
 template <typename T>
 bool
 js::GCMarker::mark(T* thing)
 {
     AssertShouldMarkInZone(thing);
-    MOZ_ASSERT(!IsInsideNursery(TenuredCell::fromPointer(thing)));
-    return ParticipatesInCC<T>::value
-           ? TenuredCell::fromPointer(thing)->markIfUnmarked(markColor())
-           : TenuredCell::fromPointer(thing)->markIfUnmarked(MarkColor::Black);
+    TenuredCell* cell = TenuredCell::fromPointer(thing);
+    MOZ_ASSERT(!IsInsideNursery(cell));
+
+    if (!ParticipatesInCC<T>::value)
+        return cell->markIfUnmarked(MarkColor::Black);
+
+    return cell->markIfUnmarked(markColor());
 }
 
 
 /*** Inline, Eager GC Marking *********************************************************************/
 
 // Each of the eager, inline marking paths is directly preceeded by the
 // out-of-line, generic tracing code for comparison. Both paths must end up
 // traversing equivalent subgraphs.
@@ -2472,17 +2475,17 @@ GCMarker::reset()
 
     while (unmarkedArenaStackTop) {
         Arena* arena = unmarkedArenaStackTop;
         MOZ_ASSERT(arena->hasDelayedMarking);
         MOZ_ASSERT(markLaterArenas);
         unmarkedArenaStackTop = arena->getNextDelayedMarking();
         arena->unsetDelayedMarking();
         arena->markOverflow = 0;
-        arena->allocatedDuringIncremental = 0;
+
 #ifdef DEBUG
         markLaterArenas--;
 #endif
     }
     MOZ_ASSERT(isDrained());
     MOZ_ASSERT(!markLaterArenas);
 }
 
@@ -2551,37 +2554,26 @@ GCMarker::leaveWeakMarkingMode()
         if (!zone->gcWeakKeys().clear())
             oomUnsafe.crash("clearing weak keys in GCMarker::leaveWeakMarkingMode()");
     }
 }
 
 void
 GCMarker::markDelayedChildren(Arena* arena)
 {
-    if (arena->markOverflow) {
-        bool always = arena->allocatedDuringIncremental;
-        arena->markOverflow = 0;
-
-        for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
-            TenuredCell* t = i.getCell();
-            if (always || t->isMarkedAny()) {
-                t->markIfUnmarked();
-                js::TraceChildren(this, t, MapAllocToTraceKind(arena->getAllocKind()));
-            }
+    MOZ_ASSERT(arena->markOverflow);
+    arena->markOverflow = 0;
+
+    for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
+        TenuredCell* t = i.getCell();
+        if (t->isMarkedAny()) {
+            t->markIfUnmarked();
+            js::TraceChildren(this, t, MapAllocToTraceKind(arena->getAllocKind()));
         }
-    } else {
-        MOZ_ASSERT(arena->allocatedDuringIncremental);
-        PushArena(this, arena);
     }
-    arena->allocatedDuringIncremental = 0;
-    /*
-     * Note that during an incremental GC we may still be allocating into
-     * the arena. However, prepareForIncrementalGC sets the
-     * allocatedDuringIncremental flag if we continue marking.
-     */
 }
 
 bool
 GCMarker::markDelayedChildren(SliceBudget& budget)
 {
     GCRuntime& gc = runtime()->gc;
     gcstats::AutoPhase ap(gc.stats(), gc.state() == State::Mark, gcstats::PhaseKind::MARK_DELAYED);
 
@@ -3306,17 +3298,17 @@ IsMarkedInternalCommon(T* thingp)
     if (!zone->isCollectingFromAnyThread() || zone->isGCFinished())
         return true;
 
     if (zone->isGCCompacting() && IsForwarded(*thingp)) {
         *thingp = Forwarded(*thingp);
         return true;
     }
 
-    return thing.isMarkedAny() || thing.arena()->allocatedDuringIncremental;
+    return thing.isMarkedAny();
 }
 
 template <typename T>
 static bool
 IsMarkedInternal(JSRuntime* rt, T** thingp)
 {
     if (IsOwnedByOtherRuntime(rt, *thingp))
         return true;
@@ -3356,18 +3348,16 @@ IsMarkedInternal(JSRuntime* rt, T* thing
     return rv;
 }
 
 bool
 js::gc::IsAboutToBeFinalizedDuringSweep(TenuredCell& tenured)
 {
     MOZ_ASSERT(!IsInsideNursery(&tenured));
     MOZ_ASSERT(tenured.zoneFromAnyThread()->isGCSweeping());
-    if (tenured.arena()->allocatedDuringIncremental)
-        return false;
     return !tenured.isMarkedAny();
 }
 
 template <typename T>
 static bool
 IsAboutToBeFinalizedInternal(T** thingp)
 {
     CheckIsMarkedThing(thingp);
--- a/js/src/gc/PrivateIterators-inl.h
+++ b/js/src/gc/PrivateIterators-inl.h
@@ -114,12 +114,57 @@ class SweepGroupZonesIter {
     }
 
     operator JS::Zone*() const { return get(); }
     JS::Zone* operator->() const { return get(); }
 };
 
 typedef CompartmentsIterT<SweepGroupZonesIter> SweepGroupCompartmentsIter;
 
+// Iterate the free cells in an arena. See also ArenaCellIterImpl which iterates
+// the allocated cells.
+class ArenaFreeCellIter
+{
+    Arena* arena;
+    size_t thingSize;
+    FreeSpan span;
+    uint_fast16_t thing;
+
+  public:
+    explicit ArenaFreeCellIter(Arena* arena)
+      : arena(arena),
+        thingSize(arena->getThingSize()),
+        span(*arena->getFirstFreeSpan()),
+        thing(span.first)
+    {
+        MOZ_ASSERT(arena);
+        MOZ_ASSERT(thing < ArenaSize);
+    }
+
+    bool done() const {
+        MOZ_ASSERT(thing < ArenaSize);
+        return !thing;
+    }
+
+    TenuredCell* getCell() const {
+        MOZ_ASSERT(!done());
+        return reinterpret_cast<TenuredCell*>(uintptr_t(arena) + thing);
+    }
+
+    void next() {
+        MOZ_ASSERT(!done());
+        MOZ_ASSERT(thing >= span.first && thing <= span.last);
+
+        if (thing == span.last) {
+            span = *span.nextSpan(arena);
+            thing = span.first;
+        } else {
+            thing += thingSize;
+        }
+
+        MOZ_ASSERT(thing < ArenaSize);
+    }
+};
+
 } // namespace gc
 } // namespace js
 
 #endif // gc_PrivateIterators_inl_h
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -13,16 +13,17 @@
 #endif
 
 #include "gc/GCInternals.h"
 #include "gc/PublicIterators.h"
 #include "gc/Zone.h"
 #include "js/HashTable.h"
 #include "vm/JSContext.h"
 
+#include "gc/ArenaList-inl.h"
 #include "gc/GC-inl.h"
 #include "gc/Marking-inl.h"
 #include "vm/JSContext-inl.h"
 
 using namespace js;
 using namespace js::gc;
 
 #ifdef JS_GC_ZEAL
@@ -244,31 +245,31 @@ gc::GCRuntime::startVerifyPreBarriers()
 
     verifyPreData = trc;
     incrementalState = State::Mark;
     marker.start();
 
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         MOZ_ASSERT(!zone->usedByHelperThread());
         zone->setNeedsIncrementalBarrier(true);
-        zone->arenas.purge();
+        zone->arenas.clearFreeLists();
     }
 
     return;
 
 oom:
     incrementalState = State::NotActive;
     js_delete(trc);
     verifyPreData = nullptr;
 }
 
 static bool
 IsMarkedOrAllocated(TenuredCell* cell)
 {
-    return cell->isMarkedAny() || cell->arena()->allocatedDuringIncremental;
+    return cell->isMarkedAny();
 }
 
 struct CheckEdgeTracer : public JS::CallbackTracer {
     VerifyNode* node;
     explicit CheckEdgeTracer(JSRuntime* rt) : JS::CallbackTracer(rt), node(nullptr) {}
     void onChild(const JS::GCCellPtr& thing) override;
 };
 
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -10117,19 +10117,16 @@ CodeGenerator::link(JSContext* cx, Compi
     if (!ionScript)
         return false;
     auto guardIonScript = mozilla::MakeScopeExit([&ionScript] {
         // Use js_free instead of IonScript::Destroy: the cache list and
         // backedge list are still uninitialized.
         js_free(ionScript);
     });
 
-    // Also, note that creating the code here during an incremental GC will
-    // trace the code and mark all GC things it refers to. This captures any
-    // read barriers which were skipped while compiling the script off thread.
     Linker linker(masm, nogc);
     AutoFlushICache afc("IonLink");
     JitCode* code = linker.newCode(cx, CodeKind::Ion, !patchableBackedges_.empty());
     if (!code)
         return false;
 
     // Encode native to bytecode map if profiling is enabled.
     if (isProfilerInstrumentationEnabled()) {