Bug 1125101 - Incrementalise compacting GC by zones r=terrence
☠☠ backed out by dcb0abbfa423 ☠ ☠
authorJon Coppeard <jcoppeard@mozilla.com>
Thu, 26 Feb 2015 12:35:59 +0000
changeset 230967 f7104b435b475d4c4ba6b3ff215694d091df8875
parent 230966 95425fd7c738e353500b33950ddaa7181e6161b3
child 230968 4eab437d2b51f5f1261926511ce5cde3a1f107da
push id28341
push userkwierso@gmail.com
push dateFri, 27 Feb 2015 02:25:40 +0000
treeherdermozilla-central@93707c1b4edb [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersterrence
bugs1125101
milestone39.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1125101 - Incrementalise compacting GC by zones r=terrence
js/src/gc/GCRuntime.h
js/src/gc/RootMarking.cpp
js/src/gc/Zone.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/vm/Debugger.cpp
js/src/vm/Debugger.h
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -903,28 +903,30 @@ class GCRuntime
     void sweepZones(FreeOp *fop, bool lastGC);
     void decommitAllWithoutUnlocking(const AutoLockGC &lock);
     void decommitArenas(AutoLockGC &lock);
     void expireChunksAndArenas(bool shouldShrink, AutoLockGC &lock);
     void queueZonesForBackgroundSweep(ZoneList &zones);
     void sweepBackgroundThings(ZoneList &zones, LifoAlloc &freeBlocks, ThreadType threadType);
     void assertBackgroundSweepingFinished();
     bool shouldCompact();
+    IncrementalProgress beginCompactPhase();
     IncrementalProgress compactPhase(JS::gcreason::Reason reason);
+    void endCompactPhase(JS::gcreason::Reason reason);
     void sweepTypesAfterCompacting(Zone *zone);
     void sweepZoneAfterCompacting(Zone *zone);
-    ArenaHeader *relocateArenas(JS::gcreason::Reason reason);
-    void updateAllCellPointersParallel(MovingTracer *trc);
-    void updateAllCellPointersSerial(MovingTracer *trc);
-    void updatePointersToRelocatedCells();
-    void releaseRelocatedArenas(ArenaHeader *relocatedList);
-    void releaseRelocatedArenasWithoutUnlocking(ArenaHeader *relocatedList, const AutoLockGC& lock);
+    bool relocateArenas(Zone *zone, JS::gcreason::Reason reason);
+    void updateAllCellPointersParallel(MovingTracer *trc, Zone *zone);
+    void updateAllCellPointersSerial(MovingTracer *trc, Zone *zone);
+    void updatePointersToRelocatedCells(Zone *zone);
+    void releaseRelocatedArenas();
+    void releaseRelocatedArenasWithoutUnlocking(const AutoLockGC& lock);
 #ifdef DEBUG
-    void protectRelocatedArenas(ArenaHeader *relocatedList);
-    void unprotectRelocatedArenas(ArenaHeader *relocatedList);
+    void protectRelocatedArenas();
+    void unprotectRelocatedArenas();
 #endif
     void finishCollection(JS::gcreason::Reason reason);
 
     void computeNonIncrementalMarkingForValidation();
     void validateIncrementalMarking();
     void finishMarkingValidation();
 
 #ifdef DEBUG
@@ -1054,16 +1056,17 @@ class GCRuntime
     /* Whether observed type information is being released in the current GC. */
     bool releaseObservedTypes;
 
     /* Whether any black->gray edges were found during marking. */
     bool foundBlackGrayEdges;
 
     /* Singly linekd list of zones to be swept in the background. */
     ZoneList backgroundSweepZones;
+
     /*
      * Free LIFO blocks are transferred to this allocator before being freed on
      * the background GC thread.
      */
     js::LifoAlloc freeLifoAlloc;
 
     /* Index of current zone group (for stats). */
     unsigned zoneGroupIndex;
@@ -1085,16 +1088,23 @@ class GCRuntime
     void startTask(GCParallelTask &task, gcstats::Phase phase);
     void joinTask(GCParallelTask &task, gcstats::Phase phase);
 
     /*
      * List head of arenas allocated during the sweep phase.
      */
     js::gc::ArenaHeader *arenasAllocatedDuringSweep;
 
+    /*
+     * Incremental compacting state.
+     */
+    bool startedCompacting;
+    js::gc::ZoneList zonesToMaybeCompact;
+    ArenaHeader* relocatedArenasToRelease;
+
 #ifdef JS_GC_MARKING_VALIDATION
     js::gc::MarkingValidator *markingValidator;
 #endif
 
     /*
      * Indicates that a GC slice has taken place in the middle of an animation
      * frame, rather than at the beginning. In this case, the next slice will be
      * delayed so that we don't get back-to-back slices.
@@ -1219,19 +1229,16 @@ class GCRuntime
      * Some regions of code are hard for the static rooting hazard analysis to
      * understand. In those cases, we trade the static analysis for a dynamic
      * analysis. When this is non-zero, we should assert if we trigger, or
      * might trigger, a GC.
      */
     int inUnsafeRegion;
 
     size_t noGCOrAllocationCheck;
-
-    ArenaHeader* relocatedArenasToRelease;
-
 #endif
 
     /* Synchronize GC heap access between main thread and GCHelperState. */
     PRLock *lock;
     mozilla::DebugOnly<PRThread *> lockOwner;
 
     BackgroundAllocTask allocTask;
     GCHelperState helperState;
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -425,17 +425,17 @@ js::gc::GCRuntime::markRuntime(JSTracer 
 
     if (traceOrMark == MarkRuntime) {
         gcstats::AutoPhase ap(stats, gcstats::PHASE_MARK_CCWS);
 
         for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
             if (!c->zone()->isCollecting())
                 c->markCrossCompartmentWrappers(trc);
         }
-        Debugger::markAllCrossCompartmentEdges(trc);
+        Debugger::markIncomingCrossCompartmentEdges(trc);
     }
 
     {
         gcstats::AutoPhase ap(stats, gcstats::PHASE_MARK_ROOTERS);
 
         AutoGCRooter::traceAll(trc);
 
         if (!rt->isBeingDestroyed()) {
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -362,8 +362,15 @@ ZoneList::removeFront()
 
     Zone *front = head;
     head = head->listNext_;
     if (!head)
         tail = nullptr;
 
     front->listNext_ = Zone::NotOnList;
 }
+
+void
+ZoneList::clear()
+{
+    while (!isEmpty())
+        removeFront();
+}
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1101,16 +1101,18 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
     freeLifoAlloc(JSRuntime::TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
     zoneGroupIndex(0),
     zoneGroups(nullptr),
     currentZoneGroup(nullptr),
     sweepZone(nullptr),
     sweepKindIndex(0),
     abortSweepAfterCurrentGroup(false),
     arenasAllocatedDuringSweep(nullptr),
+    startedCompacting(false),
+    relocatedArenasToRelease(nullptr),
 #ifdef JS_GC_MARKING_VALIDATION
     markingValidator(nullptr),
 #endif
     interFrameGC(false),
     sliceBudget(SliceBudget::Unlimited),
     incrementalAllowed(true),
     generationalDisabled(0),
     compactingEnabled(true),
@@ -1129,17 +1131,16 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
     validate(true),
     fullCompartmentChecks(false),
     mallocBytesUntilGC(0),
     mallocGCTriggered(false),
     alwaysPreserveCode(false),
 #ifdef DEBUG
     inUnsafeRegion(0),
     noGCOrAllocationCheck(0),
-    relocatedArenasToRelease(nullptr),
 #endif
     lock(nullptr),
     lockOwner(nullptr),
     allocTask(rt, emptyChunks_),
     helperState(rt)
 {
     setGCMode(JSGC_MODE_GLOBAL);
 }
@@ -2126,16 +2127,26 @@ RelocateArena(ArenaHeader *aheader)
     for (ArenaCellIterUnderFinalize i(aheader); !i.done(); i.next()) {
         if (!RelocateCell(zone, i.getCell(), thingKind, thingSize)) {
             // This can only happen in zeal mode or debug builds as we don't
             // otherwise relocate more cells than we have existing free space
             // for.
             CrashAtUnhandlableOOM("Could not allocate new arena while compacting");
         }
     }
+
+#ifdef DEBUG
+    for (ArenaCellIterUnderFinalize i(aheader); !i.done(); i.next()) {
+        TenuredCell *src = i.getCell();
+        MOZ_ASSERT(RelocationOverlay::isCellForwarded(src));
+        TenuredCell *dest = Forwarded(src);
+        MOZ_ASSERT(src->isMarked(BLACK) == dest->isMarked(BLACK));
+        MOZ_ASSERT(src->isMarked(GRAY) == dest->isMarked(GRAY));
+    }
+#endif
 }
 
 /*
  * Relocate all arenas identified by pickArenasToRelocate: for each arena,
  * relocate each cell within it, then add it to a list of relocated arenas.
  */
 ArenaHeader *
 ArenaList::relocateArenas(ArenaHeader *toRelocate, ArenaHeader *relocated,
@@ -2223,35 +2234,47 @@ ArenaLists::relocateArenas(ArenaHeader *
     // AutoCopyFreeListToArenasForGC doesn't complain that the free lists are
     // different now.
     purge();
     checkEmptyFreeLists();
 
     return true;
 }
 
-ArenaHeader *
-GCRuntime::relocateArenas(JS::gcreason::Reason reason)
+bool
+GCRuntime::relocateArenas(Zone *zone, JS::gcreason::Reason reason)
 {
     gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT_MOVE);
 
-    ArenaHeader *relocatedList = nullptr;
-    for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
-        MOZ_ASSERT(zone->isGCFinished());
-        MOZ_ASSERT(!zone->isPreservingCode());
-
-        if (CanRelocateZone(rt, zone)) {
-            jit::StopAllOffThreadCompilations(zone);
-            if (zone->arenas.relocateArenas(relocatedList, reason, stats))
-                zone->setGCState(Zone::Compact);
+    MOZ_ASSERT(!zone->isPreservingCode());
+    MOZ_ASSERT(CanRelocateZone(rt, zone));
+
+    jit::StopAllOffThreadCompilations(zone);
+
+    if (!zone->arenas.relocateArenas(relocatedArenasToRelease, reason, stats))
+        return false;
+
+#ifdef DEBUG
+    // Check that we did as much compaction as we should have. There
+    // should always be less than one arena's worth of free cells.
+    for (size_t i = 0; i < FINALIZE_LIMIT; i++) {
+        size_t thingsPerArena = Arena::thingsPerArena(Arena::thingSize(AllocKind(i)));
+        if (CanRelocateAllocKind(AllocKind(i))) {
+            ArenaList &al = zone->arenas.arenaLists[i];
+            size_t freeCells = 0;
+            for (ArenaHeader *arena = al.arenaAfterCursor(); arena; arena = arena->next)
+                freeCells += arena->countFreeCells();
+            MOZ_ASSERT(freeCells < thingsPerArena);
         }
     }
-
-    return relocatedList;
-}
+#endif
+
+    return true;
+}
+
 
 void
 MovingTracer::Visit(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
 {
     TenuredCell *thing = TenuredCell::fromPointer(*thingp);
 
     // Currently we only relocate objects.
     if (kind != JSTRACE_OBJECT) {
@@ -2376,25 +2399,25 @@ namespace gc {
 
 struct ArenasToUpdate
 {
     enum KindsToUpdate {
         FOREGROUND = 1,
         BACKGROUND = 2,
         ALL = FOREGROUND | BACKGROUND
     };
-    ArenasToUpdate(JSRuntime *rt, KindsToUpdate kinds);
+    ArenasToUpdate(Zone *zone, KindsToUpdate kinds);
     bool done() { return initialized && arena == nullptr; }
     ArenaHeader* next(AutoLockHelperThreadState& lock);
     ArenaHeader *getArenasToUpdate(AutoLockHelperThreadState& lock, unsigned max);
 
   private:
     bool initialized;
     KindsToUpdate kinds;
-    GCZonesIter zone;    // Current zone to process, unless zone.done()
+    Zone *zone;          // Zone to process
     unsigned kind;       // Current alloc kind to process
     ArenaHeader *arena;  // Next arena to process
 
     bool shouldProcessKind(unsigned kind);
 };
 
 bool ArenasToUpdate::shouldProcessKind(unsigned kind)
 {
@@ -2408,19 +2431,20 @@ bool ArenasToUpdate::shouldProcessKind(u
     }
 
     if (js::gc::IsBackgroundFinalized(AllocKind(kind)))
         return (kinds & BACKGROUND) != 0;
     else
         return (kinds & FOREGROUND) != 0;
 }
 
-ArenasToUpdate::ArenasToUpdate(JSRuntime *rt, KindsToUpdate kinds)
-  : initialized(false), kinds(kinds), zone(rt, SkipAtoms)
-{
+ArenasToUpdate::ArenasToUpdate(Zone *zone, KindsToUpdate kinds)
+  : initialized(false), kinds(kinds), zone(zone)
+{
+    MOZ_ASSERT(zone->isGCCompacting());
     MOZ_ASSERT(kinds && !(kinds & ~ALL));
 }
 
 ArenaHeader *
 ArenasToUpdate::next(AutoLockHelperThreadState& lock)
 {
     // Find the next arena to update.
     //
@@ -2429,43 +2453,40 @@ ArenasToUpdate::next(AutoLockHelperThrea
     // normal way, returning the first arena found. In subsequent invocations we
     // jump directly into the body of the for loops just after the previous
     // return. All state is stored in class members and so preserved between
     // invocations.
 
     if (initialized) {
         MOZ_ASSERT(arena);
         MOZ_ASSERT(shouldProcessKind(kind));
-        MOZ_ASSERT(!zone.done());
+        MOZ_ASSERT(zone);
         goto resumePoint;
     }
 
     initialized = true;
-    for (; !zone.done(); zone.next()) {
-        if (zone->isGCCompacting()) {
-            for (kind = 0; kind < FINALIZE_LIMIT; ++kind) {
-                if (shouldProcessKind(kind)) {
-                    for (arena = zone.get()->arenas.getFirstArena(AllocKind(kind));
-                         arena;
-                         arena = arena->next)
-                    {
-                        return arena;
-                      resumePoint:;
-                    }
-                }
+    for (kind = 0; kind < FINALIZE_LIMIT; ++kind) {
+        if (shouldProcessKind(kind)) {
+            for (arena = zone->arenas.getFirstArena(AllocKind(kind));
+                 arena;
+                 arena = arena->next)
+            {
+                return arena;
+              resumePoint:;
             }
         }
     }
+    zone = nullptr;
     return nullptr;
 }
 
 ArenaHeader *
 ArenasToUpdate::getArenasToUpdate(AutoLockHelperThreadState& lock, unsigned count)
 {
-    if (zone.done())
+    if (!zone)
         return nullptr;
 
     ArenaHeader *head = nullptr;
     ArenaHeader *tail = nullptr;
 
     for (unsigned i = 0; i < count; ++i) {
         ArenaHeader *arena = next(lock);
         if (!arena)
@@ -2541,29 +2562,29 @@ UpdateCellPointersTask::run()
         }
     }
 }
 
 } // namespace gc
 } // namespace js
 
 void
-GCRuntime::updateAllCellPointersParallel(MovingTracer *trc)
+GCRuntime::updateAllCellPointersParallel(MovingTracer *trc, Zone *zone)
 {
     AutoDisableProxyCheck noProxyCheck(rt); // These checks assert when run in parallel.
 
     const size_t minTasks = 2;
     const size_t maxTasks = 8;
     size_t targetTaskCount = HelperThreadState().cpuCount / 2;
     size_t taskCount = Min(Max(targetTaskCount, minTasks), maxTasks);
     UpdateCellPointersTask bgTasks[maxTasks];
     UpdateCellPointersTask fgTask;
 
-    ArenasToUpdate fgArenas(rt, ArenasToUpdate::FOREGROUND);
-    ArenasToUpdate bgArenas(rt, ArenasToUpdate::BACKGROUND);
+    ArenasToUpdate fgArenas(zone, ArenasToUpdate::FOREGROUND);
+    ArenasToUpdate bgArenas(zone, ArenasToUpdate::BACKGROUND);
 
     unsigned tasksStarted = 0;
     {
         AutoLockHelperThreadState lock;
         unsigned i;
         for (i = 0; i < taskCount && !bgArenas.done(); ++i) {
             bgTasks[i].init(rt, &bgArenas, lock);
             startTask(bgTasks[i], gcstats::PHASE_COMPACT_UPDATE_CELLS);
@@ -2578,148 +2599,149 @@ GCRuntime::updateAllCellPointersParallel
     {
         AutoLockHelperThreadState lock;
         for (unsigned i = 0; i < tasksStarted; ++i)
             joinTask(bgTasks[i], gcstats::PHASE_COMPACT_UPDATE_CELLS);
     }
 }
 
 void
-GCRuntime::updateAllCellPointersSerial(MovingTracer *trc)
+GCRuntime::updateAllCellPointersSerial(MovingTracer *trc, Zone *zone)
 {
     UpdateCellPointersTask task;
     {
         AutoLockHelperThreadState lock;
-        ArenasToUpdate allArenas(rt, ArenasToUpdate::ALL);
+        ArenasToUpdate allArenas(zone, ArenasToUpdate::ALL);
         task.init(rt, &allArenas, lock);
     }
     task.runFromMainThread(rt);
 }
 
 /*
  * Update pointers to relocated cells by doing a full heap traversal and sweep.
  *
  * The latter is necessary to update weak references which are not marked as
  * part of the traversal.
  */
 void
-GCRuntime::updatePointersToRelocatedCells()
-{
+GCRuntime::updatePointersToRelocatedCells(Zone *zone)
+{
+    MOZ_ASSERT(zone->isGCCompacting());
     MOZ_ASSERT(rt->currentThreadHasExclusiveAccess());
 
     gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT_UPDATE);
     MovingTracer trc(rt);
 
     // Fixup compartment global pointers as these get accessed during marking.
-    for (GCCompartmentsIter comp(rt); !comp.done(); comp.next())
+    for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next())
         comp->fixupAfterMovingGC();
 
     // Fixup cross compartment wrappers as we assert the existence of wrappers in the map.
     for (CompartmentsIter comp(rt, SkipAtoms); !comp.done(); comp.next()) {
+        // Sweep the wrapper map to update its pointers.
         comp->sweepCrossCompartmentWrappers();
+
+        // Mark the contents of the map to update each wrapper's cross compartment pointer.
         comp->markCrossCompartmentWrappers(&trc);
     }
 
     // Iterate through all cells that can contain JSObject pointers to update
     // them. Since updating each cell is independent we try to parallelize this
     // as much as possible.
     if (CanUseExtraThreads())
-        updateAllCellPointersParallel(&trc);
+        updateAllCellPointersParallel(&trc, zone);
     else
-        updateAllCellPointersSerial(&trc);
+        updateAllCellPointersSerial(&trc, zone);
 
     // Mark roots to update them.
     {
         markRuntime(&trc, MarkRuntime);
 
         gcstats::AutoPhase ap(stats, gcstats::PHASE_MARK_ROOTS);
         Debugger::markAll(&trc);
         Debugger::markAllCrossCompartmentEdges(&trc);
 
-        for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
+        for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
             WeakMapBase::markAll(c, &trc);
             if (c->watchpointMap)
                 c->watchpointMap->markAll(&trc);
         }
 
         // Mark all gray roots, making sure we call the trace callback to get the
         // current set.
         if (JSTraceDataOp op = grayRootTracer.op)
             (*op)(&trc, grayRootTracer.data);
     }
 
     // Sweep everything to fix up weak pointers
     WatchpointMap::sweepAll(rt);
     Debugger::sweepAll(rt->defaultFreeOp());
-    for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
-        if (zone->isGCCompacting())
-            rt->gc.sweepZoneAfterCompacting(zone);
-    }
+    rt->gc.sweepZoneAfterCompacting(zone);
 
     // Type inference may put more blocks here to free.
     freeLifoAlloc.freeAll();
 
     // Clear runtime caches that can contain cell pointers.
     // TODO: Should possibly just call purgeRuntime() here.
     rt->newObjectCache.purge();
     rt->nativeIterCache.purge();
 
     // Call callbacks to get the rest of the system to fixup other untraced pointers.
     callWeakPointerCallbacks();
 }
 
 #ifdef DEBUG
 void
-GCRuntime::protectRelocatedArenas(ArenaHeader *relocatedList)
-{
-    for (ArenaHeader* arena = relocatedList, *next; arena; arena = next) {
+GCRuntime::protectRelocatedArenas()
+{
+    for (ArenaHeader* arena = relocatedArenasToRelease, *next; arena; arena = next) {
         next = arena->next;
 #if defined(XP_WIN)
         DWORD oldProtect;
         if (!VirtualProtect(arena, ArenaSize, PAGE_NOACCESS, &oldProtect))
             MOZ_CRASH();
 #else  // assume Unix
         if (mprotect(arena, ArenaSize, PROT_NONE))
             MOZ_CRASH();
 #endif
     }
 }
 
 void
-GCRuntime::unprotectRelocatedArenas(ArenaHeader *relocatedList)
-{
-    for (ArenaHeader* arena = relocatedList; arena; arena = arena->next) {
+GCRuntime::unprotectRelocatedArenas()
+{
+    for (ArenaHeader* arena = relocatedArenasToRelease; arena; arena = arena->next) {
 #if defined(XP_WIN)
         DWORD oldProtect;
         if (!VirtualProtect(arena, ArenaSize, PAGE_READWRITE, &oldProtect))
             MOZ_CRASH();
 #else  // assume Unix
         if (mprotect(arena, ArenaSize, PROT_READ | PROT_WRITE))
             MOZ_CRASH();
 #endif
     }
 }
 #endif
 
 void
-GCRuntime::releaseRelocatedArenas(ArenaHeader *relocatedList)
+GCRuntime::releaseRelocatedArenas()
 {
     AutoLockGC lock(rt);
-    releaseRelocatedArenasWithoutUnlocking(relocatedList, lock);
+    releaseRelocatedArenasWithoutUnlocking(lock);
     expireChunksAndArenas(true, lock);
 }
 
 void
-GCRuntime::releaseRelocatedArenasWithoutUnlocking(ArenaHeader *relocatedList, const AutoLockGC &lock)
+GCRuntime::releaseRelocatedArenasWithoutUnlocking(const AutoLockGC &lock)
 {
     // Release the relocated arenas, now containing only forwarding pointers
     unsigned count = 0;
-    while (relocatedList) {
-        ArenaHeader *aheader = relocatedList;
-        relocatedList = relocatedList->next;
+    while (relocatedArenasToRelease) {
+        ArenaHeader *aheader = relocatedArenasToRelease;
+        relocatedArenasToRelease = relocatedArenasToRelease->next;
 
         // Clear the mark bits
         aheader->unmarkAll();
 
         // Mark arena as empty
         AllocKind thingKind = aheader->getAllocKind();
         size_t thingSize = aheader->getThingSize();
         Arena *arena = aheader->getArena();
@@ -2739,19 +2761,18 @@ GCRuntime::releaseRelocatedArenasWithout
 
 void
 GCRuntime::releaseHeldRelocatedArenas()
 {
 #ifdef DEBUG
     // In debug mode we don't release relocated arenas straight away.  Instead
     // we protect them and hold onto them until the next GC sweep phase to catch
     // any pointers to them that didn't get forwarded.
-    unprotectRelocatedArenas(relocatedArenasToRelease);
-    releaseRelocatedArenas(relocatedArenasToRelease);
-    relocatedArenasToRelease = nullptr;
+    unprotectRelocatedArenas();
+    releaseRelocatedArenas();
 #endif
 }
 
 void
 ReleaseArenaList(JSRuntime *rt, ArenaHeader *aheader, const AutoLockGC &lock)
 {
     ArenaHeader *next;
     for (; aheader; aheader = next) {
@@ -3402,17 +3423,16 @@ GCRuntime::sweepBackgroundThings(ZoneLis
 }
 
 void
 GCRuntime::assertBackgroundSweepingFinished()
 {
 #ifdef DEBUG
     MOZ_ASSERT(backgroundSweepZones.isEmpty());
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
-        MOZ_ASSERT(!zone->isOnList());
         for (unsigned i = 0; i < FINALIZE_LIMIT; ++i) {
             MOZ_ASSERT(!zone->arenas.arenaListsToSweep[i]);
             MOZ_ASSERT(zone->arenas.doneBackgroundFinalize(AllocKind(i)));
         }
     }
     MOZ_ASSERT(freeLifoAlloc.computedSizeOfExcludingThis() == 0);
 #endif
 }
@@ -5470,108 +5490,107 @@ GCRuntime::endSweepPhase(bool lastGC)
             if (e.front().key().kind != CrossCompartmentKey::StringWrapper)
                 AssertNotOnGrayList(&e.front().value().unbarrieredGet().toObject());
         }
     }
 #endif
 }
 
 GCRuntime::IncrementalProgress
-GCRuntime::compactPhase(JS::gcreason::Reason reason)
+GCRuntime::beginCompactPhase()
 {
     gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT);
 
     if (isIncremental) {
         // Poll for end of background sweeping
         AutoLockGC lock(rt);
         if (isBackgroundSweeping())
             return NotFinished;
     } else {
         waitBackgroundSweepEnd();
     }
 
+    MOZ_ASSERT(zonesToMaybeCompact.isEmpty());
+    for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
+        if (CanRelocateZone(rt, zone))
+            zonesToMaybeCompact.append(zone);
+    }
+
+    MOZ_ASSERT(!relocatedArenasToRelease);
+    startedCompacting = true;
+    return Finished;
+}
+
+GCRuntime::IncrementalProgress
+GCRuntime::compactPhase(JS::gcreason::Reason reason)
+{
     MOZ_ASSERT(rt->gc.nursery.isEmpty());
     assertBackgroundSweepingFinished();
-
-    ArenaHeader *relocatedList = relocateArenas(reason);
-    if (relocatedList)
-        updatePointersToRelocatedCells();
-
-#ifdef DEBUG
-    for (ArenaHeader *arena = relocatedList; arena; arena = arena->next) {
-        for (ArenaCellIterUnderFinalize i(arena); !i.done(); i.next()) {
-            TenuredCell *src = i.getCell();
-            MOZ_ASSERT(RelocationOverlay::isCellForwarded(src));
-            TenuredCell *dest = Forwarded(src);
-            MOZ_ASSERT(src->isMarked(BLACK) == dest->isMarked(BLACK));
-            MOZ_ASSERT(src->isMarked(GRAY) == dest->isMarked(GRAY));
+    MOZ_ASSERT(startedCompacting);
+
+    gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT);
+
+    while (!zonesToMaybeCompact.isEmpty()) {
+        Zone *zone = zonesToMaybeCompact.front();
+        MOZ_ASSERT(zone->isGCFinished());
+        if (relocateArenas(zone, reason)) {
+            zone->setGCState(Zone::Compact);
+            updatePointersToRelocatedCells(zone);
+            zone->setGCState(Zone::Finished);
         }
-    }
-#endif
-
-    // Release the relocated arenas, or in debug builds queue them to be
-    // released until the start of the next GC unless this is the last GC or we
-    // are doing a last ditch GC.
-#ifndef DEBUG
-    releaseRelocatedArenas(relocatedList);
-#else
-    if (reason == JS::gcreason::DESTROY_RUNTIME || reason == JS::gcreason::LAST_DITCH) {
-        releaseRelocatedArenas(relocatedList);
-    } else {
-        MOZ_ASSERT(!relocatedArenasToRelease);
-        protectRelocatedArenas(relocatedList);
-        relocatedArenasToRelease = relocatedList;
-    }
-#endif
+        zonesToMaybeCompact.removeFront();
+    }
 
 #ifdef DEBUG
     CheckHashTablesAfterMovingGC(rt);
-    for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
-        if (zone->isGCCompacting()) {
-            MOZ_ASSERT(!zone->isPreservingCode());
-            zone->arenas.checkEmptyFreeLists();
-
-            // Check that we did as much compaction as we should have. There
-            // should always be less than one arena's worth of free cells.
-            for (size_t i = 0; i < FINALIZE_LIMIT; i++) {
-                size_t thingsPerArena = Arena::thingsPerArena(Arena::thingSize(AllocKind(i)));
-                if (CanRelocateAllocKind(AllocKind(i))) {
-                    ArenaList &al = zone->arenas.arenaLists[i];
-                    size_t freeCells = 0;
-                    for (ArenaHeader *arena = al.arenaAfterCursor(); arena; arena = arena->next)
-                        freeCells += arena->countFreeCells();
-                    MOZ_ASSERT(freeCells < thingsPerArena);
-                }
-            }
-        }
-    }
 #endif
-    return Finished;
+
+    return zonesToMaybeCompact.isEmpty() ? Finished : NotFinished;
+}
+
+void
+GCRuntime::endCompactPhase(JS::gcreason::Reason reason)
+{
+    // Release the relocated arenas, or in debug builds queue them to be
+    // released at the start of the next GC unless this is the last GC or we are
+    // doing a last ditch GC.
+#ifndef DEBUG
+    releaseRelocatedArenas();
+#else
+    if (reason == JS::gcreason::DESTROY_RUNTIME || reason == JS::gcreason::LAST_DITCH)
+        releaseRelocatedArenas();
+    else
+        protectRelocatedArenas();
+#endif
+
+    startedCompacting = false;
 }
 
 void
 GCRuntime::finishCollection(JS::gcreason::Reason reason)
 {
     MOZ_ASSERT(marker.isDrained());
     marker.stop();
 
     uint64_t currentTime = PRMJ_Now();
     schedulingState.updateHighFrequencyMode(lastGCTime, currentTime, tunables);
 
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         if (zone->isCollecting()) {
-            MOZ_ASSERT(zone->isGCFinished() || zone->isGCCompacting());
+            MOZ_ASSERT(zone->isGCFinished());
             zone->setGCState(Zone::NoGC);
             zone->active = false;
         }
 
         MOZ_ASSERT(!zone->isCollecting());
         MOZ_ASSERT(!zone->wasGCStarted());
     }
 
+    MOZ_ASSERT(zonesToMaybeCompact.isEmpty());
+
     if (invocationKind == GC_SHRINK) {
         // Ensure excess chunks are returns to the system and free arenas
         // decommitted.
         shrinkBuffers();
     }
 
     lastGCTime = currentTime;
 
@@ -5716,37 +5735,42 @@ GCRuntime::resetIncrementalGC(const char
 
       case COMPACT: {
         {
             gcstats::AutoPhase ap(stats, gcstats::PHASE_WAIT_BACKGROUND_THREAD);
             rt->gc.waitBackgroundSweepOrAllocEnd();
         }
 
         bool wasCompacting = isCompacting;
-        isCompacting = false;
+
+        isCompacting = true;
+        startedCompacting = true;
+        zonesToMaybeCompact.clear();
 
         SliceBudget budget;
         incrementalCollectSlice(budget, JS::gcreason::RESET);
 
         isCompacting = wasCompacting;
         break;
       }
 
       default:
         MOZ_CRASH("Invalid incremental GC state");
     }
 
     stats.reset(reason);
 
 #ifdef DEBUG
+    assertBackgroundSweepingFinished();
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
+        MOZ_ASSERT(!zone->isCollecting());
         MOZ_ASSERT(!zone->needsIncrementalBarrier());
-        for (unsigned i = 0; i < FINALIZE_LIMIT; ++i)
-            MOZ_ASSERT(!zone->arenas.arenaListsToSweep[i]);
-    }
+        MOZ_ASSERT(!zone->isOnList());
+    }
+    MOZ_ASSERT(zonesToMaybeCompact.isEmpty());
 #endif
 }
 
 namespace {
 
 class AutoGCSlice {
   public:
     explicit AutoGCSlice(JSRuntime *rt);
@@ -5934,24 +5958,32 @@ GCRuntime::incrementalCollectSlice(Slice
 
       case SWEEP:
         if (sweepPhase(budget) == NotFinished)
             break;
 
         endSweepPhase(lastGC);
 
         incrementalState = COMPACT;
+        MOZ_ASSERT(!startedCompacting);
 
         /* Yield before compacting since it is not incremental. */
         if (isCompacting && isIncremental)
             break;
 
       case COMPACT:
-        if (isCompacting && compactPhase(reason) == NotFinished)
-            break;
+        if (isCompacting) {
+            if (!startedCompacting && beginCompactPhase() == NotFinished)
+                break;
+
+            if (compactPhase(reason) == NotFinished)
+                break;
+
+            endCompactPhase(reason);
+        }
 
         finishCollection(reason);
 
         incrementalState = NO_INCREMENTAL;
         break;
 
       default:
         MOZ_ASSERT(false);
@@ -6401,22 +6433,21 @@ GCRuntime::onOutOfMallocMemory()
 
     AutoLockGC lock(rt);
     onOutOfMallocMemory(lock);
 }
 
 void
 GCRuntime::onOutOfMallocMemory(const AutoLockGC &lock)
 {
+#ifdef DEBUG
     // Release any relocated arenas we may be holding on to, without releasing
     // the GC lock.
-#ifdef DEBUG
-    unprotectRelocatedArenas(relocatedArenasToRelease);
-    releaseRelocatedArenasWithoutUnlocking(relocatedArenasToRelease, lock);
-    relocatedArenasToRelease = nullptr;
+    unprotectRelocatedArenas();
+    releaseRelocatedArenasWithoutUnlocking(lock);
 #endif
 
     // Throw away any excess chunks we have lying around.
     freeEmptyChunks(rt, lock);
 
     // Immediately decommit as many arenas as possible in the hopes that this
     // might let the OS scrape together enough pages to satisfy the failing
     // malloc request.
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -1387,16 +1387,17 @@ class ZoneList
     ~ZoneList();
 
     bool isEmpty() const;
     Zone *front() const;
 
     void append(Zone *zone);
     void transferFrom(ZoneList &other);
     void removeFront();
+    void clear();
 
   private:
     explicit ZoneList(Zone *singleZone);
     void check() const;
 
     ZoneList(const ZoneList &other) = delete;
     ZoneList &operator=(const ZoneList &other) = delete;
 };
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -2110,26 +2110,34 @@ Debugger::markCrossCompartmentEdges(JSTr
  * We must scan all Debugger objects regardless of whether they *currently* have
  * any debuggees in a compartment being GC'd, because the WeakMap entries
  * persist even when debuggees are removed.
  *
  * This happens during the initial mark phase, not iterative marking, because
  * all the edges being reported here are strong references.
  */
 /* static */ void
-Debugger::markAllCrossCompartmentEdges(JSTracer *trc)
+Debugger::markIncomingCrossCompartmentEdges(JSTracer *trc)
 {
     JSRuntime *rt = trc->runtime();
 
     for (Debugger *dbg = rt->debuggerList.getFirst(); dbg; dbg = dbg->getNext()) {
         if (!dbg->object->zone()->isCollecting())
             dbg->markCrossCompartmentEdges(trc);
     }
 }
 
+/* static */ void
+Debugger::markAllCrossCompartmentEdges(JSTracer *trc)
+{
+    JSRuntime *rt = trc->runtime();
+    for (Debugger *dbg = rt->debuggerList.getFirst(); dbg; dbg = dbg->getNext())
+        dbg->markCrossCompartmentEdges(trc);
+}
+
 /*
  * This method has two tasks:
  *   1. Mark Debugger objects that are unreachable except for debugger hooks that
  *      may yet be called.
  *   2. Mark breakpoint handlers.
  *
  * This happens during the iterative part of the GC mark phase. This method
  * returns true if it has to mark anything; GC calls it repeatedly until it
--- a/js/src/vm/Debugger.h
+++ b/js/src/vm/Debugger.h
@@ -543,16 +543,17 @@ class Debugger : private mozilla::Linked
      *       - it has a debugger hook installed
      *       - it has a breakpoint set on a live script
      *       - it has a watchpoint set on a live object.
      *
      * Debugger::markAllIteratively handles the last case. If it finds any
      * Debugger objects that are definitely live but not yet marked, it marks
      * them and returns true. If not, it returns false.
      */
+    static void markIncomingCrossCompartmentEdges(JSTracer *tracer);
     static void markAllCrossCompartmentEdges(JSTracer *tracer);
     static bool markAllIteratively(GCMarker *trc);
     static void markAll(JSTracer *trc);
     static void sweepAll(FreeOp *fop);
     static void detachAllDebuggersFromGlobal(FreeOp *fop, GlobalObject *global);
     static void findCompartmentEdges(JS::Zone *v, gc::ComponentFinder<JS::Zone> &finder);
 
     /*