Bug 1250634 - Part 1: Refactor FreeSpan management to be less indirect and confusing. r=terrence, r=jandem
authorEmanuel Hoogeveen <emanuel.hoogeveen@gmail.com>
Mon, 29 Feb 2016 06:50:00 -0500
changeset 286125 5e76a5e6b927dd54878494e949fd038c2555293c
parent 286124 1783f15418c50cdaacde593f1a321b67abe16e85
child 286126 1194660fd742c3c909285e388a38997cb296ed83
push id30039
push usercbook@mozilla.com
push dateTue, 01 Mar 2016 11:02:11 +0000
treeherdermozilla-central@5cafa6f3019b [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersterrence, jandem
bugs1250634
milestone47.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1250634 - Part 1: Refactor FreeSpan management to be less indirect and confusing. r=terrence, r=jandem
js/src/gc/Allocator.cpp
js/src/gc/GCInternals.h
js/src/gc/Heap.h
js/src/gc/Marking.cpp
js/src/jit/CompileWrappers.cpp
js/src/jit/CompileWrappers.h
js/src/jit/MacroAssembler.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsgcinlines.h
--- a/js/src/gc/Allocator.cpp
+++ b/js/src/gc/Allocator.cpp
@@ -267,17 +267,17 @@ GCRuntime::tryNewTenuredThing(ExclusiveC
     checkIncrementalZoneState(cx, t);
     TraceTenuredAlloc(t, kind);
     return t;
 }
 
 /* static */ void*
 GCRuntime::refillFreeListFromAnyThread(ExclusiveContext* cx, AllocKind thingKind, size_t thingSize)
 {
-    MOZ_ASSERT(cx->arenas()->freeLists[thingKind].isEmpty());
+    cx->arenas()->checkEmptyFreeList(thingKind);
 
     if (cx->isJSContext())
         return refillFreeListFromMainThread(cx->asJSContext(), thingKind, thingSize);
 
     return refillFreeListOffMainThread(cx, thingKind);
 }
 
 /* static */ void*
@@ -323,17 +323,17 @@ ArenaLists::allocateFromArena(JS::Zone* 
         maybeLock.emplace(rt);
 
     ArenaList& al = arenaLists[thingKind];
     ArenaHeader* aheader = al.takeNextArena();
     if (aheader) {
         // Empty arenas should be immediately freed.
         MOZ_ASSERT(!aheader->isEmpty());
 
-        return allocateFromArenaInner<HasFreeThings>(zone, aheader, thingKind);
+        return allocateFromArenaInner(zone, aheader, thingKind);
     }
 
     // Parallel threads have their own ArenaLists, but chunks are shared;
     // if we haven't already, take the GC lock now to avoid racing.
     if (maybeLock.isNothing())
         maybeLock.emplace(rt);
 
     Chunk* chunk = rt->gc.pickChunk(maybeLock.ref(), maybeStartBGAlloc);
@@ -343,51 +343,38 @@ ArenaLists::allocateFromArena(JS::Zone* 
     // Although our chunk should definitely have enough space for another arena,
     // there are other valid reasons why Chunk::allocateArena() may fail.
     aheader = rt->gc.allocateArena(chunk, zone, thingKind, maybeLock.ref());
     if (!aheader)
         return nullptr;
 
     MOZ_ASSERT(!maybeLock->wasUnlocked());
     MOZ_ASSERT(al.isCursorAtEnd());
-    al.insertAtCursor(aheader);
+    al.insertBeforeCursor(aheader);
 
-    return allocateFromArenaInner<IsEmpty>(zone, aheader, thingKind);
+    return allocateFromArenaInner(zone, aheader, thingKind);
 }
 
-template <ArenaLists::ArenaAllocMode hasFreeThings>
-TenuredCell*
+inline TenuredCell*
 ArenaLists::allocateFromArenaInner(JS::Zone* zone, ArenaHeader* aheader, AllocKind kind)
 {
     size_t thingSize = Arena::thingSize(kind);
 
-    FreeSpan span;
-    if (hasFreeThings) {
-        MOZ_ASSERT(aheader->hasFreeThings());
-        span = aheader->getFirstFreeSpan();
-        aheader->setAsFullyUsed();
-    } else {
-        MOZ_ASSERT(!aheader->hasFreeThings());
-        Arena* arena = aheader->getArena();
-        span.initFinal(arena->thingsStart(kind), arena->thingsEnd() - thingSize, thingSize);
-    }
-    freeLists[kind].setHead(&span);
+    freeLists[kind] = aheader;
 
     if (MOZ_UNLIKELY(zone->wasGCStarted()))
         zone->runtimeFromAnyThread()->gc.arenaAllocatedDuringGC(zone, aheader);
-    TenuredCell* thing = freeLists[kind].allocate(thingSize);
+    TenuredCell* thing = aheader->allocate(thingSize);
     MOZ_ASSERT(thing); // This allocation is infallible.
     return thing;
 }
 
 void
 GCRuntime::arenaAllocatedDuringGC(JS::Zone* zone, ArenaHeader* arena)
 {
     if (zone->needsIncrementalBarrier()) {
         arena->allocatedDuringIncremental = true;
         marker.delayMarkingArena(arena);
     } else if (zone->isGCSweeping()) {
         arena->setNextAllocDuringSweep(arenasAllocatedDuringSweep);
         arenasAllocatedDuringSweep = arena;
     }
 }
-
-
--- a/js/src/gc/GCInternals.h
+++ b/js/src/gc/GCInternals.h
@@ -14,26 +14,16 @@
 
 #include "gc/Zone.h"
 #include "vm/HelperThreads.h"
 #include "vm/Runtime.h"
 
 namespace js {
 namespace gc {
 
-class MOZ_RAII AutoCopyFreeListToArenas
-{
-    JSRuntime* runtime;
-    ZoneSelector selector;
-
-  public:
-    AutoCopyFreeListToArenas(JSRuntime* rt, ZoneSelector selector);
-    ~AutoCopyFreeListToArenas();
-};
-
 struct MOZ_RAII AutoFinishGC
 {
     explicit AutoFinishGC(JSRuntime* rt);
 };
 
 /*
  * This class should be used by any code that needs to exclusive access to the
  * heap in order to trace through it...
@@ -55,17 +45,16 @@ class MOZ_RAII AutoTraceSession
     JS::HeapState prevState;
     AutoSPSEntry pseudoFrame;
 };
 
 struct MOZ_RAII AutoPrepareForTracing
 {
     AutoFinishGC finish;
     AutoTraceSession session;
-    AutoCopyFreeListToArenas copy;
 
     AutoPrepareForTracing(JSRuntime* rt, ZoneSelector selector);
 };
 
 class IncrementalSafety
 {
     const char* reason_;
 
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -318,292 +318,85 @@ const size_t ArenaBitmapWords = ArenaBit
  *   span, and |last| is the address of the last free thing in the span.
  *   Furthermore, the memory pointed to by |last| holds a FreeSpan structure
  *   that points to the next span (which may be empty); this works because
  *   sizeof(FreeSpan) is less than the smallest thingSize.
  */
 class FreeSpan
 {
     friend class ArenaCellIterImpl;
-    friend class CompactFreeSpan;
-    friend class FreeList;
+    friend struct ArenaHeader;
 
-    uintptr_t   first;
-    uintptr_t   last;
+    uint16_t first;
+    uint16_t last;
 
   public:
     // This inits just |first| and |last|; if the span is non-empty it doesn't
     // do anything with the next span stored at |last|.
-    void initBoundsUnchecked(uintptr_t first, uintptr_t last) {
-        this->first = first;
-        this->last = last;
-    }
-
-    void initBounds(uintptr_t first, uintptr_t last) {
-        initBoundsUnchecked(first, last);
-        checkSpan();
+    void initBounds(uintptr_t firstArg, uintptr_t lastArg, const ArenaHeader* aheader) {
+        checkRange(firstArg, lastArg, aheader);
+        first = firstArg;
+        last = lastArg;
     }
 
     void initAsEmpty() {
         first = 0;
         last = 0;
-        MOZ_ASSERT(isEmpty());
     }
 
     // This sets |first| and |last|, and also sets the next span stored at
     // |last| as empty. (As a result, |firstArg| and |lastArg| cannot represent
     // an empty span.)
-    void initFinal(uintptr_t firstArg, uintptr_t lastArg, size_t thingSize) {
-        first = firstArg;
-        last = lastArg;
-        FreeSpan* lastSpan = reinterpret_cast<FreeSpan*>(last);
-        lastSpan->initAsEmpty();
-        MOZ_ASSERT(!isEmpty());
-        checkSpan(thingSize);
+    void initFinal(uintptr_t firstArg, uintptr_t lastArg, const ArenaHeader* aheader) {
+        initBounds(firstArg, lastArg, aheader);
+        FreeSpan* last = nextSpanUnchecked(aheader);
+        last->initAsEmpty();
+        checkSpan(aheader);
     }
 
-    bool isEmpty() const {
-        checkSpan();
+    bool isEmpty(const ArenaHeader* aheader) const {
+        checkSpan(aheader);
         return !first;
     }
 
-    static size_t offsetOfFirst() {
-        return offsetof(FreeSpan, first);
-    }
-
-    static size_t offsetOfLast() {
-        return offsetof(FreeSpan, last);
-    }
-
     // Like nextSpan(), but no checking of the following span is done.
-    FreeSpan* nextSpanUnchecked() const {
-        return reinterpret_cast<FreeSpan*>(last);
-    }
-
-    const FreeSpan* nextSpan() const {
-        MOZ_ASSERT(!isEmpty());
-        return nextSpanUnchecked();
-    }
-
-    uintptr_t arenaAddress() const {
-        MOZ_ASSERT(!isEmpty());
-        return first & ~ArenaMask;
-    }
-
-#ifdef DEBUG
-    bool isWithinArena(uintptr_t arenaAddr) const {
-        MOZ_ASSERT(!(arenaAddr & ArenaMask));
-        MOZ_ASSERT(!isEmpty());
-        return arenaAddress() == arenaAddr;
-    }
-#endif
-
-    size_t length(size_t thingSize) const {
-        checkSpan();
-        MOZ_ASSERT((last - first) % thingSize == 0);
-        return (last - first) / thingSize + 1;
-    }
-
-    bool inFreeList(uintptr_t thing) {
-        for (const FreeSpan* span = this; !span->isEmpty(); span = span->nextSpan()) {
-            /* If the thing comes before the current span, it's not free. */
-            if (thing < span->first)
-                return false;
-
-            /* If we find it before the end of the span, it's free. */
-            if (thing <= span->last)
-                return true;
-        }
-        return false;
-    }
-
-  private:
-    // Some callers can pass in |thingSize| easily, and we can do stronger
-    // checking in that case.
-    void checkSpan(size_t thingSize = 0) const {
-#ifdef DEBUG
-        if (!first || !last) {
-            MOZ_ASSERT(!first && !last);
-            // An empty span.
-            return;
-        }
-
-        // |first| and |last| must be ordered appropriately, belong to the same
-        // arena, and be suitably aligned.
-        MOZ_ASSERT(first <= last);
-        MOZ_ASSERT((first & ~ArenaMask) == (last & ~ArenaMask));
-        MOZ_ASSERT((last - first) % (thingSize ? thingSize : CellSize) == 0);
-
-        // If there's a following span, it must be from the same arena, it must
-        // have a higher address, and the gap must be at least 2*thingSize.
-        FreeSpan* next = reinterpret_cast<FreeSpan*>(last);
-        if (next->first) {
-            MOZ_ASSERT(next->last);
-            MOZ_ASSERT((first & ~ArenaMask) == (next->first & ~ArenaMask));
-            MOZ_ASSERT(thingSize
-                       ? last + 2 * thingSize <= next->first
-                       : last < next->first);
-        }
-#endif
-    }
-};
-
-class CompactFreeSpan
-{
-    uint16_t firstOffset_;
-    uint16_t lastOffset_;
-
-  public:
-    CompactFreeSpan(size_t firstOffset, size_t lastOffset)
-      : firstOffset_(firstOffset)
-      , lastOffset_(lastOffset)
-    {}
-
-    void initAsEmpty() {
-        firstOffset_ = 0;
-        lastOffset_ = 0;
-    }
-
-    bool operator==(const CompactFreeSpan& other) const {
-        return firstOffset_ == other.firstOffset_ &&
-               lastOffset_  == other.lastOffset_;
+    FreeSpan* nextSpanUnchecked(const ArenaHeader* aheader) const {
+        MOZ_ASSERT(aheader && first);
+        return reinterpret_cast<FreeSpan*>(uintptr_t(aheader) + last);
     }
 
-    void compact(FreeSpan span) {
-        if (span.isEmpty()) {
-            initAsEmpty();
-        } else {
-            static_assert(ArenaShift < 16, "Check that we can pack offsets into uint16_t.");
-            uintptr_t arenaAddr = span.arenaAddress();
-            firstOffset_ = span.first - arenaAddr;
-            lastOffset_  = span.last  - arenaAddr;
-        }
-    }
-
-    bool isEmpty() const {
-        MOZ_ASSERT(!!firstOffset_ == !!lastOffset_);
-        return !firstOffset_;
-    }
-
-    FreeSpan decompact(uintptr_t arenaAddr) const {
-        MOZ_ASSERT(!(arenaAddr & ArenaMask));
-        FreeSpan decodedSpan;
-        if (isEmpty()) {
-            decodedSpan.initAsEmpty();
-        } else {
-            MOZ_ASSERT(firstOffset_ <= lastOffset_);
-            MOZ_ASSERT(lastOffset_ < ArenaSize);
-            decodedSpan.initBounds(arenaAddr + firstOffset_, arenaAddr + lastOffset_);
-        }
-        return decodedSpan;
-    }
-};
-
-class FreeList
-{
-    // Although |head| is private, it is exposed to the JITs via the
-    // offsetOf{First,Last}() and addressOfFirstLast() methods below.
-    // Therefore, any change in the representation of |head| will require
-    // updating the relevant JIT code.
-    FreeSpan head;
-
-  public:
-    FreeList() {}
-
-    static size_t offsetOfFirst() {
-        return offsetof(FreeList, head) + offsetof(FreeSpan, first);
-    }
-
-    static size_t offsetOfLast() {
-        return offsetof(FreeList, head) + offsetof(FreeSpan, last);
-    }
-
-    void* addressOfFirst() const {
-        return (void*)&head.first;
+    const FreeSpan* nextSpan(const ArenaHeader* aheader) const {
+        MOZ_ASSERT(!isEmpty(aheader));
+        return nextSpanUnchecked(aheader);
     }
 
-    void* addressOfLast() const {
-        return (void*)&head.last;
-    }
-
-    void initAsEmpty() {
-        head.initAsEmpty();
-    }
-
-    FreeSpan* getHead() { return &head; }
-    void setHead(FreeSpan* span) { head = *span; }
-
-    bool isEmpty() const {
-        return head.isEmpty();
-    }
-
-#ifdef DEBUG
-    uintptr_t arenaAddress() const {
-        MOZ_ASSERT(!isEmpty());
-        return head.arenaAddress();
-    }
-#endif
-
-    ArenaHeader* arenaHeader() const {
-        MOZ_ASSERT(!isEmpty());
-        return reinterpret_cast<ArenaHeader*>(head.arenaAddress());
-    }
-
-#ifdef DEBUG
-    bool isSameNonEmptySpan(const FreeSpan& another) const {
-        MOZ_ASSERT(!isEmpty());
-        MOZ_ASSERT(!another.isEmpty());
-        return head.first == another.first && head.last == another.last;
-    }
-#endif
-
-    MOZ_ALWAYS_INLINE TenuredCell* allocate(size_t thingSize) {
-        MOZ_ASSERT(thingSize % CellSize == 0);
-        head.checkSpan(thingSize);
-        uintptr_t thing = head.first;
-        if (thing < head.last) {
-            // We have two or more things in the free list head, so we can do a
-            // simple bump-allocate.
-            head.first = thing + thingSize;
-        } else if (MOZ_LIKELY(thing)) {
-            // We have one thing in the free list head. Use it, but first
-            // update the free list head to point to the subseqent span (which
-            // may be empty).
-            setHead(reinterpret_cast<FreeSpan*>(thing));
-        } else {
-            // The free list head is empty.
-            return nullptr;
-        }
-        head.checkSpan(thingSize);
-        JS_EXTRA_POISON(reinterpret_cast<void*>(thing), JS_ALLOCATED_TENURED_PATTERN, thingSize);
-        MemProfiler::SampleTenured(reinterpret_cast<void*>(thing), thingSize);
-        return reinterpret_cast<TenuredCell*>(thing);
-    }
+    inline void checkSpan(const ArenaHeader* aheader) const;
+    inline void checkRange(uintptr_t first, uintptr_t last, const ArenaHeader*) const;
 };
 
 /* Every arena has a header. */
 struct ArenaHeader
 {
-    friend struct FreeLists;
+    friend struct Arena;
+    friend class ArenaCellIterImpl;
 
     JS::Zone* zone;
 
     /*
      * ArenaHeader::next has two purposes: when unallocated, it points to the
      * next available Arena's header. When allocated, it points to the next
      * arena of the same size class and compartment.
      */
     ArenaHeader* next;
 
   private:
     /*
-     * The first span of free things in the arena. We encode it as a
-     * CompactFreeSpan rather than a FreeSpan to minimize the header size.
+     * The first span of free things in the arena.
      */
-    CompactFreeSpan firstFreeSpan;
+    FreeSpan firstFreeSpan;
 
     /*
      * One of AllocKind constants or AllocKind::LIMIT when the arena does not
      * contain any GC things and is on the list of empty arenas in the GC
      * chunk.
      *
      * We use 8 bits for the allocKind so the compiler can use byte-level memory
      * instructions to access it.
@@ -625,104 +418,144 @@ struct ArenaHeader
      *
      * When sweeping we keep track of which arenas have been allocated since the
      * end of the mark phase.  This allows us to tell whether a pointer to an
      * unmarked object is yet to be finalized or has already been reallocated.
      * We set the allocatedDuringIncremental flag for this and clear it at the
      * end of the sweep phase.
      *
      * To minimize the ArenaHeader size we record the next linkage as
-     * arenaAddress() >> ArenaShift and pack it with the allocKind field and the
-     * flags.
+     * address() >> ArenaShift and pack it with the allocKind field and the flags.
      */
   public:
     size_t       hasDelayedMarking : 1;
     size_t       allocatedDuringIncremental : 1;
     size_t       markOverflow : 1;
     size_t       auxNextLink : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
     static_assert(ArenaShift >= 8 + 1 + 1 + 1,
                   "ArenaHeader::auxNextLink packing assumes that ArenaShift has enough bits to "
                   "cover allocKind and hasDelayedMarking.");
 
+    ArenaHeader() { setAsNotAllocated(); }
+
     inline uintptr_t address() const;
     inline Chunk* chunk() const;
 
     bool allocated() const {
         MOZ_ASSERT(IsAllocKind(AllocKind(allocKind)));
         return IsValidAllocKind(AllocKind(allocKind));
     }
 
+    // This sets |firstFreeSpan| to the Arena's entire valid range, and
+    // also sets the next span stored at |firstFreeSpan.last| as empty.
+    inline void setAsFullyUnused();
+
     void init(JS::Zone* zoneArg, AllocKind kind) {
         MOZ_ASSERT(!allocated());
         MOZ_ASSERT(!markOverflow);
         MOZ_ASSERT(!allocatedDuringIncremental);
         MOZ_ASSERT(!hasDelayedMarking);
         zone = zoneArg;
 
         static_assert(size_t(AllocKind::LIMIT) <= 255,
             "We must be able to fit the allockind into uint8_t.");
         allocKind = size_t(kind);
 
-        /*
-         * The firstFreeSpan is initially marked as empty (and thus the arena
-         * is marked as full). See allocateFromArenaInline().
-         */
-        firstFreeSpan.initAsEmpty();
+        setAsFullyUnused();
     }
 
     void setAsNotAllocated() {
         allocKind = size_t(AllocKind::LIMIT);
         markOverflow = 0;
         allocatedDuringIncremental = 0;
         hasDelayedMarking = 0;
         auxNextLink = 0;
+        firstFreeSpan.initAsEmpty();
     }
 
-    inline uintptr_t arenaAddress() const;
-    inline Arena* getArena();
+    Arena* getArena() { return reinterpret_cast<Arena*>(address()); }
 
     AllocKind getAllocKind() const {
         MOZ_ASSERT(allocated());
         return AllocKind(allocKind);
     }
 
     inline size_t getThingSize() const;
 
     bool hasFreeThings() const {
-        return !firstFreeSpan.isEmpty();
+        return !firstFreeSpan.isEmpty(this);
+    }
+
+    size_t numFreeThings(size_t thingSize) const {
+        firstFreeSpan.checkSpan(this);
+        size_t numFree = 0;
+        const FreeSpan* span = &firstFreeSpan;
+        for (; !span->isEmpty(this); span = span->nextSpan(this))
+            numFree += (span->last - span->first) / thingSize + 1;
+        return numFree;
     }
 
     inline bool isEmpty() const;
 
-    void setAsFullyUsed() {
-        firstFreeSpan.initAsEmpty();
+    static size_t offsetOfFreeSpanFirst() {
+        return offsetof(ArenaHeader, firstFreeSpan) + offsetof(FreeSpan, first);
+    }
+
+    static size_t offsetOfFreeSpanLast() {
+        return offsetof(ArenaHeader, firstFreeSpan) + offsetof(FreeSpan, last);
     }
 
-    inline FreeSpan getFirstFreeSpan() const;
-    inline void setFirstFreeSpan(const FreeSpan* span);
+    bool inFreeList(uintptr_t thing) {
+        uintptr_t base = address();
+        const FreeSpan* span = &firstFreeSpan;
+        for (; !span->isEmpty(this); span = span->nextSpan(this)) {
+            /* If the thing comes before the current span, it's not free. */
+            if (thing < base + span->first)
+                return false;
+
+            /* If we find it before the end of the span, it's free. */
+            if (thing <= base + span->last)
+                return true;
+        }
+        return false;
+    }
 
-#ifdef DEBUG
-    void checkSynchronizedWithFreeList() const;
-#endif
+    MOZ_ALWAYS_INLINE TenuredCell* allocate(size_t thingSize) {
+        firstFreeSpan.checkSpan(this);
+        uintptr_t thing = uintptr_t(this) + firstFreeSpan.first;
+        if (firstFreeSpan.first < firstFreeSpan.last) {
+            // We have space for at least two more things, so do a simple bump-allocate.
+            firstFreeSpan.first += thingSize;
+        } else if (MOZ_LIKELY(firstFreeSpan.first)) {
+            // The last space points to the next free span (which may be empty).
+            firstFreeSpan = *firstFreeSpan.nextSpan(this);
+        } else {
+            return nullptr; // The span is empty.
+        }
+        firstFreeSpan.checkSpan(this);
+        JS_EXTRA_POISON(reinterpret_cast<void*>(thing), JS_ALLOCATED_TENURED_PATTERN, thingSize);
+        MemProfiler::SampleTenured(reinterpret_cast<void*>(thing), thingSize);
+        return reinterpret_cast<TenuredCell*>(thing);
+    }
 
     inline ArenaHeader* getNextDelayedMarking() const;
     inline void setNextDelayedMarking(ArenaHeader* aheader);
     inline void unsetDelayedMarking();
 
     inline ArenaHeader* getNextAllocDuringSweep() const;
     inline void setNextAllocDuringSweep(ArenaHeader* aheader);
     inline void unsetAllocDuringSweep();
 
     inline void setNextArenaToUpdate(ArenaHeader* aheader);
     inline ArenaHeader* getNextArenaToUpdateAndUnlink();
 
     void unmarkAll();
 
     size_t countUsedCells();
-    size_t countFreeCells();
+    size_t countFreeCells() { return numFreeThings(getThingSize()); }
 };
 static_assert(ArenaZoneOffset == offsetof(ArenaHeader, zone),
               "The hardcoded API zone offset must match the actual offset.");
 
 struct Arena
 {
     /*
      * Layout of an arena:
@@ -778,28 +611,61 @@ struct Arena
     uintptr_t thingsStart(AllocKind thingKind) {
         return address() + firstThingOffset(thingKind);
     }
 
     uintptr_t thingsEnd() {
         return address() + ArenaSize;
     }
 
-    void setAsFullyUnused(AllocKind thingKind);
-
     template <typename T>
     size_t finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize);
 };
 
 static_assert(sizeof(Arena) == ArenaSize, "The hardcoded arena size must match the struct size.");
 
+inline void
+FreeSpan::checkSpan(const ArenaHeader* aheader) const
+{
+#ifdef DEBUG
+    if (!first) {
+        MOZ_ASSERT(!first && !last);
+        return;
+    }
+
+    checkRange(first, last, aheader);
+
+    // If there's a following span, it must have a higher address,
+    // and the gap must be at least 2 * thingSize.
+    const FreeSpan* next = nextSpanUnchecked(aheader);
+    if (next->first) {
+        checkRange(next->first, next->last, aheader);
+        size_t thingSize = aheader->getThingSize();
+        MOZ_ASSERT(last + 2 * thingSize <= next->first);
+    }
+#endif
+}
+
+inline void
+FreeSpan::checkRange(uintptr_t first, uintptr_t last, const ArenaHeader* aheader) const
+{
+#ifdef DEBUG
+    MOZ_ASSERT(aheader);
+    AllocKind thingKind = aheader->getAllocKind();
+    size_t thingSize = Arena::thingSize(thingKind);
+    MOZ_ASSERT(first <= last);
+    MOZ_ASSERT(first >= Arena::firstThingOffset(thingKind));
+    MOZ_ASSERT(last <= ArenaSize - thingSize);
+    MOZ_ASSERT((last - first) % thingSize == 0);
+#endif
+}
+
 inline size_t
 ArenaHeader::getThingSize() const
 {
-    MOZ_ASSERT(allocated());
     return Arena::thingSize(getAllocKind());
 }
 
 /*
  * The tail of the chunk info is shared between all chunks in the system, both
  * nursery and tenured. This structure is locatable from any GC pointer by
  * aligning to 1MiB.
  */
@@ -1047,18 +913,17 @@ struct Chunk
     bool isNurseryChunk() const {
         return info.trailer.storeBuffer;
     }
 
     ArenaHeader* allocateArena(JSRuntime* rt, JS::Zone* zone, AllocKind kind,
                                const AutoLockGC& lock);
 
     void releaseArena(JSRuntime* rt, ArenaHeader* aheader, const AutoLockGC& lock);
-    void recycleArena(ArenaHeader* aheader, SortedArenaList& dest, AllocKind thingKind,
-                      size_t thingsPerArena);
+    void recycleArena(ArenaHeader* aheader, SortedArenaList& dest, size_t thingsPerArena);
 
     bool decommitOneFreeArena(JSRuntime* rt, AutoLockGC& lock);
     void decommitAllArenasWithoutUnlocking(const AutoLockGC& lock);
 
     static Chunk* allocate(JSRuntime* rt);
 
   private:
     inline void init(JSRuntime* rt);
@@ -1152,70 +1017,52 @@ ArenaHeader::address() const
 }
 
 inline Chunk*
 ArenaHeader::chunk() const
 {
     return Chunk::fromAddress(address());
 }
 
-inline uintptr_t
-ArenaHeader::arenaAddress() const
-{
-    return address();
-}
-
-inline Arena*
-ArenaHeader::getArena()
-{
-    return reinterpret_cast<Arena*>(arenaAddress());
-}
-
 inline bool
 ArenaHeader::isEmpty() const
 {
     /* Arena is empty if its first span covers the whole arena. */
-    MOZ_ASSERT(allocated());
-    size_t firstThingOffset = Arena::firstThingOffset(getAllocKind());
-    size_t lastThingOffset = ArenaSize - getThingSize();
-    const CompactFreeSpan emptyCompactSpan(firstThingOffset, lastThingOffset);
-    return firstFreeSpan == emptyCompactSpan;
+    firstFreeSpan.checkSpan(this);
+    AllocKind kind = getAllocKind();
+    size_t firstThingOffset = Arena::firstThingOffset(kind);
+    size_t lastThingOffset = ArenaSize - Arena::thingSize(kind);
+    return firstFreeSpan.first == firstThingOffset && firstFreeSpan.last == lastThingOffset;
 }
 
-FreeSpan
-ArenaHeader::getFirstFreeSpan() const
+inline void
+ArenaHeader::setAsFullyUnused()
 {
-#ifdef DEBUG
-    checkSynchronizedWithFreeList();
-#endif
-    return firstFreeSpan.decompact(arenaAddress());
-}
-
-void
-ArenaHeader::setFirstFreeSpan(const FreeSpan* span)
-{
-    MOZ_ASSERT_IF(!span->isEmpty(), span->isWithinArena(arenaAddress()));
-    firstFreeSpan.compact(*span);
+    AllocKind kind = getAllocKind();
+    firstFreeSpan.first = Arena::firstThingOffset(kind);
+    firstFreeSpan.last = ArenaSize - Arena::thingSize(kind);
+    FreeSpan* last = firstFreeSpan.nextSpanUnchecked(this);
+    last->initAsEmpty();
 }
 
 inline ArenaHeader*
 ArenaHeader::getNextDelayedMarking() const
 {
     MOZ_ASSERT(hasDelayedMarking);
     return &reinterpret_cast<Arena*>(auxNextLink << ArenaShift)->aheader;
 }
 
 inline void
 ArenaHeader::setNextDelayedMarking(ArenaHeader* aheader)
 {
     MOZ_ASSERT(!(uintptr_t(aheader) & ArenaMask));
     MOZ_ASSERT(!auxNextLink && !hasDelayedMarking);
     hasDelayedMarking = 1;
     if (aheader)
-        auxNextLink = aheader->arenaAddress() >> ArenaShift;
+        auxNextLink = aheader->address() >> ArenaShift;
 }
 
 inline void
 ArenaHeader::unsetDelayedMarking()
 {
     MOZ_ASSERT(hasDelayedMarking);
     hasDelayedMarking = 0;
     auxNextLink = 0;
@@ -1229,17 +1076,17 @@ ArenaHeader::getNextAllocDuringSweep() c
 }
 
 inline void
 ArenaHeader::setNextAllocDuringSweep(ArenaHeader* aheader)
 {
     MOZ_ASSERT(!auxNextLink && !allocatedDuringIncremental);
     allocatedDuringIncremental = 1;
     if (aheader)
-        auxNextLink = aheader->arenaAddress() >> ArenaShift;
+        auxNextLink = aheader->address() >> ArenaShift;
 }
 
 inline void
 ArenaHeader::unsetAllocDuringSweep()
 {
     MOZ_ASSERT(allocatedDuringIncremental);
     allocatedDuringIncremental = 0;
     auxNextLink = 0;
@@ -1254,17 +1101,23 @@ ArenaHeader::getNextArenaToUpdateAndUnli
     return next;
 }
 
 inline void
 ArenaHeader::setNextArenaToUpdate(ArenaHeader* aheader)
 {
     MOZ_ASSERT(!hasDelayedMarking && !allocatedDuringIncremental && !markOverflow);
     MOZ_ASSERT(!auxNextLink);
-    auxNextLink = aheader->arenaAddress() >> ArenaShift;
+    auxNextLink = aheader->address() >> ArenaShift;
+}
+
+inline size_t
+ArenaHeader::countUsedCells()
+{
+    return Arena::thingsPerArena(getAllocKind()) - countFreeCells();
 }
 
 static void
 AssertValidColor(const TenuredCell* thing, uint32_t color)
 {
 #ifdef DEBUG
     ArenaHeader* aheader = thing->arenaHeader();
     MOZ_ASSERT(color < aheader->getThingSize() / CellSize);
@@ -1339,25 +1192,19 @@ inline JS::TraceKind
 Cell::getTraceKind() const
 {
     return isTenured() ? asTenured().getTraceKind() : JS::TraceKind::Object;
 }
 
 inline bool
 InFreeList(ArenaHeader* aheader, void* thing)
 {
-    if (!aheader->hasFreeThings())
-        return false;
-
-    FreeSpan firstSpan(aheader->getFirstFreeSpan());
     uintptr_t addr = reinterpret_cast<uintptr_t>(thing);
-
     MOZ_ASSERT(Arena::isAligned(addr, aheader->getThingSize()));
-
-    return firstSpan.inFreeList(addr);
+    return aheader->inFreeList(addr);
 }
 
 /* static */ MOZ_ALWAYS_INLINE bool
 Cell::needWriteBarrierPre(JS::Zone* zone) {
     return JS::shadow::Zone::asShadowZone(zone)->needsIncrementalBarrier();
 }
 
 /* static */ MOZ_ALWAYS_INLINE TenuredCell*
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -224,20 +224,18 @@ js::CheckTracedThing(JSTracer* trc, T* t
                       !zone->isGCMarkingBlack() || zone->isAtomsZone());
 
         MOZ_ASSERT(!(zone->isGCSweeping() || zone->isGCFinished() || zone->isGCCompacting()));
     }
 
     /*
      * Try to assert that the thing is allocated.  This is complicated by the
      * fact that allocated things may still contain the poison pattern if that
-     * part has not been overwritten, and that the free span list head in the
-     * ArenaHeader may not be synced with the real one in ArenaLists.  Also,
-     * background sweeping may be running and concurrently modifiying the free
-     * list.
+     * part has not been overwritten.  Also, background sweeping may be running
+     * and concurrently modifiying the free list.
      */
     MOZ_ASSERT_IF(IsThingPoisoned(thing) && rt->isHeapBusy() && !rt->gc.isBackgroundSweeping(),
                   !InFreeList(thing->asTenured().arenaHeader(), thing));
 #endif
 }
 
 template <typename S>
 struct CheckTracedFunctor : public VoidDefaultAdaptor<S> {
--- a/js/src/jit/CompileWrappers.cpp
+++ b/js/src/jit/CompileWrappers.cpp
@@ -210,25 +210,19 @@ CompileZone::get(Zone* zone)
 
 const void*
 CompileZone::addressOfNeedsIncrementalBarrier()
 {
     return zone()->addressOfNeedsIncrementalBarrier();
 }
 
 const void*
-CompileZone::addressOfFreeListFirst(gc::AllocKind allocKind)
+CompileZone::addressOfFreeList(gc::AllocKind allocKind)
 {
-    return zone()->arenas.getFreeList(allocKind)->addressOfFirst();
-}
-
-const void*
-CompileZone::addressOfFreeListLast(gc::AllocKind allocKind)
-{
-    return zone()->arenas.getFreeList(allocKind)->addressOfLast();
+    return zone()->arenas.addressOfFreeList(allocKind);
 }
 
 JSCompartment*
 CompileCompartment::compartment()
 {
     return reinterpret_cast<JSCompartment*>(this);
 }
 
--- a/js/src/jit/CompileWrappers.h
+++ b/js/src/jit/CompileWrappers.h
@@ -92,19 +92,17 @@ class CompileZone
 {
     Zone* zone();
 
   public:
     static CompileZone* get(Zone* zone);
 
     const void* addressOfNeedsIncrementalBarrier();
 
-    // arenas.getFreeList(allocKind)
-    const void* addressOfFreeListFirst(gc::AllocKind allocKind);
-    const void* addressOfFreeListLast(gc::AllocKind allocKind);
+    const void* addressOfFreeList(gc::AllocKind allocKind);
 };
 
 class JitCompartment;
 
 class CompileCompartment
 {
     JSCompartment* compartment();
 
--- a/js/src/jit/MacroAssembler.cpp
+++ b/js/src/jit/MacroAssembler.cpp
@@ -772,44 +772,53 @@ MacroAssembler::nurseryAllocate(Register
     storePtr(temp, AbsoluteAddress(nursery.addressOfPosition()));
 
     if (nDynamicSlots) {
         computeEffectiveAddress(Address(result, thingSize), temp);
         storePtr(temp, Address(result, NativeObject::offsetOfSlots()));
     }
 }
 
-// Inlined version of FreeList::allocate. This does not fill in slots_.
+// Inlined version of ArenaHeader::allocate. This does not fill in slots_.
 void
 MacroAssembler::freeListAllocate(Register result, Register temp, gc::AllocKind allocKind, Label* fail)
 {
     CompileZone* zone = GetJitContext()->compartment->zone();
     int thingSize = int(gc::Arena::thingSize(allocKind));
 
     Label fallback;
     Label success;
 
-    // Load FreeList::head::first of |zone|'s freeLists for |allocKind|. If
-    // there is no room remaining in the span, fall back to get the next one.
-    loadPtr(AbsoluteAddress(zone->addressOfFreeListFirst(allocKind)), result);
-    branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(zone->addressOfFreeListLast(allocKind)), result, &fallback);
-    computeEffectiveAddress(Address(result, thingSize), temp);
-    storePtr(temp, AbsoluteAddress(zone->addressOfFreeListFirst(allocKind)));
+    // Load the first and last offsets of |zone|'s free list for |allocKind|.
+    // If there is no room remaining in the span, fall back to get the next one.
+    loadPtr(AbsoluteAddress(zone->addressOfFreeList(allocKind)), temp);
+    load16ZeroExtend(Address(temp, js::gc::ArenaHeader::offsetOfFreeSpanFirst()), result);
+    load16ZeroExtend(Address(temp, js::gc::ArenaHeader::offsetOfFreeSpanLast()), temp);
+    branch32(Assembler::AboveOrEqual, result, temp, &fallback);
+
+    // Bump the offset for the next allocation.
+    add32(Imm32(thingSize), result);
+    loadPtr(AbsoluteAddress(zone->addressOfFreeList(allocKind)), temp);
+    store16(result, Address(temp, js::gc::ArenaHeader::offsetOfFreeSpanFirst()));
+    sub32(Imm32(thingSize), result);
+    addPtr(temp, result); // Turn the offset into a pointer.
     jump(&success);
 
     bind(&fallback);
-    // If there are no FreeSpans left, we bail to finish the allocation. The
-    // interpreter will call |refillFreeLists|, setting up a new FreeList so
-    // that we can continue allocating in the jit.
-    branchPtr(Assembler::Equal, result, ImmPtr(0), fail);
-    // Point the free list head at the subsequent span (which may be empty).
-    loadPtr(Address(result, js::gc::FreeSpan::offsetOfFirst()), temp);
-    storePtr(temp, AbsoluteAddress(zone->addressOfFreeListFirst(allocKind)));
-    loadPtr(Address(result, js::gc::FreeSpan::offsetOfLast()), temp);
-    storePtr(temp, AbsoluteAddress(zone->addressOfFreeListLast(allocKind)));
+    // If there are no free spans left, we bail to finish the allocation. The
+    // interpreter will call the GC allocator to set up a new arena to allocate
+    // from, after which we can resume allocating in the jit.
+    branchTest32(Assembler::Zero, result, result, fail);
+    loadPtr(AbsoluteAddress(zone->addressOfFreeList(allocKind)), temp);
+    addPtr(temp, result); // Turn the offset into a pointer.
+    Push(result);
+    // Update the free list to point to the next span (which may be empty).
+    load32(Address(result, 0), result);
+    store32(result, Address(temp, js::gc::ArenaHeader::offsetOfFreeSpanFirst()));
+    Pop(result);
 
     bind(&success);
 }
 
 void
 MacroAssembler::callMallocStub(size_t nbytes, Register result, Label* fail)
 {
     // This register must match the one in JitRuntime::generateMallocStub.
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -297,16 +297,18 @@ const uint32_t Arena::ThingSizes[] = CHE
     sizeof(ObjectGroup),        /* AllocKind::OBJECT_GROUP        */
     sizeof(JSFatInlineString),  /* AllocKind::FAT_INLINE_STRING   */
     sizeof(JSString),           /* AllocKind::STRING              */
     sizeof(JSExternalString),   /* AllocKind::EXTERNAL_STRING     */
     sizeof(JS::Symbol),         /* AllocKind::SYMBOL              */
     sizeof(jit::JitCode),       /* AllocKind::JITCODE             */
 );
 
+ArenaHeader ArenaLists::placeholder;
+
 #undef CHECK_THING_SIZE_INNER
 #undef CHECK_THING_SIZE
 
 #define OFFSET(type) uint32_t(sizeof(ArenaHeader) + (ArenaSize - sizeof(ArenaHeader)) % sizeof(type))
 
 const uint32_t Arena::FirstThingOffsets[] = {
     OFFSET(JSFunction),         /* AllocKind::FUNCTION            */
     OFFSET(FunctionExtended),   /* AllocKind::FUNCTION_EXTENDED   */
@@ -444,49 +446,16 @@ static const FinalizePhase BackgroundFin
 template<>
 JSObject*
 ArenaCellIterImpl::get<JSObject>() const
 {
     MOZ_ASSERT(!done());
     return reinterpret_cast<JSObject*>(getCell());
 }
 
-#ifdef DEBUG
-void
-ArenaHeader::checkSynchronizedWithFreeList() const
-{
-    /*
-     * Do not allow to access the free list when its real head is still stored
-     * in FreeLists and is not synchronized with this one.
-     */
-    MOZ_ASSERT(allocated());
-
-    /*
-     * We can be called from the background finalization thread when the free
-     * list in the zone can mutate at any moment. We cannot do any
-     * checks in this case.
-     */
-    if (IsBackgroundFinalized(getAllocKind()) && zone->runtimeFromAnyThread()->gc.onBackgroundThread())
-        return;
-
-    FreeSpan firstSpan = firstFreeSpan.decompact(arenaAddress());
-    if (firstSpan.isEmpty())
-        return;
-    const FreeList* freeList = zone->arenas.getFreeList(getAllocKind());
-    if (freeList->isEmpty() || firstSpan.arenaAddress() != freeList->arenaAddress())
-        return;
-
-    /*
-     * Here this arena has free things, FreeList::lists[thingKind] is not
-     * empty and also points to this arena. Thus they must be the same.
-     */
-    MOZ_ASSERT(freeList->isSameNonEmptySpan(firstSpan));
-}
-#endif
-
 void
 ArenaHeader::unmarkAll()
 {
     uintptr_t* word = chunk()->bitmap.arenaBits(this);
     memset(word, 0, ArenaBitmapWords * sizeof(uintptr_t));
 }
 
 /* static */ void
@@ -495,66 +464,57 @@ Arena::staticAsserts()
     static_assert(JS_ARRAY_LENGTH(ThingSizes) == size_t(AllocKind::LIMIT),
                   "We haven't defined all thing sizes.");
     static_assert(JS_ARRAY_LENGTH(FirstThingOffsets) == size_t(AllocKind::LIMIT),
                   "We haven't defined all offsets.");
     static_assert(JS_ARRAY_LENGTH(ThingsPerArena) == size_t(AllocKind::LIMIT),
                   "We haven't defined all counts.");
 }
 
-void
-Arena::setAsFullyUnused(AllocKind thingKind)
-{
-    FreeSpan fullSpan;
-    size_t thingSize = Arena::thingSize(thingKind);
-    fullSpan.initFinal(thingsStart(thingKind), thingsEnd() - thingSize, thingSize);
-    aheader.setFirstFreeSpan(&fullSpan);
-}
-
 template<typename T>
 inline size_t
 Arena::finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize)
 {
     /* Enforce requirements on size of T. */
     MOZ_ASSERT(thingSize % CellSize == 0);
     MOZ_ASSERT(thingSize <= 255);
 
     MOZ_ASSERT(aheader.allocated());
     MOZ_ASSERT(thingKind == aheader.getAllocKind());
     MOZ_ASSERT(thingSize == aheader.getThingSize());
     MOZ_ASSERT(!aheader.hasDelayedMarking);
     MOZ_ASSERT(!aheader.markOverflow);
     MOZ_ASSERT(!aheader.allocatedDuringIncremental);
 
-    uintptr_t firstThing = thingsStart(thingKind);
-    uintptr_t firstThingOrSuccessorOfLastMarkedThing = firstThing;
-    uintptr_t lastThing = thingsEnd() - thingSize;
+    uint_fast16_t firstThing = firstThingOffset(thingKind);
+    uint_fast16_t firstThingOrSuccessorOfLastMarkedThing = firstThing;
+    uint_fast16_t lastThing = ArenaSize - thingSize;
 
     FreeSpan newListHead;
     FreeSpan* newListTail = &newListHead;
     size_t nmarked = 0;
 
     if (MOZ_UNLIKELY(MemProfiler::enabled())) {
         for (ArenaCellIterUnderFinalize i(&aheader); !i.done(); i.next()) {
             T* t = i.get<T>();
             if (t->asTenured().isMarked())
                 MemProfiler::MarkTenured(reinterpret_cast<void*>(t));
         }
     }
 
     for (ArenaCellIterUnderFinalize i(&aheader); !i.done(); i.next()) {
         T* t = i.get<T>();
         if (t->asTenured().isMarked()) {
-            uintptr_t thing = reinterpret_cast<uintptr_t>(t);
+            uint_fast16_t thing = uintptr_t(t) & ArenaMask;
             if (thing != firstThingOrSuccessorOfLastMarkedThing) {
                 // We just finished passing over one or more free things,
                 // so record a new FreeSpan.
-                newListTail->initBoundsUnchecked(firstThingOrSuccessorOfLastMarkedThing,
-                                                 thing - thingSize);
-                newListTail = newListTail->nextSpanUnchecked();
+                newListTail->initBounds(firstThingOrSuccessorOfLastMarkedThing,
+                                        thing - thingSize, &aheader);
+                newListTail = newListTail->nextSpanUnchecked(&aheader);
             }
             firstThingOrSuccessorOfLastMarkedThing = thing + thingSize;
             nmarked++;
         } else {
             t->finalize(fop);
             JS_POISON(t, JS_SWEPT_TENURED_PATTERN, thingSize);
             TraceTenuredFinalize(t);
         }
@@ -563,33 +523,31 @@ Arena::finalize(FreeOp* fop, AllocKind t
     if (nmarked == 0) {
         // Do nothing. The caller will update the arena header appropriately.
         MOZ_ASSERT(newListTail == &newListHead);
         JS_EXTRA_POISON(data, JS_SWEPT_TENURED_PATTERN, sizeof(data));
         return nmarked;
     }
 
     MOZ_ASSERT(firstThingOrSuccessorOfLastMarkedThing != firstThing);
-    uintptr_t lastMarkedThing = firstThingOrSuccessorOfLastMarkedThing - thingSize;
+    uint_fast16_t lastMarkedThing = firstThingOrSuccessorOfLastMarkedThing - thingSize;
     if (lastThing == lastMarkedThing) {
         // If the last thing was marked, we will have already set the bounds of
         // the final span, and we just need to terminate the list.
         newListTail->initAsEmpty();
     } else {
         // Otherwise, end the list with a span that covers the final stretch of free things.
-        newListTail->initFinal(firstThingOrSuccessorOfLastMarkedThing, lastThing, thingSize);
-    }
-
+        newListTail->initFinal(firstThingOrSuccessorOfLastMarkedThing, lastThing, &aheader);
+    }
+
+    aheader.firstFreeSpan = newListHead;
 #ifdef DEBUG
-    size_t nfree = 0;
-    for (const FreeSpan* span = &newListHead; !span->isEmpty(); span = span->nextSpan())
-        nfree += span->length(thingSize);
+    size_t nfree = aheader.numFreeThings(thingSize);
     MOZ_ASSERT(nfree + nmarked == thingsPerArena(thingKind));
 #endif
-    aheader.setFirstFreeSpan(&newListHead);
     return nmarked;
 }
 
 // Finalize arenas from src list, releasing empty arenas if keepArenas wasn't
 // specified and inserting the others into the appropriate destination size
 // bins.
 template<typename T>
 static inline bool
@@ -615,17 +573,17 @@ FinalizeTypedArenas(FreeOp* fop,
     while (ArenaHeader* aheader = *src) {
         *src = aheader->next;
         size_t nmarked = aheader->getArena()->finalize<T>(fop, thingKind, thingSize);
         size_t nfree = thingsPerArena - nmarked;
 
         if (nmarked)
             dest.insertAt(aheader, nfree);
         else if (keepArenas == ArenaLists::KEEP_ARENAS)
-            aheader->chunk()->recycleArena(aheader, dest, thingKind, thingsPerArena);
+            aheader->chunk()->recycleArena(aheader, dest, thingsPerArena);
         else
             fop->runtime()->gc.releaseArena(aheader, maybeLock.ref());
 
         budget.step(thingsPerArena);
         if (budget.isOverBudget())
             return false;
     }
 
@@ -969,24 +927,23 @@ Chunk::addArenaToFreeList(JSRuntime* rt,
     ++info.numArenasFree;
     rt->gc.updateOnArenaFree(info);
 }
 
 void
 Chunk::addArenaToDecommittedList(JSRuntime* rt, const ArenaHeader* aheader)
 {
     ++info.numArenasFree;
-    decommittedArenas.set(Chunk::arenaIndex(aheader->arenaAddress()));
-}
-
-void
-Chunk::recycleArena(ArenaHeader* aheader, SortedArenaList& dest, AllocKind thingKind,
-                    size_t thingsPerArena)
-{
-    aheader->getArena()->setAsFullyUnused(thingKind);
+    decommittedArenas.set(Chunk::arenaIndex(aheader->address()));
+}
+
+void
+Chunk::recycleArena(ArenaHeader* aheader, SortedArenaList& dest, size_t thingsPerArena)
+{
+    aheader->setAsFullyUnused();
     dest.insertAt(aheader, thingsPerArena);
 }
 
 void
 Chunk::releaseArena(JSRuntime* rt, ArenaHeader* aheader, const AutoLockGC& lock)
 {
     MOZ_ASSERT(aheader->allocated());
     MOZ_ASSERT(!aheader->hasDelayedMarking);
@@ -1991,21 +1948,24 @@ GCMarker::delayMarkingChildren(const voi
     cell->arenaHeader()->markOverflow = 1;
     delayMarkingArena(cell->arenaHeader());
 }
 
 inline void
 ArenaLists::prepareForIncrementalGC(JSRuntime* rt)
 {
     for (auto i : AllAllocKinds()) {
-        FreeList* freeList = &freeLists[i];
-        if (!freeList->isEmpty()) {
-            ArenaHeader* aheader = freeList->arenaHeader();
-            aheader->allocatedDuringIncremental = true;
-            rt->gc.marker.delayMarkingArena(aheader);
+        ArenaHeader* aheader = freeLists[i];
+        if (aheader != &placeholder) {
+            if (aheader->hasFreeThings()) {
+                aheader->allocatedDuringIncremental = true;
+                rt->gc.marker.delayMarkingArena(aheader);
+            } else {
+                freeLists[i] = &placeholder;
+            }
         }
     }
 }
 
 /* Compacting GC */
 
 bool
 GCRuntime::shouldCompact()
@@ -2058,31 +2018,16 @@ CanRelocateZone(Zone* zone)
 }
 
 static bool
 CanRelocateAllocKind(AllocKind kind)
 {
     return IsObjectAllocKind(kind);
 }
 
-size_t ArenaHeader::countFreeCells()
-{
-    size_t count = 0;
-    size_t thingSize = getThingSize();
-    FreeSpan firstSpan(getFirstFreeSpan());
-    for (const FreeSpan* span = &firstSpan; !span->isEmpty(); span = span->nextSpan())
-        count += span->length(thingSize);
-    return count;
-}
-
-size_t ArenaHeader::countUsedCells()
-{
-    return Arena::thingsPerArena(getAllocKind()) - countFreeCells();
-}
-
 ArenaHeader*
 ArenaList::removeRemainingArenas(ArenaHeader** arenap)
 {
     // This is only ever called to remove arenas that are after the cursor, so
     // we don't need to update it.
 #ifdef DEBUG
     for (ArenaHeader* arena = *arenap; arena; arena = arena->next)
         MOZ_ASSERT(cursorp_ != &arena->next);
@@ -2336,19 +2281,18 @@ ArenaLists::relocateArenas(Zone* zone, A
                            SliceBudget& sliceBudget, gcstats::Statistics& stats)
 {
     // This is only called from the main thread while we are doing a GC, so
     // there is no need to lock.
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
     MOZ_ASSERT(runtime_->gc.isHeapCompacting());
     MOZ_ASSERT(!runtime_->gc.isBackgroundSweeping());
 
-    // Flush all the freeLists back into the arena headers
+    // Clear all the free lists.
     purge();
-    checkEmptyFreeLists();
 
     if (ShouldRelocateAllArenas(reason)) {
         zone->prepareForCompacting();
         for (auto i : AllAllocKinds()) {
             if (CanRelocateAllocKind(i)) {
                 ArenaList& al = arenaLists[i];
                 ArenaHeader* allArenas = al.head();
                 al.clear();
@@ -2374,23 +2318,16 @@ ArenaLists::relocateArenas(Zone* zone, A
             if (toRelocate[i]) {
                 ArenaList& al = arenaLists[i];
                 ArenaHeader* arenas = al.removeRemainingArenas(toRelocate[i]);
                 relocatedListOut = al.relocateArenas(arenas, relocatedListOut, sliceBudget, stats);
             }
         }
     }
 
-    // When we allocate new locations for cells, we use
-    // allocateFromFreeList(). Reset the free list again so that
-    // AutoCopyFreeListToArenasForGC doesn't complain that the free lists are
-    // different now.
-    purge();
-    checkEmptyFreeLists();
-
     return true;
 }
 
 bool
 GCRuntime::relocateArenas(Zone* zone, JS::gcreason::Reason reason, ArenaHeader*& relocatedListOut,
                           SliceBudget& sliceBudget)
 {
     gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT_MOVE);
@@ -2879,24 +2816,21 @@ GCRuntime::releaseRelocatedArenasWithout
     while (arenaList) {
         ArenaHeader* aheader = arenaList;
         arenaList = arenaList->next;
 
         // Clear the mark bits
         aheader->unmarkAll();
 
         // Mark arena as empty
-        AllocKind thingKind = aheader->getAllocKind();
-        size_t thingSize = aheader->getThingSize();
-        Arena* arena = aheader->getArena();
-        FreeSpan fullSpan;
-        fullSpan.initFinal(arena->thingsStart(thingKind), arena->thingsEnd() - thingSize, thingSize);
-        aheader->setFirstFreeSpan(&fullSpan);
+        aheader->setAsFullyUnused();
 
 #if defined(JS_CRASH_DIAGNOSTICS) || defined(JS_GC_ZEAL)
+        Arena* arena = aheader->getArena();
+        AllocKind thingKind = aheader->getAllocKind();
         JS_POISON(reinterpret_cast<void*>(arena->thingsStart(thingKind)),
                   JS_MOVED_TENURED_PATTERN, Arena::thingsSpan(thingKind));
 #endif
 
         releaseArena(aheader, lock);
         ++count;
     }
 }
@@ -3155,17 +3089,17 @@ ArenaLists::queueForegroundThingsForSwee
 
 /* static */ void*
 GCRuntime::refillFreeListInGC(Zone* zone, AllocKind thingKind)
 {
     /*
      * Called by compacting GC to refill a free list while we are in a GC.
      */
 
-    MOZ_ASSERT(zone->arenas.freeLists[thingKind].isEmpty());
+    zone->arenas.checkEmptyFreeList(thingKind);
     mozilla::DebugOnly<JSRuntime*> rt = zone->runtimeFromMainThread();
     MOZ_ASSERT(rt->isHeapMajorCollecting());
     MOZ_ASSERT(!rt->gc.isBackgroundSweeping());
 
     AutoMaybeStartBackgroundAllocation maybeStartBackgroundAllocation;
     return zone->arenas.allocateFromArena(zone, thingKind, maybeStartBackgroundAllocation);
 }
 
@@ -5872,57 +5806,25 @@ AutoTraceSession::~AutoTraceSession()
 
         // Notify any helper threads waiting for the trace session to end.
         HelperThreadState().notifyAll(GlobalHelperThreadState::PRODUCER);
     } else {
         runtime->heapState_ = prevState;
     }
 }
 
-AutoCopyFreeListToArenas::AutoCopyFreeListToArenas(JSRuntime* rt, ZoneSelector selector)
-  : runtime(rt),
-    selector(selector)
-{
-    for (ZonesIter zone(rt, selector); !zone.done(); zone.next())
-        zone->arenas.copyFreeListsToArenas();
-}
-
-AutoCopyFreeListToArenas::~AutoCopyFreeListToArenas()
-{
-    for (ZonesIter zone(runtime, selector); !zone.done(); zone.next())
-        zone->arenas.clearFreeListsInArenas();
-}
-
-class AutoCopyFreeListToArenasForGC
-{
-    JSRuntime* runtime;
-
-  public:
-    explicit AutoCopyFreeListToArenasForGC(JSRuntime* rt) : runtime(rt) {
-        MOZ_ASSERT(rt->currentThreadHasExclusiveAccess());
-        for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next())
-            zone->arenas.copyFreeListsToArenas();
-    }
-    ~AutoCopyFreeListToArenasForGC() {
-        for (ZonesIter zone(runtime, WithAtoms); !zone.done(); zone.next())
-            zone->arenas.clearFreeListsInArenas();
-    }
-};
-
 void
 GCRuntime::resetIncrementalGC(const char* reason)
 {
     switch (incrementalState) {
       case NO_INCREMENTAL:
         return;
 
       case MARK: {
         /* Cancel any ongoing marking. */
-        AutoCopyFreeListToArenasForGC copy(rt);
-
         marker.reset();
         marker.stop();
         clearBufferedGrayRoots();
 
         for (GCCompartmentsIter c(rt); !c.done(); c.next())
             ResetGrayList(c);
 
         for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
@@ -6099,17 +6001,16 @@ ShouldCleanUpEverything(JS::gcreason::Re
     return IsShutdownGC(reason) || gckind == GC_SHRINK;
 }
 
 void
 GCRuntime::incrementalCollectSlice(SliceBudget& budget, JS::gcreason::Reason reason)
 {
     MOZ_ASSERT(rt->currentThreadHasExclusiveAccess());
 
-    AutoCopyFreeListToArenasForGC copy(rt);
     AutoGCSlice slice(rt);
 
     bool destroyingRuntime = (reason == JS::gcreason::DESTROY_RUNTIME);
 
     gc::State initialState = incrementalState;
 
     bool useZeal = false;
 #ifdef JS_GC_ZEAL
@@ -6867,18 +6768,17 @@ AutoFinishGC::AutoFinishGC(JSRuntime* rt
     }
 
     rt->gc.waitBackgroundSweepEnd();
     rt->gc.nursery.waitBackgroundFreeEnd();
 }
 
 AutoPrepareForTracing::AutoPrepareForTracing(JSRuntime* rt, ZoneSelector selector)
   : finish(rt),
-    session(rt),
-    copy(rt, selector)
+    session(rt)
 {
 }
 
 JSCompartment*
 js::NewCompartment(JSContext* cx, Zone* zone, JSPrincipals* principals,
                    const JS::CompartmentOptions& options)
 {
     JSRuntime* rt = cx->runtime();
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -322,25 +322,22 @@ struct SortedArenaListSegment
     }
 };
 
 /*
  * Arena lists have a head and a cursor. The cursor conceptually lies on arena
  * boundaries, i.e. before the first arena, between two arenas, or after the
  * last arena.
  *
- * Normally the arena following the cursor is the first arena in the list with
- * some free things and all arenas before the cursor are fully allocated. (And
- * if the cursor is at the end of the list, then all the arenas are full.)
- *
- * However, the arena currently being allocated from is considered full while
- * its list of free spans is moved into the freeList. Therefore, during GC or
- * cell enumeration, when an unallocated freeList is moved back to the arena,
- * we can see an arena with some free cells before the cursor.
- *
+ * Arenas are usually sorted in order of increasing free space, with the cursor
+ * following the Arena currently being allocated from. This ordering should not
+ * be treated as an invariant, however, as the free lists may be cleared,
+ * leaving arenas previously used for allocation partially full. Sorting order
+ * is restored during sweeping.
+
  * Arenas following the cursor should not be full.
  */
 class ArenaList {
     // The cursor is implemented via an indirect pointer, |cursorp_|, to allow
     // for efficient list insertion at the cursor point and other list
     // manipulations.
     //
     // - If the list is empty: |head| is null, |cursorp_| points to |head|, and
@@ -452,28 +449,36 @@ class ArenaList {
         check();
         return aheader;
     }
 
     // This does two things.
     // - Inserts |a| at the cursor.
     // - Leaves the cursor sitting just before |a|, if |a| is not full, or just
     //   after |a|, if |a| is full.
-    //
     void insertAtCursor(ArenaHeader* a) {
         check();
         a->next = *cursorp_;
         *cursorp_ = a;
         // At this point, the cursor is sitting before |a|. Move it after |a|
         // if necessary.
         if (!a->hasFreeThings())
             cursorp_ = &a->next;
         check();
     }
 
+    // Inserts |a| at the cursor, then moves the cursor past it.
+    void insertBeforeCursor(ArenaHeader* a) {
+        check();
+        a->next = *cursorp_;
+        *cursorp_ = a;
+        cursorp_ = &a->next;
+        check();
+    }
+
     // This inserts |other|, which must be full, at the cursor of |this|.
     ArenaList& insertListWithCursorAtEnd(const ArenaList& other) {
         check();
         other.check();
         MOZ_ASSERT(other.isCursorAtEnd());
         if (other.isCursorAtHead())
             return *this;
         // Insert the full arenas of |other| after those of |this|.
@@ -585,17 +590,22 @@ class ArenaLists
      * For each arena kind its free list is represented as the first span with
      * free things. Initially all the spans are initialized as empty. After we
      * find a new arena with available things we move its first free span into
      * the list and set the arena as fully allocated. way we do not need to
      * update the arena header after the initial allocation. When starting the
      * GC we only move the head of the of the list of spans back to the arena
      * only for the arena that was not fully allocated.
      */
-    AllAllocKindArray<FreeList> freeLists;
+    AllAllocKindArray<ArenaHeader*> freeLists;
+
+    // Because the JITs can allocate from the free lists, they cannot be null.
+    // We use a placeholder ArenaHeader with an empty span (and no associated
+    // Arena) so the JITs can fall back gracefully.
+    static ArenaHeader placeholder;
 
     AllAllocKindArray<ArenaList> arenaLists;
 
     enum BackgroundFinalizeStateEnum { BFS_DONE, BFS_RUN };
 
     typedef mozilla::Atomic<BackgroundFinalizeStateEnum, mozilla::SequentiallyConsistent>
         BackgroundFinalizeState;
 
@@ -621,38 +631,33 @@ class ArenaLists
     // happen at the beginning of the GC), so that type sweeping can determine
     // which of the object pointers are marked.
     ObjectAllocKindArray<ArenaList> savedObjectArenas;
     ArenaHeader* savedEmptyObjectArenas;
 
   public:
     explicit ArenaLists(JSRuntime* rt) : runtime_(rt) {
         for (auto i : AllAllocKinds())
-            freeLists[i].initAsEmpty();
+            freeLists[i] = &placeholder;
         for (auto i : AllAllocKinds())
             backgroundFinalizeState[i] = BFS_DONE;
         for (auto i : AllAllocKinds())
             arenaListsToSweep[i] = nullptr;
         incrementalSweptArenaKind = AllocKind::LIMIT;
         gcShapeArenasToUpdate = nullptr;
         gcAccessorShapeArenasToUpdate = nullptr;
         gcScriptArenasToUpdate = nullptr;
         gcObjectGroupArenasToUpdate = nullptr;
         savedEmptyObjectArenas = nullptr;
     }
 
     ~ArenaLists();
 
-    static uintptr_t getFreeListOffset(AllocKind thingKind) {
-        uintptr_t offset = offsetof(ArenaLists, freeLists);
-        return offset + size_t(thingKind) * sizeof(FreeList);
-    }
-
-    const FreeList* getFreeList(AllocKind thingKind) const {
-        return &freeLists[thingKind];
+    const void* addressOfFreeList(AllocKind thingKind) const {
+        return reinterpret_cast<const void*>(&freeLists[thingKind]);
     }
 
     ArenaHeader* getFirstArena(AllocKind thingKind) const {
         return arenaLists[thingKind].head();
     }
 
     ArenaHeader* getFirstArenaToSweep(AllocKind thingKind) const {
         return arenaListsToSweep[thingKind];
@@ -695,103 +700,33 @@ class ArenaLists
         return backgroundFinalizeState[kind] == BFS_DONE;
     }
 
     bool needBackgroundFinalizeWait(AllocKind kind) const {
         return backgroundFinalizeState[kind] != BFS_DONE;
     }
 
     /*
-     * Return the free list back to the arena so the GC finalization will not
-     * run the finalizers over unitialized bytes from free things.
+     * Clear the free lists so we won't try to allocate from swept arenas.
      */
     void purge() {
         for (auto i : AllAllocKinds())
-            purge(i);
-    }
-
-    void purge(AllocKind i) {
-        FreeList* freeList = &freeLists[i];
-        if (!freeList->isEmpty()) {
-            ArenaHeader* aheader = freeList->arenaHeader();
-            aheader->setFirstFreeSpan(freeList->getHead());
-            freeList->initAsEmpty();
-        }
+            freeLists[i] = &placeholder;
     }
 
     inline void prepareForIncrementalGC(JSRuntime* rt);
 
-    /*
-     * Temporarily copy the free list heads to the arenas so the code can see
-     * the proper value in ArenaHeader::freeList when accessing the latter
-     * outside the GC.
-     */
-    void copyFreeListsToArenas() {
-        for (auto i : AllAllocKinds())
-            copyFreeListToArena(i);
-    }
-
-    void copyFreeListToArena(AllocKind thingKind) {
-        FreeList* freeList = &freeLists[thingKind];
-        if (!freeList->isEmpty()) {
-            ArenaHeader* aheader = freeList->arenaHeader();
-            MOZ_ASSERT(!aheader->hasFreeThings());
-            aheader->setFirstFreeSpan(freeList->getHead());
-        }
-    }
-
-    /*
-     * Clear the free lists in arenas that were temporarily set there using
-     * copyToArenas.
-     */
-    void clearFreeListsInArenas() {
-        for (auto i : AllAllocKinds())
-            clearFreeListInArena(i);
-    }
-
-    void clearFreeListInArena(AllocKind kind) {
-        FreeList* freeList = &freeLists[kind];
-        if (!freeList->isEmpty()) {
-            ArenaHeader* aheader = freeList->arenaHeader();
-            MOZ_ASSERT(freeList->isSameNonEmptySpan(aheader->getFirstFreeSpan()));
-            aheader->setAsFullyUsed();
-        }
-    }
-
-    /*
-     * Check that the free list is either empty or were synchronized with the
-     * arena using copyToArena().
-     */
-    bool isSynchronizedFreeList(AllocKind kind) {
-        FreeList* freeList = &freeLists[kind];
-        if (freeList->isEmpty())
-            return true;
-        ArenaHeader* aheader = freeList->arenaHeader();
-        if (aheader->hasFreeThings()) {
-            /*
-             * If the arena has a free list, it must be the same as one in
-             * lists.
-             */
-            MOZ_ASSERT(freeList->isSameNonEmptySpan(aheader->getFirstFreeSpan()));
-            return true;
-        }
-        return false;
-    }
-
     /* Check if |aheader|'s arena is in use. */
     bool arenaIsInUse(ArenaHeader* aheader, AllocKind kind) const {
         MOZ_ASSERT(aheader);
-        const FreeList& freeList = freeLists[kind];
-        if (freeList.isEmpty())
-            return false;
-        return aheader == freeList.arenaHeader();
+        return aheader == freeLists[kind];
     }
 
     MOZ_ALWAYS_INLINE TenuredCell* allocateFromFreeList(AllocKind thingKind, size_t thingSize) {
-        return freeLists[thingKind].allocate(thingSize);
+        return freeLists[thingKind]->allocate(thingSize);
     }
 
     /*
      * Moves all arenas from |fromArenaLists| into |this|.
      */
     void adoptArenas(JSRuntime* runtime, ArenaLists* fromArenaLists);
 
     /* True if the ArenaHeader in question is found in this ArenaLists */
@@ -800,17 +735,17 @@ class ArenaLists
     void checkEmptyFreeLists() {
 #ifdef DEBUG
         for (auto i : AllAllocKinds())
             checkEmptyFreeList(i);
 #endif
     }
 
     void checkEmptyFreeList(AllocKind kind) {
-        MOZ_ASSERT(freeLists[kind].isEmpty());
+        MOZ_ASSERT(!freeLists[kind]->hasFreeThings());
     }
 
     bool relocateArenas(Zone* zone, ArenaHeader*& relocatedListOut, JS::gcreason::Reason reason,
                         SliceBudget& sliceBudget, gcstats::Statistics& stats);
 
     void queueForegroundObjectsForSweep(FreeOp* fop);
     void queueForegroundThingsForSweep(FreeOp* fop);
 
@@ -837,20 +772,18 @@ class ArenaLists
     inline void forceFinalizeNow(FreeOp* fop, AllocKind thingKind,
                                  KeepArenasEnum keepArenas, ArenaHeader** empty = nullptr);
     inline void queueForForegroundSweep(FreeOp* fop, AllocKind thingKind);
     inline void queueForBackgroundSweep(FreeOp* fop, AllocKind thingKind);
     inline void mergeSweptArenas(AllocKind thingKind);
 
     TenuredCell* allocateFromArena(JS::Zone* zone, AllocKind thingKind,
                                    AutoMaybeStartBackgroundAllocation& maybeStartBGAlloc);
-
-    enum ArenaAllocMode { HasFreeThings = true, IsEmpty = false };
-    template <ArenaAllocMode hasFreeThings>
-    TenuredCell* allocateFromArenaInner(JS::Zone* zone, ArenaHeader* aheader, AllocKind kind);
+    inline TenuredCell* allocateFromArenaInner(JS::Zone* zone, ArenaHeader* aheader,
+                                               AllocKind kind);
 
     inline void normalizeBackgroundFinalizeState(AllocKind thingKind);
 
     friend class GCRuntime;
     friend class js::Nursery;
     friend class js::TenuringTracer;
 };
 
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -94,90 +94,86 @@ class ArenaCellIterImpl
     // These three are set in initUnsynchronized().
     size_t firstThingOffset;
     size_t thingSize;
 #ifdef DEBUG
     bool isInited;
 #endif
 
     // These three are set in reset() (which is called by init()).
+    ArenaHeader* arenaAddr;
     FreeSpan span;
-    uintptr_t thing;
-    uintptr_t limit;
+    uint_fast16_t thing;
 
     // Upon entry, |thing| points to any thing (free or used) and finds the
     // first used thing, which may be |thing|.
     void moveForwardIfFree() {
         MOZ_ASSERT(!done());
         MOZ_ASSERT(thing);
         // Note: if |span| is empty, this test will fail, which is what we want
         // -- |span| being empty means that we're past the end of the last free
         // thing, all the remaining things in the arena are used, and we'll
         // never need to move forward.
         if (thing == span.first) {
             thing = span.last + thingSize;
-            span = *span.nextSpan();
+            span = *span.nextSpan(arenaAddr);
         }
     }
 
   public:
     ArenaCellIterImpl()
-      : firstThingOffset(0)     // Squelch
-      , thingSize(0)            //   warnings
-      , limit(0)
+      : firstThingOffset(0)
+      , thingSize(0)
+#ifdef DEBUG
+      , isInited(false)
+#endif
+      , arenaAddr(nullptr)
+      , thing(0)
     {
     }
 
-    void initUnsynchronized(ArenaHeader* aheader) {
+    void init(ArenaHeader* aheader) {
         AllocKind kind = aheader->getAllocKind();
 #ifdef DEBUG
         isInited = true;
 #endif
         firstThingOffset = Arena::firstThingOffset(kind);
         thingSize = Arena::thingSize(kind);
         reset(aheader);
     }
 
-    void init(ArenaHeader* aheader) {
-#ifdef DEBUG
-        AllocKind kind = aheader->getAllocKind();
-        MOZ_ASSERT(aheader->zone->arenas.isSynchronizedFreeList(kind));
-#endif
-        initUnsynchronized(aheader);
-    }
-
     // Use this to move from an Arena of a particular kind to another Arena of
     // the same kind.
     void reset(ArenaHeader* aheader) {
         MOZ_ASSERT(isInited);
-        span = aheader->getFirstFreeSpan();
-        uintptr_t arenaAddr = aheader->arenaAddress();
-        thing = arenaAddr + firstThingOffset;
-        limit = arenaAddr + ArenaSize;
+        arenaAddr = aheader;
+        span = aheader->firstFreeSpan;
+        thing = firstThingOffset;
         moveForwardIfFree();
     }
 
     bool done() const {
-        return thing == limit;
+        MOZ_ASSERT(thing <= ArenaSize);
+        return thing == ArenaSize;
     }
 
     TenuredCell* getCell() const {
         MOZ_ASSERT(!done());
-        return reinterpret_cast<TenuredCell*>(thing);
+        return reinterpret_cast<TenuredCell*>(uintptr_t(arenaAddr) + thing);
     }
 
     template<typename T> T* get() const {
         MOZ_ASSERT(!done());
         return static_cast<T*>(getCell());
     }
 
     void next() {
         MOZ_ASSERT(!done());
         thing += thingSize;
-        if (thing < limit)
+        if (thing < ArenaSize)
             moveForwardIfFree();
     }
 };
 
 template<>
 JSObject*
 ArenaCellIterImpl::get<JSObject>() const;
 
@@ -189,30 +185,29 @@ class ArenaCellIterUnderGC : public Aren
         init(aheader);
     }
 };
 
 class ArenaCellIterUnderFinalize : public ArenaCellIterImpl
 {
   public:
     explicit ArenaCellIterUnderFinalize(ArenaHeader* aheader) {
-        initUnsynchronized(aheader);
+        init(aheader);
     }
 };
 
 class ZoneCellIterImpl
 {
     ArenaIter arenaIter;
     ArenaCellIterImpl cellIter;
 
   protected:
     ZoneCellIterImpl() {}
 
     void init(JS::Zone* zone, AllocKind kind) {
-        MOZ_ASSERT(zone->arenas.isSynchronizedFreeList(kind));
         arenaIter.init(zone, kind);
         if (!arenaIter.done())
             cellIter.init(arenaIter.get());
     }
 
   public:
     bool done() const {
         return arenaIter.done();
@@ -248,24 +243,19 @@ class ZoneCellIterUnderGC : public ZoneC
         MOZ_ASSERT(zone->runtimeFromAnyThread()->isHeapBusy());
         init(zone, kind);
     }
 };
 
 class ZoneCellIter : public ZoneCellIterImpl
 {
     JS::AutoAssertNoAlloc noAlloc;
-    ArenaLists* lists;
-    AllocKind kind;
 
   public:
-    ZoneCellIter(JS::Zone* zone, AllocKind kind)
-      : lists(&zone->arenas),
-        kind(kind)
-    {
+    ZoneCellIter(JS::Zone* zone, AllocKind kind) {
         JSRuntime* rt = zone->runtimeFromMainThread();
 
         /*
          * We have a single-threaded runtime, so there's no need to protect
          * against other threads iterating or allocating. However, we do have
          * background finalization; we have to wait for this to finish if it's
          * currently active.
          */
@@ -273,33 +263,21 @@ class ZoneCellIter : public ZoneCellIter
             zone->arenas.needBackgroundFinalizeWait(kind))
         {
             rt->gc.waitBackgroundSweepEnd();
         }
 
         /* Evict the nursery before iterating so we can see all things. */
         rt->gc.evictNursery();
 
-        if (lists->isSynchronizedFreeList(kind)) {
-            lists = nullptr;
-        } else {
-            MOZ_ASSERT(!rt->isHeapBusy());
-            lists->copyFreeListToArena(kind);
-        }
-
         /* Assert that no GCs can occur while a ZoneCellIter is live. */
         noAlloc.disallowAlloc(rt);
 
         init(zone, kind);
     }
-
-    ~ZoneCellIter() {
-        if (lists)
-            lists->clearFreeListInArena(kind);
-    }
 };
 
 class GCZonesIter
 {
   private:
     ZonesIter zone;
 
   public: