Bug 1276908 - Reimplement whole cell store buffer using a bit vector associated with the arena r=terrence
authorJon Coppeard <jcoppeard@mozilla.com>
Tue, 21 Jun 2016 15:30:35 +0100
changeset 302222 85911372f2765079241357835fe84901b49c24c2
parent 302221 b1c44ce827f75356115509db7a18075247c18d53
child 302223 b8b6dd03d8fd907fcbea09caa44a6a937947e1c7
push id19727
push usercbook@mozilla.com
push dateWed, 22 Jun 2016 11:46:45 +0000
treeherderfx-team@b7abc00e6256 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersterrence
bugs1276908
milestone50.0a1
Bug 1276908 - Reimplement whole cell store buffer using a bit vector associated with the arena r=terrence
js/src/gc/Allocator.cpp
js/src/gc/Heap-inl.h
js/src/gc/Heap.h
js/src/gc/Marking.cpp
js/src/gc/Nursery.h
js/src/gc/StoreBuffer-inl.h
js/src/gc/StoreBuffer.cpp
js/src/gc/StoreBuffer.h
js/src/jsgc.cpp
js/src/vm/NativeObject.cpp
js/src/vm/NativeObject.h
--- a/js/src/gc/Allocator.cpp
+++ b/js/src/gc/Allocator.cpp
@@ -12,16 +12,18 @@
 #include "gc/GCTrace.h"
 #include "gc/Nursery.h"
 #include "jit/JitCompartment.h"
 #include "vm/Runtime.h"
 #include "vm/String.h"
 
 #include "jsobjinlines.h"
 
+#include "gc/Heap-inl.h"
+
 using namespace js;
 using namespace gc;
 
 template <typename T, AllowGC allowGC /* = CanGC */>
 JSObject*
 js::Allocate(ExclusiveContext* cx, AllocKind kind, size_t nDynamicSlots, InitialHeap heap,
              const Class* clasp)
 {
new file mode 100644
--- /dev/null
+++ b/js/src/gc/Heap-inl.h
@@ -0,0 +1,29 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef gc_Heap_inl_h
+#define gc_Heap_inl_h
+
+#include "gc/StoreBuffer.h"
+
+inline void
+js::gc::Arena::init(JS::Zone* zoneArg, AllocKind kind)
+{
+    MOZ_ASSERT(firstFreeSpan.isEmpty());
+    MOZ_ASSERT(!zone);
+    MOZ_ASSERT(!allocated());
+    MOZ_ASSERT(!hasDelayedMarking);
+    MOZ_ASSERT(!allocatedDuringIncremental);
+    MOZ_ASSERT(!markOverflow);
+    MOZ_ASSERT(!auxNextLink);
+
+    zone = zoneArg;
+    allocKind = size_t(kind);
+    setAsFullyUnused();
+    bufferedCells = &ArenaCellSet::Empty;
+}
+
+#endif
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -58,16 +58,17 @@ extern bool
 UnmarkGrayCellRecursively(gc::Cell* cell, JS::TraceKind kind);
 
 extern void
 TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, gc::Cell** thingp, const char* name);
 
 namespace gc {
 
 class Arena;
+class ArenaCellSet;
 class ArenaList;
 class SortedArenaList;
 struct Chunk;
 
 /*
  * This flag allows an allocation site to request a specific heap based upon the
  * estimated lifetime or lifetime requirements of objects allocated from that
  * site.
@@ -323,26 +324,33 @@ class TenuredCell : public Cell
 #ifdef DEBUG
     inline bool isAligned() const;
 #endif
 };
 
 /* Cells are aligned to CellShift, so the largest tagged null pointer is: */
 const uintptr_t LargestTaggedNullCellPointer = (1 << CellShift) - 1;
 
+MOZ_CONSTEXPR size_t
+DivideAndRoundUp(size_t numerator, size_t divisor) {
+    return (numerator + divisor - 1) / divisor;
+}
+
+const size_t ArenaCellCount = ArenaSize / CellSize;
+static_assert(ArenaSize % CellSize == 0, "Arena size must be a multiple of cell size");
+
 /*
  * The mark bitmap has one bit per each GC cell. For multi-cell GC things this
  * wastes space but allows to avoid expensive devisions by thing's size when
  * accessing the bitmap. In addition this allows to use some bits for colored
  * marking during the cycle GC.
  */
-const size_t ArenaCellCount = size_t(1) << (ArenaShift - CellShift);
 const size_t ArenaBitmapBits = ArenaCellCount;
-const size_t ArenaBitmapBytes = ArenaBitmapBits / 8;
-const size_t ArenaBitmapWords = ArenaBitmapBits / JS_BITS_PER_WORD;
+const size_t ArenaBitmapBytes = DivideAndRoundUp(ArenaBitmapBits, 8);
+const size_t ArenaBitmapWords = DivideAndRoundUp(ArenaBitmapBits, JS_BITS_PER_WORD);
 
 /*
  * A FreeSpan represents a contiguous sequence of free cells in an Arena. It
  * can take two forms.
  *
  * - In an empty span, |first| and |last| are both zero.
  *
  * - In a non-empty span, |first| is the address of the first free thing in the
@@ -516,41 +524,31 @@ class Arena
     size_t hasDelayedMarking : 1;
     size_t allocatedDuringIncremental : 1;
     size_t markOverflow : 1;
     size_t auxNextLink : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
     static_assert(ArenaShift >= 8 + 1 + 1 + 1,
                   "Arena::auxNextLink packing assumes that ArenaShift has "
                   "enough bits to cover allocKind and hasDelayedMarking.");
 
-    /* Extra field for content-specific data. */
-    void* extra;
+    /*
+     * If non-null, points to an ArenaCellSet that represents the set of cells
+     * in this arena that are in the nursery's store buffer.
+     */
+    ArenaCellSet* bufferedCells;
 
     /*
      * The size of data should be |ArenaSize - offsetof(data)|, but the offset
      * is not yet known to the compiler, so we do it by hand. |firstFreeSpan|
      * takes up 8 bytes on 64-bit due to alignment requirements; the rest are
      * obvious. This constant is stored in js/HeapAPI.h.
      */
     uint8_t data[ArenaSize - ArenaHeaderSize];
 
-    void init(JS::Zone* zoneArg, AllocKind kind) {
-        MOZ_ASSERT(firstFreeSpan.isEmpty());
-        MOZ_ASSERT(!zone);
-        MOZ_ASSERT(!allocated());
-        MOZ_ASSERT(!hasDelayedMarking);
-        MOZ_ASSERT(!allocatedDuringIncremental);
-        MOZ_ASSERT(!markOverflow);
-        MOZ_ASSERT(!auxNextLink);
-
-        zone = zoneArg;
-        allocKind = size_t(kind);
-        setAsFullyUnused();
-        extra = nullptr;
-    }
+    void init(JS::Zone* zoneArg, AllocKind kind);
 
     // Sets |firstFreeSpan| to the Arena's entire valid range, and
     // also sets the next span stored at |firstFreeSpan.last| as empty.
     void setAsFullyUnused() {
         AllocKind kind = getAllocKind();
         firstFreeSpan.first = firstThingOffset(kind);
         firstFreeSpan.last = lastThingOffset(kind);
         FreeSpan* last = firstFreeSpan.nextSpanUnchecked(this);
@@ -560,16 +558,17 @@ class Arena
     void setAsNotAllocated() {
         firstFreeSpan.initAsEmpty();
         zone = nullptr;
         allocKind = size_t(AllocKind::LIMIT);
         hasDelayedMarking = 0;
         allocatedDuringIncremental = 0;
         markOverflow = 0;
         auxNextLink = 0;
+        bufferedCells = nullptr;
     }
 
     uintptr_t address() const {
         checkAddress();
         return uintptr_t(this);
     }
 
     inline void checkAddress() const;
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -2046,18 +2046,16 @@ js::gc::StoreBuffer::MonoTypeBuffer<T>::
     sinkStore(owner);
     for (typename StoreSet::Range r = stores_.all(); !r.empty(); r.popFront())
         r.front().trace(mover);
 }
 
 namespace js {
 namespace gc {
 template void
-StoreBuffer::MonoTypeBuffer<StoreBuffer::WholeCellEdges>::trace(StoreBuffer*, TenuringTracer&);
-template void
 StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>::trace(StoreBuffer*, TenuringTracer&);
 template void
 StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>::trace(StoreBuffer*, TenuringTracer&);
 template void
 StoreBuffer::MonoTypeBuffer<StoreBuffer::CellPtrEdge>::trace(StoreBuffer*, TenuringTracer&);
 } // namespace gc
 } // namespace js
 
@@ -2083,25 +2081,21 @@ js::gc::StoreBuffer::SlotsEdge::trace(Te
         int32_t start = Min(uint32_t(start_), obj->slotSpan());
         int32_t end = Min(uint32_t(start_) + count_, obj->slotSpan());
         MOZ_ASSERT(end >= start);
         mover.traceObjectSlots(obj, start, end - start);
     }
 }
 
 void
-js::gc::StoreBuffer::WholeCellEdges::trace(TenuringTracer& mover) const
+js::gc::StoreBuffer::traceWholeCell(TenuringTracer& mover, JS::TraceKind kind, Cell* edge)
 {
     MOZ_ASSERT(edge->isTenured());
-    JS::TraceKind kind = edge->getTraceKind();
     if (kind == JS::TraceKind::Object) {
         JSObject *object = static_cast<JSObject*>(edge);
-        if (object->is<NativeObject>())
-            object->as<NativeObject>().clearInWholeCellBuffer();
-
         mover.traceObject(object);
 
         // Additionally trace the expando object attached to any unboxed plain
         // objects. Baseline and Ion can write properties to the expando while
         // only adding a post barrier to the owning unboxed object. Note that
         // it isn't possible for a nursery unboxed object to have a tenured
         // expando, so that adding a post barrier on the original object will
         // capture any tenured->nursery edges in the expando as well.
@@ -2116,16 +2110,37 @@ js::gc::StoreBuffer::WholeCellEdges::tra
         static_cast<JSScript*>(edge)->traceChildren(&mover);
     else if (kind == JS::TraceKind::JitCode)
         static_cast<jit::JitCode*>(edge)->traceChildren(&mover);
     else
         MOZ_CRASH();
 }
 
 void
+js::gc::StoreBuffer::traceWholeCells(TenuringTracer& mover)
+{
+    for (ArenaCellSet* cells = bufferWholeCell; cells; cells = cells->next) {
+        Arena* arena = cells->arena;
+
+        MOZ_ASSERT(arena->bufferedCells == cells);
+        arena->bufferedCells = &ArenaCellSet::Empty;
+
+        JS::TraceKind kind = MapAllocToTraceKind(arena->getAllocKind());
+        for (size_t i = 0; i < ArenaCellCount; i++) {
+            if (cells->hasCell(i)) {
+                auto cell = reinterpret_cast<Cell*>(uintptr_t(arena) + CellSize * i);
+                traceWholeCell(mover, kind, cell);
+            }
+        }
+    }
+
+    bufferWholeCell = nullptr;
+}
+
+void
 js::gc::StoreBuffer::CellPtrEdge::trace(TenuringTracer& mover) const
 {
     if (!*edge)
         return;
 
     MOZ_ASSERT((*edge)->getTraceKind() == JS::TraceKind::Object);
     mover.traverse(reinterpret_cast<JSObject**>(edge));
 }
--- a/js/src/gc/Nursery.h
+++ b/js/src/gc/Nursery.h
@@ -213,16 +213,21 @@ class Nursery
     MOZ_ALWAYS_INLINE uintptr_t start() const {
         return heapStart_;
     }
 
     MOZ_ALWAYS_INLINE uintptr_t heapEnd() const {
         return heapEnd_;
     }
 
+    // Free space remaining, not counting chunk trailers.
+    MOZ_ALWAYS_INLINE size_t approxFreeSpace() const {
+        return heapEnd_ - position_;
+    }
+
 #ifdef JS_GC_ZEAL
     void enterZealMode();
     void leaveZealMode();
 #endif
 
   private:
     /*
      * The start and end pointers are stored under the runtime so that we can
--- a/js/src/gc/StoreBuffer-inl.h
+++ b/js/src/gc/StoreBuffer-inl.h
@@ -2,29 +2,74 @@
  * vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef gc_StoreBuffer_inl_h
 #define gc_StoreBuffer_inl_h
 
+#include "gc/StoreBuffer.h"
+
+#include "gc/Heap.h"
+
 namespace js {
 namespace gc {
 
+inline /* static */ size_t
+ArenaCellSet::getCellIndex(const TenuredCell* cell)
+{
+    MOZ_ASSERT((uintptr_t(cell) & ~ArenaMask) % CellSize == 0);
+    return (uintptr_t(cell) & ArenaMask) / CellSize;
+}
+
+inline /* static */ void
+ArenaCellSet::getWordIndexAndMask(size_t cellIndex, size_t* wordp, uint32_t* maskp)
+{
+    BitArray<ArenaCellCount>::getIndexAndMask(cellIndex, wordp, maskp);
+}
+
+inline bool
+ArenaCellSet::hasCell(size_t cellIndex) const
+{
+    MOZ_ASSERT(cellIndex < ArenaCellCount);
+    return bits.get(cellIndex);
+}
+
+inline void
+ArenaCellSet::putCell(size_t cellIndex)
+{
+    MOZ_ASSERT(cellIndex < ArenaCellCount);
+    bits.set(cellIndex);
+}
+
+inline void
+ArenaCellSet::check() const
+{
+#ifdef DEBUG
+    bool bitsZero = bits.isAllClear();
+    MOZ_ASSERT(isEmpty() == bitsZero);
+    MOZ_ASSERT(isEmpty() == !arena);
+    MOZ_ASSERT_IF(!isEmpty(), arena->bufferedCells == this);
+#endif
+}
+
 inline void
 StoreBuffer::putWholeCell(Cell* cell)
 {
     MOZ_ASSERT(cell->isTenured());
 
-    if (cell->getTraceKind() == JS::TraceKind::Object) {
-        JSObject *obj = static_cast<JSObject*>(cell);
-        if (obj->is<NativeObject>())
-            obj->as<NativeObject>().setInWholeCellBuffer();
+    Arena* arena = cell->asTenured().arena();
+    ArenaCellSet* cells = arena->bufferedCells;
+    if (cells->isEmpty()) {
+        cells = AllocateWholeCellSet(arena);
+        if (!cells)
+            return;
     }
 
-    put(bufferWholeCell, WholeCellEdges(cell));
+    cells->putCell(&cell->asTenured());
+    cells->check();
 }
 
 } // namespace gc
 } // namespace js
 
 #endif // gc_StoreBuffer_inl_h
--- a/js/src/gc/StoreBuffer.cpp
+++ b/js/src/gc/StoreBuffer.cpp
@@ -1,15 +1,15 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
  * vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#include "gc/StoreBuffer.h"
+#include "gc/StoreBuffer-inl.h"
 
 #include "mozilla/Assertions.h"
 
 #include "jscompartment.h"
 
 #include "gc/Statistics.h"
 #include "vm/ArgumentsObject.h"
 #include "vm/Runtime.h"
@@ -40,17 +40,16 @@ bool
 StoreBuffer::enable()
 {
     if (enabled_)
         return true;
 
     if (!bufferVal.init() ||
         !bufferCell.init() ||
         !bufferSlot.init() ||
-        !bufferWholeCell.init() ||
         !bufferGeneric.init())
     {
         return false;
     }
 
     enabled_ = true;
     return true;
 }
@@ -73,18 +72,21 @@ StoreBuffer::clear()
         return;
 
     aboutToOverflow_ = false;
     cancelIonCompilations_ = false;
 
     bufferVal.clear();
     bufferCell.clear();
     bufferSlot.clear();
-    bufferWholeCell.clear();
     bufferGeneric.clear();
+
+    for (ArenaCellSet* set = bufferWholeCell; set; set = set->next)
+         set->arena->bufferedCells = nullptr;
+    bufferWholeCell = nullptr;
 }
 
 void
 StoreBuffer::setAboutToOverflow()
 {
     if (!aboutToOverflow_) {
         aboutToOverflow_ = true;
         runtime_->gc.stats.count(gcstats::STAT_STOREBUFFER_OVERFLOW);
@@ -94,16 +96,58 @@ StoreBuffer::setAboutToOverflow()
 
 void
 StoreBuffer::addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSizes
 *sizes)
 {
     sizes->storeBufferVals       += bufferVal.sizeOfExcludingThis(mallocSizeOf);
     sizes->storeBufferCells      += bufferCell.sizeOfExcludingThis(mallocSizeOf);
     sizes->storeBufferSlots      += bufferSlot.sizeOfExcludingThis(mallocSizeOf);
-    sizes->storeBufferWholeCells += bufferWholeCell.sizeOfExcludingThis(mallocSizeOf);
     sizes->storeBufferGenerics   += bufferGeneric.sizeOfExcludingThis(mallocSizeOf);
+
+    for (ArenaCellSet* set = bufferWholeCell; set; set = set->next)
+        sizes->storeBufferWholeCells += sizeof(ArenaCellSet);
+}
+
+void
+StoreBuffer::addToWholeCellBuffer(ArenaCellSet* set)
+{
+    set->next = bufferWholeCell;
+    bufferWholeCell = set;
+}
+
+ArenaCellSet ArenaCellSet::Empty(nullptr);
+
+ArenaCellSet::ArenaCellSet(Arena* arena)
+  : arena(arena), next(nullptr)
+{
+    bits.clear(false);
+}
+
+ArenaCellSet*
+js::gc::AllocateWholeCellSet(Arena* arena)
+{
+    Zone* zone = arena->zone;
+    JSRuntime* rt = zone->runtimeFromMainThread();
+    if (!rt->gc.nursery.isEnabled())
+        return nullptr;
+
+    AutoEnterOOMUnsafeRegion oomUnsafe;
+    Nursery& nursery = rt->gc.nursery;
+    void* data = nursery.allocateBuffer(zone, sizeof(ArenaCellSet));
+    if (!data) {
+        oomUnsafe.crash("Failed to allocate WholeCellSet");
+        return nullptr;
+    }
+
+    if (nursery.approxFreeSpace() < ArenaCellSet::NurseryFreeThresholdBytes)
+        rt->gc.storeBuffer.setAboutToOverflow();
+
+    auto cells = static_cast<ArenaCellSet*>(data);
+    new (cells) ArenaCellSet(arena);
+    arena->bufferedCells = cells;
+    rt->gc.storeBuffer.addToWholeCellBuffer(cells);
+    return cells;
 }
 
 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>;
 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::CellPtrEdge>;
 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>;
-template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::WholeCellEdges>;
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -16,16 +16,18 @@
 
 #include "ds/LifoAlloc.h"
 #include "gc/Nursery.h"
 #include "js/MemoryMetrics.h"
 
 namespace js {
 namespace gc {
 
+class ArenaCellSet;
+
 /*
  * BufferableRef represents an abstract reference for use in the generational
  * GC's remembered set. Entries in the store buffer that cannot be represented
  * with the simple pointer-to-a-pointer scheme must derive from this class and
  * use the generic store buffer interface.
  */
 class BufferableRef
 {
@@ -329,37 +331,16 @@ class StoreBuffer
 
         typedef struct {
             typedef SlotsEdge Lookup;
             static HashNumber hash(const Lookup& l) { return l.objectAndKind_ ^ l.start_ ^ l.count_; }
             static bool match(const SlotsEdge& k, const Lookup& l) { return k == l; }
         } Hasher;
     };
 
-    struct WholeCellEdges
-    {
-        Cell* edge;
-
-        WholeCellEdges() : edge(nullptr) {}
-        explicit WholeCellEdges(Cell* cell) : edge(cell) {
-            MOZ_ASSERT(edge->isTenured());
-        }
-
-        bool operator==(const WholeCellEdges& other) const { return edge == other.edge; }
-        bool operator!=(const WholeCellEdges& other) const { return edge != other.edge; }
-
-        bool maybeInRememberedSet(const Nursery&) const { return true; }
-
-        void trace(TenuringTracer& mover) const;
-
-        explicit operator bool() const { return edge != nullptr; }
-
-        typedef PointerEdgeHasher<WholeCellEdges> Hasher;
-    };
-
     template <typename Buffer, typename Edge>
     void unput(Buffer& buffer, const Edge& edge) {
         MOZ_ASSERT(!JS::shadow::Runtime::asShadowRuntime(runtime_)->isHeapBusy());
         MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
         if (!isEnabled())
             return;
         mozilla::ReentrancyGuard g(*this);
         buffer.unput(this, edge);
@@ -374,32 +355,32 @@ class StoreBuffer
         mozilla::ReentrancyGuard g(*this);
         if (edge.maybeInRememberedSet(nursery_))
             buffer.put(this, edge);
     }
 
     MonoTypeBuffer<ValueEdge> bufferVal;
     MonoTypeBuffer<CellPtrEdge> bufferCell;
     MonoTypeBuffer<SlotsEdge> bufferSlot;
-    MonoTypeBuffer<WholeCellEdges> bufferWholeCell;
+    ArenaCellSet* bufferWholeCell;
     GenericBuffer bufferGeneric;
     bool cancelIonCompilations_;
 
     JSRuntime* runtime_;
     const Nursery& nursery_;
 
     bool aboutToOverflow_;
     bool enabled_;
 #ifdef DEBUG
     bool mEntered; /* For ReentrancyGuard. */
 #endif
 
   public:
     explicit StoreBuffer(JSRuntime* rt, const Nursery& nursery)
-      : bufferVal(), bufferCell(), bufferSlot(), bufferWholeCell(), bufferGeneric(),
+      : bufferVal(), bufferCell(), bufferSlot(), bufferWholeCell(nullptr), bufferGeneric(),
         cancelIonCompilations_(false), runtime_(rt), nursery_(nursery), aboutToOverflow_(false),
         enabled_(false)
 #ifdef DEBUG
         , mEntered(false)
 #endif
     {
     }
 
@@ -435,21 +416,73 @@ class StoreBuffer
     void setShouldCancelIonCompilations() {
         cancelIonCompilations_ = true;
     }
 
     /* Methods to trace the source of all edges in the store buffer. */
     void traceValues(TenuringTracer& mover)            { bufferVal.trace(this, mover); }
     void traceCells(TenuringTracer& mover)             { bufferCell.trace(this, mover); }
     void traceSlots(TenuringTracer& mover)             { bufferSlot.trace(this, mover); }
-    void traceWholeCells(TenuringTracer& mover)        { bufferWholeCell.trace(this, mover); }
     void traceGenericEntries(JSTracer *trc)            { bufferGeneric.trace(this, trc); }
 
+    void traceWholeCells(TenuringTracer& mover);
+    void traceWholeCell(TenuringTracer& mover, JS::TraceKind kind, Cell* cell);
+
     /* For use by our owned buffers and for testing. */
     void setAboutToOverflow();
 
+    void addToWholeCellBuffer(ArenaCellSet* set);
+
     void addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSizes* sizes);
 };
 
+// A set of cells in an arena used to implement the whole cell store buffer.
+class ArenaCellSet
+{
+    friend class StoreBuffer;
+
+    // The arena this relates to.
+    Arena* arena;
+
+    // Pointer to next set forming a linked list.
+    ArenaCellSet* next;
+
+    // Bit vector for each possible cell start position.
+    BitArray<ArenaCellCount> bits;
+
+  public:
+    ArenaCellSet(Arena* arena);
+
+    bool hasCell(const TenuredCell* cell) const {
+        return hasCell(getCellIndex(cell));
+    }
+
+    void putCell(const TenuredCell* cell) {
+        putCell(getCellIndex(cell));
+    }
+
+    bool isEmpty() const {
+        return this == &Empty;
+    }
+
+    bool hasCell(size_t cellIndex) const;
+
+    void putCell(size_t cellIndex);
+
+    void check() const;
+
+    // Sentinel object used for all empty sets.
+    static ArenaCellSet Empty;
+
+    static size_t getCellIndex(const TenuredCell* cell);
+    static void getWordIndexAndMask(size_t cellIndex, size_t* wordp, uint32_t* maskp);
+
+    // Attempt to trigger a minor GC if free space in the nursery (where these
+    // objects are allocated) falls below this threshold.
+    static const size_t NurseryFreeThresholdBytes = 64 * 1024;
+};
+
+ArenaCellSet* AllocateWholeCellSet(Arena* arena);
+
 } /* namespace gc */
 } /* namespace js */
 
 #endif /* gc_StoreBuffer_h */
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1965,16 +1965,17 @@ RelocateCell(Zone* zone, TenuredCell* sr
 
 static void
 RelocateArena(Arena* arena, SliceBudget& sliceBudget)
 {
     MOZ_ASSERT(arena->allocated());
     MOZ_ASSERT(!arena->hasDelayedMarking);
     MOZ_ASSERT(!arena->markOverflow);
     MOZ_ASSERT(!arena->allocatedDuringIncremental);
+    MOZ_ASSERT(arena->bufferedCells->isEmpty());
 
     Zone* zone = arena->zone;
 
     AllocKind thingKind = arena->getAllocKind();
     size_t thingSize = arena->getThingSize();
 
     for (ArenaCellIterUnderFinalize i(arena); !i.done(); i.next()) {
         RelocateCell(zone, i.getCell(), thingKind, thingSize);
--- a/js/src/vm/NativeObject.cpp
+++ b/js/src/vm/NativeObject.cpp
@@ -87,22 +87,16 @@ ObjectElements::MakeElementsCopyOnWrite(
     static_assert(sizeof(HeapSlot) >= sizeof(GCPtrObject),
                   "there must be enough room for the owner object pointer at "
                   "the end of the elements");
     if (!obj->ensureElements(cx, obj->getDenseInitializedLength() + 1))
         return false;
 
     ObjectElements* header = obj->getElementsHeader();
 
-    // As soon as we have (or may soon have) multiple objects referencing a
-    // single header, it isn't clear which object the "I'm already in the
-    // whole-cell store buffer" bit is describing, so just disable that
-    // optimization.
-    header->clearInWholeCellBuffer();
-
     // Note: this method doesn't update type information to indicate that the
     // elements might be copy on write. Handling this is left to the caller.
     MOZ_ASSERT(!header->isCopyOnWrite());
     header->flags |= COPY_ON_WRITE;
 
     header->ownerObject().init(obj);
     return true;
 }
--- a/js/src/vm/NativeObject.h
+++ b/js/src/vm/NativeObject.h
@@ -176,24 +176,16 @@ class ObjectElements
         // the shared elements may change (from ints to doubles) without
         // making a copy first.
         COPY_ON_WRITE               = 0x4,
 
         // For TypedArrays only: this TypedArray's storage is mapping shared
         // memory.  This is a static property of the TypedArray, set when it
         // is created and never changed.
         SHARED_MEMORY               = 0x8,
-
-        // Set if the object has already been added to the whole-cell store
-        // buffer, and therefore adding individual elements into the slots store
-        // buffer would be pointless. This is never set for the empty or shared
-        // elements headers, nor if the elements are copy on write; in such
-        // situations it isn't clear *which* object that references this
-        // elements header has already been put in the whole-cell store buffer.
-        IN_WHOLE_CELL_BUFFER        = 0x10,
     };
 
   private:
     friend class ::JSObject;
     friend class ArrayObject;
     friend class NativeObject;
     friend class TenuringTracer;
 
@@ -240,29 +232,16 @@ class ObjectElements
     }
     bool isCopyOnWrite() const {
         return flags & COPY_ON_WRITE;
     }
     void clearCopyOnWrite() {
         MOZ_ASSERT(isCopyOnWrite());
         flags &= ~COPY_ON_WRITE;
     }
-    bool isInWholeCellBuffer() const {
-        return flags & IN_WHOLE_CELL_BUFFER;
-    }
-    void setInWholeCellBuffer() {
-        MOZ_ASSERT(!isSharedMemory());
-        MOZ_ASSERT(!isCopyOnWrite());
-        flags |= IN_WHOLE_CELL_BUFFER;
-    }
-    void clearInWholeCellBuffer() {
-        MOZ_ASSERT(!isSharedMemory());
-        MOZ_ASSERT(!isCopyOnWrite());
-        flags &= ~IN_WHOLE_CELL_BUFFER;
-    }
 
   public:
     MOZ_CONSTEXPR ObjectElements(uint32_t capacity, uint32_t length)
       : flags(0), initializedLength(0), capacity(capacity), length(length)
     {}
 
     enum class SharedMemory {
         IsShared
@@ -476,25 +455,19 @@ class NativeObject : public JSObject
     // marked as shared by giving them an ObjectElements that has the
     // ObjectElements::SHARED_MEMORY flag set.
     void setIsSharedMemory() {
         MOZ_ASSERT(elements_ == emptyObjectElements);
         elements_ = emptyObjectElementsShared;
     }
 
     bool isInWholeCellBuffer() const {
-        return getElementsHeader()->isInWholeCellBuffer();
-    }
-    void setInWholeCellBuffer() {
-        if (!hasEmptyElements() && !isSharedMemory() && !getElementsHeader()->isCopyOnWrite())
-            getElementsHeader()->setInWholeCellBuffer();
-    }
-    void clearInWholeCellBuffer() {
-        if (!hasEmptyElements() && !isSharedMemory() && !getElementsHeader()->isCopyOnWrite())
-            getElementsHeader()->clearInWholeCellBuffer();
+        const gc::TenuredCell* cell = &asTenured();
+        gc::ArenaCellSet* cells = cell->arena()->bufferedCells;
+        return cells && cells->hasCell(cell);
     }
 
   protected:
 #ifdef DEBUG
     void checkShapeConsistency();
 #else
     void checkShapeConsistency() { }
 #endif