Bug 1068223 - Clean up the Cell hierarchy; r=jonco
authorTerrence Cole <terrence@mozilla.com>
Wed, 17 Sep 2014 10:32:37 -0700
changeset 205891 f4e226d272443fa3515ae579e4d8fcd7b0cc292a
parent 205890 29ba45e163d874fb94f74002305a72cfc513eb4f
child 205892 495dcc54c2b25829e268197a54611a19098f3293
push id27507
push userryanvm@gmail.com
push dateThu, 18 Sep 2014 02:16:54 +0000
treeherdermozilla-central@488d490da742 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjonco
bugs1068223
milestone35.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1068223 - Clean up the Cell hierarchy; r=jonco
js/src/gc/Barrier.h
js/src/gc/ForkJoinNursery.cpp
js/src/gc/Heap.h
js/src/gc/Iteration.cpp
js/src/gc/Marking.cpp
js/src/gc/Nursery.cpp
js/src/gc/Tracer.cpp
js/src/gc/Verifier.cpp
js/src/gc/Zone.cpp
js/src/jit/BaselineJIT.cpp
js/src/jit/BaselineJIT.h
js/src/jit/CodeGenerator.cpp
js/src/jit/IonBuilder.cpp
js/src/jit/IonCode.h
js/src/jit/IonMacroAssembler.cpp
js/src/jit/MIR.cpp
js/src/jit/VMFunctions.cpp
js/src/jsfriendapi.cpp
js/src/jsfun.h
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsgcinlines.h
js/src/jsinfer.cpp
js/src/jsinfer.h
js/src/jsiter.cpp
js/src/jsobj.cpp
js/src/jsobjinlines.h
js/src/jspropertytree.cpp
js/src/jsscript.cpp
js/src/jsscript.h
js/src/jsscriptinlines.h
js/src/jsweakmap.cpp
js/src/proxy/Wrapper.cpp
js/src/vm/ObjectImpl.h
js/src/vm/RegExpObject.cpp
js/src/vm/SelfHosting.cpp
js/src/vm/Shape.h
js/src/vm/String.cpp
js/src/vm/String.h
js/src/vm/Symbol.h
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -185,19 +185,16 @@ namespace types {
 struct TypeObject;
 }
 
 namespace jit {
 class JitCode;
 }
 
 #ifdef DEBUG
-bool
-RuntimeFromMainThreadIsHeapMajorCollecting(JS::shadow::Zone *shadowZone);
-
 // Barriers can't be triggered during backend Ion compilation, which may run on
 // a helper thread.
 bool
 CurrentThreadIsIonCompiling();
 #endif
 
 bool
 StringIsPermanentAtom(JSString *str);
@@ -228,157 +225,66 @@ template <> struct MapTypeToTraceKind<Sa
 template <> struct MapTypeToTraceKind<ScopeObject>      { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
 template <> struct MapTypeToTraceKind<Shape>            { static const JSGCTraceKind kind = JSTRACE_SHAPE; };
 template <> struct MapTypeToTraceKind<SharedArrayBufferObject>{ static const JSGCTraceKind kind = JSTRACE_OBJECT; };
 template <> struct MapTypeToTraceKind<SharedTypedArrayObject>{ static const JSGCTraceKind kind = JSTRACE_OBJECT; };
 template <> struct MapTypeToTraceKind<UnownedBaseShape> { static const JSGCTraceKind kind = JSTRACE_BASE_SHAPE; };
 template <> struct MapTypeToTraceKind<jit::JitCode>     { static const JSGCTraceKind kind = JSTRACE_JITCODE; };
 template <> struct MapTypeToTraceKind<types::TypeObject>{ static const JSGCTraceKind kind = JSTRACE_TYPE_OBJECT; };
 
-template <typename T>
-void
-MarkUnbarriered(JSTracer *trc, T **thingp, const char *name);
-
 // Direct value access used by the write barriers and the jits.
 void
 MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name);
 
 // These three declarations are also present in gc/Marking.h, via the DeclMarker
 // macro.  Not great, but hard to avoid.
 void
-MarkObjectUnbarriered(JSTracer *trc, JSObject **obj, const char *name);
-void
 MarkStringUnbarriered(JSTracer *trc, JSString **str, const char *name);
 void
 MarkSymbolUnbarriered(JSTracer *trc, JS::Symbol **sym, const char *name);
 
-// Note that some subclasses (e.g. ObjectImpl) specialize some of these
-// methods.
-template <typename T>
-class BarrieredCell : public gc::Cell
-{
-  public:
-    MOZ_ALWAYS_INLINE JS::Zone *zone() const { return tenuredZone(); }
-    MOZ_ALWAYS_INLINE JS::shadow::Zone *shadowZone() const { return JS::shadow::Zone::asShadowZone(zone()); }
-    MOZ_ALWAYS_INLINE JS::Zone *zoneFromAnyThread() const { return tenuredZoneFromAnyThread(); }
-    MOZ_ALWAYS_INLINE JS::shadow::Zone *shadowZoneFromAnyThread() const {
-        return JS::shadow::Zone::asShadowZone(zoneFromAnyThread());
-    }
-
-    static MOZ_ALWAYS_INLINE void readBarrier(T *thing) {
-#ifdef JSGC_INCREMENTAL
-        JS_ASSERT(!CurrentThreadIsIonCompiling());
-        JS_ASSERT(!T::isNullLike(thing));
-        JS::shadow::Zone *shadowZone = thing->shadowZoneFromAnyThread();
-        if (shadowZone->needsIncrementalBarrier()) {
-            MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
-            T *tmp = thing;
-            js::gc::MarkUnbarriered<T>(shadowZone->barrierTracer(), &tmp, "read barrier");
-            JS_ASSERT(tmp == thing);
-        }
-        if (JS::GCThingIsMarkedGray(thing))
-            JS::UnmarkGrayGCThingRecursively(thing, MapTypeToTraceKind<T>::kind);
-#endif
-    }
-
-    static MOZ_ALWAYS_INLINE bool needWriteBarrierPre(JS::Zone *zone) {
-#ifdef JSGC_INCREMENTAL
-        return JS::shadow::Zone::asShadowZone(zone)->needsIncrementalBarrier();
-#else
-        return false;
-#endif
-    }
-
-    static MOZ_ALWAYS_INLINE bool isNullLike(T *thing) { return !thing; }
-
-    static MOZ_ALWAYS_INLINE void writeBarrierPre(T *thing) {
-#ifdef JSGC_INCREMENTAL
-        JS_ASSERT(!CurrentThreadIsIonCompiling());
-        if (isNullLike(thing) || !thing->shadowRuntimeFromAnyThread()->needsIncrementalBarrier())
-            return;
-
-        JS::shadow::Zone *shadowZone = thing->shadowZoneFromAnyThread();
-        if (shadowZone->needsIncrementalBarrier()) {
-            MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
-            T *tmp = thing;
-            js::gc::MarkUnbarriered<T>(shadowZone->barrierTracer(), &tmp, "write barrier");
-            JS_ASSERT(tmp == thing);
-        }
-#endif
-    }
-
-    static void writeBarrierPost(T *thing, void *cellp) {}
-    static void writeBarrierPostRelocate(T *thing, void *cellp) {}
-    static void writeBarrierPostRemove(T *thing, void *cellp) {}
-
-    template <typename S>
-    S *pod_malloc(size_t numElems) {
-        return zone()->template pod_malloc<S>(numElems);
-    }
-    template <typename S>
-    S *pod_calloc(size_t numElems) {
-        return zone()->template pod_calloc<S>(numElems);
-    }
-    template <typename S>
-    S *pod_realloc(S *prior, size_t oldSize, size_t newSize) {
-        return zone()->template pod_realloc<S>(prior, oldSize, newSize);
-    }
-
-    template <typename S, typename U>
-    S *pod_malloc_with_extra(size_t numExtra) {
-        return zone()->template pod_malloc_with_extra<S, U>(numExtra);
-    }
-
-    template <typename S, typename U>
-    S *pod_calloc_with_extra(size_t numExtra) {
-        return zone()->template pod_calloc_with_extra<S, U>(numExtra);
-    }
-};
-
 } // namespace gc
 
-// Note: the following Zone-getting functions must be equivalent to the zone()
-// and shadowZone() functions implemented by the subclasses of BarrieredCell.
-
-static inline JS::shadow::Zone *
-ShadowZoneOfString(JSString *str)
-{
-    return JS::shadow::Zone::asShadowZone(reinterpret_cast<const js::gc::Cell *>(str)->tenuredZone());
+// This context is more basal than the GC things being implemented, so C++ does
+// not know about the inheritance hierarchy yet.
+static inline const gc::TenuredCell *AsTenuredCell(const JSString *str) {
+    return reinterpret_cast<const gc::TenuredCell *>(str);
+}
+static inline const gc::TenuredCell *AsTenuredCell(const JS::Symbol *sym) {
+    return reinterpret_cast<const gc::TenuredCell *>(sym);
 }
 
 JS::Zone *
 ZoneOfObjectFromAnyThread(const JSObject &obj);
 
 static inline JS::shadow::Zone *
 ShadowZoneOfObjectFromAnyThread(JSObject *obj)
 {
     return JS::shadow::Zone::asShadowZone(ZoneOfObjectFromAnyThread(*obj));
 }
 
 static inline JS::shadow::Zone *
 ShadowZoneOfStringFromAnyThread(JSString *str)
 {
-    return JS::shadow::Zone::asShadowZone(
-        reinterpret_cast<const js::gc::Cell *>(str)->tenuredZoneFromAnyThread());
+    return JS::shadow::Zone::asShadowZone(AsTenuredCell(str)->zoneFromAnyThread());
 }
 
 static inline JS::shadow::Zone *
 ShadowZoneOfSymbolFromAnyThread(JS::Symbol *sym)
 {
-    return JS::shadow::Zone::asShadowZone(
-        reinterpret_cast<const js::gc::Cell *>(sym)->tenuredZoneFromAnyThread());
+    return JS::shadow::Zone::asShadowZone(AsTenuredCell(sym)->zoneFromAnyThread());
 }
 
 MOZ_ALWAYS_INLINE JS::Zone *
 ZoneOfValueFromAnyThread(const JS::Value &value)
 {
     JS_ASSERT(value.isMarkable());
     if (value.isObject())
         return ZoneOfObjectFromAnyThread(value.toObject());
-    return static_cast<js::gc::Cell *>(value.toGCThing())->tenuredZoneFromAnyThread();
+    return js::gc::TenuredCell::fromPointer(value.toGCThing())->zoneFromAnyThread();
 }
 
 void
 ValueReadBarrier(const Value &value);
 
 template <typename T>
 struct InternalGCMethods {};
 
--- a/js/src/gc/ForkJoinNursery.cpp
+++ b/js/src/gc/ForkJoinNursery.cpp
@@ -549,17 +549,17 @@ ForkJoinNursery::allocateSlots(JSObject 
 {
     JS_ASSERT(obj);
     JS_ASSERT(nslots > 0);
 
     if (nslots & mozilla::tl::MulOverflowMask<sizeof(HeapSlot)>::value)
         return nullptr;
 
     if (!isInsideNewspace(obj))
-        return obj->pod_malloc<HeapSlot>(nslots);
+        return obj->zone()->pod_malloc<HeapSlot>(nslots);
 
     if (nslots > MaxNurserySlots)
         return allocateHugeSlots(obj, nslots);
 
     size_t size = nslots * sizeof(HeapSlot);
     HeapSlot *slots = static_cast<HeapSlot *>(allocate(size));
     if (slots)
         return slots;
@@ -571,17 +571,17 @@ HeapSlot *
 ForkJoinNursery::reallocateSlots(JSObject *obj, HeapSlot *oldSlots,
                                  uint32_t oldCount, uint32_t newCount)
 {
     if (newCount & mozilla::tl::MulOverflowMask<sizeof(HeapSlot)>::value)
         return nullptr;
 
     if (!isInsideNewspace(obj)) {
         JS_ASSERT_IF(oldSlots, !isInsideNewspace(oldSlots));
-        return obj->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
+        return obj->zone()->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
     }
 
     if (!isInsideNewspace(oldSlots))
         return reallocateHugeSlots(obj, oldSlots, oldCount, newCount);
 
     // No-op if we're shrinking, we can't make use of the freed portion.
     if (newCount < oldCount)
         return oldSlots;
@@ -621,30 +621,30 @@ ForkJoinNursery::freeSlots(HeapSlot *slo
 }
 
 HeapSlot *
 ForkJoinNursery::allocateHugeSlots(JSObject *obj, size_t nslots)
 {
     if (nslots & mozilla::tl::MulOverflowMask<sizeof(HeapSlot)>::value)
         return nullptr;
 
-    HeapSlot *slots = obj->pod_malloc<HeapSlot>(nslots);
+    HeapSlot *slots = obj->zone()->pod_malloc<HeapSlot>(nslots);
     if (!slots)
         return slots;
 
     // If this put fails, we will only leak the slots.
     (void)hugeSlots[hugeSlotsNew].put(slots);
     return slots;
 }
 
 HeapSlot *
 ForkJoinNursery::reallocateHugeSlots(JSObject *obj, HeapSlot *oldSlots,
                                      uint32_t oldCount, uint32_t newCount)
 {
-    HeapSlot *newSlots = obj->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
+    HeapSlot *newSlots = obj->zone()->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
     if (!newSlots)
         return newSlots;
 
     if (oldSlots != newSlots) {
         hugeSlots[hugeSlotsNew].remove(oldSlots);
         // If this put fails, we will only leak the slots.
         (void)hugeSlots[hugeSlotsNew].put(newSlots);
     }
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -15,40 +15,55 @@
 #include <stdint.h>
 
 #include "jspubtd.h"
 #include "jstypes.h"
 #include "jsutil.h"
 
 #include "ds/BitArray.h"
 #include "gc/Memory.h"
+#include "js/GCAPI.h"
 #include "js/HeapAPI.h"
+#include "js/TracingAPI.h"
 
 struct JSCompartment;
 
 struct JSRuntime;
 
 namespace JS {
 namespace shadow {
 struct Runtime;
 }
 }
 
 namespace js {
 
 class FreeOp;
 
+#ifdef DEBUG
+extern bool
+RuntimeFromMainThreadIsHeapMajorCollecting(JS::shadow::Zone *shadowZone);
+
+// Barriers can't be triggered during backend Ion compilation, which may run on
+// a helper thread.
+extern bool
+CurrentThreadIsIonCompiling();
+#endif
+
 namespace gc {
 
 struct Arena;
 class ArenaList;
 class SortedArenaList;
 struct ArenaHeader;
 struct Chunk;
 
+extern void
+MarkKind(JSTracer *trc, void **thingp, JSGCTraceKind kind);
+
 /*
  * This flag allows an allocation site to request a specific heap based upon the
  * estimated lifetime or lifetime requirements of objects allocated from that
  * site.
  */
 enum InitialHeap {
     DefaultHeap,
     TenuredHeap
@@ -80,58 +95,129 @@ enum AllocKind {
     FINALIZE_SYMBOL,
     FINALIZE_JITCODE,
     FINALIZE_LAST = FINALIZE_JITCODE
 };
 
 static const unsigned FINALIZE_LIMIT = FINALIZE_LAST + 1;
 static const unsigned FINALIZE_OBJECT_LIMIT = FINALIZE_OBJECT_LAST + 1;
 
+static inline JSGCTraceKind
+MapAllocToTraceKind(AllocKind kind)
+{
+    static const JSGCTraceKind map[] = {
+        JSTRACE_OBJECT,     /* FINALIZE_OBJECT0 */
+        JSTRACE_OBJECT,     /* FINALIZE_OBJECT0_BACKGROUND */
+        JSTRACE_OBJECT,     /* FINALIZE_OBJECT2 */
+        JSTRACE_OBJECT,     /* FINALIZE_OBJECT2_BACKGROUND */
+        JSTRACE_OBJECT,     /* FINALIZE_OBJECT4 */
+        JSTRACE_OBJECT,     /* FINALIZE_OBJECT4_BACKGROUND */
+        JSTRACE_OBJECT,     /* FINALIZE_OBJECT8 */
+        JSTRACE_OBJECT,     /* FINALIZE_OBJECT8_BACKGROUND */
+        JSTRACE_OBJECT,     /* FINALIZE_OBJECT12 */
+        JSTRACE_OBJECT,     /* FINALIZE_OBJECT12_BACKGROUND */
+        JSTRACE_OBJECT,     /* FINALIZE_OBJECT16 */
+        JSTRACE_OBJECT,     /* FINALIZE_OBJECT16_BACKGROUND */
+        JSTRACE_SCRIPT,     /* FINALIZE_SCRIPT */
+        JSTRACE_LAZY_SCRIPT,/* FINALIZE_LAZY_SCRIPT */
+        JSTRACE_SHAPE,      /* FINALIZE_SHAPE */
+        JSTRACE_BASE_SHAPE, /* FINALIZE_BASE_SHAPE */
+        JSTRACE_TYPE_OBJECT,/* FINALIZE_TYPE_OBJECT */
+        JSTRACE_STRING,     /* FINALIZE_FAT_INLINE_STRING */
+        JSTRACE_STRING,     /* FINALIZE_STRING */
+        JSTRACE_STRING,     /* FINALIZE_EXTERNAL_STRING */
+        JSTRACE_SYMBOL,     /* FINALIZE_SYMBOL */
+        JSTRACE_JITCODE,    /* FINALIZE_JITCODE */
+    };
+    JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT);
+    return map[kind];
+}
+
 /*
  * This must be an upper bound, but we do not need the least upper bound, so
  * we just exclude non-background objects.
  */
 static const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - FINALIZE_OBJECT_LIMIT / 2;
 
-/*
- * A GC cell is the base class for all GC things.
- */
+class TenuredCell;
+
+// A GC cell is the base class for all GC things.
 struct Cell
 {
   public:
-    inline ArenaHeader *arenaHeader() const;
-    inline AllocKind tenuredGetAllocKind() const;
-    MOZ_ALWAYS_INLINE bool isMarked(uint32_t color = BLACK) const;
-    MOZ_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const;
-    MOZ_ALWAYS_INLINE void unmark(uint32_t color) const;
-    MOZ_ALWAYS_INLINE void copyMarkBitsFrom(const Cell *src);
+    MOZ_ALWAYS_INLINE bool isTenured() const { return !IsInsideNursery(this); }
+    MOZ_ALWAYS_INLINE const TenuredCell *asTenured() const;
+    MOZ_ALWAYS_INLINE TenuredCell *asTenured();
 
     inline JSRuntime *runtimeFromMainThread() const;
     inline JS::shadow::Runtime *shadowRuntimeFromMainThread() const;
-    inline JS::Zone *tenuredZone() const;
-    inline JS::Zone *tenuredZoneFromAnyThread() const;
-    inline bool tenuredIsInsideZone(JS::Zone *zone) const;
 
     // Note: Unrestricted access to the runtime of a GC thing from an arbitrary
     // thread can easily lead to races. Use this method very carefully.
     inline JSRuntime *runtimeFromAnyThread() const;
     inline JS::shadow::Runtime *shadowRuntimeFromAnyThread() const;
 
     inline StoreBuffer *storeBuffer() const;
 
+    static MOZ_ALWAYS_INLINE bool needWriteBarrierPre(JS::Zone *zone);
+
 #ifdef DEBUG
     inline bool isAligned() const;
-    inline bool isTenured() const;
 #endif
 
   protected:
     inline uintptr_t address() const;
     inline Chunk *chunk() const;
 };
 
+// A GC TenuredCell gets behaviors that are valid for things in the Tenured
+// heap, such as access to the arena header and mark bits.
+class TenuredCell : public Cell
+{
+  public:
+    // Construct a TenuredCell from a void*, making various sanity assertions.
+    static MOZ_ALWAYS_INLINE TenuredCell *fromPointer(void *ptr);
+    static MOZ_ALWAYS_INLINE const TenuredCell *fromPointer(const void *ptr);
+
+    // Mark bit management.
+    MOZ_ALWAYS_INLINE bool isMarked(uint32_t color = BLACK) const;
+    MOZ_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const;
+    MOZ_ALWAYS_INLINE void unmark(uint32_t color) const;
+    MOZ_ALWAYS_INLINE void copyMarkBitsFrom(const TenuredCell *src);
+
+    // Note: this is in TenuredCell because ObjectImpl subclasses are sometimes
+    // used tagged.
+    static MOZ_ALWAYS_INLINE bool isNullLike(const Cell *thing) { return !thing; }
+
+    // Access to the arena header.
+    inline ArenaHeader *arenaHeader() const;
+    inline AllocKind getAllocKind() const;
+    inline JS::Zone *zone() const;
+    inline JS::Zone *zoneFromAnyThread() const;
+    inline bool isInsideZone(JS::Zone *zone) const;
+
+    MOZ_ALWAYS_INLINE JS::shadow::Zone *shadowZone() const {
+        return JS::shadow::Zone::asShadowZone(zone());
+    }
+    MOZ_ALWAYS_INLINE JS::shadow::Zone *shadowZoneFromAnyThread() const {
+        return JS::shadow::Zone::asShadowZone(zoneFromAnyThread());
+    }
+
+    static MOZ_ALWAYS_INLINE void readBarrier(TenuredCell *thing);
+    static MOZ_ALWAYS_INLINE void writeBarrierPre(TenuredCell *thing);
+
+    static MOZ_ALWAYS_INLINE void writeBarrierPost(TenuredCell *thing, void *cellp);
+    static MOZ_ALWAYS_INLINE void writeBarrierPostRelocate(TenuredCell *thing, void *cellp);
+    static MOZ_ALWAYS_INLINE void writeBarrierPostRemove(TenuredCell *thing, void *cellp);
+
+#ifdef DEBUG
+    inline bool isAligned() const;
+#endif
+};
+
 /*
  * The mark bitmap has one bit per each GC cell. For multi-cell GC things this
  * wastes space but allows to avoid expensive devisions by thing's size when
  * accessing the bitmap. In addition this allows to use some bits for colored
  * marking during the cycle GC.
  */
 const size_t ArenaCellCount = size_t(1) << (ArenaShift - CellShift);
 const size_t ArenaBitmapBits = ArenaCellCount;
@@ -757,17 +843,17 @@ struct ChunkBitmap
     }
 
     MOZ_ALWAYS_INLINE void unmark(const Cell *cell, uint32_t color) {
         uintptr_t *word, mask;
         getMarkWordAndMask(cell, color, &word, &mask);
         *word &= ~mask;
     }
 
-    MOZ_ALWAYS_INLINE void copyMarkBit(Cell *dst, const Cell *src, uint32_t color) {
+    MOZ_ALWAYS_INLINE void copyMarkBit(Cell *dst, const TenuredCell *src, uint32_t color) {
         uintptr_t *word, mask;
         getMarkWordAndMask(dst, color, &word, &mask);
         *word = (*word & ~mask) | (src->isMarked(color) ? mask : 0);
     }
 
     void clear() {
         memset((void *)bitmap, 0, sizeof(bitmap));
     }
@@ -1045,31 +1131,36 @@ inline void
 ArenaHeader::unsetAllocDuringSweep()
 {
     JS_ASSERT(allocatedDuringIncremental);
     allocatedDuringIncremental = 0;
     auxNextLink = 0;
 }
 
 static void
-AssertValidColor(const void *thing, uint32_t color)
+AssertValidColor(const TenuredCell *thing, uint32_t color)
 {
 #ifdef DEBUG
-    ArenaHeader *aheader = reinterpret_cast<const Cell *>(thing)->arenaHeader();
+    ArenaHeader *aheader = thing->arenaHeader();
     JS_ASSERT(color < aheader->getThingSize() / CellSize);
 #endif
 }
 
-inline ArenaHeader *
-Cell::arenaHeader() const
+MOZ_ALWAYS_INLINE const TenuredCell *
+Cell::asTenured() const
 {
     JS_ASSERT(isTenured());
-    uintptr_t addr = address();
-    addr &= ~ArenaMask;
-    return reinterpret_cast<ArenaHeader *>(addr);
+    return static_cast<const TenuredCell *>(this);
+}
+
+MOZ_ALWAYS_INLINE TenuredCell *
+Cell::asTenured()
+{
+    JS_ASSERT(isTenured());
+    return static_cast<TenuredCell *>(this);
 }
 
 inline JSRuntime *
 Cell::runtimeFromMainThread() const
 {
     JSRuntime *rt = chunk()->info.trailer.runtime;
     JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
     return rt;
@@ -1088,92 +1179,16 @@ Cell::runtimeFromAnyThread() const
 }
 
 inline JS::shadow::Runtime *
 Cell::shadowRuntimeFromAnyThread() const
 {
     return reinterpret_cast<JS::shadow::Runtime*>(runtimeFromAnyThread());
 }
 
-bool
-Cell::isMarked(uint32_t color /* = BLACK */) const
-{
-    JS_ASSERT(isTenured());
-    JS_ASSERT(arenaHeader()->allocated());
-    AssertValidColor(this, color);
-    return chunk()->bitmap.isMarked(this, color);
-}
-
-bool
-Cell::markIfUnmarked(uint32_t color /* = BLACK */) const
-{
-    JS_ASSERT(isTenured());
-    AssertValidColor(this, color);
-    return chunk()->bitmap.markIfUnmarked(this, color);
-}
-
-void
-Cell::unmark(uint32_t color) const
-{
-    JS_ASSERT(isTenured());
-    JS_ASSERT(color != BLACK);
-    AssertValidColor(this, color);
-    chunk()->bitmap.unmark(this, color);
-}
-
-void
-Cell::copyMarkBitsFrom(const Cell *src)
-{
-    JS_ASSERT(isTenured());
-    JS_ASSERT(src->isTenured());
-    ChunkBitmap &bitmap = chunk()->bitmap;
-    bitmap.copyMarkBit(this, src, BLACK);
-    bitmap.copyMarkBit(this, src, GRAY);
-}
-
-JS::Zone *
-Cell::tenuredZone() const
-{
-    JS::Zone *zone = arenaHeader()->zone;
-    JS_ASSERT(CurrentThreadCanAccessZone(zone));
-    JS_ASSERT(isTenured());
-    return zone;
-}
-
-JS::Zone *
-Cell::tenuredZoneFromAnyThread() const
-{
-    JS_ASSERT(isTenured());
-    return arenaHeader()->zone;
-}
-
-bool
-Cell::tenuredIsInsideZone(JS::Zone *zone) const
-{
-    JS_ASSERT(isTenured());
-    return zone == arenaHeader()->zone;
-}
-
-#ifdef DEBUG
-bool
-Cell::isAligned() const
-{
-    return Arena::isAligned(address(), arenaHeader()->getThingSize());
-}
-
-bool
-Cell::isTenured() const
-{
-#ifdef JSGC_GENERATIONAL
-    return !IsInsideNursery(this);
-#endif
-    return true;
-}
-#endif
-
 inline uintptr_t
 Cell::address() const
 {
     uintptr_t addr = uintptr_t(this);
     JS_ASSERT(addr % CellSize == 0);
     JS_ASSERT(Chunk::withinArenasRange(addr));
     return addr;
 }
@@ -1202,19 +1217,181 @@ InFreeList(ArenaHeader *aheader, void *t
     FreeSpan firstSpan(aheader->getFirstFreeSpan());
     uintptr_t addr = reinterpret_cast<uintptr_t>(thing);
 
     JS_ASSERT(Arena::isAligned(addr, aheader->getThingSize()));
 
     return firstSpan.inFreeList(addr);
 }
 
-} /* namespace gc */
+/* static */ MOZ_ALWAYS_INLINE bool
+Cell::needWriteBarrierPre(JS::Zone *zone) {
+#ifdef JSGC_INCREMENTAL
+    return JS::shadow::Zone::asShadowZone(zone)->needsIncrementalBarrier();
+#else
+    return false;
+#endif
+}
+
+/* static */ MOZ_ALWAYS_INLINE TenuredCell *
+TenuredCell::fromPointer(void *ptr)
+{
+    JS_ASSERT(static_cast<TenuredCell *>(ptr)->isTenured());
+    return static_cast<TenuredCell *>(ptr);
+}
+
+/* static */ MOZ_ALWAYS_INLINE const TenuredCell *
+TenuredCell::fromPointer(const void *ptr)
+{
+    JS_ASSERT(static_cast<const TenuredCell *>(ptr)->isTenured());
+    return static_cast<const TenuredCell *>(ptr);
+}
+
+bool
+TenuredCell::isMarked(uint32_t color /* = BLACK */) const
+{
+    JS_ASSERT(arenaHeader()->allocated());
+    AssertValidColor(this, color);
+    return chunk()->bitmap.isMarked(this, color);
+}
 
-gc::AllocKind
-gc::Cell::tenuredGetAllocKind() const
+bool
+TenuredCell::markIfUnmarked(uint32_t color /* = BLACK */) const
+{
+    AssertValidColor(this, color);
+    return chunk()->bitmap.markIfUnmarked(this, color);
+}
+
+void
+TenuredCell::unmark(uint32_t color) const
+{
+    JS_ASSERT(color != BLACK);
+    AssertValidColor(this, color);
+    chunk()->bitmap.unmark(this, color);
+}
+
+void
+TenuredCell::copyMarkBitsFrom(const TenuredCell *src)
+{
+    ChunkBitmap &bitmap = chunk()->bitmap;
+    bitmap.copyMarkBit(this, src, BLACK);
+    bitmap.copyMarkBit(this, src, GRAY);
+}
+
+inline ArenaHeader *
+TenuredCell::arenaHeader() const
+{
+    JS_ASSERT(isTenured());
+    uintptr_t addr = address();
+    addr &= ~ArenaMask;
+    return reinterpret_cast<ArenaHeader *>(addr);
+}
+
+AllocKind
+TenuredCell::getAllocKind() const
 {
     return arenaHeader()->getAllocKind();
 }
 
+JS::Zone *
+TenuredCell::zone() const
+{
+    JS::Zone *zone = arenaHeader()->zone;
+    JS_ASSERT(CurrentThreadCanAccessZone(zone));
+    return zone;
+}
+
+JS::Zone *
+TenuredCell::zoneFromAnyThread() const
+{
+    return arenaHeader()->zone;
+}
+
+bool
+TenuredCell::isInsideZone(JS::Zone *zone) const
+{
+    return zone == arenaHeader()->zone;
+}
+
+/* static */ MOZ_ALWAYS_INLINE void
+TenuredCell::readBarrier(TenuredCell *thing)
+{
+#ifdef JSGC_INCREMENTAL
+    JS_ASSERT(!CurrentThreadIsIonCompiling());
+    JS_ASSERT(!isNullLike(thing));
+    JS::shadow::Zone *shadowZone = thing->shadowZoneFromAnyThread();
+    if (shadowZone->needsIncrementalBarrier()) {
+        MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
+        void *tmp = thing;
+        shadowZone->barrierTracer()->setTracingName("read barrier");
+        MarkKind(shadowZone->barrierTracer(), &tmp,
+                         MapAllocToTraceKind(thing->getAllocKind()));
+        JS_ASSERT(tmp == thing);
+    }
+    if (JS::GCThingIsMarkedGray(thing))
+        JS::UnmarkGrayGCThingRecursively(thing, MapAllocToTraceKind(thing->getAllocKind()));
+#endif
+}
+
+/* static */ MOZ_ALWAYS_INLINE void
+TenuredCell::writeBarrierPre(TenuredCell *thing) {
+#ifdef JSGC_INCREMENTAL
+    JS_ASSERT(!CurrentThreadIsIonCompiling());
+    if (isNullLike(thing) || !thing->shadowRuntimeFromAnyThread()->needsIncrementalBarrier())
+        return;
+
+    JS::shadow::Zone *shadowZone = thing->shadowZoneFromAnyThread();
+    if (shadowZone->needsIncrementalBarrier()) {
+        MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
+        void *tmp = thing;
+        shadowZone->barrierTracer()->setTracingName("pre barrier");
+        MarkKind(shadowZone->barrierTracer(), &tmp,
+                         MapAllocToTraceKind(thing->getAllocKind()));
+        JS_ASSERT(tmp == thing);
+    }
+#endif
+}
+
+static MOZ_ALWAYS_INLINE void
+AssertValidToSkipBarrier(TenuredCell *thing)
+{
+    JS_ASSERT(!IsInsideNursery(thing));
+    JS_ASSERT_IF(thing, MapAllocToTraceKind(thing->getAllocKind()) != JSTRACE_OBJECT);
+}
+
+/* static */ MOZ_ALWAYS_INLINE void
+TenuredCell::writeBarrierPost(TenuredCell *thing, void *cellp)
+{
+    AssertValidToSkipBarrier(thing);
+}
+
+/* static */ MOZ_ALWAYS_INLINE void
+TenuredCell::writeBarrierPostRelocate(TenuredCell *thing, void *cellp)
+{
+    AssertValidToSkipBarrier(thing);
+}
+
+/* static */ MOZ_ALWAYS_INLINE void
+TenuredCell::writeBarrierPostRemove(TenuredCell *thing, void *cellp)
+{
+    AssertValidToSkipBarrier(thing);
+}
+
+#ifdef DEBUG
+bool
+Cell::isAligned() const
+{
+    if (!isTenured())
+        return true;
+    return asTenured()->isAligned();
+}
+
+bool
+TenuredCell::isAligned() const
+{
+    return Arena::isAligned(address(), arenaHeader()->getThingSize());
+}
+#endif
+
+} /* namespace gc */
 } /* namespace js */
 
 #endif /* gc_Heap_h */
--- a/js/src/gc/Iteration.cpp
+++ b/js/src/gc/Iteration.cpp
@@ -114,17 +114,17 @@ void
 js::IterateGrayObjects(Zone *zone, GCThingCallback cellCallback, void *data)
 {
     zone->runtimeFromMainThread()->gc.evictNursery();
     AutoPrepareForTracing prep(zone->runtimeFromMainThread(), SkipAtoms);
 
     for (size_t finalizeKind = 0; finalizeKind <= FINALIZE_OBJECT_LAST; finalizeKind++) {
         for (ZoneCellIterUnderGC i(zone, AllocKind(finalizeKind)); !i.done(); i.next()) {
             JSObject *obj = i.get<JSObject>();
-            if (obj->isMarked(GRAY))
+            if (obj->asTenured()->isMarked(GRAY))
                 cellCallback(data, obj);
         }
     }
 }
 
 JS_PUBLIC_API(void)
 JS_IterateCompartments(JSRuntime *rt, void *data,
                        JSIterateCompartmentCallback compartmentCallback)
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -222,17 +222,17 @@ CheckMarkedThing(JSTracer *trc, T **thin
 
     /*
      * Try to assert that the thing is allocated.  This is complicated by the
      * fact that allocated things may still contain the poison pattern if that
      * part has not been overwritten, and that the free span list head in the
      * ArenaHeader may not be synced with the real one in ArenaLists.
      */
     JS_ASSERT_IF(IsThingPoisoned(thing) && rt->isHeapBusy(),
-                 !InFreeList(thing->arenaHeader(), thing));
+                 !InFreeList(thing->asTenured()->arenaHeader(), thing));
 #endif
 }
 
 /*
  * We only set the maybeAlive flag for objects and scripts. It's assumed that,
  * if a compartment is alive, then it will have at least some live object or
  * script it in. Even if we get this wrong, the worst that will happen is that
  * scheduledForDestruction will be set on the compartment, which will cause some
@@ -442,24 +442,24 @@ IsMarked(T **thingp)
 #endif
     {
         if (IsInsideNursery(*thingp)) {
             Nursery &nursery = rt->gc.nursery;
             return nursery.getForwardedPointer(thingp);
         }
     }
 #endif  // JSGC_GENERATIONAL
-    Zone *zone = (*thingp)->tenuredZone();
+    Zone *zone = (*thingp)->asTenured()->zone();
     if (!zone->isCollecting() || zone->isGCFinished())
         return true;
 #ifdef JSGC_COMPACTING
     if (zone->isGCCompacting() && IsForwarded(*thingp))
         *thingp = Forwarded(*thingp);
 #endif
-    return (*thingp)->isMarked();
+    return (*thingp)->asTenured()->isMarked();
 }
 
 template <typename T>
 static bool
 IsAboutToBeFinalized(T **thingp)
 {
     JS_ASSERT(thingp);
     JS_ASSERT(*thingp);
@@ -487,28 +487,29 @@ IsAboutToBeFinalized(T **thingp)
         if (rt->isHeapMinorCollecting()) {
             if (IsInsideNursery(thing))
                 return !nursery.getForwardedPointer(thingp);
             return false;
         }
     }
 #endif  // JSGC_GENERATIONAL
 
-    Zone *zone = thing->tenuredZone();
+    Zone *zone = thing->asTenured()->zone();
     if (zone->isGCSweeping()) {
         /*
          * We should return false for things that have been allocated during
          * incremental sweeping, but this possibility doesn't occur at the moment
          * because this function is only called at the very start of the sweeping a
          * compartment group and during minor gc. Rather than do the extra check,
          * we just assert that it's not necessary.
          */
-        JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !thing->arenaHeader()->allocatedDuringIncremental);
+        JS_ASSERT_IF(!rt->isHeapMinorCollecting(),
+                     !thing->asTenured()->arenaHeader()->allocatedDuringIncremental);
 
-        return !thing->isMarked();
+        return !thing->asTenured()->isMarked();
     }
 #ifdef JSGC_COMPACTING
     else if (zone->isGCCompacting() && IsForwarded(thing)) {
         *thingp = Forwarded(thing);
         return false;
     }
 #endif
 
@@ -537,17 +538,17 @@ UpdateIfRelocated(JSRuntime *rt, T **thi
 
     if (rt->isHeapMinorCollecting() && IsInsideNursery(*thingp)) {
         rt->gc.nursery.getForwardedPointer(thingp);
         return *thingp;
     }
 #endif  // JSGC_GENERATIONAL
 
 #ifdef JSGC_COMPACTING
-    Zone *zone = (*thingp)->tenuredZone();
+    Zone *zone = (*thingp)->zone();
     if (zone->isGCCompacting() && IsForwarded(*thingp))
         *thingp = Forwarded(*thingp);
 #endif
     return *thingp;
 }
 
 #define DeclMarkerImpl(base, type)                                                                \
 void                                                                                              \
@@ -656,17 +657,18 @@ DeclMarkerImpl(TypeObject, js::types::Ty
 /*** Externally Typed Marking ***/
 
 void
 gc::MarkKind(JSTracer *trc, void **thingp, JSGCTraceKind kind)
 {
     JS_ASSERT(thingp);
     JS_ASSERT(*thingp);
     DebugOnly<Cell *> cell = static_cast<Cell *>(*thingp);
-    JS_ASSERT_IF(cell->isTenured(), kind == MapAllocToTraceKind(cell->tenuredGetAllocKind()));
+    JS_ASSERT_IF(cell->isTenured(),
+                 kind == MapAllocToTraceKind(cell->asTenured()->getAllocKind()));
     switch (kind) {
       case JSTRACE_OBJECT:
         MarkInternal(trc, reinterpret_cast<JSObject **>(thingp));
         break;
       case JSTRACE_STRING:
         MarkInternal(trc, reinterpret_cast<JSString **>(thingp));
         break;
       case JSTRACE_SYMBOL:
@@ -938,39 +940,40 @@ ShouldMarkCrossCompartment(JSTracer *trc
 
     uint32_t color = AsGCMarker(trc)->getMarkColor();
     JS_ASSERT(color == BLACK || color == GRAY);
 
     if (IsInsideNursery(cell)) {
         JS_ASSERT(color == BLACK);
         return false;
     }
+    TenuredCell *tenured = cell->asTenured();
 
-    JS::Zone *zone = cell->tenuredZone();
+    JS::Zone *zone = tenured->zone();
     if (color == BLACK) {
         /*
          * Having black->gray edges violates our promise to the cycle
          * collector. This can happen if we're collecting a compartment and it
          * has an edge to an uncollected compartment: it's possible that the
          * source and destination of the cross-compartment edge should be gray,
          * but the source was marked black by the conservative scanner.
          */
-        if (cell->isMarked(GRAY)) {
+        if (tenured->isMarked(GRAY)) {
             JS_ASSERT(!zone->isCollecting());
             trc->runtime()->gc.setFoundBlackGrayEdges();
         }
         return zone->isGCMarking();
     } else {
         if (zone->isGCMarkingBlack()) {
             /*
              * The destination compartment is being not being marked gray now,
              * but it will be later, so record the cell so it can be marked gray
              * at the appropriate time.
              */
-            if (!cell->isMarked())
+            if (!tenured->isMarked())
                 DelayCrossCompartmentGrayMarking(src);
             return false;
         }
         return zone->isGCMarkingGray();
     }
 }
 
 void
@@ -1030,17 +1033,17 @@ gc::IsCellAboutToBeFinalized(Cell **thin
     JS_COMPARTMENT_ASSERT_STR(rt, sym)
 
 static void
 PushMarkStack(GCMarker *gcmarker, ObjectImpl *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
     JS_ASSERT(!IsInsideNursery(thing));
 
-    if (thing->markIfUnmarked(gcmarker->getMarkColor()))
+    if (thing->asTenured()->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushObject(thing);
 }
 
 /*
  * PushMarkStack for BaseShape unpacks its children directly onto the mark
  * stack. For a pre-barrier between incremental slices, this may result in
  * objects in the nursery getting pushed onto the mark stack. It is safe to
  * ignore these objects because they will be marked by the matching
@@ -1048,27 +1051,27 @@ PushMarkStack(GCMarker *gcmarker, Object
  */
 static void
 MaybePushMarkStackBetweenSlices(GCMarker *gcmarker, JSObject *thing)
 {
     DebugOnly<JSRuntime *> rt = gcmarker->runtime();
     JS_COMPARTMENT_ASSERT(rt, thing);
     JS_ASSERT_IF(rt->isHeapBusy(), !IsInsideNursery(thing));
 
-    if (!IsInsideNursery(thing) && thing->markIfUnmarked(gcmarker->getMarkColor()))
+    if (!IsInsideNursery(thing) && thing->asTenured()->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushObject(thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, JSFunction *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
     JS_ASSERT(!IsInsideNursery(thing));
 
-    if (thing->markIfUnmarked(gcmarker->getMarkColor()))
+    if (thing->asTenured()->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushObject(thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
     JS_ASSERT(!IsInsideNursery(thing));
@@ -1698,17 +1701,17 @@ GCMarker::processMarkStackTop(SliceBudge
                 JS_ASSERT(runtime()->isAtomsZone(str->zone()) || str->zone() == obj->zone());
                 if (str->markIfUnmarked())
                     ScanString(this, str);
             }
         } else if (v.isObject()) {
             JSObject *obj2 = &v.toObject();
             JS_COMPARTMENT_ASSERT(runtime(), obj2);
             JS_ASSERT(obj->compartment() == obj2->compartment());
-            if (obj2->markIfUnmarked(getMarkColor())) {
+            if (obj2->asTenured()->markIfUnmarked(getMarkColor())) {
                 pushValueArray(obj, vp, end);
                 obj = obj2;
                 goto scan_obj;
             }
         } else if (v.isSymbol()) {
             JS::Symbol *sym = v.toSymbol();
             if (!sym->isWellKnownSymbol()) {
                 JS_COMPARTMENT_ASSERT_SYM(runtime(), sym);
@@ -1879,22 +1882,16 @@ js::TraceChildren(JSTracer *trc, void *t
 static void
 AssertNonGrayGCThing(JSTracer *trc, void **thingp, JSGCTraceKind kind)
 {
     MOZ_ASSERT(!JS::GCThingIsMarkedGray(*thingp));
 }
 #endif
 
 static void
-UnmarkGrayGCThing(void *thing)
-{
-    static_cast<js::gc::Cell *>(thing)->unmark(js::gc::GRAY);
-}
-
-static void
 UnmarkGrayChildren(JSTracer *trc, void **thingp, JSGCTraceKind kind);
 
 struct UnmarkGrayTracer : public JSTracer
 {
     /*
      * We set eagerlyTraceWeakMaps to false because the cycle collector will fix
      * up any color mismatches involving weakmaps when it runs.
      */
@@ -1972,17 +1969,17 @@ UnmarkGrayChildren(JSTracer *trc, void *
         JS_TraceChildren(&nongray, thing, kind);
     }
 #endif
 
     if (!JS::GCThingIsMarkedGray(thing))
         return;
 
     UnmarkGrayTracer *tracer = static_cast<UnmarkGrayTracer *>(trc);
-    UnmarkGrayGCThing(thing);
+    TenuredCell::fromPointer(thing)->unmark(js::gc::GRAY);
     tracer->unmarkedAny = true;
 
     /*
      * Trace children of |thing|. If |thing| and its parent are both shapes,
      * |thing| will get saved to mPreviousShape without being traced. The parent
      * will later trace |thing|. This is done to avoid increasing the stack
      * depth during shape tracing. It is safe to do because a shape can only
      * have one child that is a shape.
@@ -2021,17 +2018,17 @@ JS::UnmarkGrayGCThingRecursively(void *t
     if (rt->isHeapBusy())
         return false;
 
     bool unmarkedArg = false;
     if (!IsInsideNursery(static_cast<Cell *>(thing))) {
         if (!JS::GCThingIsMarkedGray(thing))
             return false;
 
-        UnmarkGrayGCThing(thing);
+        TenuredCell::fromPointer(thing)->unmark(js::gc::GRAY);
         unmarkedArg = true;
     }
 
     UnmarkGrayTracer trc(rt);
     JS_TraceChildren(&trc, thing, kind);
 
     return unmarkedArg || trc.unmarkedAny;
 }
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -219,17 +219,17 @@ js::Nursery::allocate(size_t size)
 /* Internally, this function is used to allocate elements as well as slots. */
 HeapSlot *
 js::Nursery::allocateSlots(JSObject *obj, uint32_t nslots)
 {
     JS_ASSERT(obj);
     JS_ASSERT(nslots > 0);
 
     if (!IsInsideNursery(obj))
-        return obj->pod_malloc<HeapSlot>(nslots);
+        return obj->zone()->pod_malloc<HeapSlot>(nslots);
 
     if (nslots > MaxNurserySlots)
         return allocateHugeSlots(obj->zone(), nslots);
 
     size_t size = sizeof(HeapSlot) * nslots;
     HeapSlot *slots = static_cast<HeapSlot *>(allocate(size));
     if (slots)
         return slots;
@@ -244,20 +244,20 @@ js::Nursery::allocateElements(JSObject *
     return reinterpret_cast<ObjectElements *>(allocateSlots(obj, nelems));
 }
 
 HeapSlot *
 js::Nursery::reallocateSlots(JSObject *obj, HeapSlot *oldSlots,
                              uint32_t oldCount, uint32_t newCount)
 {
     if (!IsInsideNursery(obj))
-        return obj->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
+        return obj->zone()->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
 
     if (!isInside(oldSlots)) {
-        HeapSlot *newSlots = obj->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
+        HeapSlot *newSlots = obj->zone()->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
         if (newSlots && oldSlots != newSlots) {
             hugeSlots.remove(oldSlots);
             /* If this put fails, we will only leak the slots. */
             (void)hugeSlots.put(newSlots);
         }
         return newSlots;
     }
 
--- a/js/src/gc/Tracer.cpp
+++ b/js/src/gc/Tracer.cpp
@@ -528,17 +528,17 @@ GCMarker::reset()
 void
 GCMarker::markDelayedChildren(ArenaHeader *aheader)
 {
     if (aheader->markOverflow) {
         bool always = aheader->allocatedDuringIncremental;
         aheader->markOverflow = 0;
 
         for (ArenaCellIterUnderGC i(aheader); !i.done(); i.next()) {
-            Cell *t = i.getCell();
+            TenuredCell *t = i.getCell();
             if (always || t->isMarked()) {
                 t->markIfUnmarked();
                 JS_TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind()));
             }
         }
     } else {
         JS_ASSERT(aheader->allocatedDuringIncremental);
         PushArena(this, aheader);
@@ -582,17 +582,17 @@ GCMarker::markDelayedChildren(SliceBudge
 }
 
 #ifdef DEBUG
 void
 GCMarker::checkZone(void *p)
 {
     JS_ASSERT(started);
     DebugOnly<Cell *> cell = static_cast<Cell *>(p);
-    JS_ASSERT_IF(cell->isTenured(), cell->tenuredZone()->isCollecting());
+    JS_ASSERT_IF(cell->isTenured(), cell->asTenured()->zone()->isCollecting());
 }
 #endif
 
 bool
 GCMarker::hasBufferedGrayRoots() const
 {
     return grayBufferState == GRAY_BUFFER_OK;
 }
@@ -651,17 +651,17 @@ GCMarker::appendGrayRoot(void *thing, JS
 
     GrayRoot root(thing, kind);
 #ifdef DEBUG
     root.debugPrinter = debugPrinter();
     root.debugPrintArg = debugPrintArg();
     root.debugPrintIndex = debugPrintIndex();
 #endif
 
-    Zone *zone = static_cast<Cell *>(thing)->tenuredZone();
+    Zone *zone = TenuredCell::fromPointer(thing)->zone();
     if (zone->isCollecting()) {
         // See the comment on SetMaybeAliveFlag to see why we only do this for
         // objects and scripts. We rely on gray root buffering for this to work,
         // but we only need to worry about uncollected dead compartments during
         // incremental GCs (when we do gray root buffering).
         switch (kind) {
           case JSTRACE_OBJECT:
             static_cast<JSObject *>(thing)->compartment()->maybeAlive = true;
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -259,17 +259,17 @@ gc::GCRuntime::startVerifyPreBarriers()
 
 oom:
     incrementalState = NO_INCREMENTAL;
     js_delete(trc);
     verifyPreData = nullptr;
 }
 
 static bool
-IsMarkedOrAllocated(Cell *cell)
+IsMarkedOrAllocated(TenuredCell *cell)
 {
     return cell->isMarked() || cell->arenaHeader()->allocatedDuringIncremental;
 }
 
 static const uint32_t MAX_VERIFIER_EDGES = 1000;
 
 /*
  * This function is called by EndVerifyBarriers for every heap edge. If the edge
@@ -295,17 +295,17 @@ CheckEdge(JSTracer *jstrc, void **thingp
             return;
         }
     }
 }
 
 static void
 AssertMarkedOrAllocated(const EdgeValue &edge)
 {
-    if (!edge.thing || IsMarkedOrAllocated(static_cast<Cell *>(edge.thing)))
+    if (!edge.thing || IsMarkedOrAllocated(TenuredCell::fromPointer(edge.thing)))
         return;
 
     // Permanent atoms and well-known symbols aren't marked during graph traversal.
     if (edge.kind == JSTRACE_STRING && static_cast<JSString *>(edge.thing)->isPermanentAtom())
         return;
     if (edge.kind == JSTRACE_SYMBOL && static_cast<JS::Symbol *>(edge.thing)->isWellKnownSymbol())
         return;
 
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -263,16 +263,16 @@ Zone::canCollect()
 }
 
 JS::Zone *
 js::ZoneOfValue(const JS::Value &value)
 {
     JS_ASSERT(value.isMarkable());
     if (value.isObject())
         return value.toObject().zone();
-    return static_cast<js::gc::Cell *>(value.toGCThing())->tenuredZone();
+    return js::gc::TenuredCell::fromPointer(value.toGCThing())->zone();
 }
 
 bool
 js::ZonesIter::atAtomsZone(JSRuntime *rt)
 {
     return rt->isAtomsZone(*it);
 }
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -356,17 +356,17 @@ BaselineScript::New(JSScript *jsscript, 
     size_t paddedPCMappingSize = AlignBytes(pcMappingSize, DataAlignment);
     size_t paddedBytecodeTypesMapSize = AlignBytes(bytecodeTypeMapSize, DataAlignment);
 
     size_t allocBytes = paddedICEntriesSize +
                         paddedPCMappingIndexEntriesSize +
                         paddedPCMappingSize +
                         paddedBytecodeTypesMapSize;
 
-    BaselineScript *script = jsscript->pod_malloc_with_extra<BaselineScript, uint8_t>(allocBytes);
+    BaselineScript *script = jsscript->zone()->pod_malloc_with_extra<BaselineScript, uint8_t>(allocBytes);
     if (!script)
         return nullptr;
     new (script) BaselineScript(prologueOffset, epilogueOffset,
                                 spsPushToggleOffset, postDebugPrologueOffset);
 
     size_t offsetCursor = sizeof(BaselineScript);
     MOZ_ASSERT(offsetCursor == AlignBytes(sizeof(BaselineScript), DataAlignment));
 
--- a/js/src/jit/BaselineJIT.h
+++ b/js/src/jit/BaselineJIT.h
@@ -178,17 +178,17 @@ struct BaselineScript
     // they correspond to, for use by TypeScript::BytecodeTypes.
     uint32_t bytecodeTypeMapOffset_;
 
   public:
     // Do not call directly, use BaselineScript::New. This is public for cx->new_.
     BaselineScript(uint32_t prologueOffset, uint32_t epilogueOffset,
                    uint32_t spsPushToggleOffset, uint32_t postDebugPrologueOffset);
 
-    static BaselineScript *New(JSScript *script, uint32_t prologueOffset,
+    static BaselineScript *New(JSScript *jsscript, uint32_t prologueOffset,
                                uint32_t epilogueOffset, uint32_t postDebugPrologueOffset,
                                uint32_t spsPushToggleOffset, size_t icEntries,
                                size_t pcMappingIndexEntries, size_t pcMappingSize,
                                size_t bytecodeTypeMapEntries);
     static void Trace(JSTracer *trc, BaselineScript *script);
     static void Destroy(FreeOp *fop, BaselineScript *script);
 
     void purgeOptimizedStubs(Zone *zone);
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -4006,17 +4006,17 @@ static const VMFunction NewGCThingParInf
 
 bool
 CodeGenerator::emitAllocateGCThingPar(LInstruction *lir, Register objReg, Register cxReg,
                                       Register tempReg1, Register tempReg2, JSObject *templateObj)
 {
     JS_ASSERT(lir->mirRaw());
     JS_ASSERT(lir->mirRaw()->isInstruction());
 
-    gc::AllocKind allocKind = templateObj->tenuredGetAllocKind();
+    gc::AllocKind allocKind = templateObj->asTenured()->getAllocKind();
 #ifdef JSGC_FJGENERATIONAL
     OutOfLineCode *ool = oolCallVM(NewGCThingParInfo, lir,
                                    (ArgList(), Imm32(allocKind)), StoreRegisterTo(objReg));
     if (!ool)
         return false;
 #else
     OutOfLineNewGCThingPar *ool = new(alloc()) OutOfLineNewGCThingPar(lir, allocKind, objReg, cxReg);
     if (!ool || !addOutOfLineCode(ool, lir->mirRaw()->toInstruction()))
@@ -4188,17 +4188,17 @@ typedef JSObject *(*NewGCObjectFn)(JSCon
                                    gc::InitialHeap initialHeap);
 static const VMFunction NewGCObjectInfo =
     FunctionInfo<NewGCObjectFn>(js::jit::NewGCObject);
 
 bool
 CodeGenerator::visitCreateThisWithTemplate(LCreateThisWithTemplate *lir)
 {
     JSObject *templateObject = lir->mir()->templateObject();
-    gc::AllocKind allocKind = templateObject->tenuredGetAllocKind();
+    gc::AllocKind allocKind = templateObject->asTenured()->getAllocKind();
     gc::InitialHeap initialHeap = lir->mir()->initialHeap();
     Register objReg = ToRegister(lir->output());
     Register tempReg = ToRegister(lir->temp());
 
     OutOfLineCode *ool = oolCallVM(NewGCObjectInfo, lir,
                                    (ArgList(), Imm32(allocKind), Imm32(initialHeap)),
                                    StoreRegisterTo(objReg));
     if (!ool)
--- a/js/src/jit/IonBuilder.cpp
+++ b/js/src/jit/IonBuilder.cpp
@@ -6755,17 +6755,17 @@ IonBuilder::ensureDefiniteTypeSet(MDefin
 
 static size_t
 NumFixedSlots(JSObject *object)
 {
     // Note: we can't use object->numFixedSlots() here, as this will read the
     // shape and can race with the main thread if we are building off thread.
     // The allocation kind and object class (which goes through the type) can
     // be read freely, however.
-    gc::AllocKind kind = object->tenuredGetAllocKind();
+    gc::AllocKind kind = object->asTenured()->getAllocKind();
     return gc::GetGCKindSlots(kind, object->getClass());
 }
 
 bool
 IonBuilder::getStaticName(JSObject *staticObject, PropertyName *name, bool *psucceeded,
                           MDefinition *lexicalCheck)
 {
     jsid id = NameToId(name);
--- a/js/src/jit/IonCode.h
+++ b/js/src/jit/IonCode.h
@@ -25,17 +25,17 @@ class AsmJSModule;
 
 namespace jit {
 
 class MacroAssembler;
 class CodeOffsetLabel;
 class PatchableBackedge;
 class IonBuilder;
 
-class JitCode : public gc::BarrieredCell<JitCode>
+class JitCode : public gc::TenuredCell
 {
   protected:
     uint8_t *code_;
     ExecutablePool *pool_;
     uint32_t bufferSize_;             // Total buffer size. Does not include headerSize_.
     uint32_t insnSize_;               // Instruction stream size.
     uint32_t dataSize_;               // Size of the read-only data area.
     uint32_t jumpRelocTableBytes_;    // Size of the jump relocation table.
--- a/js/src/jit/IonMacroAssembler.cpp
+++ b/js/src/jit/IonMacroAssembler.cpp
@@ -590,28 +590,28 @@ void
 MacroAssembler::newGCThing(Register result, Register temp, JSObject *templateObj,
                             gc::InitialHeap initialHeap, Label *fail)
 {
     // This method does not initialize the object: if external slots get
     // allocated into |temp|, there is no easy way for us to ensure the caller
     // frees them. Instead just assert this case does not happen.
     JS_ASSERT(!templateObj->numDynamicSlots());
 
-    gc::AllocKind allocKind = templateObj->tenuredGetAllocKind();
+    gc::AllocKind allocKind = templateObj->asTenured()->getAllocKind();
     JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST);
 
     allocateObject(result, temp, allocKind, templateObj->numDynamicSlots(), initialHeap, fail);
 }
 
 void
 MacroAssembler::createGCObject(Register obj, Register temp, JSObject *templateObj,
                                gc::InitialHeap initialHeap, Label *fail, bool initFixedSlots)
 {
     uint32_t nDynamicSlots = templateObj->numDynamicSlots();
-    gc::AllocKind allocKind = templateObj->tenuredGetAllocKind();
+    gc::AllocKind allocKind = templateObj->asTenured()->getAllocKind();
     JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST);
 
     // Arrays with copy on write elements do not need fixed space for an
     // elements header. The template object, which owns the original elements,
     // might have another allocation kind.
     if (templateObj->denseElementsAreCopyOnWrite())
         allocKind = gc::FINALIZE_OBJECT0_BACKGROUND;
 
@@ -731,17 +731,17 @@ MacroAssembler::newGCTenuredThingPar(Reg
     // tempReg1->head.first = tempReg2;
     storePtr(tempReg2, Address(tempReg1, gc::FreeList::offsetOfFirst()));
 }
 
 void
 MacroAssembler::newGCThingPar(Register result, Register cx, Register tempReg1, Register tempReg2,
                               JSObject *templateObject, Label *fail)
 {
-    gc::AllocKind allocKind = templateObject->tenuredGetAllocKind();
+    gc::AllocKind allocKind = templateObject->asTenured()->getAllocKind();
     JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST);
     JS_ASSERT(!templateObject->numDynamicSlots());
 
     newGCThingPar(result, cx, tempReg1, tempReg2, allocKind, fail);
 }
 
 void
 MacroAssembler::newGCStringPar(Register result, Register cx, Register tempReg1, Register tempReg2,
--- a/js/src/jit/MIR.cpp
+++ b/js/src/jit/MIR.cpp
@@ -3102,17 +3102,17 @@ MObjectState::Copy(TempAllocator &alloc,
 }
 
 bool
 MNewArray::shouldUseVM() const
 {
     JS_ASSERT(count() < JSObject::NELEMENTS_LIMIT);
 
     size_t arraySlots =
-        gc::GetGCKindSlots(templateObject()->tenuredGetAllocKind()) - ObjectElements::VALUES_PER_HEADER;
+        gc::GetGCKindSlots(templateObject()->asTenured()->getAllocKind()) - ObjectElements::VALUES_PER_HEADER;
 
     // Allocate space using the VMCall when mir hints it needs to get allocated
     // immediately, but only when data doesn't fit the available array slots.
     bool allocating = allocatingBehaviour() != NewArray_Unallocating && count() > arraySlots;
 
     return templateObject()->hasSingletonType() || allocating;
 }
 
--- a/js/src/jit/VMFunctions.cpp
+++ b/js/src/jit/VMFunctions.cpp
@@ -1131,41 +1131,41 @@ AssertValidObjectPtr(JSContext *cx, JSOb
     JS_ASSERT(obj->compartment() == cx->compartment());
     JS_ASSERT(obj->runtimeFromMainThread() == cx->runtime());
 
     JS_ASSERT_IF(!obj->hasLazyType(),
                  obj->type()->clasp() == obj->lastProperty()->getObjectClass());
 
     if (obj->isTenured()) {
         JS_ASSERT(obj->isAligned());
-        gc::AllocKind kind = obj->tenuredGetAllocKind();
+        gc::AllocKind kind = obj->asTenured()->getAllocKind();
         JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST);
-        JS_ASSERT(obj->tenuredZone() == cx->zone());
+        JS_ASSERT(obj->asTenured()->zone() == cx->zone());
     }
 }
 
 void
 AssertValidStringPtr(JSContext *cx, JSString *str)
 {
     // We can't closely inspect strings from another runtime.
     if (str->runtimeFromAnyThread() != cx->runtime()) {
         JS_ASSERT(str->isPermanentAtom());
         return;
     }
 
     if (str->isAtom())
-        JS_ASSERT(cx->runtime()->isAtomsZone(str->tenuredZone()));
+        JS_ASSERT(cx->runtime()->isAtomsZone(str->zone()));
     else
-        JS_ASSERT(str->tenuredZone() == cx->zone());
+        JS_ASSERT(str->zone() == cx->zone());
 
     JS_ASSERT(str->runtimeFromMainThread() == cx->runtime());
     JS_ASSERT(str->isAligned());
     JS_ASSERT(str->length() <= JSString::MAX_LENGTH);
 
-    gc::AllocKind kind = str->tenuredGetAllocKind();
+    gc::AllocKind kind = str->getAllocKind();
     if (str->isFatInline())
         JS_ASSERT(kind == gc::FINALIZE_FAT_INLINE_STRING);
     else if (str->isExternal())
         JS_ASSERT(kind == gc::FINALIZE_EXTERNAL_STRING);
     else if (str->isAtom() || str->isFlat())
         JS_ASSERT(kind == gc::FINALIZE_STRING || kind == gc::FINALIZE_FAT_INLINE_STRING);
     else
         JS_ASSERT(kind == gc::FINALIZE_STRING);
@@ -1173,26 +1173,26 @@ AssertValidStringPtr(JSContext *cx, JSSt
 
 void
 AssertValidSymbolPtr(JSContext *cx, JS::Symbol *sym)
 {
     // We can't closely inspect symbols from another runtime.
     if (sym->runtimeFromAnyThread() != cx->runtime())
         return;
 
-    JS_ASSERT(cx->runtime()->isAtomsZone(sym->tenuredZone()));
+    JS_ASSERT(cx->runtime()->isAtomsZone(sym->zone()));
 
     JS_ASSERT(sym->runtimeFromMainThread() == cx->runtime());
     JS_ASSERT(sym->isAligned());
     if (JSString *desc = sym->description()) {
         JS_ASSERT(desc->isAtom());
         AssertValidStringPtr(cx, desc);
     }
 
-    JS_ASSERT(sym->tenuredGetAllocKind() == gc::FINALIZE_SYMBOL);
+    JS_ASSERT(sym->getAllocKind() == gc::FINALIZE_SYMBOL);
 }
 
 void
 AssertValidValue(JSContext *cx, Value *v)
 {
     if (v->isObject())
         AssertValidObjectPtr(cx, &v->toObject());
     else if (v->isString())
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -639,17 +639,17 @@ js::GCThingTraceKind(void *thing)
 }
 
 JS_FRIEND_API(void)
 js::VisitGrayWrapperTargets(Zone *zone, GCThingCallback callback, void *closure)
 {
     for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
         for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) {
             gc::Cell *thing = e.front().key().wrapped;
-            if (!IsInsideNursery(thing) && thing->isMarked(gc::GRAY))
+            if (thing->isTenured() && thing->asTenured()->isMarked(gc::GRAY))
                 callback(closure, thing);
         }
     }
 }
 
 JS_FRIEND_API(JSObject *)
 js::GetWeakmapKeyDelegate(JSObject *key)
 {
@@ -718,17 +718,17 @@ struct DumpHeapTracer : public JSTracer
                    WeakMapTraceKind weakTraceKind)
       : JSTracer(rt, callback, weakTraceKind), output(fp)
     {}
 };
 
 static char
 MarkDescriptor(void *thing)
 {
-    gc::Cell *cell = static_cast<gc::Cell*>(thing);
+    gc::TenuredCell *cell = gc::TenuredCell::fromPointer(thing);
     if (cell->isMarked(gc::BLACK))
         return cell->isMarked(gc::GRAY) ? 'G' : 'B';
     else
         return cell->isMarked(gc::GRAY) ? 'X' : 'W';
 }
 
 static void
 DumpHeapVisitZone(JSRuntime *rt, void *data, Zone *zone)
@@ -963,17 +963,17 @@ JS::IncrementalReferenceBarrier(void *pt
     if (kind == JSTRACE_STRING && StringIsPermanentAtom(static_cast<JSString *>(ptr)))
         return;
 
     gc::Cell *cell = static_cast<gc::Cell *>(ptr);
 
 #ifdef DEBUG
     Zone *zone = kind == JSTRACE_OBJECT
                  ? static_cast<JSObject *>(cell)->zone()
-                 : cell->tenuredZone();
+                 : cell->asTenured()->zone();
     JS_ASSERT(!zone->runtimeFromMainThread()->isHeapMajorCollecting());
 #endif
 
     if (kind == JSTRACE_OBJECT)
         JSObject::writeBarrierPre(static_cast<JSObject*>(cell));
     else if (kind == JSTRACE_STRING)
         JSString::writeBarrierPre(static_cast<JSString*>(cell));
     else if (kind == JSTRACE_SYMBOL)
--- a/js/src/jsfun.h
+++ b/js/src/jsfun.h
@@ -456,17 +456,17 @@ class JSFunction : public JSObject
 
   private:
     inline js::FunctionExtended *toExtended();
     inline const js::FunctionExtended *toExtended() const;
 
   public:
     inline bool isExtended() const {
         JS_STATIC_ASSERT(FinalizeKind != ExtendedFinalizeKind);
-        JS_ASSERT_IF(isTenured(), !!(flags() & EXTENDED) == (tenuredGetAllocKind() == ExtendedFinalizeKind));
+        JS_ASSERT_IF(isTenured(), !!(flags() & EXTENDED) == (asTenured()->getAllocKind() == ExtendedFinalizeKind));
         return !!(flags() & EXTENDED);
     }
 
     /*
      * Accessors for data stored in extended functions. Use setExtendedSlot if
      * the function has already been initialized. Otherwise use
      * initExtendedSlot.
      */
@@ -479,17 +479,17 @@ class JSFunction : public JSObject
     static bool setTypeForScriptedFunction(js::ExclusiveContext *cx, js::HandleFunction fun,
                                            bool singleton = false);
 
     /* GC support. */
     js::gc::AllocKind getAllocKind() const {
         js::gc::AllocKind kind = FinalizeKind;
         if (isExtended())
             kind = ExtendedFinalizeKind;
-        JS_ASSERT_IF(isTenured(), kind == tenuredGetAllocKind());
+        JS_ASSERT_IF(isTenured(), kind == asTenured()->getAllocKind());
         return kind;
     }
 };
 
 extern JSString *
 fun_toStringHelper(JSContext *cx, js::HandleObject obj, unsigned indent);
 
 inline JSFunction::Flags
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -409,16 +409,24 @@ static const AllocKind * const Backgroun
 static const int BackgroundPhaseCount = sizeof(BackgroundPhases) / sizeof(AllocKind*);
 
 static const int BackgroundPhaseLength[] = {
     sizeof(BackgroundPhaseObjects) / sizeof(AllocKind),
     sizeof(BackgroundPhaseStringsAndSymbols) / sizeof(AllocKind),
     sizeof(BackgroundPhaseShapes) / sizeof(AllocKind)
 };
 
+template<>
+JSObject *
+ArenaCellIterImpl::get<JSObject>() const
+{
+    JS_ASSERT(!done());
+    return reinterpret_cast<JSObject *>(getCell());
+}
+
 #ifdef DEBUG
 void
 ArenaHeader::checkSynchronizedWithFreeList() const
 {
     /*
      * Do not allow to access the free list when its real head is still stored
      * in FreeLists and is not synchronized with this one.
      */
@@ -483,17 +491,17 @@ Arena::finalize(FreeOp *fop, AllocKind t
     uintptr_t lastThing = thingsEnd() - thingSize;
 
     FreeSpan newListHead;
     FreeSpan *newListTail = &newListHead;
     size_t nmarked = 0;
 
     for (ArenaCellIterUnderFinalize i(&aheader); !i.done(); i.next()) {
         T *t = i.get<T>();
-        if (t->isMarked()) {
+        if (t->asTenured()->isMarked()) {
             uintptr_t thing = reinterpret_cast<uintptr_t>(t);
             if (thing != firstThingOrSuccessorOfLastMarkedThing) {
                 // We just finished passing over one or more free things,
                 // so record a new FreeSpan.
                 newListTail->initBoundsUnchecked(firstThingOrSuccessorOfLastMarkedThing,
                                                  thing - thingSize);
                 newListTail = newListTail->nextSpanUnchecked();
             }
@@ -1866,17 +1874,17 @@ GCMarker::delayMarkingArena(ArenaHeader 
     aheader->setNextDelayedMarking(unmarkedArenaStackTop);
     unmarkedArenaStackTop = aheader;
     markLaterArenas++;
 }
 
 void
 GCMarker::delayMarkingChildren(const void *thing)
 {
-    const Cell *cell = reinterpret_cast<const Cell *>(thing);
+    const TenuredCell *cell = TenuredCell::fromPointer(thing);
     cell->arenaHeader()->markOverflow = 1;
     delayMarkingArena(cell->arenaHeader());
 }
 
 inline void
 ArenaLists::prepareForIncrementalGC(JSRuntime *rt)
 {
     for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
@@ -2076,38 +2084,38 @@ AutoDisableCompactingGC::AutoDisableComp
 }
 
 AutoDisableCompactingGC::~AutoDisableCompactingGC()
 {
     gc.enableCompactingGC();
 }
 
 static void
-ForwardCell(Cell *dest, Cell *src)
+ForwardCell(TenuredCell *dest, TenuredCell *src)
 {
     // Mark a cell has having been relocated and astore forwarding pointer to
     // the new cell.
-    MOZ_ASSERT(src->tenuredZone() == dest->tenuredZone());
+    MOZ_ASSERT(src->zone() == dest->zone());
 
     // Putting the values this way round is a terrible hack to make
     // ObjectImpl::zone() work on forwarded objects.
     MOZ_ASSERT(ObjectImpl::offsetOfShape() == 0);
     uintptr_t *ptr = reinterpret_cast<uintptr_t *>(src);
     ptr[0] = reinterpret_cast<uintptr_t>(dest); // Forwarding address
     ptr[1] = ForwardedCellMagicValue; // Moved!
 }
 
 static bool
 ArenaContainsGlobal(ArenaHeader *arena)
 {
     if (arena->getAllocKind() > FINALIZE_OBJECT_LAST)
         return false;
 
     for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
-        JSObject *obj = static_cast<JSObject *>(i.getCell());
+        JSObject *obj = i.get<JSObject>();
         if (obj->is<GlobalObject>())
             return true;
     }
 
     return false;
 }
 
 static bool
@@ -2177,49 +2185,50 @@ ArenaList::pickArenasToRelocate()
 inline bool
 PtrIsInRange(const void *ptr, const void *start, size_t length)
 {
     return uintptr_t(ptr) - uintptr_t(start) < length;
 }
 #endif
 
 static bool
-RelocateCell(Zone *zone, Cell *src, AllocKind thingKind, size_t thingSize)
+RelocateCell(Zone *zone, TenuredCell *src, AllocKind thingKind, size_t thingSize)
 {
     // Allocate a new cell.
-    void *dst = zone->allocator.arenas.allocateFromFreeList(thingKind, thingSize);
-    if (!dst)
-        dst = js::gc::ArenaLists::refillFreeListInGC(zone, thingKind);
-    if (!dst)
+    void *dstAlloc = zone->allocator.arenas.allocateFromFreeList(thingKind, thingSize);
+    if (!dstAlloc)
+        dstAlloc = js::gc::ArenaLists::refillFreeListInGC(zone, thingKind);
+    if (!dstAlloc)
         return false;
+    TenuredCell *dst = TenuredCell::fromPointer(dstAlloc);
 
     // Copy source cell contents to destination.
     memcpy(dst, src, thingSize);
 
     if (thingKind <= FINALIZE_OBJECT_LAST) {
-        JSObject *srcObj = static_cast<JSObject *>(src);
-        JSObject *dstObj = static_cast<JSObject *>(dst);
+        JSObject *srcObj = static_cast<JSObject *>(static_cast<Cell *>(src));
+        JSObject *dstObj = static_cast<JSObject *>(static_cast<Cell *>(dst));
 
         // Fixup the pointer to inline object elements if necessary.
         if (srcObj->hasFixedElements())
             dstObj->setFixedElements();
 
         // Call object moved hook if present.
         if (JSObjectMovedOp op = srcObj->getClass()->ext.objectMovedOp)
             op(dstObj, srcObj);
 
         JS_ASSERT_IF(dstObj->isNative(),
                      !PtrIsInRange((const Value*)dstObj->getDenseElements(), src, thingSize));
     }
 
     // Copy the mark bits.
-    static_cast<Cell *>(dst)->copyMarkBitsFrom(src);
+    dst->copyMarkBitsFrom(src);
 
     // Mark source cell as forwarded and leave a pointer to the destination.
-    ForwardCell(static_cast<Cell *>(dst), src);
+    ForwardCell(dst, src);
 
     return true;
 }
 
 static bool
 RelocateArena(ArenaHeader *aheader)
 {
     JS_ASSERT(aheader->allocated());
@@ -2322,18 +2331,18 @@ GCRuntime::relocateArenas()
     }
 
     return relocatedList;
 }
 
 void
 MovingTracer::Visit(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
 {
-    Cell *thing = static_cast<Cell *>(*thingp);
-    Zone *zone = thing->tenuredZoneFromAnyThread();
+    TenuredCell *thing = TenuredCell::fromPointer(*thingp);
+    Zone *zone = thing->zoneFromAnyThread();
     if (!zone->isGCCompacting()) {
         JS_ASSERT(!IsForwarded(thing));
         return;
     }
     JS_ASSERT(CurrentThreadCanAccessZone(zone));
 
     if (IsForwarded(thing)) {
         Cell *dst = Forwarded(thing);
@@ -2369,17 +2378,16 @@ MovingTracer::Sweep(JSTracer *jstrc)
 
     /* Type inference may put more blocks here to free. */
     rt->freeLifoAlloc.freeAll();
 
     /* Clear runtime caches that can contain cell pointers. */
     // TODO: Should possibly just call PurgeRuntime() here.
     rt->newObjectCache.purge();
     rt->nativeIterCache.purge();
-    rt->regExpTestCache.purge();
 }
 
 /*
  * Update the interal pointers in a single cell.
  */
 static void
 UpdateCellPointers(MovingTracer *trc, Cell *cell, JSGCTraceKind traceKind) {
     if (traceKind == JSTRACE_OBJECT) {
@@ -2465,19 +2473,19 @@ GCRuntime::updatePointersToRelocatedCell
 void
 GCRuntime::releaseRelocatedArenas(ArenaHeader *relocatedList)
 {
     // Release the relocated arenas, now containing only forwarding pointers
 
 #ifdef DEBUG
     for (ArenaHeader *arena = relocatedList; arena; arena = arena->next) {
         for (ArenaCellIterUnderFinalize i(arena); !i.done(); i.next()) {
-            Cell *src = i.getCell();
+            TenuredCell *src = i.getCell();
             JS_ASSERT(IsForwarded(src));
-            Cell *dest = Forwarded(src);
+            TenuredCell *dest = Forwarded(src);
             JS_ASSERT(src->isMarked(BLACK) == dest->isMarked(BLACK));
             JS_ASSERT(src->isMarked(GRAY) == dest->isMarked(GRAY));
         }
     }
 #endif
 
     unsigned count = 0;
     while (relocatedList) {
@@ -3606,24 +3614,24 @@ CompartmentOfCell(Cell *thing, JSGCTrace
     else
         return nullptr;
 }
 
 static void
 CheckCompartmentCallback(JSTracer *trcArg, void **thingp, JSGCTraceKind kind)
 {
     CompartmentCheckTracer *trc = static_cast<CompartmentCheckTracer *>(trcArg);
-    Cell *thing = (Cell *)*thingp;
+    TenuredCell *thing = TenuredCell::fromPointer(*thingp);
 
     JSCompartment *comp = CompartmentOfCell(thing, kind);
     if (comp && trc->compartment) {
         CheckCompartment(trc, comp, thing, kind);
     } else {
-        JS_ASSERT(thing->tenuredZone() == trc->zone ||
-                  trc->runtime()->isAtomsZone(thing->tenuredZone()));
+        JS_ASSERT(thing->zone() == trc->zone ||
+                  trc->runtime()->isAtomsZone(thing->zone()));
     }
 }
 
 void
 GCRuntime::checkForCompartmentMismatches()
 {
     if (disableStrictProxyCheckingCount)
         return;
@@ -4196,39 +4204,39 @@ DropStringWrappers(JSRuntime *rt)
  */
 
 void
 JSCompartment::findOutgoingEdges(ComponentFinder<JS::Zone> &finder)
 {
     for (js::WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
         CrossCompartmentKey::Kind kind = e.front().key().kind;
         JS_ASSERT(kind != CrossCompartmentKey::StringWrapper);
-        Cell *other = e.front().key().wrapped;
+        TenuredCell *other = e.front().key().wrapped->asTenured();
         if (kind == CrossCompartmentKey::ObjectWrapper) {
             /*
              * Add edge to wrapped object compartment if wrapped object is not
              * marked black to indicate that wrapper compartment not be swept
              * after wrapped compartment.
              */
             if (!other->isMarked(BLACK) || other->isMarked(GRAY)) {
-                JS::Zone *w = other->tenuredZone();
+                JS::Zone *w = other->zone();
                 if (w->isGCMarking())
                     finder.addEdgeTo(w);
             }
         } else {
             JS_ASSERT(kind == CrossCompartmentKey::DebuggerScript ||
                       kind == CrossCompartmentKey::DebuggerSource ||
                       kind == CrossCompartmentKey::DebuggerObject ||
                       kind == CrossCompartmentKey::DebuggerEnvironment);
             /*
              * Add edge for debugger object wrappers, to ensure (in conjuction
              * with call to Debugger::findCompartmentEdges below) that debugger
              * and debuggee objects are always swept in the same group.
              */
-            JS::Zone *w = other->tenuredZone();
+            JS::Zone *w = other->zone();
             if (w->isGCMarking())
                 finder.addEdgeTo(w);
         }
     }
 
     Debugger::findCompartmentEdges(zone(), finder);
 }
 
@@ -4461,21 +4469,21 @@ MarkIncomingCrossCompartmentPointers(JSR
         for (JSObject *src = c->gcIncomingGrayPointers;
              src;
              src = NextIncomingCrossCompartmentPointer(src, unlinkList))
         {
             JSObject *dst = CrossCompartmentPointerReferent(src);
             JS_ASSERT(dst->compartment() == c);
 
             if (color == GRAY) {
-                if (IsObjectMarked(&src) && src->isMarked(GRAY))
+                if (IsObjectMarked(&src) && src->asTenured()->isMarked(GRAY))
                     MarkGCThingUnbarriered(&rt->gc.marker, (void**)&dst,
                                            "cross-compartment gray pointer");
             } else {
-                if (IsObjectMarked(&src) && !src->isMarked(GRAY))
+                if (IsObjectMarked(&src) && !src->asTenured()->isMarked(GRAY))
                     MarkGCThingUnbarriered(&rt->gc.marker, (void**)&dst,
                                            "cross-compartment black pointer");
             }
         }
 
         if (unlinkList)
             c->gcIncomingGrayPointers = nullptr;
     }
@@ -6298,28 +6306,28 @@ AutoDisableProxyCheck::AutoDisableProxyC
 AutoDisableProxyCheck::~AutoDisableProxyCheck()
 {
     gc.enableStrictProxyChecking();
 }
 
 JS_FRIEND_API(void)
 JS::AssertGCThingMustBeTenured(JSObject *obj)
 {
-    JS_ASSERT((!IsNurseryAllocable(obj->tenuredGetAllocKind()) || obj->getClass()->finalize) &&
-              obj->isTenured());
+    JS_ASSERT(obj->isTenured() &&
+              (!IsNurseryAllocable(obj->asTenured()->getAllocKind()) || obj->getClass()->finalize));
 }
 
 JS_FRIEND_API(void)
 js::gc::AssertGCThingHasType(js::gc::Cell *cell, JSGCTraceKind kind)
 {
     JS_ASSERT(cell);
     if (IsInsideNursery(cell))
         JS_ASSERT(kind == JSTRACE_OBJECT);
     else
-        JS_ASSERT(MapAllocToTraceKind(cell->tenuredGetAllocKind()) == kind);
+        JS_ASSERT(MapAllocToTraceKind(cell->asTenured()->getAllocKind()) == kind);
 }
 
 JS_FRIEND_API(size_t)
 JS::GetGCNumber()
 {
     JSRuntime *rt = js::TlsPerThreadData.get()->runtimeFromMainThread();
     if (!rt)
         return 0;
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -47,47 +47,16 @@ enum State {
     MARK_ROOTS,
     MARK,
     SWEEP,
 #ifdef JSGC_COMPACTING
     COMPACT
 #endif
 };
 
-static inline JSGCTraceKind
-MapAllocToTraceKind(AllocKind kind)
-{
-    static const JSGCTraceKind map[] = {
-        JSTRACE_OBJECT,     /* FINALIZE_OBJECT0 */
-        JSTRACE_OBJECT,     /* FINALIZE_OBJECT0_BACKGROUND */
-        JSTRACE_OBJECT,     /* FINALIZE_OBJECT2 */
-        JSTRACE_OBJECT,     /* FINALIZE_OBJECT2_BACKGROUND */
-        JSTRACE_OBJECT,     /* FINALIZE_OBJECT4 */
-        JSTRACE_OBJECT,     /* FINALIZE_OBJECT4_BACKGROUND */
-        JSTRACE_OBJECT,     /* FINALIZE_OBJECT8 */
-        JSTRACE_OBJECT,     /* FINALIZE_OBJECT8_BACKGROUND */
-        JSTRACE_OBJECT,     /* FINALIZE_OBJECT12 */
-        JSTRACE_OBJECT,     /* FINALIZE_OBJECT12_BACKGROUND */
-        JSTRACE_OBJECT,     /* FINALIZE_OBJECT16 */
-        JSTRACE_OBJECT,     /* FINALIZE_OBJECT16_BACKGROUND */
-        JSTRACE_SCRIPT,     /* FINALIZE_SCRIPT */
-        JSTRACE_LAZY_SCRIPT,/* FINALIZE_LAZY_SCRIPT */
-        JSTRACE_SHAPE,      /* FINALIZE_SHAPE */
-        JSTRACE_BASE_SHAPE, /* FINALIZE_BASE_SHAPE */
-        JSTRACE_TYPE_OBJECT,/* FINALIZE_TYPE_OBJECT */
-        JSTRACE_STRING,     /* FINALIZE_FAT_INLINE_STRING */
-        JSTRACE_STRING,     /* FINALIZE_STRING */
-        JSTRACE_STRING,     /* FINALIZE_EXTERNAL_STRING */
-        JSTRACE_SYMBOL,     /* FINALIZE_SYMBOL */
-        JSTRACE_JITCODE,    /* FINALIZE_JITCODE */
-    };
-    JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT);
-    return map[kind];
-}
-
 /* Return a printable string for the given kind, for diagnostic purposes. */
 const char *
 TraceKindAsAscii(JSGCTraceKind kind);
 
 /* Map from C++ type to finalize kind. JSObject does not have a 1:1 mapping, so must use Arena::thingSize. */
 template <typename T> struct MapTypeToFinalizeKind {};
 template <> struct MapTypeToFinalizeKind<JSScript>          { static const AllocKind kind = FINALIZE_SCRIPT; };
 template <> struct MapTypeToFinalizeKind<LazyScript>        { static const AllocKind kind = FINALIZE_LAZY_SCRIPT; };
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -36,17 +36,17 @@ ThreadSafeContext::isThreadLocal(T thing
     if (cx->nursery().isInsideNewspace(thing))
         return true;
 #endif
 
     // Global invariant
     JS_ASSERT(!IsInsideNursery(thing));
 
     // The thing is not in the nursery, but is it in the private tenured area?
-    if (allocator_->arenas.containsArena(runtime_, thing->arenaHeader()))
+    if (allocator_->arenas.containsArena(runtime_, thing->asTenured()->arenaHeader()))
     {
         // GC should be suppressed in preparation for mutating thread local
         // objects, as we don't want to trip any barriers.
         JS_ASSERT(!thing->zoneFromAnyThread()->needsIncrementalBarrier());
         JS_ASSERT(!thing->runtimeFromAnyThread()->needsIncrementalBarrier());
 
         return true;
     }
@@ -87,17 +87,17 @@ inline JSGCTraceKind
 GetGCThingTraceKind(const void *thing)
 {
     JS_ASSERT(thing);
     const Cell *cell = static_cast<const Cell *>(thing);
 #ifdef JSGC_GENERATIONAL
     if (IsInsideNursery(cell))
         return JSTRACE_OBJECT;
 #endif
-    return MapAllocToTraceKind(cell->tenuredGetAllocKind());
+    return MapAllocToTraceKind(cell->asTenured()->getAllocKind());
 }
 
 inline void
 GCRuntime::poke()
 {
     poked = true;
 
 #ifdef JS_GC_ZEAL
@@ -227,34 +227,38 @@ class ArenaCellIterImpl
         limit = arenaAddr + ArenaSize;
         moveForwardIfFree();
     }
 
     bool done() const {
         return thing == limit;
     }
 
-    Cell *getCell() const {
+    TenuredCell *getCell() const {
         JS_ASSERT(!done());
-        return reinterpret_cast<Cell *>(thing);
+        return reinterpret_cast<TenuredCell *>(thing);
     }
 
     template<typename T> T *get() const {
         JS_ASSERT(!done());
         return static_cast<T *>(getCell());
     }
 
     void next() {
         MOZ_ASSERT(!done());
         thing += thingSize;
         if (thing < limit)
             moveForwardIfFree();
     }
 };
 
+template<>
+JSObject *
+ArenaCellIterImpl::get<JSObject>() const;
+
 class ArenaCellIterUnderGC : public ArenaCellIterImpl
 {
   public:
     explicit ArenaCellIterUnderGC(ArenaHeader *aheader) {
         JS_ASSERT(aheader->zone->runtimeFromAnyThread()->isHeapBusy());
         init(aheader);
     }
 };
@@ -565,17 +569,17 @@ static inline void
 CheckIncrementalZoneState(ThreadSafeContext *cx, T *t)
 {
 #ifdef DEBUG
     if (!cx->isJSContext())
         return;
 
     Zone *zone = cx->asJSContext()->zone();
     JS_ASSERT_IF(t && zone->wasGCStarted() && (zone->isGCMarking() || zone->isGCSweeping()),
-                 t->arenaHeader()->allocatedDuringIncremental);
+                 t->asTenured()->arenaHeader()->allocatedDuringIncremental);
 #endif
 }
 
 /*
  * Allocate a new GC thing. After a successful allocation the caller must
  * fully initialize the thing before calling any function that can potentially
  * trigger GC. This will ensure that GC tracing never sees junk values stored
  * in the partially initialized thing.
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -2788,21 +2788,21 @@ TypeCompartment::fixObjectType(Exclusive
     Rooted<TaggedProto> objProto(cx, obj->getTaggedProto());
     TypeObject *objType = newTypeObject(cx, &JSObject::class_, objProto);
     if (!objType || !objType->addDefiniteProperties(cx, obj->lastProperty()))
         return;
 
     if (obj->isIndexed())
         objType->setFlags(cx, OBJECT_FLAG_SPARSE_INDEXES);
 
-    ScopedJSFreePtr<jsid> ids(objType->pod_calloc<jsid>(properties.length()));
+    ScopedJSFreePtr<jsid> ids(objType->zone()->pod_calloc<jsid>(properties.length()));
     if (!ids)
         return;
 
-    ScopedJSFreePtr<Type> types(objType->pod_calloc<Type>(properties.length()));
+    ScopedJSFreePtr<Type> types(objType->zone()->pod_calloc<Type>(properties.length()));
     if (!types)
         return;
 
     for (size_t i = 0; i < properties.length(); i++) {
         ids[i] = properties[i].id;
         types[i] = GetValueTypeForTable(obj->getSlot(i));
         if (!objType->unknownProperties())
             objType->addPropertyType(cx, IdToTypeId(ids[i]), types[i]);
@@ -3658,17 +3658,17 @@ JSScript::makeTypes(JSContext *cx)
 {
     JS_ASSERT(!types);
 
     AutoEnterAnalysis enter(cx);
 
     unsigned count = TypeScript::NumTypeSets(this);
 
     TypeScript *typeScript = (TypeScript *)
-        pod_calloc<uint8_t>(TypeScript::SizeIncludingTypeArray(count));
+        zone()->pod_calloc<uint8_t>(TypeScript::SizeIncludingTypeArray(count));
     if (!typeScript)
         return false;
 
     types = typeScript;
 
 #ifdef DEBUG
     StackTypeSet *typeArray = typeScript->typeArray();
     for (unsigned i = 0; i < nTypeSets(); i++) {
@@ -3730,17 +3730,17 @@ TypeNewScript::make(JSContext *cx, TypeO
         return;
 
     ScopedJSDeletePtr<TypeNewScript> newScript(cx->new_<TypeNewScript>());
     if (!newScript)
         return;
 
     newScript->fun = fun;
 
-    JSObject **preliminaryObjects = type->pod_calloc<JSObject *>(PRELIMINARY_OBJECT_COUNT);
+    JSObject **preliminaryObjects = type->zone()->pod_calloc<JSObject *>(PRELIMINARY_OBJECT_COUNT);
     if (!preliminaryObjects)
         return;
 
     newScript->preliminaryObjects = preliminaryObjects;
     type->setNewScript(newScript.forget());
 
     gc::TraceTypeNewScript(type);
 }
@@ -3994,17 +3994,17 @@ TypeNewScript::maybeAnalyze(JSContext *c
                 return true;
         }
 
         Initializer done(Initializer::DONE, 0);
 
         if (!initializerVector.append(done))
             return false;
 
-        initializerList = type->pod_calloc<Initializer>(initializerVector.length());
+        initializerList = type->zone()->pod_calloc<Initializer>(initializerVector.length());
         if (!initializerList)
             return false;
         PodCopy(initializerList, initializerVector.begin(), initializerVector.length());
     }
 
     js_free(preliminaryObjects);
     preliminaryObjects = nullptr;
 
--- a/js/src/jsinfer.h
+++ b/js/src/jsinfer.h
@@ -994,17 +994,17 @@ class TypeNewScript
  * analyzed script attaches constraints to it: the script is querying that
  * property off the object or another which delegates to it, and the analysis
  * information is sensitive to changes in the property's type. Future changes
  * to the property (whether those uncovered by analysis or those occurring
  * in the VM) will treat these properties like those of any other type object.
  */
 
 /* Type information about an object accessed by a script. */
-struct TypeObject : gc::BarrieredCell<TypeObject>
+struct TypeObject : public gc::TenuredCell
 {
   private:
     /* Class shared by object using this type. */
     const Class *clasp_;
 
     /* Prototype shared by objects using this type. */
     HeapPtrObject proto_;
 
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -1769,17 +1769,17 @@ js_NewGenerator(JSContext *cx, const Int
 
     /* Load and compute stack slot counts. */
     Value *stackvp = stackfp->generatorArgsSnapshotBegin();
     unsigned vplen = stackfp->generatorArgsSnapshotEnd() - stackvp;
 
     static_assert(sizeof(InterpreterFrame) % sizeof(HeapValue) == 0,
                   "The Values stored after InterpreterFrame must be aligned.");
     unsigned nvals = vplen + VALUES_PER_STACK_FRAME + stackfp->script()->nslots();
-    JSGenerator *gen = obj->pod_calloc_with_extra<JSGenerator, HeapValue>(nvals);
+    JSGenerator *gen = obj->zone()->pod_calloc_with_extra<JSGenerator, HeapValue>(nvals);
     if (!gen)
         return nullptr;
 
     /* Cut up floatingStack space. */
     HeapValue *genvp = gen->stackSnapshot();
     SetValueRangeToUndefined((Value *)genvp, vplen);
 
     InterpreterFrame *genfp = reinterpret_cast<InterpreterFrame *>(genvp + vplen);
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -2044,17 +2044,17 @@ js::DeepCloneObjectLiteral(JSContext *cx
     RootedValue v(cx);
     RootedObject deepObj(cx);
 
     if (obj->is<ArrayObject>()) {
         clone = NewDenseUnallocatedArray(cx, obj->as<ArrayObject>().length(), nullptr, newKind);
     } else {
         // Object literals are tenured by default as holded by the JSScript.
         JS_ASSERT(obj->isTenured());
-        AllocKind kind = obj->tenuredGetAllocKind();
+        AllocKind kind = obj->asTenured()->getAllocKind();
         Rooted<TypeObject*> typeObj(cx, obj->getType(cx));
         if (!typeObj)
             return nullptr;
         RootedObject parent(cx, obj->getParent());
         clone = NewObjectWithGivenProto(cx, &JSObject::class_, TaggedProto(typeObj->proto().toObject()),
                                         parent, kind, newKind);
     }
 
@@ -2152,17 +2152,17 @@ js::XDRObjectLiteral(XDRState<mode> *xdr
 
     } else {
         // Code the alloc kind of the object.
         AllocKind kind;
         {
             if (mode == XDR_ENCODE) {
                 JS_ASSERT(obj->getClass() == &JSObject::class_);
                 JS_ASSERT(obj->isTenured());
-                kind = obj->tenuredGetAllocKind();
+                kind = obj->asTenured()->getAllocKind();
             }
 
             if (!xdr->codeEnum32(&kind))
                 return false;
 
             if (mode == XDR_DECODE)
                 obj.set(NewBuiltinClassInstance(cx, &JSObject::class_, kind, js::MaybeSingletonObject));
         }
@@ -2350,17 +2350,17 @@ js::XDRObjectLiteral(XDRState<XDR_ENCODE
 template bool
 js::XDRObjectLiteral(XDRState<XDR_DECODE> *xdr, MutableHandleObject obj);
 
 JSObject *
 js::CloneObjectLiteral(JSContext *cx, HandleObject parent, HandleObject srcObj)
 {
     if (srcObj->getClass() == &JSObject::class_) {
         AllocKind kind = GetBackgroundAllocKind(GuessObjectGCKind(srcObj->numFixedSlots()));
-        JS_ASSERT_IF(srcObj->isTenured(), kind == srcObj->tenuredGetAllocKind());
+        JS_ASSERT_IF(srcObj->isTenured(), kind == srcObj->asTenured()->getAllocKind());
 
         JSObject *proto = cx->global()->getOrCreateObjectPrototype(cx);
         if (!proto)
             return nullptr;
         Rooted<TypeObject*> typeObj(cx, cx->getNewType(&JSObject::class_, TaggedProto(proto)));
         if (!typeObj)
             return nullptr;
 
@@ -2379,17 +2379,18 @@ js::CloneObjectLiteral(JSContext *cx, Ha
 
     RootedId id(cx);
     RootedValue value(cx);
     for (size_t i = 0; i < length; i++) {
         // The only markable values in copy on write arrays are atoms, which
         // can be freely copied between compartments.
         value = srcObj->getDenseElement(i);
         JS_ASSERT_IF(value.isMarkable(),
-                     cx->runtime()->isAtomsZone(value.toGCThing()->tenuredZone()));
+                     value.toGCThing()->isTenured() &&
+                     cx->runtime()->isAtomsZone(value.toGCThing()->asTenured()->zone()));
 
         id = INT_TO_JSID(i);
         if (!JSObject::defineGeneric(cx, res, id, value, nullptr, nullptr, JSPROP_ENUMERATE))
             return nullptr;
     }
 
     if (!ObjectElements::MakeElementsCopyOnWrite(cx, res))
         return nullptr;
@@ -2465,26 +2466,26 @@ JSObject::ReserveForTradeGuts(JSContext 
      * inline slots. The fixed slots will be updated in place during TradeGuts.
      * Non-native objects need to be reshaped according to the new count.
      */
     if (a->isNative()) {
         if (!a->generateOwnShape(cx))
             return false;
     } else {
         reserved.newbshape = EmptyShape::getInitialShape(cx, aClass, aProto, a->getParent(), a->getMetadata(),
-                                                         b->tenuredGetAllocKind());
+                                                         b->asTenured()->getAllocKind());
         if (!reserved.newbshape)
             return false;
     }
     if (b->isNative()) {
         if (!b->generateOwnShape(cx))
             return false;
     } else {
         reserved.newashape = EmptyShape::getInitialShape(cx, bClass, bProto, b->getParent(), b->getMetadata(),
-                                                         a->tenuredGetAllocKind());
+                                                         a->asTenured()->getAllocKind());
         if (!reserved.newashape)
             return false;
     }
 
     /* The avals/bvals vectors hold all original values from the objects. */
 
     if (!reserved.avals.reserve(a->slotSpan()))
         return false;
@@ -2517,23 +2518,23 @@ JSObject::ReserveForTradeGuts(JSContext 
      * if they do not have enough fixed slots to accomodate the slots in the
      * other object.
      */
 
     unsigned adynamic = dynamicSlotsCount(reserved.newafixed, b->slotSpan(), b->getClass());
     unsigned bdynamic = dynamicSlotsCount(reserved.newbfixed, a->slotSpan(), a->getClass());
 
     if (adynamic) {
-        reserved.newaslots = a->pod_malloc<HeapSlot>(adynamic);
+        reserved.newaslots = a->zone()->pod_malloc<HeapSlot>(adynamic);
         if (!reserved.newaslots)
             return false;
         Debug_SetSlotRangeToCrashOnTouch(reserved.newaslots, adynamic);
     }
     if (bdynamic) {
-        reserved.newbslots = b->pod_malloc<HeapSlot>(bdynamic);
+        reserved.newbslots = b->zone()->pod_malloc<HeapSlot>(bdynamic);
         if (!reserved.newbslots)
             return false;
         Debug_SetSlotRangeToCrashOnTouch(reserved.newbslots, bdynamic);
     }
 
     return true;
 }
 
@@ -2674,18 +2675,18 @@ JSObject::TradeGuts(JSContext *cx, JSObj
 #endif
 }
 
 /* Use this method with extreme caution. It trades the guts of two objects. */
 bool
 JSObject::swap(JSContext *cx, HandleObject a, HandleObject b)
 {
     // Ensure swap doesn't cause a finalizer to not be run.
-    JS_ASSERT(IsBackgroundFinalized(a->tenuredGetAllocKind()) ==
-              IsBackgroundFinalized(b->tenuredGetAllocKind()));
+    JS_ASSERT(IsBackgroundFinalized(a->asTenured()->getAllocKind()) ==
+              IsBackgroundFinalized(b->asTenured()->getAllocKind()));
     JS_ASSERT(a->compartment() == b->compartment());
 
     unsigned r = NotifyGCPreSwap(a, b);
 
     TradeGutsReserved reserved(cx);
     if (!ReserveForTradeGuts(cx, a, b, reserved)) {
         NotifyGCPostSwap(b, a, r);
         return false;
@@ -3021,17 +3022,17 @@ AllocateSlots(ThreadSafeContext *cx, JSO
 #ifdef JSGC_GENERATIONAL
     if (cx->isJSContext())
         return cx->asJSContext()->runtime()->gc.nursery.allocateSlots(obj, nslots);
 #endif
 #ifdef JSGC_FJGENERATIONAL
     if (cx->isForkJoinContext())
         return cx->asForkJoinContext()->nursery().allocateSlots(obj, nslots);
 #endif
-    return obj->pod_malloc<HeapSlot>(nslots);
+    return obj->zone()->pod_malloc<HeapSlot>(nslots);
 }
 
 // This will not run the garbage collector.  If a nursery cannot accomodate the slot array
 // an attempt will be made to place the array in the tenured area.
 //
 // If this returns null then the old slots will be left alone.
 static HeapSlot *
 ReallocateSlots(ThreadSafeContext *cx, JSObject *obj, HeapSlot *oldSlots,
@@ -3044,17 +3045,17 @@ ReallocateSlots(ThreadSafeContext *cx, J
     }
 #endif
 #ifdef JSGC_FJGENERATIONAL
     if (cx->isForkJoinContext()) {
         return cx->asForkJoinContext()->nursery().reallocateSlots(obj, oldSlots,
                                                                   oldCount, newCount);
     }
 #endif
-    return obj->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
+    return obj->zone()->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
 }
 
 /* static */ bool
 JSObject::growSlots(ThreadSafeContext *cx, HandleObject obj, uint32_t oldCount, uint32_t newCount)
 {
     JS_ASSERT(cx->isThreadLocal(obj));
     JS_ASSERT(newCount > oldCount);
     JS_ASSERT_IF(!obj->is<ArrayObject>(), newCount >= SLOT_CAPACITY_MIN);
@@ -3330,17 +3331,17 @@ AllocateElements(ThreadSafeContext *cx, 
     if (cx->isJSContext())
         return cx->asJSContext()->runtime()->gc.nursery.allocateElements(obj, nelems);
 #endif
 #ifdef JSGC_FJGENERATIONAL
     if (cx->isForkJoinContext())
         return cx->asForkJoinContext()->nursery().allocateElements(obj, nelems);
 #endif
 
-    return reinterpret_cast<js::ObjectElements *>(obj->pod_malloc<HeapSlot>(nelems));
+    return reinterpret_cast<js::ObjectElements *>(obj->zone()->pod_malloc<HeapSlot>(nelems));
 }
 
 // This will not run the garbage collector.  If a nursery cannot accomodate the element array
 // an attempt will be made to place the array in the tenured area.
 static ObjectElements *
 ReallocateElements(ThreadSafeContext *cx, JSObject *obj, ObjectElements *oldHeader,
                    uint32_t oldCount, uint32_t newCount)
 {
@@ -3353,18 +3354,18 @@ ReallocateElements(ThreadSafeContext *cx
 #ifdef JSGC_FJGENERATIONAL
     if (cx->isForkJoinContext()) {
         return cx->asForkJoinContext()->nursery().reallocateElements(obj, oldHeader,
                                                                      oldCount, newCount);
     }
 #endif
 
     return reinterpret_cast<js::ObjectElements *>(
-            obj->pod_realloc<HeapSlot>(reinterpret_cast<HeapSlot *>(oldHeader),
-                                       oldCount, newCount));
+            obj->zone()->pod_realloc<HeapSlot>(reinterpret_cast<HeapSlot *>(oldHeader),
+                                               oldCount, newCount));
 }
 
 // Round up |reqAllocated| to a good size. Up to 1 Mebi (i.e. 1,048,576) the
 // slot count is usually a power-of-two:
 //
 //   8, 16, 32, 64, ..., 256 Ki, 512 Ki, 1 Mi
 //
 // Beyond that, we use this formula:
--- a/js/src/jsobjinlines.h
+++ b/js/src/jsobjinlines.h
@@ -79,17 +79,17 @@ JSObject::unwatch(JSContext *cx, JS::Han
 
 inline void
 JSObject::finalize(js::FreeOp *fop)
 {
     js::probes::FinalizeObject(this);
 
 #ifdef DEBUG
     JS_ASSERT(isTenured());
-    if (!IsBackgroundFinalized(tenuredGetAllocKind())) {
+    if (!IsBackgroundFinalized(asTenured()->getAllocKind())) {
         /* Assert we're on the main thread. */
         JS_ASSERT(CurrentThreadCanAccessRuntime(fop->runtime()));
     }
 #endif
     const js::Class *clasp = getClass();
     if (clasp->finalize)
         clasp->finalize(fop, this);
 
@@ -1054,17 +1054,17 @@ NewObjectScriptedCall(JSContext *cx, Mut
 static inline JSObject *
 CopyInitializerObject(JSContext *cx, HandleObject baseobj, NewObjectKind newKind = GenericObject)
 {
     JS_ASSERT(baseobj->getClass() == &JSObject::class_);
     JS_ASSERT(!baseobj->inDictionaryMode());
 
     gc::AllocKind allocKind = gc::GetGCObjectFixedSlotsKind(baseobj->numFixedSlots());
     allocKind = gc::GetBackgroundAllocKind(allocKind);
-    JS_ASSERT_IF(baseobj->isTenured(), allocKind == baseobj->tenuredGetAllocKind());
+    JS_ASSERT_IF(baseobj->isTenured(), allocKind == baseobj->asTenured()->getAllocKind());
     RootedObject obj(cx);
     obj = NewBuiltinClassInstance(cx, &JSObject::class_, allocKind, newKind);
     if (!obj)
         return nullptr;
 
     RootedObject metadata(cx, obj->getMetadata());
     RootedShape lastProp(cx, baseobj->lastProperty());
     if (!JSObject::setLastProperty(cx, obj, lastProp))
--- a/js/src/jspropertytree.cpp
+++ b/js/src/jspropertytree.cpp
@@ -281,17 +281,17 @@ Shape::fixupDictionaryShapeAfterMovingGC
     // location of a dead object in the nursery. In this case we should never
     // touch it again, so poison it for good measure.
     if (IsInsideNursery(reinterpret_cast<Cell *>(listp))) {
         JS_POISON(reinterpret_cast<void *>(this), JS_SWEPT_TENURED_PATTERN, sizeof(Shape));
         return;
     }
 
     JS_ASSERT(!IsInsideNursery(reinterpret_cast<Cell *>(listp)));
-    AllocKind kind = reinterpret_cast<Cell *>(listp)->tenuredGetAllocKind();
+    AllocKind kind = TenuredCell::fromPointer(listp)->getAllocKind();
     JS_ASSERT(kind == FINALIZE_SHAPE || kind <= FINALIZE_OBJECT_LAST);
     if (kind == FINALIZE_SHAPE) {
         // listp points to the parent field of the next shape.
         Shape *next = reinterpret_cast<Shape *>(uintptr_t(listp) -
                                                 offsetof(Shape, parent));
         listp = &gc::MaybeForwarded(next)->parent;
     } else {
         // listp points to the shape_ field of an object.
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -2917,17 +2917,17 @@ Rebase(JSScript *dst, JSScript *src, T *
 
 JSScript *
 js::CloneScript(JSContext *cx, HandleObject enclosingScope, HandleFunction fun, HandleScript src,
                 NewObjectKind newKind /* = GenericObject */)
 {
     /* NB: Keep this in sync with XDRScript. */
 
     /* Some embeddings are not careful to use ExposeObjectToActiveJS as needed. */
-    JS_ASSERT(!src->sourceObject()->isMarked(gc::GRAY));
+    JS_ASSERT(!src->sourceObject()->asTenured()->isMarked(gc::GRAY));
 
     uint32_t nconsts   = src->hasConsts()   ? src->consts()->length   : 0;
     uint32_t nobjects  = src->hasObjects()  ? src->objects()->length  : 0;
     uint32_t nregexps  = src->hasRegexps()  ? src->regexps()->length  : 0;
     uint32_t ntrynotes = src->hasTrynotes() ? src->trynotes()->length : 0;
     uint32_t nblockscopes = src->hasBlockScopes() ? src->blockScopes()->length : 0;
 
     /* Script data */
@@ -3688,17 +3688,17 @@ LazyScript::CreateRaw(ExclusiveContext *
 
     // Reset runtime flags to obtain a fresh LazyScript.
     p.hasBeenCloned = false;
     p.treatAsRunOnce = false;
 
     size_t bytes = (p.numFreeVariables * sizeof(FreeVariable))
                  + (p.numInnerFunctions * sizeof(HeapPtrFunction));
 
-    ScopedJSFreePtr<uint8_t> table(bytes ? fun->pod_malloc<uint8_t>(bytes) : nullptr);
+    ScopedJSFreePtr<uint8_t> table(bytes ? fun->zone()->pod_malloc<uint8_t>(bytes) : nullptr);
     if (bytes && !table)
         return nullptr;
 
     LazyScript *res = js_NewGCLazyScript(cx);
     if (!res)
         return nullptr;
 
     cx->compartment()->scheduleDelazificationForDebugMode();
--- a/js/src/jsscript.h
+++ b/js/src/jsscript.h
@@ -726,17 +726,17 @@ XDRLazyScript(XDRState<mode> *xdr, Handl
  * Code any constant value.
  */
 template<XDRMode mode>
 bool
 XDRScriptConst(XDRState<mode> *xdr, MutableHandleValue vp);
 
 } /* namespace js */
 
-class JSScript : public js::gc::BarrieredCell<JSScript>
+class JSScript : public js::gc::TenuredCell
 {
     template <js::XDRMode mode>
     friend
     bool
     js::XDRScript(js::XDRState<mode> *xdr, js::HandleObject enclosingScope, js::HandleScript enclosingScript,
                   js::HandleFunction fun, js::MutableHandleScript scriptp);
 
     friend JSScript *
@@ -1282,17 +1282,17 @@ class JSScript : public js::gc::Barriere
     js::jit::IonScript *maybeIonScript() const {
         return ion;
     }
     js::jit::IonScript *const *addressOfIonScript() const {
         return &ion;
     }
     void setIonScript(JSContext *maybecx, js::jit::IonScript *ionScript) {
         if (hasIonScript())
-            js::jit::IonScript::writeBarrierPre(tenuredZone(), ion);
+            js::jit::IonScript::writeBarrierPre(zone(), ion);
         ion = ionScript;
         MOZ_ASSERT_IF(hasIonScript(), hasBaselineScript());
         updateBaselineOrIonRaw(maybecx);
     }
 
     bool hasBaselineScript() const {
         bool res = baseline && baseline != BASELINE_DISABLED_SCRIPT;
         MOZ_ASSERT_IF(!res, !ion || ion == ION_DISABLED_SCRIPT);
@@ -1335,17 +1335,17 @@ class JSScript : public js::gc::Barriere
         JS_ASSERT(hasParallelIonScript());
         return parallelIon;
     }
     js::jit::IonScript *maybeParallelIonScript() const {
         return parallelIon;
     }
     void setParallelIonScript(js::jit::IonScript *ionScript) {
         if (hasParallelIonScript())
-            js::jit::IonScript::writeBarrierPre(tenuredZone(), parallelIon);
+            js::jit::IonScript::writeBarrierPre(zone(), parallelIon);
         parallelIon = ionScript;
     }
 
     static size_t offsetOfBaselineScript() {
         return offsetof(JSScript, baseline);
     }
     static size_t offsetOfIonScript() {
         return offsetof(JSScript, ion);
@@ -1717,17 +1717,17 @@ class AliasedFormalIter
     const Binding &operator*() const { JS_ASSERT(!done()); return *p_; }
     const Binding *operator->() const { JS_ASSERT(!done()); return p_; }
     unsigned frameIndex() const { JS_ASSERT(!done()); return p_ - begin_; }
     unsigned scopeSlot() const { JS_ASSERT(!done()); return slot_; }
 };
 
 // Information about a script which may be (or has been) lazily compiled to
 // bytecode from its source.
-class LazyScript : public gc::BarrieredCell<LazyScript>
+class LazyScript : public gc::TenuredCell
 {
   public:
     class FreeVariable
     {
         // Free variable names are possible tagged JSAtom *s.
         uintptr_t bits_;
 
         static const uintptr_t HOISTED_USE_BIT = 0x1;
--- a/js/src/jsscriptinlines.h
+++ b/js/src/jsscriptinlines.h
@@ -161,17 +161,17 @@ JSScript::setIsCallsiteClone(JSObject *f
     JS_ASSERT(fun->is<JSFunction>());
     enclosingScopeOrOriginalFunction_ = fun;
 }
 
 inline void
 JSScript::setBaselineScript(JSContext *maybecx, js::jit::BaselineScript *baselineScript)
 {
     if (hasBaselineScript())
-        js::jit::BaselineScript::writeBarrierPre(tenuredZone(), baseline);
+        js::jit::BaselineScript::writeBarrierPre(zone(), baseline);
     MOZ_ASSERT(!hasIonScript());
     baseline = baselineScript;
     updateBaselineOrIonRaw(maybecx);
 }
 
 inline bool
 JSScript::ensureHasAnalyzedArgsUsage(JSContext *cx)
 {
--- a/js/src/jsweakmap.cpp
+++ b/js/src/jsweakmap.cpp
@@ -171,17 +171,17 @@ ObjectValueMap::findZoneEdges()
      * For unmarked weakmap keys with delegates in a different zone, add a zone
      * edge to ensure that the delegate zone does finish marking after the key
      * zone.
      */
     JS::AutoSuppressGCAnalysis nogc;
     Zone *mapZone = compartment->zone();
     for (Range r = all(); !r.empty(); r.popFront()) {
         JSObject *key = r.front().key();
-        if (key->isMarked(BLACK) && !key->isMarked(GRAY))
+        if (key->asTenured()->isMarked(BLACK) && !key->asTenured()->isMarked(GRAY))
             continue;
         JSWeakmapKeyDelegateOp op = key->getClass()->ext.weakmapKeyDelegateOp;
         if (!op)
             continue;
         JSObject *delegate = op(key);
         if (!delegate)
             continue;
         Zone *delegateZone = delegate->zone();
--- a/js/src/proxy/Wrapper.cpp
+++ b/js/src/proxy/Wrapper.cpp
@@ -161,10 +161,10 @@ bool Wrapper::finalizeInBackground(Value
      * Make the 'background-finalized-ness' of the wrapper the same as the
      * wrapped object, to allow transplanting between them.
      *
      * If the wrapped object is in the nursery then we know it doesn't have a
      * finalizer, and so background finalization is ok.
      */
     if (IsInsideNursery(&priv.toObject()))
         return true;
-    return IsBackgroundFinalized(priv.toObject().tenuredGetAllocKind());
+    return IsBackgroundFinalized(priv.toObject().asTenured()->getAllocKind());
 }
--- a/js/src/vm/ObjectImpl.h
+++ b/js/src/vm/ObjectImpl.h
@@ -344,21 +344,18 @@ IsObjectValueInCompartment(js::Value v, 
  *   the slots and elements.
  *
  * - For non-native objects, slots and elements are both empty.
  *
  * The members of this class are currently protected; in the long run this will
  * will change so that some members are private, and only certain methods that
  * act upon them will be protected.
  */
-class ObjectImpl : public gc::BarrieredCell<ObjectImpl>
+class ObjectImpl : public gc::Cell
 {
-    friend Zone *js::gc::BarrieredCell<ObjectImpl>::zone() const;
-    friend Zone *js::gc::BarrieredCell<ObjectImpl>::zoneFromAnyThread() const;
-
   protected:
     /*
      * Shape of the object, encodes the layout of the object's properties and
      * all other information about its structure. See vm/Shape.h.
      */
     HeapPtrShape shape_;
 
     /*
@@ -821,17 +818,18 @@ class ObjectImpl : public gc::BarrieredC
      * an object with the given number of fixed slots and slot span. The slot
      * capacity is not stored explicitly, and the allocated size of the slot
      * array is kept in sync with this count.
      */
     static uint32_t dynamicSlotsCount(uint32_t nfixed, uint32_t span, const Class *clasp);
 
     /* Memory usage functions. */
     size_t tenuredSizeOfThis() const {
-        return js::gc::Arena::thingSize(tenuredGetAllocKind());
+        MOZ_ASSERT(isTenured());
+        return js::gc::Arena::thingSize(asTenured()->getAllocKind());
     }
 
     /* Elements accessors. */
 
     ObjectElements * getElementsHeader() const {
         return ObjectElements::fromElements(elements);
     }
 
@@ -937,17 +935,36 @@ class ObjectImpl : public gc::BarrieredC
     }
 
     /* Access private data for an object with a known number of fixed slots. */
     inline void *getPrivate(uint32_t nfixed) const {
         return privateRef(nfixed);
     }
 
     /* GC Accessors */
+    static const size_t MaxTagBits = 3;
     void setInitialSlots(HeapSlot *newSlots) { slots = newSlots; }
+    static bool isNullLike(const ObjectImpl *obj) { return uintptr_t(obj) < (1 << MaxTagBits); }
+    MOZ_ALWAYS_INLINE JS::Zone *zone() const {
+        return shape_->zone();
+    }
+    MOZ_ALWAYS_INLINE JS::shadow::Zone *shadowZone() const {
+        return JS::shadow::Zone::asShadowZone(zone());
+    }
+    MOZ_ALWAYS_INLINE JS::Zone *zoneFromAnyThread() const {
+        return shape_->zoneFromAnyThread();
+    }
+    MOZ_ALWAYS_INLINE JS::shadow::Zone *shadowZoneFromAnyThread() const {
+        return JS::shadow::Zone::asShadowZone(zoneFromAnyThread());
+    }
+    static MOZ_ALWAYS_INLINE void readBarrier(ObjectImpl *obj);
+    static MOZ_ALWAYS_INLINE void writeBarrierPre(ObjectImpl *obj);
+    static MOZ_ALWAYS_INLINE void writeBarrierPost(ObjectImpl *obj, void *cellp);
+    static MOZ_ALWAYS_INLINE void writeBarrierPostRelocate(ObjectImpl *obj, void *cellp);
+    static MOZ_ALWAYS_INLINE void writeBarrierPostRemove(ObjectImpl *obj, void *cellp);
 
     /* JIT Accessors */
     static size_t offsetOfShape() { return offsetof(ObjectImpl, shape_); }
     HeapPtrShape *addressOfShape() { return &shape_; }
 
     static size_t offsetOfType() { return offsetof(ObjectImpl, type_); }
     HeapPtrTypeObject *addressOfType() { return &type_; }
 
@@ -958,88 +975,69 @@ class ObjectImpl : public gc::BarrieredC
 
     static size_t getFixedSlotOffset(size_t slot) {
         return sizeof(ObjectImpl) + slot * sizeof(Value);
     }
     static size_t getPrivateDataOffset(size_t nfixed) { return getFixedSlotOffset(nfixed); }
     static size_t offsetOfSlots() { return offsetof(ObjectImpl, slots); }
 };
 
-namespace gc {
-
-template <>
-MOZ_ALWAYS_INLINE Zone *
-BarrieredCell<ObjectImpl>::zone() const
+/* static */ MOZ_ALWAYS_INLINE void
+ObjectImpl::readBarrier(ObjectImpl *obj)
 {
-    const ObjectImpl* obj = static_cast<const ObjectImpl*>(this);
-    JS::Zone *zone = obj->shape_->zone();
-    JS_ASSERT(CurrentThreadCanAccessZone(zone));
-    return zone;
+    if (!isNullLike(obj) && obj->isTenured())
+        obj->asTenured()->readBarrier(obj->asTenured());
 }
 
-template <>
-MOZ_ALWAYS_INLINE Zone *
-BarrieredCell<ObjectImpl>::zoneFromAnyThread() const
+/* static */ MOZ_ALWAYS_INLINE void
+ObjectImpl::writeBarrierPre(ObjectImpl *obj)
 {
-    const ObjectImpl* obj = static_cast<const ObjectImpl*>(this);
-    return obj->shape_->zoneFromAnyThread();
+    if (!isNullLike(obj) && obj->isTenured())
+        obj->asTenured()->writeBarrierPre(obj->asTenured());
 }
 
-// TypeScript::global uses 0x1 as a special value.
-template<>
-/* static */ inline bool
-BarrieredCell<ObjectImpl>::isNullLike(ObjectImpl *obj)
-{
-    return IsNullTaggedPointer(obj);
-}
-
-template<>
-/* static */ inline void
-BarrieredCell<ObjectImpl>::writeBarrierPost(ObjectImpl *obj, void *cellp)
+/* static */ MOZ_ALWAYS_INLINE void
+ObjectImpl::writeBarrierPost(ObjectImpl *obj, void *cellp)
 {
     JS_ASSERT(cellp);
 #ifdef JSGC_GENERATIONAL
     if (IsNullTaggedPointer(obj))
         return;
     JS_ASSERT(obj == *static_cast<ObjectImpl **>(cellp));
     gc::StoreBuffer *storeBuffer = obj->storeBuffer();
     if (storeBuffer)
-        storeBuffer->putCellFromAnyThread(static_cast<Cell **>(cellp));
+        storeBuffer->putCellFromAnyThread(static_cast<gc::Cell **>(cellp));
 #endif
 }
 
-template<>
-/* static */ inline void
-BarrieredCell<ObjectImpl>::writeBarrierPostRelocate(ObjectImpl *obj, void *cellp)
+/* static */ MOZ_ALWAYS_INLINE void
+ObjectImpl::writeBarrierPostRelocate(ObjectImpl *obj, void *cellp)
 {
     JS_ASSERT(cellp);
     JS_ASSERT(obj);
     JS_ASSERT(obj == *static_cast<ObjectImpl **>(cellp));
 #ifdef JSGC_GENERATIONAL
     gc::StoreBuffer *storeBuffer = obj->storeBuffer();
     if (storeBuffer)
-        storeBuffer->putRelocatableCellFromAnyThread(static_cast<Cell **>(cellp));
+        storeBuffer->putRelocatableCellFromAnyThread(static_cast<gc::Cell **>(cellp));
 #endif
 }
 
-template<>
-/* static */ inline void
-BarrieredCell<ObjectImpl>::writeBarrierPostRemove(ObjectImpl *obj, void *cellp)
+/* static */ MOZ_ALWAYS_INLINE void
+ObjectImpl::writeBarrierPostRemove(ObjectImpl *obj, void *cellp)
 {
     JS_ASSERT(cellp);
     JS_ASSERT(obj);
     JS_ASSERT(obj == *static_cast<ObjectImpl **>(cellp));
 #ifdef JSGC_GENERATIONAL
     obj->shadowRuntimeFromAnyThread()->gcStoreBufferPtr()->removeRelocatableCellFromAnyThread(
-        static_cast<Cell **>(cellp));
+        static_cast<gc::Cell **>(cellp));
 #endif
 }
 
-} // namespace gc
-
 inline void
 ObjectImpl::privateWriteBarrierPre(void **oldval)
 {
 #ifdef JSGC_INCREMENTAL
     JS::shadow::Zone *shadowZone = this->shadowZoneFromAnyThread();
     if (shadowZone->needsIncrementalBarrier()) {
         if (*oldval && getClass()->trace)
             getClass()->trace(shadowZone->barrierTracer(), this->asObjectPtr());
--- a/js/src/vm/RegExpObject.cpp
+++ b/js/src/vm/RegExpObject.cpp
@@ -247,17 +247,17 @@ RegExpObject::trace(JSTracer *trc, JSObj
     // be collected. To detect this we need to test all the following
     // conditions, since:
     //   1. During TraceRuntime, isHeapBusy() is true, but the tracer might not
     //      be a marking tracer.
     //   2. When a write barrier executes, IS_GC_MARKING_TRACER is true, but
     //      isHeapBusy() will be false.
     if (trc->runtime()->isHeapBusy() &&
         IS_GC_MARKING_TRACER(trc) &&
-        !obj->tenuredZone()->isPreservingCode())
+        !obj->asTenured()->zone()->isPreservingCode())
     {
         obj->setPrivate(nullptr);
     } else {
         shared->trace(trc);
     }
 }
 
 const Class RegExpObject::class_ = {
--- a/js/src/vm/SelfHosting.cpp
+++ b/js/src/vm/SelfHosting.cpp
@@ -1040,17 +1040,17 @@ JSRuntime::initSelfHosting(JSContext *cx
         RootedScript script(cx);
         if (Compile(cx, shg, options, filename, &script))
             ok = Execute(cx, script, *shg.get(), rv.address());
     } else {
         uint32_t srcLen = GetRawScriptsSize();
 
         const unsigned char *compressed = compressedSources;
         uint32_t compressedLen = GetCompressedSize();
-        ScopedJSFreePtr<char> src(selfHostingGlobal_->pod_malloc<char>(srcLen));
+        ScopedJSFreePtr<char> src(selfHostingGlobal_->zone()->pod_malloc<char>(srcLen));
         if (!src || !DecompressString(compressed, compressedLen,
                                       reinterpret_cast<unsigned char *>(src.get()), srcLen))
         {
             return false;
         }
 
         ok = Evaluate(cx, shg, options, src, srcLen, &rv);
     }
@@ -1221,17 +1221,17 @@ CloneObject(JSContext *cx, HandleObject 
         if (!str)
             return nullptr;
         clone = StringObject::create(cx, str);
     } else if (selfHostedObject->is<ArrayObject>()) {
         clone = NewDenseEmptyArray(cx, nullptr, TenuredObject);
     } else {
         JS_ASSERT(selfHostedObject->isNative());
         clone = NewObjectWithGivenProto(cx, selfHostedObject->getClass(), TaggedProto(nullptr), cx->global(),
-                                        selfHostedObject->tenuredGetAllocKind(),
+                                        selfHostedObject->asTenured()->getAllocKind(),
                                         SingletonObject);
     }
     if (!clone)
         return nullptr;
     if (!CloneProperties(cx, selfHostedObject, clone))
         return nullptr;
     return clone;
 }
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -276,17 +276,17 @@ static inline void
 GetterSetterWriteBarrierPostRemove(JSRuntime *rt, JSObject **objp)
 {
 #ifdef JSGC_GENERATIONAL
     JS::shadow::Runtime *shadowRuntime = JS::shadow::Runtime::asShadowRuntime(rt);
     shadowRuntime->gcStoreBufferPtr()->removeRelocatableCellFromAnyThread(reinterpret_cast<gc::Cell **>(objp));
 #endif
 }
 
-class BaseShape : public gc::BarrieredCell<BaseShape>
+class BaseShape : public gc::TenuredCell
 {
   public:
     friend class Shape;
     friend struct StackBaseShape;
     friend struct StackShape;
     friend void gc::MergeCompartments(JSCompartment *source, JSCompartment *target);
 
     enum Flag {
@@ -625,17 +625,17 @@ BaseShape::BaseShape(const StackBaseShap
     this->compartment_ = base.compartment;
 }
 
 typedef HashSet<ReadBarrieredUnownedBaseShape,
                 StackBaseShape,
                 SystemAllocPolicy> BaseShapeSet;
 
 
-class Shape : public gc::BarrieredCell<Shape>
+class Shape : public gc::TenuredCell
 {
     friend class ::JSObject;
     friend class ::JSFunction;
     friend class js::Bindings;
     friend class js::Nursery;
     friend class js::gc::ForkJoinNursery;
     friend class js::ObjectImpl;
     friend class js::PropertyTree;
--- a/js/src/vm/String.cpp
+++ b/js/src/vm/String.cpp
@@ -168,17 +168,17 @@ AllocChars(JSString *str, size_t length,
      */
     static const size_t DOUBLING_MAX = 1024 * 1024;
     numChars = numChars > DOUBLING_MAX ? numChars + (numChars / 8) : RoundUpPow2(numChars);
 
     /* Like length, capacity does not include the null char, so take it out. */
     *capacity = numChars - 1;
 
     JS_STATIC_ASSERT(JSString::MAX_LENGTH * sizeof(CharT) < UINT32_MAX);
-    *chars = str->pod_malloc<CharT>(numChars);
+    *chars = str->zone()->pod_malloc<CharT>(numChars);
     return *chars != nullptr;
 }
 
 bool
 JSRope::copyLatin1CharsZ(ThreadSafeContext *cx, ScopedJSFreePtr<Latin1Char> &out) const
 {
     return copyCharsInternal<Latin1Char>(cx, out, true);
 }
--- a/js/src/vm/String.h
+++ b/js/src/vm/String.h
@@ -131,17 +131,17 @@ static const size_t UINT32_CHAR_BUFFER_L
  *
  * Derived string types can be queried from ancestor types via isX() and
  * retrieved with asX() debug-only-checked casts.
  *
  * The ensureX() operations mutate 'this' in place to effectively the type to be
  * at least X (e.g., ensureLinear will change a JSRope to be a JSFlatString).
  */
 
-class JSString : public js::gc::BarrieredCell<JSString>
+class JSString : public js::gc::TenuredCell
 {
   protected:
     static const size_t NUM_INLINE_CHARS_LATIN1 = 2 * sizeof(void *) / sizeof(char);
     static const size_t NUM_INLINE_CHARS_TWO_BYTE = 2 * sizeof(void *) / sizeof(char16_t);
 
     /* Fields only apply to string types commented on the right. */
     struct Data
     {
@@ -473,18 +473,16 @@ class JSString : public js::gc::Barriere
     static size_t offsetOfFlags() {
         return offsetof(JSString, d.u1.flags);
     }
 
     static size_t offsetOfNonInlineChars() {
         return offsetof(JSString, d.s.u2.nonInlineCharsTwoByte);
     }
 
-    js::gc::AllocKind getAllocKind() const { return tenuredGetAllocKind(); }
-
     static inline js::ThingRootKind rootKind() { return js::THING_ROOT_STRING; }
 
 #ifdef DEBUG
     void dump();
     void dumpCharsNoNewline(FILE *fp=stderr);
 
     template <typename CharT>
     static void dumpChars(const CharT *s, size_t len, FILE *fp=stderr);
@@ -492,26 +490,26 @@ class JSString : public js::gc::Barriere
     bool equals(const char *s);
 #endif
 
     static MOZ_ALWAYS_INLINE void readBarrier(JSString *thing) {
 #ifdef JSGC_INCREMENTAL
         if (thing->isPermanentAtom())
             return;
 
-        js::gc::BarrieredCell<JSString>::readBarrier(thing);
+        TenuredCell::readBarrier(thing);
 #endif
     }
 
     static MOZ_ALWAYS_INLINE void writeBarrierPre(JSString *thing) {
 #ifdef JSGC_INCREMENTAL
         if (isNullLike(thing) || thing->isPermanentAtom())
             return;
 
-        js::gc::BarrieredCell<JSString>::writeBarrierPre(thing);
+        TenuredCell::writeBarrierPre(thing);
 #endif
     }
 
   private:
     JSString() MOZ_DELETE;
     JSString(const JSString &other) MOZ_DELETE;
     void operator=(const JSString &other) MOZ_DELETE;
 };
--- a/js/src/vm/Symbol.h
+++ b/js/src/vm/Symbol.h
@@ -16,17 +16,17 @@
 
 #include "gc/Barrier.h"
 
 #include "js/RootingAPI.h"
 #include "js/TypeDecls.h"
 
 namespace JS {
 
-class Symbol : public js::gc::BarrieredCell<Symbol>
+class Symbol : public js::gc::TenuredCell
 {
   private:
     SymbolCode code_;
     JSAtom *description_;
 
     // The minimum allocation size is sizeof(JSString): 16 bytes on 32-bit
     // architectures and 24 bytes on 64-bit.  8 bytes of padding makes Symbol
     // the minimum size on both.