Bug 751618 - Zone renaming part 8 (r=terrence)
authorBill McCloskey <wmccloskey@mozilla.com>
Sun, 27 Jan 2013 13:51:41 -0800
changeset 129907 cb81443b42ec74c5d51c36ce512a9c504ec92407
parent 129906 1032a553ddfd6b5360b5ec25a28240ab1b26f85b
child 129908 576566ced8f604fd0752b887ca41be407e0e5bc7
push id2323
push userbbajaj@mozilla.com
push dateMon, 01 Apr 2013 19:47:02 +0000
treeherdermozilla-beta@7712be144d91 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersterrence
bugs751618
milestone21.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 751618 - Zone renaming part 8 (r=terrence)
js/src/gc/Barrier-inl.h
js/src/gc/Barrier.h
js/src/gc/Heap.h
js/src/gc/Iteration.cpp
js/src/gc/Marking.cpp
js/src/gc/RootMarking.cpp
js/src/gc/Statistics.cpp
js/src/gc/Statistics.h
js/src/gc/Verifier.cpp
js/src/ion/CodeGenerator.cpp
js/src/ion/Ion.cpp
js/src/ion/IonCaches.cpp
js/src/ion/IonMacroAssembler.cpp
js/src/ion/IonMacroAssembler.h
js/src/jsapi-tests/tests.h
js/src/jscntxt.h
js/src/jscntxtinlines.h
js/src/jscompartment.cpp
js/src/jsfriendapi.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsgcinlines.h
js/src/jsinfer.cpp
js/src/jsinferinlines.h
js/src/jsiter.cpp
js/src/jsmemorymetrics.cpp
js/src/jsobj.cpp
js/src/jsprobes.cpp
js/src/jsprobes.h
js/src/jspropertytree.cpp
js/src/jsscript.cpp
js/src/jsweakmap.cpp
js/src/methodjit/BaseAssembler.h
js/src/methodjit/Compiler.cpp
js/src/methodjit/Compiler.h
js/src/methodjit/FastBuiltins.cpp
js/src/methodjit/FastOps.cpp
js/src/methodjit/InvokeHelpers.cpp
js/src/methodjit/MethodJIT.cpp
js/src/methodjit/MonoIC.cpp
js/src/methodjit/PolyIC.cpp
js/src/methodjit/StubCalls.cpp
js/src/vm/ArgumentsObject-inl.h
js/src/vm/Debugger.cpp
js/src/vm/ObjectImpl.cpp
js/src/vm/RegExpStatics-inl.h
js/src/vm/ScopeObject.cpp
js/src/vm/Stack-inl.h
js/src/vm/Stack.cpp
--- a/js/src/gc/Barrier-inl.h
+++ b/js/src/gc/Barrier-inl.h
@@ -144,17 +144,17 @@ HeapValue::operator=(const HeapValue &v)
 
 inline void
 HeapValue::set(Zone *zone, const Value &v)
 {
 #ifdef DEBUG
     if (value.isMarkable()) {
         js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
         JS_ASSERT(cell->zone() == zone ||
-                  cell->zone() == zone->rt->atomsCompartment);
+                  cell->zone() == zone->rt->atomsCompartment->zone());
     }
 #endif
 
     pre(zone);
     JS_ASSERT(!IsPoisonedValue(v));
     value = v;
     post(zone);
 }
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -259,17 +259,17 @@ class RelocatablePtr : public Encapsulat
     }
     explicit RelocatablePtr(const RelocatablePtr<T> &v) : EncapsulatedPtr<T>(v) {
         if (this->value)
             post();
     }
 
     ~RelocatablePtr() {
         if (this->value)
-            relocate(this->value->compartment());
+            relocate(this->value->zone());
     }
 
     RelocatablePtr<T> &operator=(T *v) {
         this->pre();
         JS_ASSERT(!IsPoisonedPtr<T>(v));
         if (v) {
             this->value = v;
             post();
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -762,17 +762,17 @@ struct Chunk
     bool hasAvailableArenas() const {
         return info.numArenasFree != 0;
     }
 
     inline void addToAvailableList(Zone *zone);
     inline void insertToAvailableList(Chunk **insertPoint);
     inline void removeFromAvailableList();
 
-    ArenaHeader *allocateArena(JSCompartment *comp, AllocKind kind);
+    ArenaHeader *allocateArena(JS::Zone *zone, AllocKind kind);
 
     void releaseArena(ArenaHeader *aheader);
 
     static Chunk *allocate(JSRuntime *rt);
 
     /* Must be called with the GC lock taken. */
     static inline void release(JSRuntime *rt, Chunk *chunk);
     static inline void releaseList(JSRuntime *rt, Chunk *chunkListHead);
--- a/js/src/gc/Iteration.cpp
+++ b/js/src/gc/Iteration.cpp
@@ -8,16 +8,17 @@
 #include "jsapi.h"
 #include "jscntxt.h"
 #include "jsgc.h"
 #include "jsprf.h"
 
 #include "js/HashTable.h"
 #include "gc/GCInternals.h"
 
+#include "jsobjinlines.h"
 #include "jsgcinlines.h"
 
 using namespace js;
 using namespace js::gc;
 
 void
 js::TraceRuntime(JSTracer *trc)
 {
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -114,39 +114,39 @@ IsThingPoisoned(T *thing)
 #endif
 
 template<typename T>
 static inline void
 CheckMarkedThing(JSTracer *trc, T *thing)
 {
     JS_ASSERT(trc);
     JS_ASSERT(thing);
-    JS_ASSERT(thing->compartment());
-    JS_ASSERT(thing->compartment()->rt == trc->runtime);
+    JS_ASSERT(thing->zone());
+    JS_ASSERT(thing->zone()->rt == trc->runtime);
     JS_ASSERT(trc->debugPrinter || trc->debugPrintArg);
 
     DebugOnly<JSRuntime *> rt = trc->runtime;
 
     JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gcManipulatingDeadCompartments,
                  !thing->compartment()->scheduledForDestruction);
 
 #ifdef DEBUG
     rt->assertValidThread();
 #endif
 
-    JS_ASSERT_IF(thing->compartment()->requireGCTracer(), IS_GC_MARKING_TRACER(trc));
+    JS_ASSERT_IF(thing->zone()->requireGCTracer(), IS_GC_MARKING_TRACER(trc));
 
     JS_ASSERT(thing->isAligned());
 
     JS_ASSERT_IF(rt->gcStrictCompartmentChecking,
-                 thing->compartment()->isCollecting() ||
+                 thing->zone()->isCollecting() ||
                  thing->compartment() == rt->atomsCompartment);
 
     JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && ((GCMarker *)trc)->getMarkColor() == GRAY,
-                 thing->compartment()->isGCMarkingGray() ||
+                 thing->zone()->isGCMarkingGray() ||
                  thing->compartment() == rt->atomsCompartment);
 
     /*
      * Try to assert that the thing is allocated.  This is complicated by the
      * fact that allocated things may still contain the poison pattern if that
      * part has not been overwritten, and that the free span list head in the
      * ArenaHeader may not be synced with the real one in ArenaLists.
      */
@@ -171,17 +171,17 @@ MarkInternal(JSTracer *trc, T **thingp)
 
     CheckMarkedThing(trc, thing);
 
     /*
      * Don't mark things outside a compartment if we are in a per-compartment
      * GC.
      */
     if (!trc->callback) {
-        if (thing->compartment()->isGCMarking()) {
+        if (thing->zone()->isGCMarking()) {
             PushMarkStack(AsGCMarker(trc), thing);
             thing->compartment()->maybeAlive = true;
         }
     } else {
         trc->callback(trc, (void **)thingp, GetGCThingTraceKind(thing));
         JS_UNSET_TRACING_LOCATION(trc);
     }
 
@@ -254,29 +254,29 @@ namespace js {
 namespace gc {
 
 template <typename T>
 static bool
 IsMarked(T **thingp)
 {
     JS_ASSERT(thingp);
     JS_ASSERT(*thingp);
-    JSCompartment *c = (*thingp)->compartment();
-    if (!c->isCollecting() || c->isGCFinished())
+    Zone *zone = (*thingp)->zone();
+    if (!zone->isCollecting() || zone->isGCFinished())
         return true;
     return (*thingp)->isMarked();
 }
 
 template <typename T>
 static bool
 IsAboutToBeFinalized(T **thingp)
 {
     JS_ASSERT(thingp);
     JS_ASSERT(*thingp);
-    if (!(*thingp)->compartment()->isGCSweeping())
+    if (!(*thingp)->zone()->isGCSweeping())
         return false;
     return !(*thingp)->isMarked();
 }
 
 #define DeclMarkerImpl(base, type)                                                                \
 void                                                                                              \
 Mark##base(JSTracer *trc, EncapsulatedPtr<type> *thing, const char *name)                         \
 {                                                                                                 \
@@ -639,45 +639,45 @@ gc::MarkObjectSlots(JSTracer *trc, JSObj
 }
 
 static bool
 ShouldMarkCrossCompartment(JSTracer *trc, RawObject src, Cell *cell)
 {
     if (!IS_GC_MARKING_TRACER(trc))
         return true;
 
-    JSCompartment *c = cell->compartment();
+    JS::Zone *zone = cell->zone();
     uint32_t color = AsGCMarker(trc)->getMarkColor();
 
     JS_ASSERT(color == BLACK || color == GRAY);
     if (color == BLACK) {
         /*
          * Having black->gray edges violates our promise to the cycle
          * collector. This can happen if we're collecting a compartment and it
          * has an edge to an uncollected compartment: it's possible that the
          * source and destination of the cross-compartment edge should be gray,
          * but the source was marked black by the conservative scanner.
          */
         if (cell->isMarked(GRAY)) {
-            JS_ASSERT(!cell->compartment()->isCollecting());
+            JS_ASSERT(!zone->isCollecting());
             trc->runtime->gcFoundBlackGrayEdges = true;
         }
-        return c->isGCMarking();
+        return zone->isGCMarking();
     } else {
-        if (c->isGCMarkingBlack()) {
+        if (zone->isGCMarkingBlack()) {
             /*
              * The destination compartment is being not being marked gray now,
              * but it will be later, so record the cell so it can be marked gray
              * at the appropriate time.
              */
             if (!cell->isMarked())
                 DelayCrossCompartmentGrayMarking(src);
             return false;
         }
-        return c->isGCMarkingGray();
+        return zone->isGCMarkingGray();
     }
 }
 
 void
 gc::MarkCrossCompartmentObjectUnbarriered(JSTracer *trc, RawObject src, JSObject **dst, const char *name)
 {
     if (ShouldMarkCrossCompartment(trc, src, *dst))
         MarkObjectUnbarriered(trc, dst, name);
@@ -724,20 +724,20 @@ bool
 gc::IsCellAboutToBeFinalized(Cell **thingp)
 {
     return IsAboutToBeFinalized<Cell>(thingp);
 }
 
 /*** Push Mark Stack ***/
 
 #define JS_COMPARTMENT_ASSERT(rt, thing)                                \
-    JS_ASSERT((thing)->compartment()->isGCMarking())
+    JS_ASSERT((thing)->zone()->isGCMarking())
 
 #define JS_COMPARTMENT_ASSERT_STR(rt, thing)                            \
-    JS_ASSERT((thing)->compartment()->isGCMarking() ||                  \
+    JS_ASSERT((thing)->zone()->isGCMarking() ||                         \
               (thing)->compartment() == (rt)->atomsCompartment);
 
 #if JS_HAS_XML_SUPPORT
 static void
 PushMarkStack(GCMarker *gcmarker, JSXML *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
 
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -664,17 +664,17 @@ js::gc::MarkRuntime(JSTracer *trc, bool 
 {
     JSRuntime *rt = trc->runtime;
     JS_ASSERT(trc->callback != GCMarker::GrayCallback);
 
     JS_ASSERT(!rt->mainThread.suppressGC);
 
     if (IS_GC_MARKING_TRACER(trc)) {
         for (CompartmentsIter c(rt); !c.done(); c.next()) {
-            if (!c->isCollecting())
+            if (!c->zone()->isCollecting())
                 c->markCrossCompartmentWrappers(trc);
         }
         Debugger::markCrossCompartmentDebuggerObjectReferents(trc);
     }
 
     AutoGCRooter::traceAll(trc);
 
     if (rt->hasContexts()) {
@@ -705,33 +705,33 @@ js::gc::MarkRuntime(JSTracer *trc, bool 
     }
 
     if (rt->scriptAndCountsVector) {
         ScriptAndCountsVector &vec = *rt->scriptAndCountsVector;
         for (size_t i = 0; i < vec.length(); i++)
             MarkScriptRoot(trc, &vec[i].script, "scriptAndCountsVector");
     }
 
-    if (!IS_GC_MARKING_TRACER(trc) || rt->atomsCompartment->isCollecting()) {
+    if (!IS_GC_MARKING_TRACER(trc) || rt->atomsCompartment->zone()->isCollecting()) {
         MarkAtoms(trc);
 #ifdef JS_ION
         /* Any Ion wrappers survive until the runtime is being torn down. */
         if (rt->hasContexts())
             ion::IonRuntime::Mark(trc);
 #endif
     }
 
     rt->staticStrings.trace(trc);
 
     for (ContextIter acx(rt); !acx.done(); acx.next())
         acx->mark(trc);
 
     /* We can't use GCCompartmentsIter if we're called from TraceRuntime. */
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        if (IS_GC_MARKING_TRACER(trc) && !c->isCollecting())
+        if (IS_GC_MARKING_TRACER(trc) && !c->zone()->isCollecting())
             continue;
 
         if (IS_GC_MARKING_TRACER(trc) && c->isPreservingCode()) {
             gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK_TYPES);
             c->markTypes(trc);
         }
 
         /* During a GC, these are treated as weak pointers. */
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -362,16 +362,17 @@ Statistics::formatData(StatisticsSeriali
     double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC);
 
     ss.beginObject(NULL);
     if (ss.isJSON())
         ss.appendNumber("Timestamp", "%llu", "", (unsigned long long)timestamp);
     ss.appendDecimal("Total Time", "ms", t(total));
     ss.appendNumber("Compartments Collected", "%d", "", collectedCount);
     ss.appendNumber("Total Compartments", "%d", "", compartmentCount);
+    ss.appendNumber("Total Zones", "%d", "", zoneCount);
     ss.appendNumber("MMU (20ms)", "%d", "%", int(mmu20 * 100));
     ss.appendNumber("MMU (50ms)", "%d", "%", int(mmu50 * 100));
     ss.appendDecimal("SCC Sweep Total", "ms", t(sccTotal));
     ss.appendDecimal("SCC Sweep Max Pause", "ms", t(sccLongest));
     if (slices.length() > 1 || ss.isJSON())
         ss.appendDecimal("Max Pause", "ms", t(longest));
     else
         ss.appendString("Reason", ExplainReason(slices[0].reason));
@@ -442,16 +443,17 @@ Statistics::formatJSON(uint64_t timestam
 
 Statistics::Statistics(JSRuntime *rt)
   : runtime(rt),
     startupTime(PRMJ_Now()),
     fp(NULL),
     fullFormat(false),
     gcDepth(0),
     collectedCount(0),
+    zoneCount(0),
     compartmentCount(0),
     nonincrementalReason(NULL),
     preBytes(0),
     phaseNestingDepth(0)
 {
     PodArrayZero(phaseTotals);
     PodArrayZero(counts);
 
@@ -542,17 +544,17 @@ Statistics::endGC()
 
     if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback) {
         int64_t total, longest;
         gcDuration(&total, &longest);
 
         int64_t sccTotal, sccLongest;
         sccDurations(&sccTotal, &sccLongest);
 
-        (*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, collectedCount == compartmentCount ? 0 : 1);
+        (*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, collectedCount == zoneCount ? 0 : 1);
         (*cb)(JS_TELEMETRY_GC_MS, t(total));
         (*cb)(JS_TELEMETRY_GC_MAX_PAUSE_MS, t(longest));
         (*cb)(JS_TELEMETRY_GC_MARK_MS, t(phaseTimes[PHASE_MARK]));
         (*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[PHASE_SWEEP]));
         (*cb)(JS_TELEMETRY_GC_MARK_ROOTS_MS, t(phaseTimes[PHASE_MARK_ROOTS]));
         (*cb)(JS_TELEMETRY_GC_MARK_GRAY_MS, t(phaseTimes[PHASE_SWEEP_MARK_GRAY]));
         (*cb)(JS_TELEMETRY_GC_NON_INCREMENTAL, !!nonincrementalReason);
         (*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gcIncrementalEnabled);
@@ -563,34 +565,36 @@ Statistics::endGC()
         (*cb)(JS_TELEMETRY_GC_MMU_50, mmu50 * 100);
     }
 
     if (fp)
         printStats();
 }
 
 void
-Statistics::beginSlice(int collectedCount, int compartmentCount, gcreason::Reason reason)
+Statistics::beginSlice(int collectedCount, int zoneCount, int compartmentCount,
+                       gcreason::Reason reason)
 {
     this->collectedCount = collectedCount;
+    this->zoneCount = zoneCount;
     this->compartmentCount = compartmentCount;
 
     bool first = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
     if (first)
         beginGC();
 
     SliceData data(reason, PRMJ_Now(), gc::GetPageFaultCount());
     (void) slices.append(data); /* Ignore any OOMs here. */
 
     if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
         (*cb)(JS_TELEMETRY_GC_REASON, reason);
 
     // Slice callbacks should only fire for the outermost level
     if (++gcDepth == 1) {
-        bool wasFullGC = collectedCount == compartmentCount;
+        bool wasFullGC = collectedCount == zoneCount;
         if (GCSliceCallback cb = runtime->gcSliceCallback)
             (*cb)(runtime, first ? GC_CYCLE_BEGIN : GC_SLICE_BEGIN, GCDescription(!wasFullGC));
     }
 }
 
 void
 Statistics::endSlice()
 {
@@ -603,17 +607,17 @@ Statistics::endSlice()
     }
 
     bool last = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
     if (last)
         endGC();
 
     // Slice callbacks should only fire for the outermost level
     if (--gcDepth == 0) {
-        bool wasFullGC = collectedCount == compartmentCount;
+        bool wasFullGC = collectedCount == zoneCount;
         if (GCSliceCallback cb = runtime->gcSliceCallback)
             (*cb)(runtime, last ? GC_CYCLE_END : GC_SLICE_END, GCDescription(!wasFullGC));
     }
 
     /* Do this after the slice callback since it uses these values. */
     if (last)
         PodArrayZero(counts);
 }
--- a/js/src/gc/Statistics.h
+++ b/js/src/gc/Statistics.h
@@ -81,17 +81,17 @@ class StatisticsSerializer;
 
 struct Statistics {
     Statistics(JSRuntime *rt);
     ~Statistics();
 
     void beginPhase(Phase phase);
     void endPhase(Phase phase);
 
-    void beginSlice(int collectedCount, int compartmentCount, gcreason::Reason reason);
+    void beginSlice(int collectedCount, int zoneCount, int compartmentCount, gcreason::Reason reason);
     void endSlice();
 
     void reset(const char *reason) { slices.back().resetReason = reason; }
     void nonincremental(const char *reason) { nonincrementalReason = reason; }
 
     void count(Stat s) {
         JS_ASSERT(s < STAT_LIMIT);
         counts[s]++;
@@ -113,16 +113,17 @@ struct Statistics {
 
     /*
      * GCs can't really nest, but a second GC can be triggered from within the
      * JSGC_END callback.
      */
     int gcDepth;
 
     int collectedCount;
+    int zoneCount;
     int compartmentCount;
     const char *nonincrementalReason;
 
     struct SliceData {
         SliceData(gcreason::Reason reason, int64_t start, size_t startFaults)
           : reason(reason), resetReason(NULL), start(start), startFaults(startFaults)
         {
             PodArrayZero(phaseTimes);
@@ -172,22 +173,23 @@ struct Statistics {
     void printStats();
     bool formatData(StatisticsSerializer &ss, uint64_t timestamp);
 
     double computeMMU(int64_t resolution);
 };
 
 struct AutoGCSlice
 {
-    AutoGCSlice(Statistics &stats, int collectedCount, int compartmentCount, gcreason::Reason reason
+    AutoGCSlice(Statistics &stats, int collectedCount, int zoneCount, int compartmentCount,
+                gcreason::Reason reason
                 MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
       : stats(stats)
     {
         MOZ_GUARD_OBJECT_NOTIFIER_INIT;
-        stats.beginSlice(collectedCount, compartmentCount, reason);
+        stats.beginSlice(collectedCount, zoneCount, compartmentCount, reason);
     }
     ~AutoGCSlice() { stats.endSlice(); }
 
     Statistics &stats;
     MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 struct AutoPhase
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -12,16 +12,17 @@
 #include "jsutil.h"
 #include "jswatchpoint.h"
 
 #include "mozilla/Util.h"
 
 #include "js/HashTable.h"
 #include "gc/GCInternals.h"
 
+#include "jsobjinlines.h"
 #include "jsgcinlines.h"
 
 #ifdef MOZ_VALGRIND
 # include <valgrind/memcheck.h>
 #endif
 
 using namespace js;
 using namespace js::gc;
@@ -501,20 +502,22 @@ gc::StartVerifyPreBarriers(JSRuntime *rt
         }
 
         node = NextNode(node);
     }
 
     rt->gcVerifyPreData = trc;
     rt->gcIncrementalState = MARK;
     rt->gcMarker.start();
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+    for (CompartmentsIter c(rt); !c.done(); c.next())
         PurgeJITCaches(c);
-        c->setNeedsBarrier(true, JSCompartment::UpdateIon);
-        c->allocator.arenas.purge();
+
+    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+        zone->setNeedsBarrier(true, Zone::UpdateIon);
+        zone->allocator.arenas.purge();
     }
 
     return;
 
 oom:
     rt->gcIncrementalState = NO_INCREMENTAL;
     trc->~VerifyPreTracer();
     js_free(trc);
@@ -576,23 +579,25 @@ gc::EndVerifyPreBarriers(JSRuntime *rt)
     VerifyPreTracer *trc = (VerifyPreTracer *)rt->gcVerifyPreData;
 
     if (!trc)
         return;
 
     bool compartmentCreated = false;
 
     /* We need to disable barriers before tracing, which may invoke barriers. */
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        if (!c->needsBarrier())
+    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+        if (!zone->needsBarrier())
             compartmentCreated = true;
 
+        zone->setNeedsBarrier(false, Zone::UpdateIon);
+    }
+
+    for (CompartmentsIter c(rt); !c.done(); c.next())
         PurgeJITCaches(c);
-        c->setNeedsBarrier(false, JSCompartment::UpdateIon);
-    }
 
     /*
      * We need to bump gcNumber so that the methodjit knows that jitcode has
      * been discarded.
      */
     JS_ASSERT(trc->number == rt->gcNumber);
     rt->gcNumber++;
 
--- a/js/src/ion/CodeGenerator.cpp
+++ b/js/src/ion/CodeGenerator.cpp
@@ -3676,17 +3676,17 @@ CodeGenerator::link()
     JS_ASSERT(graph.mir().numScripts() > 0);
     ionScript->copyScriptEntries(graph.mir().scripts());
 
     linkAbsoluteLabels();
 
     // The correct state for prebarriers is unknown until the end of compilation,
     // since a GC can occur during code generation. All barriers are emitted
     // off-by-default, and are toggled on here if necessary.
-    if (cx->compartment->needsBarrier())
+    if (cx->zone()->needsBarrier())
         ionScript->toggleBarriers(true);
 
     return true;
 }
 
 // An out-of-line path to convert a boxed int32 to a double.
 class OutOfLineUnboxDouble : public OutOfLineCodeBase<CodeGenerator>
 {
--- a/js/src/ion/Ion.cpp
+++ b/js/src/ion/Ion.cpp
@@ -1813,23 +1813,23 @@ InvalidateActivation(FreeOp *fop, uint8_
         // instructions after the call) in to capture an appropriate
         // snapshot after the call occurs.
 
         ionScript->incref();
 
         const SafepointIndex *si = ionScript->getSafepointIndex(it.returnAddressToFp());
         IonCode *ionCode = ionScript->method();
 
-        JSCompartment *compartment = script->compartment();
-        if (compartment->needsBarrier()) {
+        JS::Zone *zone = script->zone();
+        if (zone->needsBarrier()) {
             // We're about to remove edges from the JSScript to gcthings
             // embedded in the IonCode. Perform one final trace of the
             // IonCode for the incremental GC, as it must know about
             // those edges.
-            ionCode->trace(compartment->barrierTracer());
+            ionCode->trace(zone->barrierTracer());
         }
         ionCode->setInvalidated();
 
         // Write the delta (from the return address offset to the
         // IonScript pointer embedded into the invalidation epilogue)
         // where the safepointed call instruction used to be. We rely on
         // the call sequence causing the safepoint being >= the size of
         // a uint32, which is checked during safepoint index
@@ -1922,23 +1922,23 @@ ion::Invalidate(types::TypeCompartment &
           case types::CompilerOutput::ParallelIon:
             executionMode = ParallelExecution;
             break;
         }
         JS_ASSERT(co.isValid());
         UnrootedScript script = co.script;
         IonScript *ionScript = GetIonScript(script, executionMode);
 
-        JSCompartment *compartment = script->compartment();
-        if (compartment->needsBarrier()) {
+        Zone *zone = script->zone();
+        if (zone->needsBarrier()) {
             // We're about to remove edges from the JSScript to gcthings
             // embedded in the IonScript. Perform one final trace of the
             // IonScript for the incremental GC, as it must know about
             // those edges.
-            IonScript::Trace(compartment->barrierTracer(), ionScript);
+            IonScript::Trace(zone->barrierTracer(), ionScript);
         }
 
         ionScript->decref(fop);
         SetIonScript(script, executionMode, NULL);
         co.invalidate();
 
         // Wait for the scripts to get warm again before doing another
         // compile, unless we are recompiling *because* a script got hot.
--- a/js/src/ion/IonCaches.cpp
+++ b/js/src/ion/IonCaches.cpp
@@ -1061,27 +1061,27 @@ IonCacheSetProperty::attachNativeExistin
                                 Address(object(), JSObject::offsetOfShape()),
                                 ImmGCPtr(obj->lastProperty()),
                                 &exit_);
     masm.bind(&exit_);
 
     if (obj->isFixedSlot(shape->slot())) {
         Address addr(object(), JSObject::getFixedSlotOffset(shape->slot()));
 
-        if (cx->compartment->needsBarrier())
+        if (cx->zone()->needsBarrier())
             masm.callPreBarrier(addr, MIRType_Value);
 
         masm.storeConstantOrRegister(value(), addr);
     } else {
         Register slotsReg = object();
         masm.loadPtr(Address(object(), JSObject::offsetOfSlots()), slotsReg);
 
         Address addr(slotsReg, obj->dynamicSlotIndex(shape->slot()) * sizeof(Value));
 
-        if (cx->compartment->needsBarrier())
+        if (cx->zone()->needsBarrier())
             masm.callPreBarrier(addr, MIRType_Value);
 
         masm.storeConstantOrRegister(value(), addr);
     }
 
     RepatchLabel rejoin_;
     CodeOffsetJump rejoinOffset = masm.jumpWithPatch(&rejoin_);
     masm.bind(&rejoin_);
@@ -1331,17 +1331,17 @@ IonCacheSetProperty::attachNativeAdding(
 
         proto = proto->getProto();
     }
 
     masm.pop(object());     // restore object reg
 
     /* Changing object shape.  Write the object's new shape. */
     Address shapeAddr(object(), JSObject::offsetOfShape());
-    if (cx->compartment->needsBarrier())
+    if (cx->zone()->needsBarrier())
         masm.callPreBarrier(shapeAddr, MIRType_Shape);
     masm.storePtr(ImmGCPtr(newShape), shapeAddr);
 
     /* Set the value on the object. */
     if (obj->isFixedSlot(propShape->slot())) {
         Address addr(object(), JSObject::getFixedSlotOffset(propShape->slot()));
         masm.storeConstantOrRegister(value(), addr);
     } else {
--- a/js/src/ion/IonMacroAssembler.cpp
+++ b/js/src/ion/IonMacroAssembler.cpp
@@ -303,31 +303,31 @@ MacroAssembler::newGCThing(const Registe
     // Inlined equivalent of js::gc::NewGCThing() without failure case handling.
 
     gc::AllocKind allocKind = templateObject->getAllocKind();
     JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST);
     int thingSize = (int)gc::Arena::thingSize(allocKind);
 
     JS_ASSERT(!templateObject->hasDynamicElements());
 
-    JSCompartment *compartment = GetIonContext()->compartment;
+    Zone *zone = GetIonContext()->compartment->zone();
 
 #ifdef JS_GC_ZEAL
     // Don't execute the inline path if gcZeal is active.
-    movePtr(ImmWord(compartment->rt), result);
+    movePtr(ImmWord(zone->rt), result);
     loadPtr(Address(result, offsetof(JSRuntime, gcZeal_)), result);
     branch32(Assembler::NotEqual, result, Imm32(0), fail);
 #endif
 
     // Inline FreeSpan::allocate.
     // There is always exactly one FreeSpan per allocKind per JSCompartment.
     // If a FreeSpan is replaced, its members are updated in the freeLists table,
     // which the code below always re-reads.
     gc::FreeSpan *list = const_cast<gc::FreeSpan *>
-                         (compartment->allocator.arenas.getFreeList(allocKind));
+                         (zone->allocator.arenas.getFreeList(allocKind));
     loadPtr(AbsoluteAddress(&list->first), result);
     branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(&list->last), result, fail);
 
     addPtr(Imm32(thingSize), result);
     storePtr(result, AbsoluteAddress(&list->first));
     subPtr(Imm32(thingSize), result);
 }
 
--- a/js/src/ion/IonMacroAssembler.h
+++ b/js/src/ion/IonMacroAssembler.h
@@ -377,19 +377,19 @@ class MacroAssembler : public MacroAssem
         if (key.isRegister())
             branch32(cond, length, key.reg(), label);
         else
             branch32(cond, length, Imm32(key.constant()), label);
     }
 
     void branchTestNeedsBarrier(Condition cond, const Register &scratch, Label *label) {
         JS_ASSERT(cond == Zero || cond == NonZero);
-        JSCompartment *comp = GetIonContext()->compartment;
-        movePtr(ImmWord(comp), scratch);
-        Address needsBarrierAddr(scratch, JSCompartment::OffsetOfNeedsBarrier());
+        JS::Zone *zone = GetIonContext()->compartment->zone();
+        movePtr(ImmWord(zone), scratch);
+        Address needsBarrierAddr(scratch, JS::Zone::OffsetOfNeedsBarrier());
         branchTest32(cond, needsBarrierAddr, Imm32(0x1), label);
     }
 
     template <typename T>
     void callPreBarrier(const T &address, MIRType type) {
         JS_ASSERT(type == MIRType_Value ||
                   type == MIRType_String ||
                   type == MIRType_Object ||
--- a/js/src/jsapi-tests/tests.h
+++ b/js/src/jsapi-tests/tests.h
@@ -8,16 +8,17 @@
 #include "mozilla/Util.h"
 
 #include "jsapi.h"
 #include "jsprvtd.h"
 #include "jsalloc.h"
 
 // For js::gc::AutoSuppressGC
 #include "jsgc.h"
+#include "jsobjinlines.h"
 #include "jsgcinlines.h"
 
 #include "js/Vector.h"
 
 #include <errno.h>
 #include <string.h>
 #include <stdio.h>
 #include <stdlib.h>
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -1377,17 +1377,18 @@ FreeOp::free_(void *p)
 
 struct JSContext : js::ContextFriendFields,
                    public mozilla::LinkedListElement<JSContext>
 {
     explicit JSContext(JSRuntime *rt);
     JSContext *thisDuringConstruction() { return this; }
     ~JSContext();
 
-    js::PerThreadData& mainThread() { return runtime->mainThread; }
+    inline JS::Zone *zone();
+    js::PerThreadData &mainThread() { return runtime->mainThread; }
 
   private:
     /* See JSContext::findVersion. */
     JSVersion           defaultVersion;      /* script compilation version */
     JSVersion           versionOverride;     /* supercedes defaultVersion when valid */
     bool                hasVersionOverride;
 
     /* Exception state -- the exception member is a GC root by definition. */
--- a/js/src/jscntxtinlines.h
+++ b/js/src/jscntxtinlines.h
@@ -602,9 +602,15 @@ JSContext::leaveCompartment(JSCompartmen
         compartment = oldCompartment;
     else
         compartment = defaultCompartmentObject_->compartment();
 
     if (throwing)
         wrapPendingException();
 }
 
+inline JS::Zone *
+JSContext::zone()
+{
+    return compartment->zone();
+}
+
 #endif /* jscntxtinlines_h___ */
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -369,17 +369,17 @@ JSCompartment::wrap(JSContext *cx, Value
             return false;
         RootedString wrapped(cx, js_NewStringCopyN<CanGC>(cx, str->chars().get(), str->length()));
         if (!wrapped)
             return false;
         vp->setString(wrapped);
         if (!putWrapper(orig, *vp))
             return false;
 
-        if (str->compartment()->isGCMarking()) {
+        if (str->zone()->isGCMarking()) {
             /*
              * All string wrappers are dropped when collection starts, but we
              * just created a new one.  Mark the wrapped string to stop it being
              * finalized, because if it was then the pointer in this
              * compartment's wrapper map would be left dangling.
              */
             JSString *tmp = str;
             MarkStringUnbarriered(&rt->gcMarker, &tmp, "wrapped string");
@@ -517,17 +517,17 @@ JSCompartment::wrap(JSContext *cx, AutoI
 /*
  * This method marks pointers that cross compartment boundaries. It should be
  * called only for per-compartment GCs, since full GCs naturally follow pointers
  * across compartments.
  */
 void
 JSCompartment::markCrossCompartmentWrappers(JSTracer *trc)
 {
-    JS_ASSERT(!isCollecting());
+    JS_ASSERT(!zone()->isCollecting());
 
     for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
         Value v = e.front().value;
         if (e.front().key.kind == CrossCompartmentKey::ObjectWrapper) {
             JSObject *wrapper = &v.toObject();
 
             /*
              * We have a cross-compartment wrapper. Its private pointer may
@@ -810,20 +810,19 @@ JSCompartment::setGCMaxMallocBytes(size_
      */
     gcMaxMallocBytes = (ptrdiff_t(value) >= 0) ? value : size_t(-1) >> 1;
     resetGCMallocBytes();
 }
 
 void
 JSCompartment::onTooMuchMalloc()
 {
-    TriggerZoneGC(this, gcreason::TOO_MUCH_MALLOC);
+    TriggerZoneGC(zone(), gcreason::TOO_MUCH_MALLOC);
 }
 
-
 bool
 JSCompartment::hasScriptsOnStack()
 {
     for (AllFramesIter afi(rt); !afi.done(); ++afi) {
 #ifdef JS_ION
         // If this is an Ion frame, check the IonActivation instead
         if (afi.isIon())
             continue;
@@ -903,17 +902,17 @@ JSCompartment::updateForDebugMode(FreeOp
     // It suffices to do a garbage collection cycle or to finish the
     // ongoing GC cycle. The necessary cleanup happens in
     // JSCompartment::sweep.
     //
     // dmgc makes sure we can't forget to GC, but it is also important not
     // to run any scripts in this compartment until the dmgc is destroyed.
     // That is the caller's responsibility.
     if (!rt->isHeapBusy())
-        dmgc.scheduleGC(this);
+        dmgc.scheduleGC(zone());
 #endif
 }
 
 bool
 JSCompartment::addDebuggee(JSContext *cx, js::GlobalObject *global)
 {
     AutoDebugModeGC dmgc(cx->runtime);
     return addDebuggee(cx, global, dmgc);
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -130,37 +130,37 @@ JS_FRIEND_API(void)
 JS::PrepareZoneForGC(Zone *zone)
 {
     zone->scheduleGC();
 }
 
 JS_FRIEND_API(void)
 JS::PrepareForFullGC(JSRuntime *rt)
 {
-    for (CompartmentsIter c(rt); !c.done(); c.next())
-        c->scheduleGC();
+    for (ZonesIter zone(rt); !zone.done(); zone.next())
+        zone->scheduleGC();
 }
 
 JS_FRIEND_API(void)
 JS::PrepareForIncrementalGC(JSRuntime *rt)
 {
     if (!IsIncrementalGCInProgress(rt))
         return;
 
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        if (c->wasGCStarted())
-            PrepareZoneForGC(c);
+    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+        if (zone->wasGCStarted())
+            PrepareZoneForGC(zone);
     }
 }
 
 JS_FRIEND_API(bool)
 JS::IsGCScheduled(JSRuntime *rt)
 {
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        if (c->isGCScheduled())
+    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+        if (zone->isGCScheduled())
             return true;
     }
 
     return false;
 }
 
 JS_FRIEND_API(void)
 JS::SkipZoneForGC(Zone *zone)
@@ -213,27 +213,27 @@ JS_SetCompartmentPrincipals(JSCompartmen
     // Clear out the old principals, if any.
     if (compartment->principals) {
         JS_DropPrincipals(compartment->rt, compartment->principals);
         compartment->principals = NULL;
         // We'd like to assert that our new principals is always same-origin
         // with the old one, but JSPrincipals doesn't give us a way to do that.
         // But we can at least assert that we're not switching between system
         // and non-system.
-        JS_ASSERT(compartment->isSystemCompartment == isSystem);
+        JS_ASSERT(compartment->zone()->isSystemCompartment == isSystem);
     }
 
     // Set up the new principals.
     if (principals) {
         JS_HoldPrincipals(principals);
         compartment->principals = principals;
     }
 
     // Update the system flag.
-    compartment->isSystemCompartment = isSystem;
+    compartment->zone()->isSystemCompartment = isSystem;
 }
 
 JS_FRIEND_API(JSBool)
 JS_WrapPropertyDescriptor(JSContext *cx, js::PropertyDescriptor *desc)
 {
     return cx->compartment->wrap(cx, desc);
 }
 
@@ -313,17 +313,17 @@ AutoSwitchCompartment::~AutoSwitchCompar
 {
     /* The old compartment may have been destroyed, so we can't use cx->setCompartment. */
     cx->compartment = oldCompartment;
 }
 
 JS_FRIEND_API(bool)
 js::IsSystemCompartment(const JSCompartment *c)
 {
-    return c->isSystemCompartment;
+    return c->zone()->isSystemCompartment;
 }
 
 JS_FRIEND_API(bool)
 js::IsAtomsCompartment(const JSCompartment *c)
 {
     return c == c->rt->atomsCompartment;
 }
 
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1498,19 +1498,19 @@ RunLastDitchGC(JSContext *cx, JS::Zone *
 
     return NULL;
 }
 
 template <AllowGC allowGC>
 /* static */ void *
 ArenaLists::refillFreeList(JSContext *cx, AllocKind thingKind)
 {
-    JS_ASSERT(cx->compartment->allocator.arenas.freeLists[thingKind].isEmpty());
-
-    Zone *zone = cx->compartment;
+    JS_ASSERT(cx->zone()->allocator.arenas.freeLists[thingKind].isEmpty());
+
+    Zone *zone = cx->zone();
     JSRuntime *rt = zone->rt;
     JS_ASSERT(!rt->isHeapBusy());
 
     bool runGC = rt->gcIncrementalState != NO_INCREMENTAL &&
                  zone->gcBytes > zone->gcTriggerBytes &&
                  allowGC;
     for (;;) {
         if (JS_UNLIKELY(runGC)) {
@@ -1982,17 +1982,17 @@ js::TriggerZoneGC(Zone *zone, gcreason::
     if (rt->isHeapBusy())
         return;
 
     if (rt->gcZeal() == ZealAllocValue) {
         TriggerGC(rt, reason);
         return;
     }
 
-    if (zone == rt->atomsCompartment) {
+    if (zone == rt->atomsCompartment->zone()) {
         /* We can't do a zone GC of the atoms compartment. */
         TriggerGC(rt, reason);
         return;
     }
 
     PrepareZoneForGC(zone);
     TriggerOperationCallback(rt, reason);
 }
@@ -2010,17 +2010,17 @@ js::MaybeGC(JSContext *cx)
     }
 
     if (rt->gcIsNeeded) {
         GCSlice(rt, GC_NORMAL, gcreason::MAYBEGC);
         return;
     }
 
     double factor = rt->gcHighFrequencyGC ? 0.75 : 0.9;
-    Zone *zone = cx->compartment;
+    Zone *zone = cx->zone();
     if (zone->gcBytes > 1024 * 1024 &&
         zone->gcBytes >= factor * zone->gcTriggerBytes &&
         rt->gcIncrementalState == NO_INCREMENTAL &&
         !rt->gcHelperThread.sweeping())
     {
         PrepareZoneForGC(zone);
         GCSlice(rt, GC_NORMAL, gcreason::MAYBEGC);
         return;
@@ -2534,19 +2534,19 @@ SweepCompartments(FreeOp *fop, bool last
     JSCompartment **write = read;
     JS_ASSERT(rt->compartments.length() >= 1);
     JS_ASSERT(*rt->compartments.begin() == rt->atomsCompartment);
 
     while (read < end) {
         JSCompartment *compartment = *read++;
 
         if (!compartment->hold && compartment->wasGCStarted() &&
-            (compartment->allocator.arenas.arenaListsAreEmpty() || lastGC))
+            (compartment->zone()->allocator.arenas.arenaListsAreEmpty() || lastGC))
         {
-            compartment->allocator.arenas.checkEmptyFreeLists();
+            compartment->zone()->allocator.arenas.checkEmptyFreeLists();
             if (callback)
                 callback(fop, compartment);
             if (compartment->principals)
                 JS_DropPrincipals(rt, compartment->principals);
             fop->delete_(compartment);
             continue;
         }
         *write++ = compartment;
@@ -2678,39 +2678,39 @@ BeginMarkPhase(JSRuntime *rt)
     for (ZonesIter zone(rt); !zone.done(); zone.next()) {
         /* Assert that compartment state is as we expect */
         JS_ASSERT(!zone->isCollecting());
         for (unsigned i = 0; i < FINALIZE_LIMIT; ++i)
             JS_ASSERT(!zone->allocator.arenas.arenaListsToSweep[i]);
 
         /* Set up which compartments will be collected. */
         if (zone->isGCScheduled()) {
-            if (zone != rt->atomsCompartment) {
+            if (zone != rt->atomsCompartment->zone()) {
                 any = true;
-                zone->setGCState(JSCompartment::Mark);
+                zone->setGCState(Zone::Mark);
             }
         } else {
             rt->gcIsFull = false;
         }
     }
 
     /* Check that at least one compartment is scheduled for collection. */
     if (!any)
         return false;
 
     /*
      * Atoms are not in the cross-compartment map. So if there are any
      * compartments that are not being collected, we are not allowed to collect
      * atoms. Otherwise, the non-collected compartments could contain pointers
      * to atoms that we would miss.
      */
-    JSCompartment *atomsComp = rt->atomsCompartment;
-    if (atomsComp->isGCScheduled() && rt->gcIsFull && !rt->gcKeepAtoms) {
-        JS_ASSERT(!atomsComp->isCollecting());
-        atomsComp->setGCState(JSCompartment::Mark);
+    Zone *atomsZone = rt->atomsCompartment->zone();
+    if (atomsZone->isGCScheduled() && rt->gcIsFull && !rt->gcKeepAtoms) {
+        JS_ASSERT(!atomsZone->isCollecting());
+        atomsZone->setGCState(Zone::Mark);
     }
 
     /*
      * At the end of each incremental slice, we call prepareForIncrementalGC,
      * which marks objects in all arenas that we're currently allocating
      * into. This can cause leaks if unreachable objects are in these
      * arenas. This purge call ensures that we only mark arenas that have had
      * allocations after the incremental GC started.
@@ -3014,25 +3014,25 @@ js::gc::MarkingValidator::nonIncremental
 
     {
         gcstats::AutoPhase ap(runtime->gcStats, gcstats::PHASE_SWEEP);
         MarkAllWeakReferences(runtime, gcstats::PHASE_SWEEP_MARK_WEAK);
 
         /* Update compartment state for gray marking. */
         for (GCZonesIter zone(runtime); !zone.done(); zone.next()) {
             JS_ASSERT(zone->isGCMarkingBlack());
-            zone->setGCState(JSCompartment::MarkGray);
+            zone->setGCState(Zone::MarkGray);
         }
 
         MarkAllGrayReferences(runtime);
 
         /* Restore compartment state. */
         for (GCZonesIter zone(runtime); !zone.done(); zone.next()) {
             JS_ASSERT(zone->isGCMarkingGray());
-            zone->setGCState(JSCompartment::Mark);
+            zone->setGCState(Zone::Mark);
         }
     }
 
     /* Take a copy of the non-incremental mark state and restore the original. */
     for (GCChunkSet::Range r(runtime->gcChunkSet.all()); !r.empty(); r.popFront()) {
         Chunk *chunk = r.front();
         ChunkBitmap *bitmap = &chunk->bitmap;
         ChunkBitmap *entry = map.lookup(chunk)->value;
@@ -3202,17 +3202,17 @@ JSCompartment::findOutgoingEdgesFromComp
         }
 
 #ifdef DEBUG
         JSObject *wrapper = &e.front().value.toObject();
         JS_ASSERT_IF(IsFunctionProxy(wrapper), &GetProxyCall(wrapper).toObject() == other);
 #endif
     }
 
-    Debugger::findCompartmentEdges(this, finder);
+    Debugger::findCompartmentEdges(zone(), finder);
 }
 
 void
 JSCompartment::findOutgoingEdges(ComponentFinder<JS::Zone> &finder)
 {
     /*
      * Any compartment may have a pointer to an atom in the atoms
      * compartment, and these aren't in the cross compartment map.
@@ -3251,18 +3251,18 @@ GetNextZoneGroup(JSRuntime *rt)
     if (!rt->gcIsIncremental)
         ComponentFinder<Zone>::mergeGroups(rt->gcCurrentZoneGroup);
 
     if (rt->gcAbortSweepAfterCurrentGroup) {
         JS_ASSERT(!rt->gcIsIncremental);
         for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
             JS_ASSERT(!zone->gcNextGraphComponent);
             JS_ASSERT(zone->isGCMarking());
-            zone->setNeedsBarrier(false, JSCompartment::UpdateIon);
-            zone->setGCState(JSCompartment::NoGC);
+            zone->setNeedsBarrier(false, Zone::UpdateIon);
+            zone->setGCState(Zone::NoGC);
         }
 
         for (GCCompartmentGroupIter comp(rt); !comp.done(); comp.next()) {
             ArrayBufferObject::resetArrayBufferList(comp);
             ResetGrayList(comp);
             comp->gcGrayRoots.clearAndFree();
         }
 
@@ -3390,18 +3390,18 @@ MarkIncomingCrossCompartmentPointers(JSR
         gcstats::PHASE_SWEEP_MARK_INCOMING_BLACK,
         gcstats::PHASE_SWEEP_MARK_INCOMING_GRAY
     };
     gcstats::AutoPhase ap1(rt->gcStats, statsPhases[color]);
 
     bool unlinkList = color == GRAY;
 
     for (GCCompartmentGroupIter c(rt); !c.done(); c.next()) {
-        JS_ASSERT_IF(color == GRAY, c->isGCMarkingGray());
-        JS_ASSERT_IF(color == BLACK, c->isGCMarkingBlack());
+        JS_ASSERT_IF(color == GRAY, c->zone()->isGCMarkingGray());
+        JS_ASSERT_IF(color == BLACK, c->zone()->isGCMarkingBlack());
         JS_ASSERT_IF(c->gcIncomingGrayPointers, IsGrayListObject(c->gcIncomingGrayPointers));
 
         for (RawObject src = c->gcIncomingGrayPointers;
              src;
              src = NextIncomingCrossCompartmentPointer(src, unlinkList)) {
 
             Cell *dst = CrossCompartmentPointerReferent(src);
             JS_ASSERT(dst->compartment() == c);
@@ -3523,31 +3523,31 @@ EndMarkingZoneGroup(JSRuntime *rt)
     /*
      * Change state of current group to MarkGray to restrict marking to this
      * group.  Note that there may be pointers to the atoms compartment, and
      * these will be marked through, as they are not marked with
      * MarkCrossCompartmentXXX.
      */
     for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
         JS_ASSERT(zone->isGCMarkingBlack());
-        zone->setGCState(JSCompartment::MarkGray);
+        zone->setGCState(Zone::MarkGray);
     }
 
     /* Mark incoming gray pointers from previously swept compartments. */
     rt->gcMarker.setMarkColorGray();
     MarkIncomingCrossCompartmentPointers(rt, GRAY);
     rt->gcMarker.setMarkColorBlack();
 
     /* Mark gray roots and mark transitively inside the current compartment group. */
     MarkGrayReferencesInCurrentGroup(rt);
 
     /* Restore marking state. */
     for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
         JS_ASSERT(zone->isGCMarkingGray());
-        zone->setGCState(JSCompartment::Mark);
+        zone->setGCState(Zone::Mark);
     }
 
     JS_ASSERT(rt->gcMarker.isDrained());
 }
 
 static void
 BeginSweepingZoneGroup(JSRuntime *rt)
 {
@@ -3555,22 +3555,22 @@ BeginSweepingZoneGroup(JSRuntime *rt)
      * Begin sweeping the group of zones in gcCurrentZoneGroup,
      * performing actions that must be done before yielding to caller.
      */
 
     bool sweepingAtoms = false;
     for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
         /* Set the GC state to sweeping. */
         JS_ASSERT(zone->isGCMarking());
-        zone->setGCState(JSCompartment::Sweep);
+        zone->setGCState(Zone::Sweep);
 
         /* Purge the ArenaLists before sweeping. */
         zone->allocator.arenas.purge();
 
-        if (zone == rt->atomsCompartment)
+        if (zone == rt->atomsCompartment->zone())
             sweepingAtoms = true;
     }
 
     ValidateIncrementalMarking(rt);
 
     FreeOp fop(rt, rt->gcSweepOnBackgroundThread);
 
     {
@@ -3647,17 +3647,17 @@ BeginSweepingZoneGroup(JSRuntime *rt)
 }
 
 static void
 EndSweepingZoneGroup(JSRuntime *rt)
 {
     /* Update the GC state for compartments we have swept and unlink the list. */
     for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
         JS_ASSERT(zone->isGCSweeping());
-        zone->setGCState(JSCompartment::Finished);
+        zone->setGCState(Zone::Finished);
     }
 
     /* Reset the list of arenas marked as being allocated during sweep phase. */
     while (ArenaHeader *arena = rt->gcArenasAllocatedDuringSweep) {
         rt->gcArenasAllocatedDuringSweep = arena->getNextAllocDuringSweep();
         arena->unsetAllocDuringSweep();
     }
 }
@@ -3767,34 +3767,34 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocat
     JS_ASSERT(rt->gcMarker.isDrained());
     rt->gcMarker.stop();
 
     /*
      * Recalculate whether GC was full or not as this may have changed due to
      * newly created compartments.  Can only change from full to not full.
      */
     if (rt->gcIsFull) {
-        for (CompartmentsIter c(rt); !c.done(); c.next()) {
-            if (!c->isCollecting()) {
+        for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+            if (!zone->isCollecting()) {
                 rt->gcIsFull = false;
                 break;
             }
         }
     }
 
     /*
      * If we found any black->gray edges during marking, we completely clear the
      * mark bits of all uncollected compartments, or if a reset has occured, compartments that
      * will no longer be collected. This is safe, although it may
      * prevent the cycle collector from collecting some dead objects.
      */
     if (rt->gcFoundBlackGrayEdges) {
-        for (CompartmentsIter c(rt); !c.done(); c.next()) {
-            if (!c->isCollecting())
-                c->allocator.arenas.unmarkAll();
+        for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+            if (!zone->isCollecting())
+                zone->allocator.arenas.unmarkAll();
         }
     }
 
 #ifdef DEBUG
     PropertyTree::dumpShapes(rt);
 #endif
 
     {
@@ -3858,42 +3858,46 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocat
 
         rt->freeLifoAlloc.freeAll();
 
         /* Ensure the compartments get swept if it's the last GC. */
         if (lastGC)
             SweepCompartments(&fop, lastGC);
     }
 
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        c->setGCLastBytes(c->gcBytes, gckind);
-        if (c->isCollecting()) {
-            JS_ASSERT(c->isGCFinished());
-            c->setGCState(JSCompartment::NoGC);
+    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+        zone->setGCLastBytes(zone->gcBytes, gckind);
+        if (zone->isCollecting()) {
+            JS_ASSERT(zone->isGCFinished());
+            zone->setGCState(Zone::NoGC);
         }
 
 #ifdef DEBUG
-        JS_ASSERT(!c->isCollecting());
-        JS_ASSERT(!c->wasGCStarted());
-
+        JS_ASSERT(!zone->isCollecting());
+        JS_ASSERT(!zone->wasGCStarted());
+
+        for (unsigned i = 0 ; i < FINALIZE_LIMIT ; ++i) {
+            JS_ASSERT_IF(!IsBackgroundFinalized(AllocKind(i)) ||
+                         !rt->gcSweepOnBackgroundThread,
+                         !zone->allocator.arenas.arenaListsToSweep[i]);
+        }
+#endif
+    }
+
+#ifdef DEBUG
+    for (CompartmentsIter c(rt); !c.done(); c.next()) {
         JS_ASSERT(!c->gcIncomingGrayPointers);
         JS_ASSERT(!c->gcLiveArrayBuffers);
 
         for (JSCompartment::WrapperEnum e(c); !e.empty(); e.popFront()) {
             if (e.front().key.kind != CrossCompartmentKey::StringWrapper)
                 AssertNotOnGrayList(&e.front().value.get().toObject());
         }
-
-        for (unsigned i = 0 ; i < FINALIZE_LIMIT ; ++i) {
-            JS_ASSERT_IF(!IsBackgroundFinalized(AllocKind(i)) ||
-                         !rt->gcSweepOnBackgroundThread,
-                         !c->allocator.arenas.arenaListsToSweep[i]);
-        }
+    }
 #endif
-    }
 
     FinishMarkingValidation(rt);
 
     rt->gcLastGCTime = PRMJ_Now();
 }
 
 /* ...while this class is to be used only for garbage collection. */
 class AutoGCSession : AutoTraceSession {
@@ -3937,35 +3941,36 @@ AutoGCSession::~AutoGCSession()
     runtime->gcChunkAllocationSinceLastGC = false;
 
 #ifdef JS_GC_ZEAL
     /* Keeping these around after a GC is dangerous. */
     runtime->gcSelectedForMarking.clearAndFree();
 #endif
 
     /* Clear gcMallocBytes for all compartments */
-    for (CompartmentsIter c(runtime); !c.done(); c.next()) {
+    for (CompartmentsIter c(runtime); !c.done(); c.next())
         c->resetGCMallocBytes();
-        c->unscheduleGC();
-    }
+
+    for (ZonesIter zone(runtime); !zone.done(); zone.next())
+        zone->unscheduleGC();
 
     runtime->resetGCMallocBytes();
 }
 
 AutoCopyFreeListToArenas::AutoCopyFreeListToArenas(JSRuntime *rt)
   : runtime(rt)
 {
-    for (CompartmentsIter c(rt); !c.done(); c.next())
-        c->allocator.arenas.copyFreeListsToArenas();
+    for (ZonesIter zone(rt); !zone.done(); zone.next())
+        zone->allocator.arenas.copyFreeListsToArenas();
 }
 
 AutoCopyFreeListToArenas::~AutoCopyFreeListToArenas()
 {
-    for (CompartmentsIter c(runtime); !c.done(); c.next())
-        c->allocator.arenas.clearFreeListsInArenas();
+    for (ZonesIter zone(runtime); !zone.done(); zone.next())
+        zone->allocator.arenas.clearFreeListsInArenas();
 }
 
 static void
 IncrementalCollectSlice(JSRuntime *rt,
                         int64_t budget,
                         gcreason::Reason gcReason,
                         JSGCInvocationKind gcKind);
 
@@ -3985,18 +3990,18 @@ ResetIncrementalGC(JSRuntime *rt, const 
 
         for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
             ArrayBufferObject::resetArrayBufferList(c);
             ResetGrayList(c);
         }
 
         for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
             JS_ASSERT(zone->isGCMarking());
-            zone->setNeedsBarrier(false, JSCompartment::UpdateIon);
-            zone->setGCState(JSCompartment::NoGC);
+            zone->setNeedsBarrier(false, Zone::UpdateIon);
+            zone->setGCState(Zone::NoGC);
         }
 
         rt->gcIncrementalState = NO_INCREMENTAL;
 
         JS_ASSERT(!rt->gcStrictCompartmentChecking);
 
         break;
       }
@@ -4059,32 +4064,32 @@ AutoGCSlice::AutoGCSlice(JSRuntime *rt)
         /*
          * Clear needsBarrier early so we don't do any write barriers during
          * GC. We don't need to update the Ion barriers (which is expensive)
          * because Ion code doesn't run during GC. If need be, we'll update the
          * Ion barriers in ~AutoGCSlice.
          */
         if (zone->isGCMarking()) {
             JS_ASSERT(zone->needsBarrier());
-            zone->setNeedsBarrier(false, JSCompartment::DontUpdateIon);
+            zone->setNeedsBarrier(false, Zone::DontUpdateIon);
         } else {
             JS_ASSERT(!zone->needsBarrier());
         }
     }
 }
 
 AutoGCSlice::~AutoGCSlice()
 {
     /* We can't use GCZonesIter if this is the end of the last slice. */
     for (ZonesIter zone(runtime); !zone.done(); zone.next()) {
         if (zone->isGCMarking()) {
-            zone->setNeedsBarrier(true, JSCompartment::UpdateIon);
+            zone->setNeedsBarrier(true, Zone::UpdateIon);
             zone->allocator.arenas.prepareForIncrementalGC(runtime);
         } else {
-            zone->setNeedsBarrier(false, JSCompartment::UpdateIon);
+            zone->setNeedsBarrier(false, Zone::UpdateIon);
         }
     }
 }
 
 static void
 PushZealSelectedObjects(JSRuntime *rt)
 {
 #ifdef JS_GC_ZEAL
@@ -4255,31 +4260,34 @@ BudgetIncrementalGC(JSRuntime *rt, int64
     }
 
     if (rt->isTooMuchMalloc()) {
         *budget = SliceBudget::Unlimited;
         rt->gcStats.nonincremental("malloc bytes trigger");
     }
 
     bool reset = false;
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        if (c->gcBytes >= c->gcTriggerBytes) {
+    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+        if (zone->gcBytes >= zone->gcTriggerBytes) {
             *budget = SliceBudget::Unlimited;
             rt->gcStats.nonincremental("allocation trigger");
         }
 
+        if (rt->gcIncrementalState != NO_INCREMENTAL &&
+            zone->isGCScheduled() != zone->wasGCStarted())
+        {
+            reset = true;
+        }
+    }
+
+    for (CompartmentsIter c(rt); !c.done(); c.next()) {
         if (c->isTooMuchMalloc()) {
             *budget = SliceBudget::Unlimited;
             rt->gcStats.nonincremental("malloc bytes trigger");
         }
-
-        if (rt->gcIncrementalState != NO_INCREMENTAL &&
-            c->isGCScheduled() != c->wasGCStarted()) {
-            reset = true;
-        }
     }
 
     if (reset)
         ResetIncrementalGC(rt, "compartment change");
 }
 
 /*
  * GC, repeatedly if necessary, until we think we have not created any new
@@ -4289,18 +4297,18 @@ BudgetIncrementalGC(JSRuntime *rt, int64
  */
 static JS_NEVER_INLINE void
 GCCycle(JSRuntime *rt, bool incremental, int64_t budget, JSGCInvocationKind gckind, gcreason::Reason reason)
 {
     /* If we attempt to invoke the GC while we are running in the GC, assert. */
     AutoAssertNoGC nogc;
 
 #ifdef DEBUG
-    for (CompartmentsIter c(rt); !c.done(); c.next())
-        JS_ASSERT_IF(rt->gcMode == JSGC_MODE_GLOBAL, c->isGCScheduled());
+    for (ZonesIter zone(rt); !zone.done(); zone.next())
+        JS_ASSERT_IF(rt->gcMode == JSGC_MODE_GLOBAL, zone->isGCScheduled());
 #endif
 
     /*
      * Don't GC if we are reporting an OOM or in an interactive debugging
      * session.
      */
     if (rt->mainThread.suppressGC)
         return;
@@ -4413,34 +4421,38 @@ Collect(JSRuntime *rt, bool incremental,
             if (restartPostVerifier)
                 StartVerifyPostBarriers(runtime);
         }
     } av(rt, isShutdown);
 #endif
 
     RecordNativeStackTopForGC(rt);
 
+    int zoneCount = 0;
     int compartmentCount = 0;
     int collectedCount = 0;
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
         if (rt->gcMode == JSGC_MODE_GLOBAL)
-            c->scheduleGC();
+            zone->scheduleGC();
 
         /* This is a heuristic to avoid resets. */
-        if (rt->gcIncrementalState != NO_INCREMENTAL && c->needsBarrier())
-            c->scheduleGC();
-
-        compartmentCount++;
-        if (c->isGCScheduled())
+        if (rt->gcIncrementalState != NO_INCREMENTAL && zone->needsBarrier())
+            zone->scheduleGC();
+
+        zoneCount++;
+        if (zone->isGCScheduled())
             collectedCount++;
     }
 
+    for (CompartmentsIter c(rt); !c.done(); c.next())
+        compartmentCount++;
+
     rt->gcShouldCleanUpEverything = ShouldCleanUpEverything(rt, reason, gckind);
 
-    gcstats::AutoGCSlice agc(rt->gcStats, collectedCount, compartmentCount, reason);
+    gcstats::AutoGCSlice agc(rt->gcStats, collectedCount, zoneCount, compartmentCount, reason);
 
     do {
         /*
          * Let the API user decide to defer a GC if it wants to (unless this
          * is the last context). Invoke the callback regardless.
          */
         if (rt->gcIncrementalState == NO_INCREMENTAL) {
             gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_GC_BEGIN);
@@ -4493,44 +4505,44 @@ js::GCSlice(JSRuntime *rt, JSGCInvocatio
 void
 js::GCFinalSlice(JSRuntime *rt, JSGCInvocationKind gckind, gcreason::Reason reason)
 {
     AssertCanGC();
     Collect(rt, true, SliceBudget::Unlimited, gckind, reason);
 }
 
 static bool
-CompartmentsSelected(JSRuntime *rt)
+ZonesSelected(JSRuntime *rt)
 {
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        if (c->isGCScheduled())
+    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+        if (zone->isGCScheduled())
             return true;
     }
     return false;
 }
 
 void
 js::GCDebugSlice(JSRuntime *rt, bool limit, int64_t objCount)
 {
     AssertCanGC();
     int64_t budget = limit ? SliceBudget::WorkBudget(objCount) : SliceBudget::Unlimited;
-    if (!CompartmentsSelected(rt)) {
+    if (!ZonesSelected(rt)) {
         if (IsIncrementalGCInProgress(rt))
             PrepareForIncrementalGC(rt);
         else
             PrepareForFullGC(rt);
     }
     Collect(rt, true, budget, GC_NORMAL, gcreason::DEBUG_GC);
 }
 
 /* Schedule a full GC unless a compartment will already be collected. */
 void
 js::PrepareForDebugGC(JSRuntime *rt)
 {
-    if (!CompartmentsSelected(rt))
+    if (!ZonesSelected(rt))
         PrepareForFullGC(rt);
 }
 
 JS_FRIEND_API(void)
 JS::ShrinkGCBuffers(JSRuntime *rt)
 {
     AutoLockGC lock(rt);
     JS_ASSERT(!rt->isHeapBusy());
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -454,17 +454,17 @@ struct ArenaLists {
      */
     void *parallelAllocate(JSCompartment *comp, AllocKind thingKind, size_t thingSize);
 
   private:
     inline void finalizeNow(FreeOp *fop, AllocKind thingKind);
     inline void queueForForegroundSweep(FreeOp *fop, AllocKind thingKind);
     inline void queueForBackgroundSweep(FreeOp *fop, AllocKind thingKind);
 
-    inline void *allocateFromArena(JSCompartment *comp, AllocKind thingKind);
+    inline void *allocateFromArena(JS::Zone *zone, AllocKind thingKind);
 };
 
 /*
  * Initial allocation size for data structures holding chunks is set to hold
  * chunks with total capacity of 16MB to avoid buffer resizes during browser
  * startup.
  */
 const size_t INITIAL_CHUNK_CAPACITY = 16 * 1024 * 1024 / ChunkSize;
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -500,27 +500,30 @@ NewGCThing(JSContext *cx, js::gc::AllocK
 #ifdef JS_GC_ZEAL
     if (cx->runtime->needZealousGC() && allowGC)
         js::gc::RunDebugGC(cx);
 #endif
 
     if (allowGC)
         MaybeCheckStackRoots(cx, /* relax = */ false);
 
-    JSCompartment *comp = cx->compartment;
-    T *t = static_cast<T *>(comp->allocator.arenas.allocateFromFreeList(kind, thingSize));
+    JS::Zone *zone = cx->zone();
+    T *t = static_cast<T *>(zone->allocator.arenas.allocateFromFreeList(kind, thingSize));
     if (!t)
         t = static_cast<T *>(js::gc::ArenaLists::refillFreeList<allowGC>(cx, kind));
 
-    JS_ASSERT_IF(t && comp->wasGCStarted() && (comp->isGCMarking() || comp->isGCSweeping()),
+    JS_ASSERT_IF(t && zone->wasGCStarted() && (zone->isGCMarking() || zone->isGCSweeping()),
                  t->arenaHeader()->allocatedDuringIncremental);
 
 #if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)
-    if (cx->runtime->gcVerifyPostData && IsNurseryAllocable(kind) && !IsAtomsCompartment(comp))
-        comp->gcNursery.insertPointer(t);
+    if (cx->runtime->gcVerifyPostData && IsNurseryAllocable(kind)
+        && !IsAtomsCompartment(cx->compartment))
+    {
+        zone->gcNursery.insertPointer(t);
+    }
 #endif
 
     return t;
 }
 
 /*
  * Instances of this class set the |JSRuntime::suppressGC| flag for the duration
  * that they are live. Use of this class is highly discouraged. Please carefully
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -6224,17 +6224,17 @@ JSCompartment::getLazyType(JSContext *cx
 /////////////////////////////////////////////////////////////////////
 // Tracing
 /////////////////////////////////////////////////////////////////////
 
 void
 TypeSet::sweep(JSCompartment *compartment)
 {
     JS_ASSERT(!purged());
-    JS_ASSERT(compartment->isGCSweeping());
+    JS_ASSERT(compartment->zone()->isGCSweeping());
 
     /*
      * Purge references to type objects that are no longer live. Type sets hold
      * only weak references. For type sets containing more than one object,
      * live entries in the object hash need to be copied to the compartment's
      * new arena.
      */
     unsigned objectCount = baseObjectCount();
@@ -6304,17 +6304,17 @@ TypeObject::sweep(FreeOp *fop)
          * as code gets reanalyzed.
          */
         clearProperties();
 
         return;
     }
 
     JSCompartment *compartment = this->compartment();
-    JS_ASSERT(compartment->isGCSweeping());
+    JS_ASSERT(compartment->zone()->isGCSweeping());
 
     if (!isMarked()) {
         if (newScript)
             fop->free_(newScript);
         return;
     }
 
     /*
@@ -6388,26 +6388,26 @@ struct SweepTypeObjectOp
         TypeObject *object = static_cast<TypeObject *>(cell);
         object->sweep(fop);
     }
 };
 
 void
 SweepTypeObjects(FreeOp *fop, JSCompartment *compartment)
 {
-    JS_ASSERT(compartment->isGCSweeping());
+    JS_ASSERT(compartment->zone()->isGCSweeping());
     SweepTypeObjectOp op(fop);
     gc::ForEachArenaAndCell(compartment, gc::FINALIZE_TYPE_OBJECT, gc::EmptyArenaOp, op);
 }
 
 void
 TypeCompartment::sweep(FreeOp *fop)
 {
     JSCompartment *compartment = this->compartment();
-    JS_ASSERT(compartment->isGCSweeping());
+    JS_ASSERT(compartment->zone()->isGCSweeping());
 
     SweepTypeObjects(fop, compartment);
 
     /*
      * Iterate through the array/object type tables and remove all entries
      * referencing collected data. These tables only hold weak references.
      */
 
@@ -6534,17 +6534,17 @@ TypeCompartment::sweepCompilerOutputs(Fr
     }
 }
 
 void
 JSCompartment::sweepNewTypeObjectTable(TypeObjectSet &table)
 {
     gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_TYPE_OBJECT);
 
-    JS_ASSERT(isGCSweeping());
+    JS_ASSERT(zone()->isGCSweeping());
     if (table.initialized()) {
         for (TypeObjectSet::Enum e(table); !e.empty(); e.popFront()) {
             TypeObject *type = e.front();
             if (IsTypeObjectAboutToBeFinalized(&type))
                 e.removeFront();
             else if (type != e.front())
                 e.rekeyFront(TypeObjectSet::Lookup(type->clasp, type->proto.get()), type);
         }
@@ -6565,17 +6565,17 @@ TypeCompartment::~TypeCompartment()
     if (allocationSiteTable)
         js_delete(allocationSiteTable);
 }
 
 /* static */ void
 TypeScript::Sweep(FreeOp *fop, RawScript script)
 {
     JSCompartment *compartment = script->compartment();
-    JS_ASSERT(compartment->isGCSweeping());
+    JS_ASSERT(compartment->zone()->isGCSweeping());
     JS_ASSERT(compartment->types.inferenceEnabled);
 
     unsigned num = NumTypeSets(script);
     TypeSet *typeArray = script->types->typeArray();
 
     /* Remove constraints and references to dead objects from the persistent type sets. */
     for (unsigned i = 0; i < num; i++)
         typeArray[i].sweep(compartment);
--- a/js/src/jsinferinlines.h
+++ b/js/src/jsinferinlines.h
@@ -415,17 +415,17 @@ struct AutoEnterCompilation
     }
 
     bool init(UnrootedScript script, bool constructing, unsigned chunkIndex)
     {
         CompilerOutput co;
         co.script = script;
         co.setKind(kind);
         co.constructing = constructing;
-        co.barriers = cx->compartment->compileBarriers();
+        co.barriers = cx->zone()->compileBarriers();
         co.chunkIndex = chunkIndex;
 
         // This flag is used to prevent adding the current compiled script in
         // the list of compiler output which should be invalided.  This is
         // necessary because we can run some analysis might discard the script
         // it-self, which can happen when the monitored value does not reflect
         // the types propagated by the type inference.
         co.pendingRecompilation = true;
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -1364,19 +1364,19 @@ MarkGeneratorFrame(JSTracer *trc, JSGene
                    HeapValueify(gen->fp->generatorSlotsSnapshotBegin()),
                    HeapValueify(gen->regs.sp),
                    "Generator Floating Stack");
 }
 
 static void
 GeneratorWriteBarrierPre(JSContext *cx, JSGenerator *gen)
 {
-    JSCompartment *comp = cx->compartment;
-    if (comp->needsBarrier())
-        MarkGeneratorFrame(comp->barrierTracer(), gen);
+    JS::Zone *zone = cx->zone();
+    if (zone->needsBarrier())
+        MarkGeneratorFrame(zone->barrierTracer(), gen);
 }
 
 /*
  * Only mark generator frames/slots when the generator is not active on the
  * stack or closed. Barriers when copying onto the stack or closing preserve
  * gc invariants.
  */
 bool
--- a/js/src/jsmemorymetrics.cpp
+++ b/js/src/jsmemorymetrics.cpp
@@ -329,26 +329,26 @@ JS::GetExplicitNonHeapForRuntime(JSRunti
     return int64_t(n);
 }
 
 JS_PUBLIC_API(size_t)
 JS::SystemCompartmentCount(const JSRuntime *rt)
 {
     size_t n = 0;
     for (size_t i = 0; i < rt->compartments.length(); i++) {
-        if (rt->compartments[i]->isSystemCompartment)
+        if (rt->compartments[i]->zone()->isSystemCompartment)
             ++n;
     }
     return n;
 }
 
 JS_PUBLIC_API(size_t)
 JS::UserCompartmentCount(const JSRuntime *rt)
 {
     size_t n = 0;
     for (size_t i = 0; i < rt->compartments.length(); i++) {
-        if (!rt->compartments[i]->isSystemCompartment)
+        if (!rt->compartments[i]->zone()->isSystemCompartment)
             ++n;
     }
     return n;
 }
 
 #endif // JS_THREADSAFE
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -1764,20 +1764,20 @@ JSObject::ReserveForTradeGuts(JSContext 
      */
 
 #ifdef JSGC_INCREMENTAL
     /*
      * We need a write barrier here. If |a| was marked and |b| was not, then
      * after the swap, |b|'s guts would never be marked. The write barrier
      * solves this.
      */
-    JSCompartment *comp = a->compartment();
-    if (comp->needsBarrier()) {
-        MarkChildren(comp->barrierTracer(), a);
-        MarkChildren(comp->barrierTracer(), b);
+    JS::Zone *zone = a->zone();
+    if (zone->needsBarrier()) {
+        MarkChildren(zone->barrierTracer(), a);
+        MarkChildren(zone->barrierTracer(), b);
     }
 #endif
 
     /*
      * Swap prototypes and classes on the two objects, so that TradeGuts can
      * preserve the types of the two objects.
      */
     Class *aClass = a->getClass();
--- a/js/src/jsprobes.cpp
+++ b/js/src/jsprobes.cpp
@@ -434,15 +434,15 @@ Probes::ETWStartExecution(UnrootedScript
 bool
 Probes::ETWStopExecution(UnrootedScript script)
 {
     int lineno = script ? script->lineno : -1;
     return EventWriteEvtExecuteDone(ScriptFilename(script), lineno) == ERROR_SUCCESS;
 }
 
 bool
-Probes::ETWResizeHeap(JSCompartment *compartment, size_t oldSize, size_t newSize)
+Probes::ETWResizeHeap(JS::Zone *zone, size_t oldSize, size_t newSize)
 {
-    return EventWriteEvtHeapResize(reinterpret_cast<uint64_t>(compartment),
+    return EventWriteEvtHeapResize(reinterpret_cast<uint64_t>(zone),
                                    oldSize, newSize) == ERROR_SUCCESS;
 }
 
 #endif
--- a/js/src/jsprobes.h
+++ b/js/src/jsprobes.h
@@ -99,17 +99,17 @@ bool exitScript(JSContext *, UnrootedScr
 
 /* Executing a script */
 bool startExecution(UnrootedScript script);
 
 /* Script has completed execution */
 bool stopExecution(UnrootedScript script);
 
 /* Heap has been resized */
-bool resizeHeap(JSCompartment *compartment, size_t oldSize, size_t newSize);
+bool resizeHeap(JS::Zone *zone, size_t oldSize, size_t newSize);
 
 /*
  * Object has been created. |obj| must exist (its class and size are read)
  */
 bool createObject(JSContext *cx, JSObject *obj);
 
 /* Resize events are being tracked. */
 bool objectResizeActive();
@@ -366,22 +366,22 @@ Probes::exitScript(JSContext *cx, Unroot
         (fp != NULL && fp->hasPushedSPSFrame()))
     {
         rt->spsProfiler.exit(cx, script, maybeFun);
     }
     return ok;
 }
 
 inline bool
-Probes::resizeHeap(JSCompartment *compartment, size_t oldSize, size_t newSize)
+Probes::resizeHeap(JS::Zone *zone, size_t oldSize, size_t newSize)
 {
     bool ok = true;
 
 #ifdef MOZ_ETW
-    if (ProfilingActive && !ETWResizeHeap(compartment, oldSize, newSize))
+    if (ProfilingActive && !ETWResizeHeap(zone, oldSize, newSize))
         ok = false;
 #endif
 
     return ok;
 }
 
 #ifdef INCLUDE_MOZILLA_DTRACE
 static const char *ObjectClassname(JSObject *obj) {
--- a/js/src/jspropertytree.cpp
+++ b/js/src/jspropertytree.cpp
@@ -154,26 +154,26 @@ PropertyTree::getChild(JSContext *cx, Sh
             if (KidsHash::Ptr p = kidp->toHash()->lookup(child))
                 shape = *p;
         } else {
             /* If kidp->isNull(), we always insert. */
         }
 
 #ifdef JSGC_INCREMENTAL
         if (shape) {
-            JSCompartment *comp = shape->compartment();
-            if (comp->needsBarrier()) {
+            JS::Zone *zone = shape->zone();
+            if (zone->needsBarrier()) {
                 /*
                  * We need a read barrier for the shape tree, since these are weak
                  * pointers.
                  */
                 Shape *tmp = shape;
-                MarkShapeUnbarriered(comp->barrierTracer(), &tmp, "read barrier");
+                MarkShapeUnbarriered(zone->barrierTracer(), &tmp, "read barrier");
                 JS_ASSERT(tmp == shape);
-            } else if (comp->isGCSweeping() && !shape->isMarked() &&
+            } else if (zone->isGCSweeping() && !shape->isMarked() &&
                        !shape->arenaHeader()->allocatedDuringIncremental)
             {
                 /*
                  * The shape we've found is unreachable and due to be finalized, so
                  * remove our weak reference to it and don't use it.
                  */
                 JS_ASSERT(parent_->isMarked());
                 parent_->removeChild(shape);
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -2596,17 +2596,17 @@ JSScript::clearTraps(FreeOp *fop)
 void
 JSScript::markChildren(JSTracer *trc)
 {
     // NOTE: this JSScript may be partially initialized at this point.  E.g. we
     // may have created it and partially initialized it with
     // JSScript::Create(), but not yet finished initializing it with
     // fullyInitFromEmitter() or fullyInitTrivial().
 
-    JS_ASSERT_IF(trc->runtime->gcStrictCompartmentChecking, compartment()->isCollecting());
+    JS_ASSERT_IF(trc->runtime->gcStrictCompartmentChecking, zone()->isCollecting());
 
     for (uint32_t i = 0; i < natoms; ++i) {
         if (atoms[i])
             MarkString(trc, &atoms[i], "atom");
     }
 
     if (hasObjects()) {
         ObjectArray *objarray = objects();
--- a/js/src/jsweakmap.cpp
+++ b/js/src/jsweakmap.cpp
@@ -303,17 +303,17 @@ WeakMap_set_impl(JSContext *cx, CallArgs
     }
 
     JS_ASSERT(key->compartment() == thisObj->compartment());
     JS_ASSERT_IF(value.isObject(), value.toObject().compartment() == thisObj->compartment());
     if (!map->put(key, value)) {
         JS_ReportOutOfMemory(cx);
         return false;
     }
-    HashTableWriteBarrierPost(cx->compartment, map, key);
+    HashTableWriteBarrierPost(cx->zone(), map, key);
 
     args.rval().setUndefined();
     return true;
 }
 
 JSBool
 WeakMap_set(JSContext *cx, unsigned argc, Value *vp)
 {
--- a/js/src/methodjit/BaseAssembler.h
+++ b/js/src/methodjit/BaseAssembler.h
@@ -1360,17 +1360,17 @@ static const JSC::MacroAssembler::Regist
             return jump();
 #endif
 
         /*
          * Inline FreeSpan::allocate. Only the case where the current freelist
          * span is not empty is handled.
          */
         gc::FreeSpan *list = const_cast<gc::FreeSpan *>
-                             (cx->compartment->allocator.arenas.getFreeList(allocKind));
+                             (cx->zone()->allocator.arenas.getFreeList(allocKind));
         loadPtr(&list->first, result);
 
         Jump jump = branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(&list->last), result);
 
         addPtr(Imm32(thingSize), result);
         storePtr(result, &list->first);
 
         /*
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -141,17 +141,17 @@ mjit::Compiler::compile()
                         TraceLogging::JM_COMPILE_STOP,
                         outerScript);
 #endif
 
     CompileStatus status = performCompilation();
     if (status != Compile_Okay && status != Compile_Retry) {
         if (!outerScript->ensureHasMJITInfo(cx))
             return Compile_Error;
-        JSScript::JITScriptHandle *jith = outerScript->jitHandle(isConstructing, cx->compartment->compileBarriers());
+        JSScript::JITScriptHandle *jith = outerScript->jitHandle(isConstructing, cx->zone()->compileBarriers());
         JSScript::ReleaseCode(cx->runtime->defaultFreeOp(), jith);
         jith->setUnjittable();
 
         if (outerScript->function()) {
             outerScript->uninlineable = true;
             types::MarkTypeObjectFlags(cx, outerScript->function(),
                                        types::OBJECT_FLAG_UNINLINEABLE);
         }
@@ -1010,17 +1010,17 @@ mjit::CanMethodJIT(JSContext *cx, JSScri
 
     if (IonGetsFirstChance(cx, script, pc, request)) {
         if (script->hasIonScript())
             script->incUseCount();
         return Compile_Skipped;
     }
 
     if (script->hasMJITInfo()) {
-        JSScript::JITScriptHandle *jith = script->jitHandle(construct, cx->compartment->compileBarriers());
+        JSScript::JITScriptHandle *jith = script->jitHandle(construct, cx->zone()->compileBarriers());
         if (jith->isUnjittable())
             return Compile_Abort;
     }
 
     if (!cx->hasRunOption(JSOPTION_METHODJIT_ALWAYS) &&
         (cx->typeInferenceEnabled()
          ? script->incUseCount() <= INFER_USES_BEFORE_COMPILE
          : script->incUseCount() <= USES_BEFORE_COMPILE))
@@ -1035,17 +1035,17 @@ mjit::CanMethodJIT(JSContext *cx, JSScri
     if (construct && !script->nslots)
         script->nslots++;
 
     uint64_t gcNumber = cx->runtime->gcNumber;
 
     if (!script->ensureHasMJITInfo(cx))
         return Compile_Error;
 
-    JSScript::JITScriptHandle *jith = script->jitHandle(construct, cx->compartment->compileBarriers());
+    JSScript::JITScriptHandle *jith = script->jitHandle(construct, cx->zone()->compileBarriers());
 
     JITScript *jit;
     if (jith->isEmpty()) {
         jit = MakeJITScript(cx, script);
         if (!jit)
             return Compile_Error;
 
         // Script analysis can trigger GC, watch in case compileBarriers() changed.
@@ -5649,17 +5649,17 @@ mjit::Compiler::jsop_setprop(HandlePrope
             uint32_t slot = propertyTypes->definiteSlot();
             RegisterID reg = frame.tempRegForData(lhs);
             frame.pinReg(reg);
             bool isObject = lhs->isTypeKnown();
             MaybeJump notObject;
             if (!isObject)
                 notObject = frame.testObject(Assembler::NotEqual, lhs);
 #ifdef JSGC_INCREMENTAL_MJ
-            if (cx->compartment->compileBarriers() && propertyTypes->needsBarrier(cx)) {
+            if (cx->zone()->compileBarriers() && propertyTypes->needsBarrier(cx)) {
                 /* Write barrier. */
                 Jump j = masm.testGCThing(Address(reg, JSObject::getFixedSlotOffset(slot)));
                 stubcc.linkExit(j, Uses(0));
                 stubcc.leave();
                 stubcc.masm.addPtr(Imm32(JSObject::getFixedSlotOffset(slot)),
                                    reg, Registers::ArgReg1);
                 OOL_STUBCALL(stubs::GCThingWriteBarrier, REJOIN_NONE);
                 stubcc.rejoin(Changes(0));
@@ -5682,17 +5682,17 @@ mjit::Compiler::jsop_setprop(HandlePrope
         }
     }
 
     if (script_->hasScriptCounts)
         bumpPropCount(PC, PCCounts::PROP_OTHER);
 
 #ifdef JSGC_INCREMENTAL_MJ
     /* Write barrier. We don't have type information for JSOP_SETNAME. */
-    if (cx->compartment->compileBarriers() &&
+    if (cx->zone()->compileBarriers() &&
         (!types || JSOp(*PC) == JSOP_SETNAME || types->propertyNeedsBarrier(cx, id)))
     {
         jsop_setprop_slow(name);
         return true;
     }
 #endif
 
     PICGenInfo pic(ic::PICInfo::SET, PC);
@@ -6058,17 +6058,17 @@ mjit::Compiler::jsop_aliasedArg(unsigned
     int32_t argsOff = ArgumentsData::offsetOfArgs() + arg * sizeof(Value);
     masm.addPtr(Imm32(argsOff), reg, reg);
     if (get) {
         FrameEntry *fe = frame.getArg(arg);
         JSValueType type = fe->isTypeKnown() ? fe->getKnownType() : JSVAL_TYPE_UNKNOWN;
         frame.push(Address(reg), type, true /* = reuseBase */);
     } else {
 #ifdef JSGC_INCREMENTAL_MJ
-        if (cx->compartment->compileBarriers()) {
+        if (cx->zone()->compileBarriers()) {
             /* Write barrier. */
             stubcc.linkExit(masm.testGCThing(Address(reg)), Uses(0));
             stubcc.leave();
             stubcc.masm.move(reg, Registers::ArgReg1);
             OOL_STUBCALL(stubs::GCThingWriteBarrier, REJOIN_NONE);
             stubcc.rejoin(Changes(0));
         }
 #endif
@@ -6101,17 +6101,17 @@ mjit::Compiler::jsop_aliasedVar(ScopeCoo
         frame.pushRegs(typeReg, dataReg, type);
         BarrierState barrier = testBarrier(typeReg, dataReg,
                                            /* testUndefined = */ false,
                                            /* testReturn */ false,
                                            /* force */ true);
         finishBarrier(barrier, REJOIN_FALLTHROUGH, 0);
     } else {
 #ifdef JSGC_INCREMENTAL_MJ
-        if (cx->compartment->compileBarriers()) {
+        if (cx->zone()->compileBarriers()) {
             /* Write barrier. */
             stubcc.linkExit(masm.testGCThing(addr), Uses(0));
             stubcc.leave();
             stubcc.masm.addPtr(Imm32(addr.offset), addr.base, Registers::ArgReg1);
             OOL_STUBCALL(stubs::GCThingWriteBarrier, REJOIN_NONE);
             stubcc.rejoin(Changes(0));
         }
 #endif
@@ -6256,17 +6256,17 @@ mjit::Compiler::iter(unsigned flags)
                                       ImmPtr(js::emptyObjectElements));
     stubcc.linkExit(hasElements, Uses(1));
 
 #ifdef JSGC_INCREMENTAL_MJ
     /*
      * Write barrier for stores to the iterator. We only need to take a write
      * barrier if NativeIterator::obj is actually going to change.
      */
-    if (cx->compartment->compileBarriers()) {
+    if (cx->zone()->compileBarriers()) {
         Jump j = masm.branchPtr(Assembler::NotEqual,
                                 Address(nireg, offsetof(NativeIterator, obj)), reg);
         stubcc.linkExit(j, Uses(1));
     }
 #endif
 
     /* Found a match with the most recent iterator. Hooray! */
 
@@ -6667,17 +6667,17 @@ mjit::Compiler::jsop_setgname(HandleProp
             shape->writable() && shape->hasSlot() &&
             !types->isOwnProperty(cx, globalObj->getType(cx), true))
         {
             watchGlobalReallocation();
             HeapSlot *value = &globalObj->getSlotRef(shape->slot());
             RegisterID reg = frame.allocReg();
 #ifdef JSGC_INCREMENTAL_MJ
             /* Write barrier. */
-            if (cx->compartment->compileBarriers() && types->needsBarrier(cx)) {
+            if (cx->zone()->compileBarriers() && types->needsBarrier(cx)) {
                 stubcc.linkExit(masm.jump(), Uses(0));
                 stubcc.leave();
                 stubcc.masm.move(ImmPtr(value), Registers::ArgReg1);
                 OOL_STUBCALL(stubs::WriteBarrier, REJOIN_NONE);
                 stubcc.rejoin(Changes(0));
             }
 #endif
             masm.move(ImmPtr(value), reg);
@@ -6685,17 +6685,17 @@ mjit::Compiler::jsop_setgname(HandleProp
             frame.shimmy(1);
             frame.freeReg(reg);
             return true;
         }
     }
 
 #ifdef JSGC_INCREMENTAL_MJ
     /* Write barrier. */
-    if (cx->compartment->compileBarriers()) {
+    if (cx->zone()->compileBarriers()) {
         jsop_setgname_slow(name);
         return true;
     }
 #endif
 
 #if defined JS_MONOIC
     FrameEntry *objFe = frame.peek(-2);
     FrameEntry *fe = frame.peek(-1);
--- a/js/src/methodjit/Compiler.h
+++ b/js/src/methodjit/Compiler.h
@@ -490,17 +490,17 @@ private:
             return PC;
         ActiveFrame *scan = a;
         while (scan && scan->parent != outer)
             scan = static_cast<ActiveFrame *>(scan->parent);
         return scan->parentPC;
     }
 
     JITScript *outerJIT() {
-        return outerScript->getJIT(isConstructing, cx->compartment->compileBarriers());
+        return outerScript->getJIT(isConstructing, cx->zone()->compileBarriers());
     }
 
     ChunkDescriptor &outerChunkRef() {
         return outerJIT()->chunkDescriptor(chunkIndex);
     }
 
     bool bytecodeInChunk(jsbytecode *pc) {
         return (unsigned(pc - outerScript->code) >= outerChunk.begin)
--- a/js/src/methodjit/FastBuiltins.cpp
+++ b/js/src/methodjit/FastBuiltins.cpp
@@ -473,17 +473,17 @@ CompileStatus
 mjit::Compiler::compileArrayPopShift(FrameEntry *thisValue, bool isPacked, bool isArrayPop)
 {
     /* Filter out silly cases. */
     if (thisValue->isConstant())
         return Compile_InlineAbort;
 
 #ifdef JSGC_INCREMENTAL_MJ
     /* Write barrier. */
-    if (cx->compartment->compileBarriers())
+    if (cx->zone()->compileBarriers())
         return Compile_InlineAbort;
 #endif
 
     RegisterID objReg = frame.tempRegForData(thisValue);
     frame.pinReg(objReg);
 
     RegisterID lengthReg = frame.allocReg();
     RegisterID slotsReg = frame.allocReg();
--- a/js/src/methodjit/FastOps.cpp
+++ b/js/src/methodjit/FastOps.cpp
@@ -969,17 +969,17 @@ mjit::Compiler::jsop_setelem_dense()
 #ifdef JSGC_INCREMENTAL_MJ
     /*
      * Write barrier.
      * We skip over the barrier if we incremented initializedLength above,
      * because in that case the slot we're overwriting was previously
      * undefined.
      */
     types::StackTypeSet *types = frame.extra(obj).types;
-    if (cx->compartment->compileBarriers() && (!types || types->propertyNeedsBarrier(cx, JSID_VOID))) {
+    if (cx->zone()->compileBarriers() && (!types || types->propertyNeedsBarrier(cx, JSID_VOID))) {
         Label barrierStart = stubcc.masm.label();
         stubcc.linkExitDirect(masm.jump(), barrierStart);
 
         /*
          * The sync call below can potentially clobber key.reg() and slotsReg.
          * We pin key.reg() to avoid it being clobbered. If |hoisted| is true,
          * we can also pin slotsReg. If not, then slotsReg is owned by the
          * compiler and we save in manually to VMFrame::scratch.
@@ -1366,17 +1366,17 @@ mjit::Compiler::jsop_setelem(bool popGua
 
     if (id->isType(JSVAL_TYPE_DOUBLE) || !globalObj) {
         jsop_setelem_slow();
         return true;
     }
 
 #ifdef JSGC_INCREMENTAL_MJ
     // Write barrier.
-    if (cx->compartment->compileBarriers()) {
+    if (cx->zone()->compileBarriers()) {
         jsop_setelem_slow();
         return true;
     }
 #endif
 
     SetElementICInfo ic;
 
     // One by one, check if the most important stack entries have registers,
@@ -2477,17 +2477,17 @@ void
 mjit::Compiler::jsop_initprop()
 {
     FrameEntry *obj = frame.peek(-2);
     FrameEntry *fe = frame.peek(-1);
     PropertyName *name = script_->getName(GET_UINT32_INDEX(PC));
 
     RootedObject baseobj(cx, frame.extra(obj).initObject);
 
-    if (!baseobj || monitored(PC) || cx->compartment->compileBarriers()) {
+    if (!baseobj || monitored(PC) || cx->zone()->compileBarriers()) {
         if (monitored(PC) && script_ == outerScript)
             monitoredBytecodes.append(PC - script_->code);
 
         prepareStubCall(Uses(2));
         masm.move(ImmPtr(name), Registers::ArgReg1);
         INLINE_STUBCALL(stubs::InitProp, REJOIN_FALLTHROUGH);
         return;
     }
--- a/js/src/methodjit/InvokeHelpers.cpp
+++ b/js/src/methodjit/InvokeHelpers.cpp
@@ -365,17 +365,17 @@ UncachedInlineCall(VMFrame &f, InitialFr
     /* Finish the handoff to the new frame regs. */
     PreserveRegsGuard regsGuard(cx, regs);
 
     /*
      * If newscript was successfully compiled, run it. Skip for calls which
      * will be constructing a new type object for 'this'.
      */
     if (!newType) {
-        if (JITScript *jit = newscript->getJIT(regs.fp()->isConstructing(), cx->compartment->compileBarriers())) {
+        if (JITScript *jit = newscript->getJIT(regs.fp()->isConstructing(), cx->zone()->compileBarriers())) {
             if (jit->invokeEntry) {
                 *pret = jit->invokeEntry;
 
                 /* Restore the old fp around and let the JIT code repush the new fp. */
                 regs.popFrame((Value *) regs.fp());
                 return true;
             }
         }
--- a/js/src/methodjit/MethodJIT.cpp
+++ b/js/src/methodjit/MethodJIT.cpp
@@ -1099,17 +1099,17 @@ CheckStackAndEnterMethodJIT(JSContext *c
 
     return EnterMethodJIT(cx, fp, code, stackLimit, partial);
 }
 
 JaegerStatus
 mjit::JaegerShot(JSContext *cx, bool partial)
 {
     StackFrame *fp = cx->fp();
-    JITScript *jit = fp->script()->getJIT(fp->isConstructing(), cx->compartment->compileBarriers());
+    JITScript *jit = fp->script()->getJIT(fp->isConstructing(), cx->zone()->compileBarriers());
 
     JS_ASSERT(cx->regs().pc == fp->script()->code);
 
 #if JS_TRACE_LOGGING
     AutoTraceLog logger(TraceLogging::defaultLogger(),
                         TraceLogging::JM_START,
                         TraceLogging::JM_STOP,
                         fp->script().unsafeGet());
@@ -1329,18 +1329,18 @@ JITScript::destroyChunk(FreeOp *fop, uns
         // Invalidates the CompilerOutput of the chunk.
         types::TypeCompartment &types = script->compartment()->types;
         desc.chunk->recompileInfo.compilerOutput(types)->invalidate();
 
         /*
          * Write barrier: Before we destroy the chunk, trace through the objects
          * it holds.
          */
-        if (script->compartment()->needsBarrier())
-            desc.chunk->trace(script->compartment()->barrierTracer());
+        if (script->zone()->needsBarrier())
+            desc.chunk->trace(script->zone()->barrierTracer());
 
         Probes::discardMJITCode(fop, this, desc.chunk, desc.chunk->code.m_code.executableAddress());
         fop->delete_(desc.chunk);
         desc.chunk = NULL;
 
         CrossChunkEdge *edges = this->edges();
         for (unsigned i = 0; i < nedges; i++) {
             CrossChunkEdge &edge = edges[i];
@@ -1381,17 +1381,17 @@ JITScript::trace(JSTracer *trc)
         if (desc.chunk)
             desc.chunk->trace(trc);
     }
 }
 
 static ic::PICInfo *
 GetPIC(JSContext *cx, JSScript *script, jsbytecode *pc, bool constructing)
 {
-    JITScript *jit = script->getJIT(constructing, cx->compartment->needsBarrier());
+    JITScript *jit = script->getJIT(constructing, cx->zone()->needsBarrier());
     if (!jit)
         return NULL;
 
     JITChunk *chunk = jit->chunk(pc);
     if (!chunk)
         return NULL;
 
     ic::PICInfo *pics = chunk->pics();
--- a/js/src/methodjit/MonoIC.cpp
+++ b/js/src/methodjit/MonoIC.cpp
@@ -869,17 +869,17 @@ class CallCompiler : public BaseCompiler
 
         // Test that:
         // - script->mJITInfo is not NULL
         // - script->mJITInfo->jitHandle{Ctor,Normal}->value is neither NULL nor UNJITTABLE, and
         // - script->mJITInfo->jitHandle{Ctor,Normal}->value->arityCheckEntry is not NULL.
         masm.loadPtr(Address(t0, JSScript::offsetOfMJITInfo()), t0);
         Jump hasNoJitInfo = masm.branchPtr(Assembler::Equal, t0, ImmPtr(NULL));
         size_t offset = JSScript::JITScriptSet::jitHandleOffset(callingNew,
-                                                                f.cx->compartment->compileBarriers());
+                                                                f.cx->zone()->compileBarriers());
         masm.loadPtr(Address(t0, offset), t0);
         Jump hasNoJitCode = masm.branchPtr(Assembler::BelowOrEqual, t0,
                                            ImmPtr(JSScript::JITScriptHandle::UNJITTABLE));
 
         masm.loadPtr(Address(t0, offsetof(JITScript, arityCheckEntry)), t0);
 
         Jump hasCode = masm.branchPtr(Assembler::NotEqual, t0, ImmPtr(0));
 
@@ -957,17 +957,17 @@ class CallCompiler : public BaseCompiler
         repatch.relink(ic.lastOolJump(), cs);
 
         return true;
     }
 
     bool patchInlinePath(JSScript *script, JSObject *obj)
     {
         JS_ASSERT(ic.frameSize.isStatic());
-        JITScript *jit = script->getJIT(callingNew, f.cx->compartment->compileBarriers());
+        JITScript *jit = script->getJIT(callingNew, f.cx->zone()->compileBarriers());
 
         /* Very fast path. */
         Repatcher repatch(f.chunk());
 
         /*
          * Use the arguments check entry if this is a monitored call, we might
          * not have accounted for all possible argument types.
          */
--- a/js/src/methodjit/PolyIC.cpp
+++ b/js/src/methodjit/PolyIC.cpp
@@ -559,17 +559,17 @@ class SetPropCompiler : public PICStubCo
             if (obj->numDynamicSlots() != slots)
                 return disable("insufficient slot capacity");
 
 #ifdef JSGC_INCREMENTAL_MJ
             /*
              * Since we're changing the object's shape, we need a write
              * barrier. Taking the slow path is the easiest way to get one.
              */
-            if (cx->compartment->compileBarriers())
+            if (cx->zone()->compileBarriers())
                 return disable("ADDPROP write barrier required");
 #endif
 
             if (pic.typeMonitored && !updateMonitoredTypes())
                 return Lookup_Uncacheable;
 
             return generateStub(initialShape, shape, true);
         }
@@ -2841,17 +2841,17 @@ bool
 SetElementIC::shouldUpdate(VMFrame &f)
 {
     if (!hit) {
         hit = true;
         spew(f, "ignored", "first hit");
         return false;
     }
 #ifdef JSGC_INCREMENTAL_MJ
-    JS_ASSERT(!f.cx->compartment->compileBarriers());
+    JS_ASSERT(!f.cx->zone()->compileBarriers());
 #endif
     JS_ASSERT(stubsGenerated < MAX_PIC_STUBS);
     return true;
 }
 
 template<JSBool strict>
 void JS_FASTCALL
 ic::SetElement(VMFrame &f, ic::SetElementIC *ic)
--- a/js/src/methodjit/StubCalls.cpp
+++ b/js/src/methodjit/StubCalls.cpp
@@ -1744,26 +1744,26 @@ stubs::ConvertToTypedFloat(JSContext *cx
         vp->setDouble(d);
     }
 }
 
 void JS_FASTCALL
 stubs::WriteBarrier(VMFrame &f, Value *addr)
 {
 #ifdef JS_GC_ZEAL
-    if (!f.cx->compartment->needsBarrier())
+    if (!f.cx->zone()->needsBarrier())
         return;
 #endif
-    gc::MarkValueUnbarriered(f.cx->compartment->barrierTracer(), addr, "write barrier");
+    gc::MarkValueUnbarriered(f.cx->zone()->barrierTracer(), addr, "write barrier");
 }
 
 void JS_FASTCALL
 stubs::GCThingWriteBarrier(VMFrame &f, Value *addr)
 {
 #ifdef JS_GC_ZEAL
-    if (!f.cx->compartment->needsBarrier())
+    if (!f.cx->zone()->needsBarrier())
         return;
 #endif
 
     gc::Cell *cell = (gc::Cell *)addr->toGCThing();
     if (cell && !cell->isMarked())
-        gc::MarkValueUnbarriered(f.cx->compartment->barrierTracer(), addr, "write barrier");
+        gc::MarkValueUnbarriered(f.cx->zone()->barrierTracer(), addr, "write barrier");
 }
--- a/js/src/vm/ArgumentsObject-inl.h
+++ b/js/src/vm/ArgumentsObject-inl.h
@@ -154,14 +154,14 @@ inline const Value &
 NormalArgumentsObject::callee() const
 {
     return data()->callee;
 }
 
 inline void
 NormalArgumentsObject::clearCallee()
 {
-    data()->callee.set(compartment(), MagicValue(JS_OVERWRITTEN_CALLEE));
+    data()->callee.set(zone(), MagicValue(JS_OVERWRITTEN_CALLEE));
 }
 
 } /* namespace js */
 
 #endif /* ArgumentsObject_inl_h___ */
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -693,17 +693,17 @@ Debugger::wrapDebuggeeValue(JSContext *c
             if (!dobj)
                 return false;
             dobj->setPrivateGCThing(obj);
             dobj->setReservedSlot(JSSLOT_DEBUGOBJECT_OWNER, ObjectValue(*object));
             if (!objects.relookupOrAdd(p, obj, dobj)) {
                 js_ReportOutOfMemory(cx);
                 return false;
             }
-            HashTableWriteBarrierPost(cx->compartment, &objects, obj);
+            HashTableWriteBarrierPost(cx->zone(), &objects, obj);
 
             if (obj->compartment() != object->compartment()) {
                 CrossCompartmentKey key(CrossCompartmentKey::DebuggerObject, object, obj);
                 if (!object->compartment()->putWrapper(key, ObjectValue(*dobj))) {
                     objects.remove(obj);
                     js_ReportOutOfMemory(cx);
                     return false;
                 }
@@ -1413,17 +1413,17 @@ Debugger::markCrossCompartmentDebuggerOb
 {
     JSRuntime *rt = tracer->runtime;
 
     /*
      * Mark all objects in comp that are referents of Debugger.Objects in other
      * compartments.
      */
     for (Debugger *dbg = rt->debuggerList.getFirst(); dbg; dbg = dbg->getNext()) {
-        if (!dbg->object->compartment()->isCollecting())
+        if (!dbg->object->zone()->isCollecting())
             dbg->markKeysInCompartment(tracer);
     }
 }
 
 /*
  * This method has two tasks:
  *   1. Mark Debugger objects that are unreachable except for debugger hooks that
  *      may yet be called.
@@ -1463,17 +1463,17 @@ Debugger::markAllIteratively(GCMarker *t
 
                 /*
                  * dbg is a Debugger with at least one debuggee. Check three things:
                  *   - dbg is actually in a compartment that is being marked
                  *   - it isn't already marked
                  *   - it actually has hooks that might be called
                  */
                 HeapPtrObject &dbgobj = dbg->toJSObjectRef();
-                if (!dbgobj->compartment()->isGCMarking())
+                if (!dbgobj->zone()->isGCMarking())
                     continue;
 
                 bool dbgMarked = IsObjectMarked(&dbgobj);
                 if (!dbgMarked && dbg->hasAnyLiveHooks()) {
                     /*
                      * obj could be reachable only via its live, enabled
                      * debugger hooks, which may yet be called.
                      */
--- a/js/src/vm/ObjectImpl.cpp
+++ b/js/src/vm/ObjectImpl.cpp
@@ -214,35 +214,35 @@ js::ObjectImpl::checkShapeConsistency()
         }
     }
 }
 #endif
 
 void
 js::ObjectImpl::initSlotRange(uint32_t start, const Value *vector, uint32_t length)
 {
-    JSCompartment *comp = compartment();
+    JS::Zone *zone = this->zone();
     HeapSlot *fixedStart, *fixedEnd, *slotsStart, *slotsEnd;
     getSlotRange(start, length, &fixedStart, &fixedEnd, &slotsStart, &slotsEnd);
     for (HeapSlot *sp = fixedStart; sp < fixedEnd; sp++)
-        sp->init(comp, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
+        sp->init(zone, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
     for (HeapSlot *sp = slotsStart; sp < slotsEnd; sp++)
-        sp->init(comp, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
+        sp->init(zone, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
 }
 
 void
 js::ObjectImpl::copySlotRange(uint32_t start, const Value *vector, uint32_t length)
 {
-    JSCompartment *comp = compartment();
+    JS::Zone *zone = this->zone();
     HeapSlot *fixedStart, *fixedEnd, *slotsStart, *slotsEnd;
     getSlotRange(start, length, &fixedStart, &fixedEnd, &slotsStart, &slotsEnd);
     for (HeapSlot *sp = fixedStart; sp < fixedEnd; sp++)
-        sp->set(comp, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
+        sp->set(zone, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
     for (HeapSlot *sp = slotsStart; sp < slotsEnd; sp++)
-        sp->set(comp, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
+        sp->set(zone, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
 }
 
 #ifdef DEBUG
 bool
 js::ObjectImpl::slotInRange(uint32_t slot, SentinelAllowed sentinel) const
 {
     uint32_t capacity = numFixedSlots() + numDynamicSlots();
     if (sentinel == SENTINEL_ALLOWED)
--- a/js/src/vm/RegExpStatics-inl.h
+++ b/js/src/vm/RegExpStatics-inl.h
@@ -428,17 +428,17 @@ RegExpStatics::restore()
 
 inline void
 RegExpStatics::updateLazily(JSContext *cx, JSLinearString *input,
                             RegExpShared *shared, size_t lastIndex)
 {
     JS_ASSERT(input && shared);
     aboutToWrite();
 
-    BarrieredSetPair<JSString, JSLinearString>(cx->compartment,
+    BarrieredSetPair<JSString, JSLinearString>(cx->zone(),
                                                pendingInput, input,
                                                matchesInput, input);
     if (regexp.initialized())
         regexp.release();
     regexp.init(*shared);
 
     this->lastIndex = lastIndex;
     pendingLazyEvaluation = true;
@@ -450,17 +450,17 @@ RegExpStatics::updateFromMatchPairs(JSCo
     JS_ASSERT(input);
     aboutToWrite();
 
     /* Unset all lazy state. */
     pendingLazyEvaluation = false;
     this->regexp.release();
     this->lastIndex = size_t(-1);
 
-    BarrieredSetPair<JSString, JSLinearString>(cx->compartment,
+    BarrieredSetPair<JSString, JSLinearString>(cx->zone(),
                                                pendingInput, input,
                                                matchesInput, input);
 
     if (!matches.initArrayFrom(newPairs)) {
         js_ReportOutOfMemory(cx);
         return false;
     }
 
--- a/js/src/vm/ScopeObject.cpp
+++ b/js/src/vm/ScopeObject.cpp
@@ -1682,17 +1682,17 @@ DebugScopes::addDebugScope(JSContext *cx
         return false;
 
     JS_ASSERT(!scopes->proxiedScopes.has(&scope));
     if (!scopes->proxiedScopes.put(&scope, &debugScope)) {
         js_ReportOutOfMemory(cx);
         return false;
     }
 
-    HashTableWriteBarrierPost(cx->compartment, &scopes->proxiedScopes, &scope);
+    HashTableWriteBarrierPost(cx->zone(), &scopes->proxiedScopes, &scope);
     return true;
 }
 
 DebugScopeObject *
 DebugScopes::hasDebugScope(JSContext *cx, const ScopeIter &si)
 {
     JS_ASSERT(!si.hasScopeObject());
 
--- a/js/src/vm/Stack-inl.h
+++ b/js/src/vm/Stack-inl.h
@@ -71,17 +71,17 @@ StackFrame::compartment() const
     return scopeChain()->compartment();
 }
 
 #ifdef JS_METHODJIT
 inline mjit::JITScript *
 StackFrame::jit()
 {
     AutoAssertNoGC nogc;
-    return script()->getJIT(isConstructing(), script()->compartment()->compileBarriers());
+    return script()->getJIT(isConstructing(), script()->zone()->compileBarriers());
 }
 #endif
 
 inline void
 StackFrame::initPrev(JSContext *cx)
 {
     JS_ASSERT(flags_ & HAS_PREVPC);
     if (FrameRegs *regs = cx->maybeRegs()) {
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -187,17 +187,18 @@ StackFrame::maybeSuspendedGenerator(JSRu
 }
 
 jsbytecode *
 StackFrame::prevpcSlow(InlinedSite **pinlined)
 {
     JS_ASSERT(!(flags_ & HAS_PREVPC));
 #if defined(JS_METHODJIT) && defined(JS_MONOIC)
     StackFrame *p = prev();
-    mjit::JITScript *jit = p->script()->getJIT(p->isConstructing(), p->compartment()->compileBarriers());
+    mjit::JITScript *jit = p->script()->getJIT(p->isConstructing(),
+                                               p->compartment()->zone()->compileBarriers());
     prevpc_ = jit->nativeToPC(ncode_, &prevInline_);
     flags_ |= HAS_PREVPC;
     if (pinlined)
         *pinlined = prevInline_;
     return prevpc_;
 #else
     JS_NOT_REACHED("Unknown PC for frame");
     return NULL;