Bug 841059 - Do not use the ArenaHeader for zone/compartment on JSObject; r=billm
authorTerrence Cole <terrence@mozilla.com>
Wed, 13 Feb 2013 10:39:19 -0800
changeset 132298 7e658b893b5eabf4cbdd681e52f7ef1398f3f842
parent 132297 bb82ad6cfd67008fd4947bd468ca91349d1f4c95
child 132299 f447e09d5a0c31eca52a219f60f1d4d751ba3db6
push idunknown
push userunknown
push dateunknown
reviewersbillm
bugs841059
milestone22.0a1
Bug 841059 - Do not use the ArenaHeader for zone/compartment on JSObject; r=billm
js/src/gc/Barrier-inl.h
js/src/gc/Heap.h
js/src/gc/Marking.cpp
js/src/gc/Verifier.cpp
js/src/gc/Zone.cpp
js/src/gc/Zone.h
js/src/ion/IonCode.h
js/src/jscntxt.cpp
js/src/jsfriendapi.cpp
js/src/jsgc.cpp
js/src/jsinfer.h
js/src/jsscript.h
js/src/jswrapper.cpp
js/src/vm/ObjectImpl-inl.h
js/src/vm/ObjectImpl.h
js/src/vm/Shape.h
js/src/vm/String.h
mobile/android/base/NotificationService.java
--- a/js/src/gc/Barrier-inl.h
+++ b/js/src/gc/Barrier-inl.h
@@ -66,20 +66,18 @@ EncapsulatedValue::operator=(const Encap
     value = v.get();
     return *this;
 }
 
 inline void
 EncapsulatedValue::writeBarrierPre(const Value &value)
 {
 #ifdef JSGC_INCREMENTAL
-    if (value.isMarkable()) {
-        js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
-        writeBarrierPre(cell->zone(), value);
-    }
+    if (value.isMarkable())
+        writeBarrierPre(ZoneOfValue(value), value);
 #endif
 }
 
 inline void
 EncapsulatedValue::writeBarrierPre(Zone *zone, const Value &value)
 {
 #ifdef JSGC_INCREMENTAL
     if (zone->needsBarrier()) {
@@ -167,19 +165,18 @@ HeapValue::operator=(const HeapValue &v)
     return *this;
 }
 
 inline void
 HeapValue::set(Zone *zone, const Value &v)
 {
 #ifdef DEBUG
     if (value.isMarkable()) {
-        js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
-        JS_ASSERT(cell->zone() == zone ||
-                  cell->zone() == zone->rt->atomsCompartment->zone());
+        JS_ASSERT(ZoneOfValue(value) == zone ||
+                  ZoneOfValue(value) == zone->rt->atomsCompartment->zone());
     }
 #endif
 
     pre(zone);
     JS_ASSERT(!IsPoisonedValue(v));
     value = v;
     post(zone->rt);
 }
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -86,17 +86,17 @@ struct Cell
 {
     inline ArenaHeader *arenaHeader() const;
     inline AllocKind getAllocKind() const;
     MOZ_ALWAYS_INLINE bool isMarked(uint32_t color = BLACK) const;
     MOZ_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const;
     MOZ_ALWAYS_INLINE void unmark(uint32_t color) const;
 
     inline JSRuntime *runtime() const;
-    inline Zone *zone() const;
+    inline Zone *tenuredZone() const;
 
 #ifdef DEBUG
     inline bool isAligned() const;
     bool isTenured() const;
 #endif
 
   protected:
     inline uintptr_t address() const;
@@ -976,17 +976,17 @@ Cell::unmark(uint32_t color) const
 {
     JS_ASSERT(isTenured());
     JS_ASSERT(color != BLACK);
     AssertValidColor(this, color);
     chunk()->bitmap.unmark(this, color);
 }
 
 Zone *
-Cell::zone() const
+Cell::tenuredZone() const
 {
     JS_ASSERT(isTenured());
     return arenaHeader()->zone;
 }
 
 #ifdef DEBUG
 bool
 Cell::isAligned() const
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -248,29 +248,29 @@ namespace js {
 namespace gc {
 
 template <typename T>
 static bool
 IsMarked(T **thingp)
 {
     JS_ASSERT(thingp);
     JS_ASSERT(*thingp);
-    Zone *zone = (*thingp)->zone();
+    Zone *zone = (*thingp)->tenuredZone();
     if (!zone->isCollecting() || zone->isGCFinished())
         return true;
     return (*thingp)->isMarked();
 }
 
 template <typename T>
 static bool
 IsAboutToBeFinalized(T **thingp)
 {
     JS_ASSERT(thingp);
     JS_ASSERT(*thingp);
-    if (!(*thingp)->zone()->isGCSweeping())
+    if (!(*thingp)->tenuredZone()->isGCSweeping())
         return false;
     return !(*thingp)->isMarked();
 }
 
 #define DeclMarkerImpl(base, type)                                                                \
 void                                                                                              \
 Mark##base(JSTracer *trc, EncapsulatedPtr<type> *thing, const char *name)                         \
 {                                                                                                 \
@@ -626,17 +626,17 @@ gc::MarkObjectSlots(JSTracer *trc, JSObj
 }
 
 static bool
 ShouldMarkCrossCompartment(JSTracer *trc, RawObject src, Cell *cell)
 {
     if (!IS_GC_MARKING_TRACER(trc))
         return true;
 
-    JS::Zone *zone = cell->zone();
+    JS::Zone *zone = cell->tenuredZone();
     uint32_t color = AsGCMarker(trc)->getMarkColor();
 
     JS_ASSERT(color == BLACK || color == GRAY);
     if (color == BLACK) {
         /*
          * Having black->gray edges violates our promise to the cycle
          * collector. This can happen if we're collecting a compartment and it
          * has an edge to an uncollected compartment: it's possible that the
@@ -1616,12 +1616,12 @@ JS::UnmarkGrayGCThingRecursively(void *t
 {
     JS_ASSERT(kind != JSTRACE_SHAPE);
 
     if (!JS::GCThingIsMarkedGray(thing))
         return;
 
     UnmarkGrayGCThing(thing);
 
-    JSRuntime *rt = static_cast<Cell *>(thing)->zone()->rt;
+    JSRuntime *rt = static_cast<Cell *>(thing)->runtime();
     UnmarkGrayTracer trc(rt);
     JS_TraceChildren(&trc, thing, kind);
 }
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -749,25 +749,23 @@ js::gc::EndVerifyPostBarriers(JSRuntime 
 
     if (rt->gcStoreBuffer.hasOverflowed())
         goto oom;
 
     if (!rt->gcStoreBuffer.coalesceForVerification())
         goto oom;
 
     /* Walk the heap. */
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        if (IsAtomsCompartment(c))
-            continue;
-
-        if (c->watchpointMap)
-            c->watchpointMap->markAll(trc);
-
+    for (CompartmentsIter comp(rt); !comp.done(); comp.next()) {
+        if (comp->watchpointMap)
+            comp->watchpointMap->markAll(trc);
+    }
+    for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
         for (size_t kind = 0; kind < FINALIZE_LIMIT; ++kind) {
-            for (CellIterUnderGC cells(c, AllocKind(kind)); !cells.done(); cells.next()) {
+            for (CellIterUnderGC cells(zone, AllocKind(kind)); !cells.done(); cells.next()) {
                 Cell *src = cells.getCell();
                 if (!rt->gcVerifierNursery.isInside(src))
                     JS_TraceChildren(trc, src, MapAllocToTraceKind(AllocKind(kind)));
             }
         }
     }
 
 oom:
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -25,17 +25,17 @@ using namespace js;
 using namespace js::gc;
 
 JS::Zone::Zone(JSRuntime *rt)
   : rt(rt),
     allocator(this),
     hold(false),
 #ifdef JSGC_GENERATIONAL
     gcNursery(),
-    gcStoreBuffer(&gcNursery),
+    gcStoreBuffer(rt),
 #endif
     ionUsingBarriers_(false),
     active(false),
     gcScheduled(false),
     gcState(NoGC),
     gcPreserveCode(false),
     gcBytes(0),
     gcTriggerBytes(0),
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -108,17 +108,17 @@ struct Zone : private JS::shadow::Zone, 
     JSRuntime                    *rt;
     js::Allocator                allocator;
 
     js::CompartmentVector        compartments;
 
     bool                         hold;
 
 #ifdef JSGC_GENERATIONAL
-    js::gc::Nursery              gcNursery;
+    js::gc::VerifierNursery      gcNursery;
     js::gc::StoreBuffer          gcStoreBuffer;
 #endif
 
   private:
     bool                         ionUsingBarriers_;
   public:
 
     bool                         active;  // GC flag, whether there are active frames
--- a/js/src/ion/IonCode.h
+++ b/js/src/ion/IonCode.h
@@ -126,16 +126,17 @@ class IonCode : public gc::Cell
     }
 
     // Allocates a new IonCode object which will be managed by the GC. If no
     // object can be allocated, NULL is returned. On failure, |pool| is
     // automatically released, so the code may be freed.
     static IonCode *New(JSContext *cx, uint8_t *code, uint32_t bufferSize, JSC::ExecutablePool *pool);
 
   public:
+    JS::Zone *zone() const { return tenuredZone(); }
     static void readBarrier(IonCode *code);
     static void writeBarrierPre(IonCode *code);
     static void writeBarrierPost(IonCode *code, void *addr);
     static inline ThingRootKind rootKind() { return THING_ROOT_ION_CODE; }
 };
 
 class SnapshotWriter;
 class SafepointWriter;
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -253,17 +253,17 @@ JSRuntime::createJaegerRuntime(JSContext
 
 void
 JSCompartment::sweepCallsiteClones()
 {
     if (callsiteClones.initialized()) {
         for (CallsiteCloneTable::Enum e(callsiteClones); !e.empty(); e.popFront()) {
             CallsiteCloneKey key = e.front().key;
             JSFunction *fun = e.front().value;
-            if (!key.script->isMarked() || !fun->isMarked())
+            if (!IsScriptMarked(&key.script) || !IsObjectMarked(&fun))
                 e.removeFront();
         }
     }
 }
 
 RawFunction
 js::CloneFunctionAtCallsite(JSContext *cx, HandleFunction fun, HandleScript script, jsbytecode *pc)
 {
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -898,17 +898,19 @@ JS::IncrementalObjectBarrier(JSObject *o
 
 JS_FRIEND_API(void)
 JS::IncrementalReferenceBarrier(void *ptr, JSGCTraceKind kind)
 {
     if (!ptr)
         return;
 
     gc::Cell *cell = static_cast<gc::Cell *>(ptr);
-    Zone *zone = cell->zone();
+    Zone *zone = kind == JSTRACE_OBJECT
+                 ? static_cast<JSObject *>(cell)->zone()
+                 : cell->tenuredZone();
 
     JS_ASSERT(!zone->rt->isHeapBusy());
 
     AutoMarkInDeadZone amn(zone);
 
     if (kind == JSTRACE_OBJECT)
         JSObject::writeBarrierPre(static_cast<JSObject*>(cell));
     else if (kind == JSTRACE_STRING)
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1838,17 +1838,18 @@ GCMarker::markDelayedChildren(SliceBudge
     return true;
 }
 
 #ifdef DEBUG
 void
 GCMarker::checkZone(void *p)
 {
     JS_ASSERT(started);
-    JS_ASSERT(static_cast<Cell *>(p)->zone()->isCollecting());
+    DebugOnly<Cell *> cell = static_cast<Cell *>(p);
+    JS_ASSERT_IF(cell->isTenured(), cell->tenuredZone()->isCollecting());
 }
 #endif
 
 bool
 GCMarker::hasBufferedGrayRoots() const
 {
     return !grayFailed;
 }
@@ -1910,17 +1911,17 @@ GCMarker::appendGrayRoot(void *thing, JS
 
     GrayRoot root(thing, kind);
 #ifdef DEBUG
     root.debugPrinter = debugPrinter;
     root.debugPrintArg = debugPrintArg;
     root.debugPrintIndex = debugPrintIndex;
 #endif
 
-    Zone *zone = static_cast<Cell *>(thing)->zone();
+    Zone *zone = static_cast<Cell *>(thing)->tenuredZone();
     if (zone->isCollecting()) {
         zone->maybeAlive = true;
         if (!zone->gcGrayRoots.append(root)) {
             grayFailed = true;
             resetBufferedGrayRoots();
         }
     }
 }
@@ -2721,18 +2722,18 @@ CheckCompartmentCallback(JSTracer *trcAr
 {
     CompartmentCheckTracer *trc = static_cast<CompartmentCheckTracer *>(trcArg);
     Cell *thing = (Cell *)*thingp;
 
     JSCompartment *comp = CompartmentOfCell(thing, kind);
     if (comp && trc->compartment) {
         CheckCompartment(trc, comp, thing, kind);
     } else {
-        JS_ASSERT(thing->zone() == trc->zone ||
-                  thing->zone() == trc->runtime->atomsCompartment->zone());
+        JS_ASSERT(thing->tenuredZone() == trc->zone ||
+                  thing->tenuredZone() == trc->runtime->atomsCompartment->zone());
     }
 }
 
 static void
 CheckForCompartmentMismatches(JSRuntime *rt)
 {
     if (rt->gcDisableStrictProxyCheckingCount)
         return;
@@ -2902,17 +2903,17 @@ BeginMarkPhase(JSRuntime *rt)
      * zones will be cleaned up. See AutoMarkInDeadZone and
      * AutoMaybeTouchDeadZones for details.
      */
 
     /* Set the maybeAlive flag based on cross-compartment edges. */
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
         for (JSCompartment::WrapperEnum e(c); !e.empty(); e.popFront()) {
             Cell *dst = e.front().key.wrapped;
-            dst->zone()->maybeAlive = true;
+            dst->tenuredZone()->maybeAlive = true;
         }
     }
 
     /*
      * For black roots, code in gc/Marking.cpp will already have set maybeAlive
      * during MarkRuntime.
      */
 
@@ -3279,30 +3280,30 @@ JSCompartment::findOutgoingEdges(Compone
         Cell *other = e.front().key.wrapped;
         if (kind == CrossCompartmentKey::ObjectWrapper) {
             /*
              * Add edge to wrapped object compartment if wrapped object is not
              * marked black to indicate that wrapper compartment not be swept
              * after wrapped compartment.
              */
             if (!other->isMarked(BLACK) || other->isMarked(GRAY)) {
-                JS::Zone *w = other->zone();
+                JS::Zone *w = other->tenuredZone();
                 if (w->isGCMarking())
                     finder.addEdgeTo(w);
             }
         } else {
             JS_ASSERT(kind == CrossCompartmentKey::DebuggerScript ||
                       kind == CrossCompartmentKey::DebuggerObject ||
                       kind == CrossCompartmentKey::DebuggerEnvironment);
             /*
              * Add edge for debugger object wrappers, to ensure (in conjuction
              * with call to Debugger::findCompartmentEdges below) that debugger
              * and debuggee objects are always swept in the same group.
              */
-            JS::Zone *w = other->zone();
+            JS::Zone *w = other->tenuredZone();
             if (w->isGCMarking())
                 finder.addEdgeTo(w);
         }
 
 #ifdef DEBUG
         JSObject *wrapper = &e.front().value.toObject();
         JS_ASSERT_IF(IsFunctionProxy(wrapper), &GetProxyCall(wrapper).toObject() == other);
 #endif
--- a/js/src/jsinfer.h
+++ b/js/src/jsinfer.h
@@ -1077,16 +1077,18 @@ struct TypeObject : gc::Cell
 
     /*
      * Type objects don't have explicit finalizers. Memory owned by a type
      * object pending deletion is released when weak references are sweeped
      * from all the compartment's type objects.
      */
     void finalize(FreeOp *fop) {}
 
+    JS::Zone *zone() const { return tenuredZone(); }
+
     static inline void writeBarrierPre(TypeObject *type);
     static inline void writeBarrierPost(TypeObject *type, void *addr);
     static inline void readBarrier(TypeObject *type);
 
     static inline ThingRootKind rootKind() { return THING_ROOT_TYPE_OBJECT; }
 
   private:
     inline uint32_t basePropertyCount() const;
--- a/js/src/jsscript.h
+++ b/js/src/jsscript.h
@@ -947,16 +947,18 @@ class JSScript : public js::gc::Cell
     bool stepModeEnabled() { return hasDebugScript && !!debugScript()->stepMode; }
 
 #ifdef DEBUG
     uint32_t stepModeCount() { return hasDebugScript ? (debugScript()->stepMode & stepCountMask) : 0; }
 #endif
 
     void finalize(js::FreeOp *fop);
 
+    JS::Zone *zone() const { return tenuredZone(); }
+
     static inline void writeBarrierPre(js::RawScript script);
     static inline void writeBarrierPost(js::RawScript script, void *addr);
 
     static inline js::ThingRootKind rootKind() { return js::THING_ROOT_SCRIPT; }
 
     static JSPrincipals *normalizeOriginPrincipals(JSPrincipals *principals,
                                                    JSPrincipals *originPrincipals) {
         return originPrincipals ? originPrincipals : principals;
--- a/js/src/jswrapper.cpp
+++ b/js/src/jswrapper.cpp
@@ -805,18 +805,18 @@ js::IsDeadProxyObject(RawObject obj)
     return IsProxy(obj) && GetProxyHandler(obj) == &DeadObjectProxy::singleton;
 }
 
 static void
 NukeSlot(JSObject *wrapper, uint32_t slot, Value v)
 {
     Value old = wrapper->getSlot(slot);
     if (old.isMarkable()) {
-        Cell *cell = static_cast<Cell *>(old.toGCThing());
-        AutoMarkInDeadZone amd(cell->zone());
+        Zone *zone = ZoneOfValue(old);
+        AutoMarkInDeadZone amd(zone);
         wrapper->setReservedSlot(slot, v);
     } else {
         wrapper->setReservedSlot(slot, v);
     }
 }
 
 void
 js::NukeCrossCompartmentWrapper(JSContext *cx, JSObject *wrapper)
--- a/js/src/vm/ObjectImpl-inl.h
+++ b/js/src/vm/ObjectImpl-inl.h
@@ -312,16 +312,31 @@ js::ObjectImpl::dynamicSlotsCount(uint32
 }
 
 inline size_t
 js::ObjectImpl::sizeOfThis() const
 {
     return js::gc::Arena::thingSize(getAllocKind());
 }
 
+JS_ALWAYS_INLINE JS::Zone *
+js::ObjectImpl::zone() const
+{
+    return shape_->zone();
+}
+
+JS_ALWAYS_INLINE JS::Zone *
+ZoneOfValue(const JS::Value &value)
+{
+    JS_ASSERT(value.isMarkable());
+    if (value.isObject())
+        return value.toObject().zone();
+    return static_cast<js::gc::Cell *>(value.toGCThing())->tenuredZone();
+}
+
 /* static */ inline void
 js::ObjectImpl::readBarrier(ObjectImpl *obj)
 {
 #ifdef JSGC_INCREMENTAL
     Zone *zone = obj->zone();
     if (zone->needsBarrier()) {
         MOZ_ASSERT(!zone->rt->isHeapBusy());
         JSObject *tmp = obj->asObjectPtr();
--- a/js/src/vm/ObjectImpl.h
+++ b/js/src/vm/ObjectImpl.h
@@ -1365,16 +1365,17 @@ class ObjectImpl : public gc::Cell
         return elements != emptyObjectElements && elements != fixedElements();
     }
 
     inline bool hasEmptyElements() const {
         return elements == emptyObjectElements;
     }
 
     /* GC support. */
+    JS_ALWAYS_INLINE Zone *zone() const;
     static inline ThingRootKind rootKind() { return THING_ROOT_OBJECT; }
     static inline void readBarrier(ObjectImpl *obj);
     static inline void writeBarrierPre(ObjectImpl *obj);
     static inline void writeBarrierPost(ObjectImpl *obj, void *addr);
     inline void privateWriteBarrierPre(void **oldval);
     inline void privateWriteBarrierPost(void **pprivate);
     void markChildren(JSTracer *trc);
 
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -328,16 +328,17 @@ class BaseShape : public js::gc::Cell
     bool hasTable() const { JS_ASSERT_IF(table_, isOwned()); return table_ != NULL; }
     ShapeTable &table() const { JS_ASSERT(table_ && isOwned()); return *table_; }
     void setTable(ShapeTable *table) { JS_ASSERT(isOwned()); table_ = table; }
 
     uint32_t slotSpan() const { JS_ASSERT(isOwned()); return slotSpan_; }
     void setSlotSpan(uint32_t slotSpan) { JS_ASSERT(isOwned()); slotSpan_ = slotSpan; }
 
     JSCompartment *compartment() const { return compartment_; }
+    JS::Zone *zone() const { return tenuredZone(); }
 
     /* Lookup base shapes from the compartment's baseShapes table. */
     static UnownedBaseShape* getUnowned(JSContext *cx, const StackBaseShape &base);
 
     /* Get the canonical base shape. */
     inline UnownedBaseShape* unowned();
 
     /* Get the canonical base shape for an owned one. */
@@ -811,16 +812,18 @@ class Shape : public js::gc::Cell
     void dump(JSContext *cx, FILE *fp) const;
     void dumpSubtree(JSContext *cx, int level, FILE *fp) const;
 #endif
 
     void sweep();
     void finalize(FreeOp *fop);
     void removeChild(RawShape child);
 
+    JS::Zone *zone() const { return tenuredZone(); }
+
     static inline void writeBarrierPre(RawShape shape);
     static inline void writeBarrierPost(RawShape shape, void *addr);
 
     /*
      * All weak references need a read barrier for incremental GC. This getter
      * method implements the read barrier. It's used to obtain initial shapes
      * from the compartment.
      */
--- a/js/src/vm/String.h
+++ b/js/src/vm/String.h
@@ -409,16 +409,18 @@ class JSString : public js::gc::Cell
     static size_t offsetOfLengthAndFlags() {
         return offsetof(JSString, d.lengthAndFlags);
     }
 
     static size_t offsetOfChars() {
         return offsetof(JSString, d.u1.chars);
     }
 
+    JS::Zone *zone() const { return tenuredZone(); }
+
     static inline void writeBarrierPre(JSString *str);
     static inline void writeBarrierPost(JSString *str, void *addr);
     static inline bool needWriteBarrierPre(JS::Zone *zone);
     static inline void readBarrier(JSString *str);
 
     static inline js::ThingRootKind rootKind() { return js::THING_ROOT_STRING; }
 
 #ifdef DEBUG