Bug 852802 - Add incremental needsBarrier to the runtime and check it first; r=billm
☠☠ backed out by b839ca9cf702 ☠ ☠
authorTerrence Cole <terrence@mozilla.com>
Tue, 19 Mar 2013 17:59:42 -0700
changeset 126479 3af927a8260cea080dd734115c415f20c764654c
parent 126467 123017ccf036c2d0bc6f459481420640e8456718
child 126480 574122faa48533b8aca098d2bf4da279079a6da9
push id24485
push userryanvm@gmail.com
push dateThu, 28 Mar 2013 12:31:20 +0000
treeherderautoland@293498096b28 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbillm
bugs852802
milestone22.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 852802 - Add incremental needsBarrier to the runtime and check it first; r=billm
content/svg/content/src/nsSVGFilters.cpp
js/public/HeapAPI.h
js/src/gc/Barrier-inl.h
js/src/gc/Heap.h
js/src/gc/Verifier.cpp
js/src/ion/Ion.cpp
js/src/jscntxt.h
js/src/jsgc.cpp
js/src/jsinferinlines.h
js/src/jspubtd.h
js/src/jsscriptinlines.h
js/src/vm/ObjectImpl-inl.h
js/src/vm/Shape-inl.h
js/src/vm/String-inl.h
--- a/js/public/HeapAPI.h
+++ b/js/public/HeapAPI.h
@@ -2,16 +2,18 @@
  */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef js_heap_api_h___
 #define js_heap_api_h___
 
+#include "jspubtd.h"
+
 /* These values are private to the JS engine. */
 namespace js {
 namespace gc {
 
 /*
  * Page size must be static to support our arena pointer optimizations, so we
  * are forced to support each platform with non-4096 pages as a special case.
  * Note: The freelist supports a maximum arena shift of 15.
@@ -38,16 +40,17 @@ const size_t ChunkMask = ChunkSize - 1;
 
 const size_t CellShift = 3;
 const size_t CellSize = size_t(1) << CellShift;
 const size_t CellMask = CellSize - 1;
 
 /* These are magic constants derived from actual offsets in gc/Heap.h. */
 const size_t ChunkMarkBitmapOffset = 1032368;
 const size_t ChunkMarkBitmapBits = 129024;
+const size_t ChunkRuntimeOffset = ChunkSize - sizeof(void*);
 
 /*
  * Live objects are marked black. How many other additional colors are available
  * depends on the size of the GCThing. Objects marked gray are eligible for
  * cycle collection.
  */
 static const uint32_t BLACK = 0;
 static const uint32_t GRAY = 1;
@@ -84,16 +87,25 @@ static JS_ALWAYS_INLINE uintptr_t *
 GetGCThingMarkBitmap(const void *thing)
 {
     uintptr_t addr = uintptr_t(thing);
     addr &= ~js::gc::ChunkMask;
     addr |= js::gc::ChunkMarkBitmapOffset;
     return reinterpret_cast<uintptr_t *>(addr);
 }
 
+static JS_ALWAYS_INLINE JS::shadow::Runtime *
+GetGCThingRuntime(const void *thing)
+{
+    uintptr_t addr = uintptr_t(thing);
+    addr &= ~js::gc::ChunkMask;
+    addr |= js::gc::ChunkRuntimeOffset;
+    return reinterpret_cast<JS::shadow::Runtime *>(addr);
+}
+
 static JS_ALWAYS_INLINE void
 GetGCThingMarkWordAndMask(const void *thing, uint32_t color,
                           uintptr_t **wordp, uintptr_t *maskp)
 {
     uintptr_t addr = uintptr_t(thing);
     size_t bit = (addr & js::gc::ChunkMask) / js::gc::CellSize + color;
     JS_ASSERT(bit < js::gc::ChunkMarkBitmapBits);
     uintptr_t *bitmap = GetGCThingMarkBitmap(thing);
@@ -134,15 +146,18 @@ GCThingIsMarkedGray(void *thing)
     uintptr_t *word, mask;
     js::gc::GetGCThingMarkWordAndMask(thing, js::gc::GRAY, &word, &mask);
     return *word & mask;
 }
 
 static JS_ALWAYS_INLINE bool
 IsIncrementalBarrierNeededOnGCThing(void *thing, JSGCTraceKind kind)
 {
+    shadow::Runtime *rt = js::gc::GetGCThingRuntime(thing);
+    if (!rt->needsBarrier_)
+        return false;
     js::Zone *zone = GetGCThingZone(thing);
     return reinterpret_cast<shadow::Zone *>(zone)->needsBarrier_;
 }
 
 } /* namespace JS */
 
 #endif /* js_heap_api_h___ */
--- a/js/src/gc/Barrier-inl.h
+++ b/js/src/gc/Barrier-inl.h
@@ -66,26 +66,27 @@ EncapsulatedValue::operator=(const Encap
     value = v.get();
     return *this;
 }
 
 inline void
 EncapsulatedValue::writeBarrierPre(const Value &value)
 {
 #ifdef JSGC_INCREMENTAL
-    if (value.isMarkable())
+    if (value.isMarkable() && runtime(value)->needsBarrier())
         writeBarrierPre(ZoneOfValue(value), value);
 #endif
 }
 
 inline void
 EncapsulatedValue::writeBarrierPre(Zone *zone, const Value &value)
 {
 #ifdef JSGC_INCREMENTAL
     if (zone->needsBarrier()) {
+        JS_ASSERT_IF(value.isMarkable(), runtime(value)->needsBarrier());
         Value tmp(value);
         js::gc::MarkValueUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
         JS_ASSERT(tmp == value);
     }
 #endif
 }
 
 inline void
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -820,16 +820,17 @@ struct Chunk
     /* Unlink and return the freeArenasHead. */
     inline ArenaHeader* fetchNextFreeArena(JSRuntime *rt);
 
     inline void addArenaToFreeList(JSRuntime *rt, ArenaHeader *aheader);
 };
 
 JS_STATIC_ASSERT(sizeof(Chunk) == ChunkSize);
 JS_STATIC_ASSERT(js::gc::ChunkMarkBitmapOffset == offsetof(Chunk, bitmap));
+JS_STATIC_ASSERT(js::gc::ChunkRuntimeOffset == offsetof(Chunk, info) + offsetof(ChunkInfo, runtime));
 
 inline uintptr_t
 ArenaHeader::address() const
 {
     uintptr_t addr = reinterpret_cast<uintptr_t>(this);
     JS_ASSERT(!(addr & ArenaMask));
     JS_ASSERT(Chunk::withinArenasRange(addr));
     return addr;
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -529,16 +529,17 @@ gc::StartVerifyPreBarriers(JSRuntime *rt
 
         node = NextNode(node);
     }
 
     rt->gcVerifyPreData = trc;
     rt->gcIncrementalState = MARK;
     rt->gcMarker.start();
 
+    rt->setNeedsBarrier(true);
     for (ZonesIter zone(rt); !zone.done(); zone.next()) {
         PurgeJITCaches(zone);
         zone->setNeedsBarrier(true, Zone::UpdateIon);
         zone->allocator.arenas.purge();
     }
 
     return;
 
@@ -612,16 +613,17 @@ gc::EndVerifyPreBarriers(JSRuntime *rt)
     /* We need to disable barriers before tracing, which may invoke barriers. */
     for (ZonesIter zone(rt); !zone.done(); zone.next()) {
         if (!zone->needsBarrier())
             compartmentCreated = true;
 
         zone->setNeedsBarrier(false, Zone::UpdateIon);
         PurgeJITCaches(zone);
     }
+    rt->setNeedsBarrier(false);
 
     /*
      * We need to bump gcNumber so that the methodjit knows that jitcode has
      * been discarded.
      */
     JS_ASSERT(trc->number == rt->gcNumber);
     rt->gcNumber++;
 
--- a/js/src/ion/Ion.cpp
+++ b/js/src/ion/Ion.cpp
@@ -451,17 +451,17 @@ IonCode::readBarrier(IonCode *code)
         MarkIonCodeUnbarriered(zone->barrierTracer(), &code, "ioncode read barrier");
 #endif
 }
 
 void
 IonCode::writeBarrierPre(IonCode *code)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!code)
+    if (!code || !code->runtime()->needsBarrier())
         return;
 
     Zone *zone = code->zone();
     if (zone->needsBarrier())
         MarkIonCodeUnbarriered(zone->barrierTracer(), &code, "ioncode write barrier");
 #endif
 }
 
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -627,31 +627,37 @@ class MarkingValidator;
 } // namespace gc
 
 class JS_FRIEND_API(AutoEnterPolicy);
 
 typedef Vector<JS::Zone *, 1, SystemAllocPolicy> ZoneVector;
 
 } // namespace js
 
-struct JSRuntime : js::RuntimeFriendFields,
+struct JSRuntime : private JS::shadow::Runtime,
                    public js::MallocProvider<JSRuntime>
 {
     /*
      * Per-thread data for the main thread that is associated with
      * this JSRuntime, as opposed to any worker threads used in
      * parallel sections.  See definition of |PerThreadData| struct
      * above for more details.
      *
      * NB: This field is statically asserted to be at offset
-     * sizeof(RuntimeFriendFields). See
+     * sizeof(js::shadow::Runtime). See
      * PerThreadDataFriendFields::getMainThread.
      */
     js::PerThreadData   mainThread;
 
+    /*
+     * If non-zero, we were been asked to call the operation callback as soon
+     * as possible.
+     */
+    volatile int32_t    interrupt;
+
     /* Default compartment. */
     JSCompartment       *atomsCompartment;
 
     /* Embedders can use this zone however they wish. */
     JS::Zone            *systemZone;
 
     /* List of compartments and zones (protected by the GC lock). */
     js::ZoneVector      zones;
@@ -1078,16 +1084,24 @@ struct JSRuntime : js::RuntimeFriendFiel
   private:
     /*
      * Malloc counter to measure memory pressure for GC scheduling. It runs
      * from gcMaxMallocBytes down to zero.
      */
     volatile ptrdiff_t  gcMallocBytes;
 
   public:
+    void setNeedsBarrier(bool needs) {
+        needsBarrier_ = needs;
+    }
+
+    bool needsBarrier() const {
+        return needsBarrier_;
+    }
+
     /*
      * The trace operations to trace embedding-specific GC roots. One is for
      * tracing through black roots and the other is for tracing through gray
      * roots. The black/gray distinction is only relevant to the cycle
      * collector.
      */
     JSTraceDataOp       gcBlackRootsTraceOp;
     void                *gcBlackRootsData;
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -3194,16 +3194,27 @@ FinishMarkingValidation(JSRuntime *rt)
 {
 #ifdef DEBUG
     js_delete(rt->gcMarkingValidator);
     rt->gcMarkingValidator = NULL;
 #endif
 }
 
 static void
+AssertNeedsBarrierFlagsConsistent(JSRuntime *rt)
+{
+#ifdef DEBUG
+    bool anyNeedsBarrier = false;
+    for (ZonesIter zone(rt); !zone.done(); zone.next())
+        anyNeedsBarrier |= zone->needsBarrier();
+    JS_ASSERT(rt->needsBarrier() == anyNeedsBarrier);
+#endif
+}
+
+static void
 DropStringWrappers(JSRuntime *rt)
 {
     /*
      * String "wrappers" are dropped on GC because their presence would require
      * us to sweep the wrappers in all compartments every time we sweep a
      * compartment group.
      */
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
@@ -3320,16 +3331,18 @@ GetNextZoneGroup(JSRuntime *rt)
         JS_ASSERT(!rt->gcIsIncremental);
         for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
             JS_ASSERT(!zone->gcNextGraphComponent);
             JS_ASSERT(zone->isGCMarking());
             zone->setNeedsBarrier(false, Zone::UpdateIon);
             zone->setGCState(Zone::NoGC);
             zone->gcGrayRoots.clearAndFree();
         }
+        rt->setNeedsBarrier(false);
+        AssertNeedsBarrierFlagsConsistent(rt);
 
         for (GCCompartmentGroupIter comp(rt); !comp.done(); comp.next()) {
             ArrayBufferObject::resetArrayBufferList(comp);
             ResetGrayList(comp);
         }
 
         rt->gcAbortSweepAfterCurrentGroup = false;
         rt->gcCurrentZoneGroup = NULL;
@@ -4085,16 +4098,18 @@ ResetIncrementalGC(JSRuntime *rt, const 
             ResetGrayList(c);
         }
 
         for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
             JS_ASSERT(zone->isGCMarking());
             zone->setNeedsBarrier(false, Zone::UpdateIon);
             zone->setGCState(Zone::NoGC);
         }
+        rt->setNeedsBarrier(false);
+        AssertNeedsBarrierFlagsConsistent(rt);
 
         rt->gcIncrementalState = NO_INCREMENTAL;
 
         JS_ASSERT(!rt->gcStrictCompartmentChecking);
 
         break;
       }
 
@@ -4161,29 +4176,35 @@ AutoGCSlice::AutoGCSlice(JSRuntime *rt)
          */
         if (zone->isGCMarking()) {
             JS_ASSERT(zone->needsBarrier());
             zone->setNeedsBarrier(false, Zone::DontUpdateIon);
         } else {
             JS_ASSERT(!zone->needsBarrier());
         }
     }
+    rt->setNeedsBarrier(false);
+    AssertNeedsBarrierFlagsConsistent(rt);
 }
 
 AutoGCSlice::~AutoGCSlice()
 {
     /* We can't use GCZonesIter if this is the end of the last slice. */
+    bool haveBarriers = false;
     for (ZonesIter zone(runtime); !zone.done(); zone.next()) {
         if (zone->isGCMarking()) {
             zone->setNeedsBarrier(true, Zone::UpdateIon);
             zone->allocator.arenas.prepareForIncrementalGC(runtime);
+            haveBarriers = true;
         } else {
             zone->setNeedsBarrier(false, Zone::UpdateIon);
         }
     }
+    runtime->setNeedsBarrier(haveBarriers);
+    AssertNeedsBarrierFlagsConsistent(runtime);
 }
 
 static void
 PushZealSelectedObjects(JSRuntime *rt)
 {
 #ifdef JS_GC_ZEAL
     /* Push selected objects onto the mark stack and clear the list. */
     for (JSObject **obj = rt->gcSelectedForMarking.begin();
--- a/js/src/jsinferinlines.h
+++ b/js/src/jsinferinlines.h
@@ -1659,17 +1659,17 @@ TypeObject::getProperty(unsigned i)
     }
     return propertySet[i];
 }
 
 inline void
 TypeObject::writeBarrierPre(TypeObject *type)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!type)
+    if (!type || !type->runtime()->needsBarrier())
         return;
 
     JS::Zone *zone = type->zone();
     if (zone->needsBarrier()) {
         TypeObject *tmp = type;
         MarkTypeObjectUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
         JS_ASSERT(tmp == type);
     }
@@ -1693,17 +1693,17 @@ TypeObject::readBarrier(TypeObject *type
     }
 #endif
 }
 
 inline void
 TypeNewScript::writeBarrierPre(TypeNewScript *newScript)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!newScript)
+    if (!newScript || !newScript->fun->runtime()->needsBarrier())
         return;
 
     JS::Zone *zone = newScript->fun->zone();
     if (zone->needsBarrier()) {
         MarkObject(zone->barrierTracer(), &newScript->fun, "write barrier");
         MarkShape(zone->barrierTracer(), &newScript->shape, "write barrier");
     }
 #endif
--- a/js/src/jspubtd.h
+++ b/js/src/jspubtd.h
@@ -209,16 +209,30 @@ typedef struct JSString                 
 typedef struct PRCallOnceType    JSCallOnceType;
 #else
 typedef JSBool                   JSCallOnceType;
 #endif
 typedef JSBool                 (*JSInitCallback)(void);
 
 #ifdef __cplusplus
 
+namespace JS {
+namespace shadow {
+
+struct Runtime
+{
+    /* Restrict zone access during Minor GC. */
+    bool needsBarrier_;
+
+    Runtime() : needsBarrier_(false) {}
+};
+
+} /* namespace shadow */
+} /* namespace JS */
+
 namespace js {
 
 class Allocator;
 
 class SkipRoot;
 
 enum ThingRootKind
 {
@@ -295,40 +309,25 @@ struct ContextFriendFields {
      * suppress false positives which occur when a rooting analysis treats the
      * location as holding a relocatable pointer, but have no other effect on
      * GC behavior.
      */
     SkipRoot *skipGCRooters;
 #endif
 };
 
-struct RuntimeFriendFields {
-    /*
-     * If non-zero, we were been asked to call the operation callback as soon
-     * as possible.
-     */
-    volatile int32_t    interrupt;
-
-    RuntimeFriendFields()
-      : interrupt(0) { }
-
-    static const RuntimeFriendFields *get(const JSRuntime *rt) {
-        return reinterpret_cast<const RuntimeFriendFields *>(rt);
-    }
-};
-
 class PerThreadData;
 
 struct PerThreadDataFriendFields
 {
   private:
     // Note: this type only exists to permit us to derive the offset of
     // the perThread data within the real JSRuntime* type in a portable
     // way.
-    struct RuntimeDummy : RuntimeFriendFields
+    struct RuntimeDummy : JS::shadow::Runtime
     {
         struct PerThreadDummy {
             void *field1;
             uintptr_t field2;
 #ifdef DEBUG
             uint64_t field3;
 #endif
         } mainThread;
@@ -363,24 +362,24 @@ struct PerThreadDataFriendFields
 
     static const size_t RuntimeMainThreadOffset = offsetof(RuntimeDummy, mainThread);
 
     static inline PerThreadDataFriendFields *get(js::PerThreadData *pt) {
         return reinterpret_cast<PerThreadDataFriendFields *>(pt);
     }
 
     static inline PerThreadDataFriendFields *getMainThread(JSRuntime *rt) {
-        // mainThread must always appear directly after |RuntimeFriendFields|.
+        // mainThread must always appear directly after |JS::shadow::Runtime|.
         // Tested by a JS_STATIC_ASSERT in |jsfriendapi.cpp|
         return reinterpret_cast<PerThreadDataFriendFields *>(
             reinterpret_cast<char*>(rt) + RuntimeMainThreadOffset);
     }
 
     static inline const PerThreadDataFriendFields *getMainThread(const JSRuntime *rt) {
-        // mainThread must always appear directly after |RuntimeFriendFields|.
+        // mainThread must always appear directly after |JS::shadow::Runtime|.
         // Tested by a JS_STATIC_ASSERT in |jsfriendapi.cpp|
         return reinterpret_cast<const PerThreadDataFriendFields *>(
             reinterpret_cast<const char*>(rt) + RuntimeMainThreadOffset);
     }
 };
 
 } /* namespace js */
 
--- a/js/src/jsscriptinlines.h
+++ b/js/src/jsscriptinlines.h
@@ -175,17 +175,17 @@ JSScript::destroyMJITInfo(js::FreeOp *fo
     mJITInfo = NULL;
 }
 #endif /* JS_METHODJIT */
 
 inline void
 JSScript::writeBarrierPre(js::RawScript script)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!script)
+    if (!script || !script->runtime()->needsBarrier())
         return;
 
     JS::Zone *zone = script->zone();
     if (zone->needsBarrier()) {
         JS_ASSERT(!zone->rt->isHeapBusy());
         js::RawScript tmp = script;
         MarkScriptUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
         JS_ASSERT(tmp == script);
--- a/js/src/vm/ObjectImpl-inl.h
+++ b/js/src/vm/ObjectImpl-inl.h
@@ -381,17 +381,17 @@ js::ObjectImpl::privateWriteBarrierPost(
 /* static */ inline void
 js::ObjectImpl::writeBarrierPre(ObjectImpl *obj)
 {
 #ifdef JSGC_INCREMENTAL
     /*
      * This would normally be a null test, but TypeScript::global uses 0x1 as a
      * special value.
      */
-    if (IsNullTaggedPointer(obj))
+    if (IsNullTaggedPointer(obj) || !obj->runtime()->needsBarrier())
         return;
 
     Zone *zone = obj->zone();
     if (zone->needsBarrier()) {
         MOZ_ASSERT(!zone->rt->isHeapBusy());
         JSObject *tmp = obj->asObjectPtr();
         MarkObjectUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
         MOZ_ASSERT(tmp == obj->asObjectPtr());
--- a/js/src/vm/Shape-inl.h
+++ b/js/src/vm/Shape-inl.h
@@ -401,17 +401,17 @@ EmptyShape::EmptyShape(RawUnownedBaseSha
     if (!getObjectClass()->isNative())
         flags |= NON_NATIVE;
 }
 
 inline void
 Shape::writeBarrierPre(RawShape shape)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!shape)
+    if (!shape || !shape->runtime()->needsBarrier())
         return;
 
     JS::Zone *zone = shape->zone();
     if (zone->needsBarrier()) {
         RawShape tmp = shape;
         MarkShapeUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
         JS_ASSERT(tmp == shape);
     }
@@ -444,17 +444,17 @@ Shape::markChildren(JSTracer *trc)
     if (parent)
         MarkShape(trc, &parent, "parent");
 }
 
 inline void
 BaseShape::writeBarrierPre(RawBaseShape base)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!base)
+    if (!base || !base->runtime()->needsBarrier())
         return;
 
     JS::Zone *zone = base->zone();
     if (zone->needsBarrier()) {
         RawBaseShape tmp = base;
         MarkBaseShapeUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
         JS_ASSERT(tmp == base);
     }
--- a/js/src/vm/String-inl.h
+++ b/js/src/vm/String-inl.h
@@ -102,17 +102,17 @@ StringWriteBarrierPostRemove(JSRuntime *
 }
 
 } /* namespace js */
 
 inline void
 JSString::writeBarrierPre(JSString *str)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!str)
+    if (!str || !str->runtime()->needsBarrier())
         return;
 
     JS::Zone *zone = str->zone();
     if (zone->needsBarrier()) {
         JSString *tmp = str;
         MarkStringUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
         JS_ASSERT(tmp == str);
     }