Bug 1171780 - We no longer need to cast out of barrieried types in GC; r=jonco
authorTerrence Cole <terrence@mozilla.com>
Fri, 05 Jun 2015 09:01:12 -0700
changeset 248078 2fd6e854642e13862e03894b99cbc66da2afdc2f
parent 248077 3e6ee3df29aa7ac0a3db30d5dffcbd31a7da55c8
child 248079 71b2118a180ca29631bb839bdcd9b2e859d74a30
push id28888
push userkwierso@gmail.com
push dateThu, 11 Jun 2015 01:29:45 +0000
treeherdermozilla-central@04c057942da4 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjonco
bugs1171780
milestone41.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1171780 - We no longer need to cast out of barrieried types in GC; r=jonco
js/public/HeapAPI.h
js/src/gc/Barrier.h
js/src/gc/Heap.h
js/src/gc/StoreBuffer.h
js/src/jspubtd.h
js/src/jsweakmap.cpp
js/src/jsweakmap.h
js/src/vm/Runtime.h
js/src/vm/TypeInference.cpp
--- a/js/public/HeapAPI.h
+++ b/js/public/HeapAPI.h
@@ -383,17 +383,17 @@ GCThingIsMarkedGray(GCCellPtr thing)
 namespace js {
 namespace gc {
 
 static MOZ_ALWAYS_INLINE bool
 IsIncrementalBarrierNeededOnTenuredGCThing(JS::shadow::Runtime* rt, const JS::GCCellPtr thing)
 {
     MOZ_ASSERT(thing);
     MOZ_ASSERT(!js::gc::IsInsideNursery(thing.asCell()));
-    if (!rt->needsIncrementalBarrier())
+    if (rt->isHeapBusy())
         return false;
     JS::Zone* zone = JS::GetTenuredGCThingZone(thing);
     return JS::shadow::Zone::asShadowZone(zone)->needsIncrementalBarrier();
 }
 
 /*
  * Create an object providing access to the garbage collector's internal notion
  * of the current state of memory (both GC heap memory and GCthing-controlled
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -831,25 +831,11 @@ class HeapSlotArray
 #ifdef DEBUG
         return allowWrite_;
 #else
         return true;
 #endif
     }
 };
 
-/*
- * Operations on a Heap thing inside the GC need to strip the barriers from
- * pointer operations. This template helps do that in contexts where the type
- * is templatized.
- */
-template <typename T> struct Unbarriered {};
-template <typename S> struct Unbarriered< PreBarriered<S> > { typedef S* type; };
-template <typename S> struct Unbarriered< RelocatablePtr<S> > { typedef S* type; };
-template <> struct Unbarriered<PreBarrieredValue> { typedef Value type; };
-template <> struct Unbarriered<RelocatableValue> { typedef Value type; };
-template <typename S> struct Unbarriered< DefaultHasher< PreBarriered<S> > > {
-    typedef DefaultHasher<S*> type;
-};
-
 } /* namespace js */
 
 #endif /* gc_Barrier_h */
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -1427,17 +1427,17 @@ TenuredCell::isInsideZone(JS::Zone* zone
     return zone == arenaHeader()->zone;
 }
 
 /* static */ MOZ_ALWAYS_INLINE void
 TenuredCell::readBarrier(TenuredCell* thing)
 {
     MOZ_ASSERT(!CurrentThreadIsIonCompiling());
     MOZ_ASSERT(!isNullLike(thing));
-    if (!thing->shadowRuntimeFromAnyThread()->needsIncrementalBarrier())
+    if (thing->shadowRuntimeFromAnyThread()->isHeapBusy())
         return;
 
     JS::shadow::Zone* shadowZone = thing->shadowZoneFromAnyThread();
     if (shadowZone->needsIncrementalBarrier()) {
         MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
         Cell* tmp = thing;
         TraceManuallyBarrieredGenericPointerEdge(shadowZone->barrierTracer(), &tmp, "read barrier");
         MOZ_ASSERT(tmp == thing);
@@ -1445,17 +1445,17 @@ TenuredCell::readBarrier(TenuredCell* th
     if (thing->isMarked(GRAY))
         UnmarkGrayCellRecursively(thing, thing->getTraceKind());
 }
 
 /* static */ MOZ_ALWAYS_INLINE void
 TenuredCell::writeBarrierPre(TenuredCell* thing)
 {
     MOZ_ASSERT(!CurrentThreadIsIonCompiling());
-    if (isNullLike(thing) || !thing->shadowRuntimeFromAnyThread()->needsIncrementalBarrier())
+    if (isNullLike(thing) || thing->shadowRuntimeFromAnyThread()->isHeapBusy())
         return;
 
     JS::shadow::Zone* shadowZone = thing->shadowZoneFromAnyThread();
     if (shadowZone->needsIncrementalBarrier()) {
         MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
         Cell* tmp = thing;
         TraceManuallyBarrieredGenericPointerEdge(shadowZone->barrierTracer(), &tmp, "pre barrier");
         MOZ_ASSERT(tmp == thing);
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -326,16 +326,18 @@ class StoreBuffer
 
       private:
         TraceCallback callback;
         Key* key;
         void* data;
     };
 
     bool isOkayToUseBuffer() const {
+        MOZ_ASSERT(!JS::shadow::Runtime::asShadowRuntime(runtime_)->isHeapBusy());
+
         /*
          * Disabled store buffers may not have a valid state; e.g. when stored
          * inline in the ChunkTrailer.
          */
         if (!isEnabled())
             return false;
 
         /*
--- a/js/src/jspubtd.h
+++ b/js/src/jspubtd.h
@@ -158,19 +158,17 @@ struct Runtime
     js::gc::StoreBuffer* gcStoreBufferPtr_;
 
   public:
     Runtime()
       : heapState_(JS::HeapState::Idle)
       , gcStoreBufferPtr_(nullptr)
     {}
 
-    bool needsIncrementalBarrier() const {
-        return heapState_ == JS::HeapState::Idle;
-    }
+    bool isHeapBusy() const { return heapState_ != JS::HeapState::Idle; }
 
     js::gc::StoreBuffer* gcStoreBufferPtr() { return gcStoreBufferPtr_; }
 
     static JS::shadow::Runtime* asShadowRuntime(JSRuntime* rt) {
         return reinterpret_cast<JS::shadow::Runtime*>(rt);
     }
 
   protected:
--- a/js/src/jsweakmap.cpp
+++ b/js/src/jsweakmap.cpp
@@ -343,17 +343,17 @@ TryPreserveReflector(JSContext* cx, Hand
 }
 
 static inline void
 WeakMapPostWriteBarrier(JSRuntime* rt, ObjectValueMap* weakMap, JSObject* key)
 {
     // Strip the barriers from the type before inserting into the store buffer.
     // This will automatically ensure that barriers do not fire during GC.
     if (key && IsInsideNursery(key))
-        rt->gc.storeBuffer.putGeneric(UnbarrieredRef(weakMap, key));
+        rt->gc.storeBuffer.putGeneric(gc::HashKeyRef<ObjectValueMap, JSObject*>(weakMap, key));
 }
 
 static MOZ_ALWAYS_INLINE bool
 SetWeakMapEntryInternal(JSContext* cx, Handle<WeakMapObject*> mapObj,
                         HandleObject key, HandleValue value)
 {
     ObjectValueMap* map = mapObj->getMap();
     if (!map) {
--- a/js/src/jsweakmap.h
+++ b/js/src/jsweakmap.h
@@ -253,60 +253,33 @@ class WeakMap : public HashMap<Key, Valu
                 tracer->trace(memberOf,
                               JS::GCCellPtr(r.front().key()),
                               JS::GCCellPtr(r.front().value()));
             }
         }
     }
 
     /* Rekey an entry when moved, ensuring we do not trigger barriers. */
-    void entryMoved(Enum& eArg, const Key& k) {
-        typedef typename HashMap<typename Unbarriered<Key>::type,
-                                 typename Unbarriered<Value>::type,
-                                 typename Unbarriered<HashPolicy>::type,
-                                 RuntimeAllocPolicy>::Enum UnbarrieredEnum;
-        UnbarrieredEnum& e = reinterpret_cast<UnbarrieredEnum&>(eArg);
-        e.rekeyFront(reinterpret_cast<const typename Unbarriered<Key>::type&>(k));
+    void entryMoved(Enum& e, const Key& k) {
+        e.rekeyFront(k);
     }
 
 protected:
     void assertEntriesNotAboutToBeFinalized() {
 #if DEBUG
         for (Range r = Base::all(); !r.empty(); r.popFront()) {
             Key k(r.front().key());
             MOZ_ASSERT(!gc::IsAboutToBeFinalized(&k));
             MOZ_ASSERT(!gc::IsAboutToBeFinalized(&r.front().value()));
             MOZ_ASSERT(k == r.front().key());
         }
 #endif
     }
 };
 
-/*
- * At times, you will need to ignore barriers when accessing WeakMap entries.
- * Localize the templatized casting craziness here.
- */
-template <class Key, class Value>
-static inline gc::HashKeyRef<HashMap<Key, Value, DefaultHasher<Key>, RuntimeAllocPolicy>, Key>
-UnbarrieredRef(WeakMap<PreBarriered<Key>, RelocatablePtr<Value>>* map, Key key)
-{
-    /*
-     * Some compilers complain about instantiating the WeakMap class for
-     * unbarriered type arguments, so we cast to a HashMap instead. Because of
-     * WeakMap's multiple inheritance, we need to do this in two stages, first
-     * to the HashMap base class and then to the unbarriered version.
-     */
-
-    typedef typename WeakMap<PreBarriered<Key>, RelocatablePtr<Value>>::Base BaseMap;
-    auto baseMap = static_cast<BaseMap*>(map);
-    typedef HashMap<Key, Value, DefaultHasher<Key>, RuntimeAllocPolicy> UnbarrieredMap;
-    typedef gc::HashKeyRef<UnbarrieredMap, Key> UnbarrieredKeyRef;
-    return UnbarrieredKeyRef(reinterpret_cast<UnbarrieredMap*>(baseMap), key);
-}
-
 /* WeakMap methods exposed so they can be installed in the self-hosting global. */
 
 extern JSObject*
 InitBareWeakMapCtor(JSContext* cx, js::HandleObject obj);
 
 extern bool
 WeakMap_has(JSContext* cx, unsigned argc, Value* vp);
 
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -998,17 +998,16 @@ struct JSRuntime : public JS::shadow::Ru
 #endif
 
     /* Garbage collector state, used by jsgc.c. */
     js::gc::GCRuntime   gc;
 
     /* Garbage collector state has been sucessfully initialized. */
     bool                gcInitialized;
 
-    bool isHeapBusy() const { return heapState_ != JS::HeapState::Idle; }
     bool isHeapMajorCollecting() const { return heapState_ == JS::HeapState::MajorCollecting; }
     bool isHeapMinorCollecting() const { return heapState_ == JS::HeapState::MinorCollecting; }
     bool isHeapCollecting() const { return isHeapMinorCollecting() || isHeapMajorCollecting(); }
 
     int gcZeal() { return gc.zeal(); }
 
     void lockGC() {
         assertCanLock(js::GCLock);
--- a/js/src/vm/TypeInference.cpp
+++ b/js/src/vm/TypeInference.cpp
@@ -3361,17 +3361,17 @@ PreliminaryObjectArrayWithTemplate::trac
         TraceEdge(trc, &shape_, "PreliminaryObjectArrayWithTemplate_shape");
 }
 
 /* static */ void
 PreliminaryObjectArrayWithTemplate::writeBarrierPre(PreliminaryObjectArrayWithTemplate* objects)
 {
     Shape* shape = objects->shape();
 
-    if (!shape || !shape->runtimeFromAnyThread()->needsIncrementalBarrier())
+    if (!shape || shape->runtimeFromAnyThread()->isHeapBusy())
         return;
 
     JS::Zone* zone = shape->zoneFromAnyThread();
     if (zone->needsIncrementalBarrier())
         objects->trace(zone->barrierTracer());
 }
 
 // Return whether shape consists entirely of plain data properties.
@@ -3913,17 +3913,17 @@ TypeNewScript::trace(JSTracer* trc)
 
     if (initializedGroup_)
         TraceEdge(trc, &initializedGroup_, "TypeNewScript_initializedGroup");
 }
 
 /* static */ void
 TypeNewScript::writeBarrierPre(TypeNewScript* newScript)
 {
-    if (!newScript->function()->runtimeFromAnyThread()->needsIncrementalBarrier())
+    if (newScript->function()->runtimeFromAnyThread()->isHeapBusy())
         return;
 
     JS::Zone* zone = newScript->function()->zoneFromAnyThread();
     if (zone->needsIncrementalBarrier())
         newScript->trace(zone->barrierTracer());
 }
 
 void