Bug 1209704 - Part 3: Share tracing accessors between all barrier classes; r=jonco
authorTerrence Cole <terrence@mozilla.com>
Thu, 01 Oct 2015 14:06:55 -0700
changeset 265808 d36103a859acec5e091d0c2160cd81d3dff27ad4
parent 265807 b297bcfe050499da98ed6de2bfff66e58fdb48a0
child 265809 02e6762578c6ea76b5642839b68e8cb414eb5634
push id29470
push userphilringnalda@gmail.com
push dateSat, 03 Oct 2015 22:38:52 +0000
treeherdermozilla-central@2e62c35afb2c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjonco
bugs1209704
milestone44.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1209704 - Part 3: Share tracing accessors between all barrier classes; r=jonco
js/src/builtin/TypedObject.h
js/src/gc/Barrier.h
js/src/gc/Marking.cpp
js/src/jit/BaselineIC.cpp
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsobj.cpp
js/src/vm/ScopeObject.cpp
--- a/js/src/builtin/TypedObject.h
+++ b/js/src/builtin/TypedObject.h
@@ -588,17 +588,17 @@ class TypedObject : public JSObject
     // User-accessible constructor (`new TypeDescriptor(...)`). Note that the
     // callee here is the type descriptor.
     static bool construct(JSContext* cx, unsigned argc, Value* vp);
 
     /* Accessors for self hosted code. */
     static bool GetBuffer(JSContext* cx, unsigned argc, Value* vp);
     static bool GetByteOffset(JSContext* cx, unsigned argc, Value* vp);
 
-    Shape** addressOfShapeFromGC() { return shape_.unsafeGet(); }
+    Shape** addressOfShapeFromGC() { return shape_.unsafeUnbarrieredForTracing(); }
 };
 
 typedef Handle<TypedObject*> HandleTypedObject;
 
 class OutlineTypedObject : public TypedObject
 {
     // The object which owns the data this object points to. Because this
     // pointer is managed in tandem with |data|, this is not a HeapPtr and
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -328,16 +328,22 @@ class BarrieredBase : public BarrieredBa
     }
 
     // Storage for all barrier classes. |value| must be a GC thing reference
     // type: either a direct pointer to a GC thing or a supported tagged
     // pointer that can reference GC things, such as JS::Value or jsid. Nested
     // barrier types are NOT supported. See assertTypeConstraints.
     T value;
 
+  public:
+    // Note: this is public because C++ cannot friend to a specific template instantiation.
+    // Friending to the generic template leads to a number of unintended consequences, including
+    // template resolution ambiguity and a circular dependency with Tracing.h.
+    T* unsafeUnbarrieredForTracing() { return &value; }
+
   private:
 #ifdef DEBUG
     // Static type assertions about T must be moved out of line to avoid
     // circular dependencies between Barrier classes and GC memory definitions.
     void assertTypeConstraints() const;
 #endif
 };
 
@@ -348,28 +354,24 @@ class WriteBarrieredBase : public Barrie
   protected:
     // WriteBarrieredBase is not directly instantiable.
     explicit WriteBarrieredBase(T v) : BarrieredBase<T>(v) {}
 
   public:
     DECLARE_POINTER_COMPARISON_OPS(T);
     DECLARE_POINTER_CONSTREF_OPS(T);
 
-    /* Use this if the automatic coercion to T isn't working. */
+    // Use this if the automatic coercion to T isn't working.
     const T& get() const { return this->value; }
 
-    /*
-     * Use these if you want to change the value without invoking barriers.
-     * Obviously this is dangerous unless you know the barrier is not needed.
-     */
-    T* unsafeGet() { return &this->value; }
-    const T* unsafeGet() const { return &this->value; }
+    // Use this if you want to change the value without invoking barriers.
+    // Obviously this is dangerous unless you know the barrier is not needed.
     void unsafeSet(T v) { this->value = v; }
 
-    /* For users who need to manually barrier the raw types. */
+    // For users who need to manually barrier the raw types.
     static void writeBarrierPre(const T& v) { InternalGCMethods<T>::preBarrier(v); }
 
   protected:
     void pre() { InternalGCMethods<T>::preBarrier(this->value); }
     void post(T prev, T next) { InternalGCMethods<T>::postBarrier(&this->value, prev, next); }
 };
 
 /*
@@ -529,20 +531,16 @@ class RelocatablePtr : public WriteBarri
 // Base class for barriered pointer types that intercept only reads.
 template <typename T>
 class ReadBarrieredBase : public BarrieredBase<T>
 {
   protected:
     // ReadBarrieredBase is not directly instantiable.
     explicit ReadBarrieredBase(T v) : BarrieredBase<T>(v) {}
 
-  public:
-    // For use by the GC.
-    T* unsafeGet() { return &this->value; }
-
   protected:
     void read() const { InternalGCMethods<T>::readBarrier(this->value); }
 };
 
 // Incremental GC requires that weak pointers have read barriers. This is mostly
 // an issue for empty shapes stored in JSCompartment. The problem happens when,
 // during an incremental GC, some JS code stores one of the compartment's empty
 // shapes into an object already marked black. Normally, this would not be a
@@ -550,38 +548,37 @@ class ReadBarrieredBase : public Barrier
 // when the GC started. However, since this is a weak pointer, it isn't. So we
 // may collect the empty shape even though a live object points to it. To fix
 // this, we mark these empty shapes black whenever they get read out.
 template <class T>
 class ReadBarriered : public ReadBarrieredBase<T>
 {
   public:
     ReadBarriered() : ReadBarrieredBase<T>(GCMethods<T>::initial()) {}
-    explicit ReadBarriered(T v) : ReadBarrieredBase<T>(v) {}
+    explicit ReadBarriered(const T& v) : ReadBarrieredBase<T>(v) {}
 
-    T get() const {
+    const T get() const {
         if (!InternalGCMethods<T>::isMarkable(this->value))
             return GCMethods<T>::initial();
         this->read();
         return this->value;
     }
 
-    T unbarrieredGet() const {
+    const T unbarrieredGet() const {
         return this->value;
     }
 
-    operator T() const { return get(); }
+    operator const T() const { return get(); }
 
-    T& operator*() const { return *get(); }
-    T operator->() const { return get(); }
+    const T& operator*() const { return *get(); }
+    const T operator->() const { return get(); }
 
-    T* unsafeGet() { return &this->value; }
     T const* unsafeGet() const { return &this->value; }
 
-    void set(T v) { this->value = v; }
+    void set(const T& v) { this->value = v; }
 };
 
 // Add Value operations to all Barrier types. Note, this must be defined before
 // HeapSlot for HeapSlot's base to get these operations.
 template <>
 class BarrieredBaseMixins<JS::Value> : public ValueOperations<WriteBarrieredBase<JS::Value>>
 {};
 
@@ -631,18 +628,16 @@ class HeapSlot : public WriteBarrieredBa
         post(owner, kind, slot, v);
     }
 
     /* For users who need to manually barrier the raw types. */
     static void writeBarrierPost(NativeObject* owner, Kind kind, uint32_t slot, const Value& target) {
         reinterpret_cast<HeapSlot*>(const_cast<Value*>(&target))->post(owner, kind, slot, target);
     }
 
-    Value* unsafeGet() { return &value; }
-
   private:
     void post(NativeObject* owner, Kind kind, uint32_t slot, const Value& target) {
         MOZ_ASSERT(preconditionForWriteBarrierPost(owner, kind, slot, target));
         if (this->value.isObject()) {
             gc::Cell* cell = reinterpret_cast<gc::Cell*>(&this->value.toObject());
             if (cell->storeBuffer())
                 cell->storeBuffer()->putSlot(owner, kind, slot, 1);
         }
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -387,17 +387,17 @@ template <typename T> void DispatchToTra
 template <typename T> T DoCallback(JS::CallbackTracer* trc, T* thingp, const char* name);
 template <typename T> void DoMarking(GCMarker* gcmarker, T* thing);
 template <typename T> void DoMarking(GCMarker* gcmarker, T thing);
 
 template <typename T>
 void
 js::TraceEdge(JSTracer* trc, WriteBarrieredBase<T>* thingp, const char* name)
 {
-    DispatchToTracer(trc, ConvertToBase(thingp->unsafeGet()), name);
+    DispatchToTracer(trc, ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name);
 }
 
 template <typename T>
 void
 js::TraceManuallyBarrieredEdge(JSTracer* trc, T* thingp, const char* name)
 {
     DispatchToTracer(trc, ConvertToBase(thingp), name);
 }
@@ -421,17 +421,17 @@ js::TraceNullableRoot(JSTracer* trc, T* 
 
 template <typename T>
 void
 js::TraceRange(JSTracer* trc, size_t len, WriteBarrieredBase<T>* vec, const char* name)
 {
     JS::AutoTracingIndex index(trc);
     for (auto i : MakeRange(len)) {
         if (InternalGCMethods<T>::isMarkable(vec[i].get()))
-            DispatchToTracer(trc, ConvertToBase(vec[i].unsafeGet()), name);
+            DispatchToTracer(trc, ConvertToBase(vec[i].unsafeUnbarrieredForTracing()), name);
         ++index;
     }
 }
 
 template <typename T>
 void
 js::TraceRootRange(JSTracer* trc, size_t len, T* vec, const char* name)
 {
@@ -469,17 +469,17 @@ template void js::TraceManuallyBarriered
                                                                         JSScript**, const char*);
 
 template <typename T>
 void
 js::TraceCrossCompartmentEdge(JSTracer* trc, JSObject* src, WriteBarrieredBase<T>* dst,
                               const char* name)
 {
     if (ShouldMarkCrossCompartment(trc, src, dst->get()))
-        DispatchToTracer(trc, dst->unsafeGet(), name);
+        DispatchToTracer(trc, dst->unsafeUnbarrieredForTracing(), name);
 }
 template void js::TraceCrossCompartmentEdge<Value>(JSTracer*, JSObject*,
                                                    WriteBarrieredBase<Value>*, const char*);
 
 template <typename T>
 void
 js::TraceProcessGlobalRoot(JSTracer* trc, T* thing, const char* name)
 {
@@ -1889,18 +1889,18 @@ js::gc::StoreBuffer::SlotsEdge::trace(Te
 
     if (IsInsideNursery(obj))
         return;
 
     if (kind() == ElementKind) {
         int32_t initLen = obj->getDenseInitializedLength();
         int32_t clampedStart = Min(start_, initLen);
         int32_t clampedEnd = Min(start_ + count_, initLen);
-        mover.traceSlots(static_cast<HeapSlot*>(obj->getDenseElements() + clampedStart)->unsafeGet(),
-                         clampedEnd - clampedStart);
+        mover.traceSlots(static_cast<HeapSlot*>(obj->getDenseElements() + clampedStart)
+                            ->unsafeUnbarrieredForTracing(), clampedEnd - clampedStart);
     } else {
         int32_t start = Min(uint32_t(start_), obj->slotSpan());
         int32_t end = Min(uint32_t(start_) + count_, obj->slotSpan());
         MOZ_ASSERT(end >= start);
         mover.traceObjectSlots(obj, start, end - start);
     }
 }
 
@@ -2027,35 +2027,35 @@ js::TenuringTracer::traceObject(JSObject
         return;
 
     // Note: the contents of copy on write elements pointers are filled in
     // during parsing and cannot contain nursery pointers.
     if (!nobj->hasEmptyElements() &&
         !nobj->denseElementsAreCopyOnWrite() &&
         ObjectDenseElementsMayBeMarkable(nobj))
     {
-        Value* elems = static_cast<HeapSlot*>(nobj->getDenseElements())->unsafeGet();
+        Value* elems = static_cast<HeapSlot*>(nobj->getDenseElements())->unsafeUnbarrieredForTracing();
         traceSlots(elems, elems + nobj->getDenseInitializedLength());
     }
 
     traceObjectSlots(nobj, 0, nobj->slotSpan());
 }
 
 void
 js::TenuringTracer::traceObjectSlots(NativeObject* nobj, uint32_t start, uint32_t length)
 {
     HeapSlot* fixedStart;
     HeapSlot* fixedEnd;
     HeapSlot* dynStart;
     HeapSlot* dynEnd;
     nobj->getSlotRange(start, length, &fixedStart, &fixedEnd, &dynStart, &dynEnd);
     if (fixedStart)
-        traceSlots(fixedStart->unsafeGet(), fixedEnd->unsafeGet());
+        traceSlots(fixedStart->unsafeUnbarrieredForTracing(), fixedEnd->unsafeUnbarrieredForTracing());
     if (dynStart)
-        traceSlots(dynStart->unsafeGet(), dynEnd->unsafeGet());
+        traceSlots(dynStart->unsafeUnbarrieredForTracing(), dynEnd->unsafeUnbarrieredForTracing());
 }
 
 void
 js::TenuringTracer::traceSlots(Value* vp, Value* end)
 {
     for (; vp != end; ++vp)
         traverse(vp);
 }
@@ -2320,45 +2320,45 @@ IsMarkedUnbarriered(T* thingp)
 {
     return IsMarkedInternal(ConvertToBase(thingp));
 }
 
 template <typename T>
 bool
 IsMarked(WriteBarrieredBase<T>* thingp)
 {
-    return IsMarkedInternal(ConvertToBase(thingp->unsafeGet()));
+    return IsMarkedInternal(ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
 }
 
 template <typename T>
 bool
 IsMarked(ReadBarrieredBase<T>* thingp)
 {
-    return IsMarkedInternal(ConvertToBase(thingp->unsafeGet()));
+    return IsMarkedInternal(ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
 }
 
 template <typename T>
 bool
 IsAboutToBeFinalizedUnbarriered(T* thingp)
 {
     return IsAboutToBeFinalizedInternal(ConvertToBase(thingp));
 }
 
 template <typename T>
 bool
 IsAboutToBeFinalized(WriteBarrieredBase<T>* thingp)
 {
-    return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeGet()));
+    return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
 }
 
 template <typename T>
 bool
 IsAboutToBeFinalized(ReadBarrieredBase<T>* thingp)
 {
-    return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeGet()));
+    return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
 }
 
 // Instantiate a copy of the Tracing templates for each derived type.
 #define INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS(type) \
     template bool IsMarkedUnbarriered<type>(type*); \
     template bool IsMarked<type>(WriteBarrieredBase<type>*); \
     template bool IsMarked<type>(ReadBarrieredBase<type>*); \
     template bool IsAboutToBeFinalizedUnbarriered<type>(type*); \
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -11100,17 +11100,17 @@ ICGetElemNativePrototypeCallStub<T>::ICG
 template <class T>
 /* static */ ICGetElem_NativePrototypeCallNative<T>*
 ICGetElem_NativePrototypeCallNative<T>::Clone(JSContext* cx,
                                               ICStubSpace* space,
                                               ICStub* firstMonitorStub,
                                               ICGetElem_NativePrototypeCallNative<T>& other)
 {
     return ICStub::New<ICGetElem_NativePrototypeCallNative<T>>(cx, space, other.jitCode(),
-                firstMonitorStub, other.receiverGuard(), other.key().unsafeGet(), other.accessType(),
+                firstMonitorStub, other.receiverGuard(), &other.key().get(), other.accessType(),
                 other.needsAtomize(), other.getter(), other.pcOffset_, other.holder(),
                 other.holderShape());
 }
 
 template ICGetElem_NativePrototypeCallNative<JS::Symbol*>*
 ICGetElem_NativePrototypeCallNative<JS::Symbol*>::Clone(JSContext*, ICStubSpace*, ICStub*,
                                           ICGetElem_NativePrototypeCallNative<JS::Symbol*>&);
 template ICGetElem_NativePrototypeCallNative<js::PropertyName*>*
@@ -11120,17 +11120,17 @@ ICGetElem_NativePrototypeCallNative<js::
 template <class T>
 /* static */ ICGetElem_NativePrototypeCallScripted<T>*
 ICGetElem_NativePrototypeCallScripted<T>::Clone(JSContext* cx,
                                                 ICStubSpace* space,
                                                 ICStub* firstMonitorStub,
                                                 ICGetElem_NativePrototypeCallScripted<T>& other)
 {
     return ICStub::New<ICGetElem_NativePrototypeCallScripted<T>>(cx, space, other.jitCode(),
-                firstMonitorStub, other.receiverGuard(), other.key().unsafeGet(), other.accessType(),
+                firstMonitorStub, other.receiverGuard(), &other.key().get(), other.accessType(),
                 other.needsAtomize(), other.getter(), other.pcOffset_, other.holder(),
                 other.holderShape());
 }
 
 template ICGetElem_NativePrototypeCallScripted<JS::Symbol*>*
 ICGetElem_NativePrototypeCallScripted<JS::Symbol*>::Clone(JSContext*, ICStubSpace*, ICStub*,
                                         ICGetElem_NativePrototypeCallScripted<JS::Symbol*>&);
 template ICGetElem_NativePrototypeCallScripted<js::PropertyName*>*
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -531,31 +531,31 @@ JSCompartment::trace(JSTracer* trc)
     savedStacks_.trace(trc);
 }
 
 void
 JSCompartment::traceRoots(JSTracer* trc, js::gc::GCRuntime::TraceOrMarkRuntime traceOrMark)
 {
     if (objectMetadataState.is<PendingMetadata>()) {
         TraceRoot(trc,
-                  objectMetadataState.as<PendingMetadata>().unsafeGet(),
+                  objectMetadataState.as<PendingMetadata>().unsafeUnbarrieredForTracing(),
                   "on-stack object pending metadata");
     }
 
     if (!trc->runtime()->isHeapMinorCollecting()) {
         // JIT code and the global are never nursery allocated, so we only need
         // to trace them when not doing a minor collection.
 
         if (jitCompartment_)
             jitCompartment_->mark(trc, this);
 
         // If a compartment is on-stack, we mark its global so that
         // JSContext::global() remains valid.
         if (enterCompartmentDepth && global_.unbarrieredGet())
-            TraceRoot(trc, global_.unsafeGet(), "on-stack compartment global");
+            TraceRoot(trc, global_.unsafeUnbarrieredForTracing(), "on-stack compartment global");
     }
 
     // Nothing below here needs to be treated as a root if we aren't marking
     // this zone for a collection.
     if (traceOrMark == js::gc::GCRuntime::MarkRuntime && !zone()->isCollecting())
         return;
 
     // During a GC, these are treated as weak pointers.
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -194,17 +194,17 @@ class MOZ_RAII AutoSetNewObjectMetadata 
 
     AutoSetNewObjectMetadata(const AutoSetNewObjectMetadata& aOther) = delete;
     void operator=(const AutoSetNewObjectMetadata& aOther) = delete;
 
   protected:
     virtual void trace(JSTracer* trc) override {
         if (prevState_.is<PendingMetadata>()) {
             TraceRoot(trc,
-                      prevState_.as<PendingMetadata>().unsafeGet(),
+                      prevState_.as<PendingMetadata>().unsafeUnbarrieredForTracing(),
                       "Object pending metadata");
         }
     }
 
   public:
     explicit AutoSetNewObjectMetadata(ExclusiveContext* ecx MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
     ~AutoSetNewObjectMetadata();
 };
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -3708,17 +3708,18 @@ JSObject::traceChildren(JSTracer* trc)
 
         TraceEdge(trc, &nobj->shape_, "shape");
 
         {
             GetObjectSlotNameFunctor func(nobj);
             JS::AutoTracingDetails ctx(trc, func);
             JS::AutoTracingIndex index(trc);
             for (uint32_t i = 0; i < nobj->slotSpan(); ++i) {
-                TraceManuallyBarrieredEdge(trc, nobj->getSlotRef(i).unsafeGet(), "object slot");
+                TraceManuallyBarrieredEdge(trc, nobj->getSlotRef(i).unsafeUnbarrieredForTracing(),
+                                           "object slot");
                 ++index;
             }
         }
 
         do {
             if (nobj->denseElementsAreCopyOnWrite()) {
                 HeapPtrNativeObject& owner = nobj->getElementsHeader()->ownerObject();
                 if (owner != nobj) {
--- a/js/src/vm/ScopeObject.cpp
+++ b/js/src/vm/ScopeObject.cpp
@@ -2101,18 +2101,17 @@ DebugScopes::mark(JSTracer* trc)
 void
 DebugScopes::sweep(JSRuntime* rt)
 {
     /*
      * missingScopes points to debug scopes weakly so that debug scopes can be
      * released more eagerly.
      */
     for (MissingScopeMap::Enum e(missingScopes); !e.empty(); e.popFront()) {
-        DebugScopeObject** debugScope = e.front().value().unsafeGet();
-        if (IsAboutToBeFinalizedUnbarriered(debugScope)) {
+        if (IsAboutToBeFinalized(&e.front().value())) {
             /*
              * Note that onPopCall and onPopBlock rely on missingScopes to find
              * scope objects that we synthesized for the debugger's sake, and
              * clean up the synthetic scope objects' entries in liveScopes. So
              * if we remove an entry frcom missingScopes here, we must also
              * remove the corresponding liveScopes entry.
              *
              * Since the DebugScopeObject is the only thing using its scope
@@ -2120,17 +2119,17 @@ DebugScopes::sweep(JSRuntime* rt)
              * that the synthetic SO is also about to be finalized too, and thus
              * the loop below will take care of things. But complex GC behavior
              * means that marks are only conservative approximations of
              * liveness; we should assume that anything could be marked.
              *
              * Thus, we must explicitly remove the entries from both liveScopes
              * and missingScopes here.
              */
-            liveScopes.remove(&(*debugScope)->scope());
+            liveScopes.remove(&e.front().value()->scope());
             e.removeFront();
         } else {
             MissingScopeKey key = e.front().key();
             if (IsForwarded(key.staticScope())) {
                 key.updateStaticScope(Forwarded(key.staticScope()));
                 e.rekeyFront(key);
             }
         }