Bug 989414 - Convert BarrieredPtr to take a T* as template parameter instead of T; r=jonco
authorTerrence Cole <terrence@mozilla.com>
Fri, 25 Apr 2014 11:02:44 -0700
changeset 200074 bb7af0cf48d6e47899b30f8a84d522e5b878a245
parent 200073 d1e4a93e5b6c6cd5ebdae9dbf51162456f9bee3f
child 200075 5c288c892ea606e034b2a5a31a08e6ac99f4eaaa
push id3741
push userasasaki@mozilla.com
push dateMon, 21 Jul 2014 20:25:18 +0000
treeherdermozilla-beta@4d6f46f5af68 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjonco
bugs989414
milestone32.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 989414 - Convert BarrieredPtr to take a T* as template parameter instead of T; r=jonco
js/src/frontend/ParseMaps.h
js/src/gc/Barrier.h
js/src/gc/Marking.cpp
js/src/gc/Marking.h
js/src/jit/AsmJSModule.h
js/src/jit/BaselineJIT.h
js/src/jit/IonCode.h
js/src/jsinfer.h
js/src/jsiter.cpp
js/src/jsiter.h
js/src/jsobj.h
js/src/jsscript.cpp
js/src/jsscript.h
js/src/vm/RegExpStatics.h
js/src/vm/ScopeObject.cpp
js/src/vm/ScopeObject.h
js/src/vm/Shape.h
js/src/vm/String.h
--- a/js/src/frontend/ParseMaps.h
+++ b/js/src/frontend/ParseMaps.h
@@ -32,17 +32,17 @@ typedef InlineMap<JSAtom *, DefinitionSi
 typedef InlineMap<JSAtom *, DefinitionList, 24> AtomDefnListMap;
 
 /*
  * For all unmapped atoms recorded in al, add a mapping from the atom's index
  * to its address. map->length must already be set to the number of atoms in
  * the list and map->vector must point to pre-allocated memory.
  */
 void
-InitAtomMap(AtomIndexMap *indices, HeapPtr<JSAtom> *atoms);
+InitAtomMap(AtomIndexMap *indices, HeapPtrAtom *atoms);
 
 /*
  * A pool that permits the reuse of the backing storage for the defn, index, or
  * defn-or-header (multi) maps.
  *
  * The pool owns all the maps that are given out, and is responsible for
  * relinquishing all resources when |purgeAll| is triggered.
  */
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -145,16 +145,20 @@
  *  |
  * RelocatablePtr         provides pre- and post-barriers and is relocatable
  *
  * These classes are designed to be used by the internals of the JS engine.
  * Barriers designed to be used externally are provided in
  * js/public/RootingAPI.h.
  */
 
+class JSAtom;
+class JSFlatString;
+class JSLinearString;
+
 namespace js {
 
 class PropertyName;
 
 #ifdef DEBUG
 bool
 RuntimeFromMainThreadIsHeapMajorCollecting(JS::shadow::Zone *shadowZone);
 #endif
@@ -280,89 +284,97 @@ MOZ_ALWAYS_INLINE JS::Zone *
 ZoneOfValueFromAnyThread(const JS::Value &value)
 {
     JS_ASSERT(value.isMarkable());
     if (value.isObject())
         return ZoneOfObjectFromAnyThread(value.toObject());
     return static_cast<js::gc::Cell *>(value.toGCThing())->tenuredZoneFromAnyThread();
 }
 
+template <typename T>
+struct InternalGCMethods {};
+
+template <typename T>
+struct InternalGCMethods<T *>
+{
+    static void preBarrier(T *v) { T::writeBarrierPre(v); }
+#ifdef JSGC_GENERATIONAL
+    static void postBarrier(T **vp) { T::writeBarrierPost(*vp, vp); }
+    static void postBarrierRelocate(T **vp) { T::writeBarrierPostRelocate(*vp, vp); }
+    static void postBarrierRemove(T **vp) { T::writeBarrierPostRemove(*vp, vp); }
+#endif
+};
+
 /*
  * Base class for barriered pointer types.
  */
-template <class T, typename Unioned = uintptr_t>
+template <class T>
 class BarrieredPtr
 {
   protected:
-    union {
-        T *value;
-        Unioned other;
-    };
+    T value;
 
-    BarrieredPtr(T *v) : value(v) {}
+    BarrieredPtr(T v) : value(v) {}
     ~BarrieredPtr() { pre(); }
 
   public:
-    void init(T *v) {
-        JS_ASSERT(!IsPoisonedPtr<T>(v));
+    void init(T v) {
+        JS_ASSERT(!GCMethods<T>::poisoned(v));
         this->value = v;
     }
 
-    /* Use this if the automatic coercion to T* isn't working. */
-    T *get() const { return value; }
+    /* Use this if the automatic coercion to T isn't working. */
+    T get() const { return value; }
 
     /*
      * Use these if you want to change the value without invoking the barrier.
      * Obviously this is dangerous unless you know the barrier is not needed.
      */
-    T **unsafeGet() { return &value; }
-    void unsafeSet(T *v) { value = v; }
-
-    Unioned *unsafeGetUnioned() { return &other; }
+    T *unsafeGet() { return &value; }
+    void unsafeSet(T v) { value = v; }
 
-    T &operator*() const { return *value; }
-    T *operator->() const { return value; }
+    T operator->() const { return value; }
 
-    operator T*() const { return value; }
+    operator T() const { return value; }
 
   protected:
-    void pre() { T::writeBarrierPre(value); }
+    void pre() { InternalGCMethods<T>::preBarrier(value); }
 };
 
 /*
  * EncapsulatedPtr only automatically handles pre-barriers. Post-barriers must
  * be manually implemented when using this class. HeapPtr and RelocatablePtr
  * should be used in all cases that do not require explicit low-level control
  * of moving behavior, e.g. for HashMap keys.
  */
-template <class T, typename Unioned = uintptr_t>
-class EncapsulatedPtr : public BarrieredPtr<T, Unioned>
+template <class T>
+class EncapsulatedPtr : public BarrieredPtr<T>
 {
   public:
-    EncapsulatedPtr() : BarrieredPtr<T, Unioned>(nullptr) {}
-    EncapsulatedPtr(T *v) : BarrieredPtr<T, Unioned>(v) {}
-    explicit EncapsulatedPtr(const EncapsulatedPtr<T, Unioned> &v)
-      : BarrieredPtr<T, Unioned>(v.value) {}
+    EncapsulatedPtr() : BarrieredPtr<T>(nullptr) {}
+    EncapsulatedPtr(T v) : BarrieredPtr<T>(v) {}
+    explicit EncapsulatedPtr(const EncapsulatedPtr<T> &v)
+      : BarrieredPtr<T>(v.value) {}
 
     /* Use to set the pointer to nullptr. */
     void clear() {
         this->pre();
         this->value = nullptr;
     }
 
-    EncapsulatedPtr<T, Unioned> &operator=(T *v) {
+    EncapsulatedPtr<T> &operator=(T v) {
         this->pre();
-        JS_ASSERT(!IsPoisonedPtr<T>(v));
+        JS_ASSERT(!GCMethods<T>::poisoned(v));
         this->value = v;
         return *this;
     }
 
-    EncapsulatedPtr<T, Unioned> &operator=(const EncapsulatedPtr<T> &v) {
+    EncapsulatedPtr<T> &operator=(const EncapsulatedPtr<T> &v) {
         this->pre();
-        JS_ASSERT(!IsPoisonedPtr<T>(v.value));
+        JS_ASSERT(!GCMethods<T>::poisoned(v.value));
         this->value = v.value;
         return *this;
     }
 };
 
 /*
  * A pre- and post-barriered heap pointer, for use inside the JS engine.
  *
@@ -370,66 +382,70 @@ class EncapsulatedPtr : public Barriered
  * external interface and implements substantially different semantics.
  *
  * The post-barriers implemented by this class are faster than those
  * implemented by RelocatablePtr<T> or JS::Heap<T> at the cost of not
  * automatically handling deletion or movement. It should generally only be
  * stored in memory that has GC lifetime. HeapPtr must not be used in contexts
  * where it may be implicitly moved or deleted, e.g. most containers.
  */
-template <class T, class Unioned = uintptr_t>
-class HeapPtr : public BarrieredPtr<T, Unioned>
+template <class T>
+class HeapPtr : public BarrieredPtr<T>
 {
   public:
-    HeapPtr() : BarrieredPtr<T, Unioned>(nullptr) {}
-    explicit HeapPtr(T *v) : BarrieredPtr<T, Unioned>(v) { post(); }
-    explicit HeapPtr(const HeapPtr<T, Unioned> &v) : BarrieredPtr<T, Unioned>(v) { post(); }
+    HeapPtr() : BarrieredPtr<T>(nullptr) {}
+    explicit HeapPtr(T v) : BarrieredPtr<T>(v) { post(); }
+    explicit HeapPtr(const HeapPtr<T> &v) : BarrieredPtr<T>(v) { post(); }
 
-    void init(T *v) {
-        JS_ASSERT(!IsPoisonedPtr<T>(v));
+    void init(T v) {
+        JS_ASSERT(!GCMethods<T>::poisoned(v));
         this->value = v;
         post();
     }
 
-    HeapPtr<T, Unioned> &operator=(T *v) {
+    HeapPtr<T> &operator=(T v) {
         this->pre();
-        JS_ASSERT(!IsPoisonedPtr<T>(v));
+        JS_ASSERT(!GCMethods<T>::poisoned(v));
         this->value = v;
         post();
         return *this;
     }
 
-    HeapPtr<T, Unioned> &operator=(const HeapPtr<T, Unioned> &v) {
+    HeapPtr<T> &operator=(const HeapPtr<T> &v) {
         this->pre();
-        JS_ASSERT(!IsPoisonedPtr<T>(v.value));
+        JS_ASSERT(!GCMethods<T>::poisoned(v.value));
         this->value = v.value;
         post();
         return *this;
     }
 
   protected:
-    void post() { T::writeBarrierPost(this->value, (void *)&this->value); }
+    void post() {
+#ifdef JSGC_GENERATIONAL
+        InternalGCMethods<T>::postBarrier(&this->value);
+#endif
+    }
 
     /* Make this friend so it can access pre() and post(). */
     template <class T1, class T2>
     friend inline void
     BarrieredSetPair(Zone *zone,
-                     HeapPtr<T1> &v1, T1 *val1,
-                     HeapPtr<T2> &v2, T2 *val2);
+                     HeapPtr<T1*> &v1, T1 *val1,
+                     HeapPtr<T2*> &v2, T2 *val2);
 
   private:
     /*
      * Unlike RelocatablePtr<T>, HeapPtr<T> must be managed with GC lifetimes.
      * Specifically, the memory used by the pointer itself must be live until
      * at least the next minor GC. For that reason, move semantics are invalid
      * and are deleted here. Please note that not all containers support move
      * semantics, so this does not completely prevent invalid uses.
      */
     HeapPtr(HeapPtr<T> &&) MOZ_DELETE;
-    HeapPtr<T, Unioned> &operator=(HeapPtr<T, Unioned> &&) MOZ_DELETE;
+    HeapPtr<T> &operator=(HeapPtr<T> &&) MOZ_DELETE;
 };
 
 /*
  * FixedHeapPtr is designed for one very narrow case: replacing immutable raw
  * pointers to GC-managed things, implicitly converting to a handle type for
  * ease of use.  Pointers encapsulated by this type must:
  *
  *   be immutable (no incremental write barriers),
@@ -464,17 +480,17 @@ class FixedHeapPtr
  * i.e. in C++ containers.  It is, however, somewhat slower, so should only be
  * used in contexts where this ability is necessary.
  */
 template <class T>
 class RelocatablePtr : public BarrieredPtr<T>
 {
   public:
     RelocatablePtr() : BarrieredPtr<T>(nullptr) {}
-    explicit RelocatablePtr(T *v) : BarrieredPtr<T>(v) {
+    explicit RelocatablePtr(T v) : BarrieredPtr<T>(v) {
         if (v)
             post();
     }
 
     /*
      * For RelocatablePtr, move semantics are equivalent to copy semantics. In
      * C++, a copy constructor taking const-ref is the way to get a single
      * function that will be used for both lvalue and rvalue copies, so we can
@@ -485,124 +501,142 @@ class RelocatablePtr : public BarrieredP
             post();
     }
 
     ~RelocatablePtr() {
         if (this->value)
             relocate();
     }
 
-    RelocatablePtr<T> &operator=(T *v) {
+    RelocatablePtr<T> &operator=(T v) {
         this->pre();
-        JS_ASSERT(!IsPoisonedPtr<T>(v));
+        JS_ASSERT(!GCMethods<T>::poisoned(v));
         if (v) {
             this->value = v;
             post();
         } else if (this->value) {
             relocate();
             this->value = v;
         }
         return *this;
     }
 
     RelocatablePtr<T> &operator=(const RelocatablePtr<T> &v) {
         this->pre();
-        JS_ASSERT(!IsPoisonedPtr<T>(v.value));
+        JS_ASSERT(!GCMethods<T>::poisoned(v.value));
         if (v.value) {
             this->value = v.value;
             post();
         } else if (this->value) {
             relocate();
             this->value = v;
         }
         return *this;
     }
 
   protected:
     void post() {
 #ifdef JSGC_GENERATIONAL
         JS_ASSERT(this->value);
-        T::writeBarrierPostRelocate(this->value, &this->value);
+        InternalGCMethods<T>::postBarrierRelocate(&this->value);
 #endif
     }
 
     void relocate() {
 #ifdef JSGC_GENERATIONAL
         JS_ASSERT(this->value);
-        T::writeBarrierPostRemove(this->value, &this->value);
+        InternalGCMethods<T>::postBarrierRemove(&this->value);
 #endif
     }
 };
 
 /*
  * This is a hack for RegExpStatics::updateFromMatch. It allows us to do two
  * barriers with only one branch to check if we're in an incremental GC.
  */
 template <class T1, class T2>
 static inline void
 BarrieredSetPair(Zone *zone,
-                 HeapPtr<T1> &v1, T1 *val1,
-                 HeapPtr<T2> &v2, T2 *val2)
+                 HeapPtr<T1*> &v1, T1 *val1,
+                 HeapPtr<T2*> &v2, T2 *val2)
 {
     if (T1::needWriteBarrierPre(zone)) {
         v1.pre();
         v2.pre();
     }
     v1.unsafeSet(val1);
     v2.unsafeSet(val2);
     v1.post();
     v2.post();
 }
 
+class ArrayBufferObject;
+class NestedScopeObject;
 class Shape;
 class BaseShape;
-namespace types { struct TypeObject; }
+class UnownedBaseShape;
+namespace jit {
+class JitCode;
+}
+namespace types {
+struct TypeObject;
+struct TypeObjectAddendum;
+}
 
-typedef BarrieredPtr<JSObject> BarrieredPtrObject;
-typedef BarrieredPtr<JSScript> BarrieredPtrScript;
+typedef BarrieredPtr<JSObject*> BarrieredPtrObject;
+typedef BarrieredPtr<JSScript*> BarrieredPtrScript;
 
-typedef EncapsulatedPtr<JSObject> EncapsulatedPtrObject;
-typedef EncapsulatedPtr<JSScript> EncapsulatedPtrScript;
+typedef EncapsulatedPtr<JSObject*> EncapsulatedPtrObject;
+typedef EncapsulatedPtr<JSScript*> EncapsulatedPtrScript;
+typedef EncapsulatedPtr<jit::JitCode*> EncapsulatedPtrJitCode;
 
-typedef RelocatablePtr<JSObject> RelocatablePtrObject;
-typedef RelocatablePtr<JSScript> RelocatablePtrScript;
+typedef RelocatablePtr<JSObject*> RelocatablePtrObject;
+typedef RelocatablePtr<JSScript*> RelocatablePtrScript;
+typedef RelocatablePtr<NestedScopeObject*> RelocatablePtrNestedScopeObject;
 
-typedef HeapPtr<JSObject> HeapPtrObject;
-typedef HeapPtr<JSFunction> HeapPtrFunction;
-typedef HeapPtr<JSString> HeapPtrString;
-typedef HeapPtr<PropertyName> HeapPtrPropertyName;
-typedef HeapPtr<JSScript> HeapPtrScript;
-typedef HeapPtr<Shape> HeapPtrShape;
-typedef HeapPtr<BaseShape> HeapPtrBaseShape;
-typedef HeapPtr<types::TypeObject> HeapPtrTypeObject;
+typedef HeapPtr<ArrayBufferObject*> HeapPtrArrayBufferObject;
+typedef HeapPtr<JSObject*> HeapPtrObject;
+typedef HeapPtr<JSFunction*> HeapPtrFunction;
+typedef HeapPtr<JSAtom*> HeapPtrAtom;
+typedef HeapPtr<JSString*> HeapPtrString;
+typedef HeapPtr<JSFlatString*> HeapPtrFlatString;
+typedef HeapPtr<JSLinearString*> HeapPtrLinearString;
+typedef HeapPtr<PropertyName*> HeapPtrPropertyName;
+typedef HeapPtr<JSScript*> HeapPtrScript;
+typedef HeapPtr<Shape*> HeapPtrShape;
+typedef HeapPtr<BaseShape*> HeapPtrBaseShape;
+typedef HeapPtr<UnownedBaseShape*> HeapPtrUnownedBaseShape;
+typedef HeapPtr<types::TypeObject*> HeapPtrTypeObject;
+typedef HeapPtr<types::TypeObjectAddendum*> HeapPtrTypeObjectAddendum;
+typedef HeapPtr<jit::JitCode*> HeapPtrJitCode;
 
 /* Useful for hashtables with a HeapPtr as key. */
 
 template <class T>
 struct HeapPtrHasher
 {
     typedef HeapPtr<T> Key;
-    typedef T *Lookup;
+    typedef T Lookup;
 
-    static HashNumber hash(Lookup obj) { return DefaultHasher<T *>::hash(obj); }
+    static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
     static bool match(const Key &k, Lookup l) { return k.get() == l; }
     static void rekey(Key &k, const Key& newKey) { k.unsafeSet(newKey); }
 };
 
 /* Specialized hashing policy for HeapPtrs. */
 template <class T>
 struct DefaultHasher< HeapPtr<T> > : HeapPtrHasher<T> { };
 
 template <class T>
 struct EncapsulatedPtrHasher
 {
     typedef EncapsulatedPtr<T> Key;
-    typedef T *Lookup;
+    typedef T Lookup;
 
-    static HashNumber hash(Lookup obj) { return DefaultHasher<T *>::hash(obj); }
+    static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
     static bool match(const Key &k, Lookup l) { return k.get() == l; }
     static void rekey(Key &k, const Key& newKey) { k.unsafeSet(newKey); }
 };
 
 template <class T>
 struct DefaultHasher< EncapsulatedPtr<T> > : EncapsulatedPtrHasher<T> { };
 
 bool
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -258,17 +258,17 @@ void
 MarkUnbarriered(JSTracer *trc, T **thingp, const char *name)
 {
     trc->setTracingName(name);
     MarkInternal(trc, thingp);
 }
 
 template <typename T>
 static void
-Mark(JSTracer *trc, BarrieredPtr<T> *thing, const char *name)
+Mark(JSTracer *trc, BarrieredPtr<T*> *thing, const char *name)
 {
     trc->setTracingName(name);
     MarkInternal(trc, thing->unsafeGet());
 }
 
 void
 MarkPermanentAtom(JSTracer *trc, JSAtom *atom, const char *name)
 {
@@ -301,17 +301,17 @@ MarkRoot(JSTracer *trc, T **thingp, cons
 {
     JS_ROOT_MARKING_ASSERT(trc);
     trc->setTracingName(name);
     MarkInternal(trc, thingp);
 }
 
 template <typename T>
 static void
-MarkRange(JSTracer *trc, size_t len, HeapPtr<T> *vec, const char *name)
+MarkRange(JSTracer *trc, size_t len, HeapPtr<T*> *vec, const char *name)
 {
     for (size_t i = 0; i < len; ++i) {
         if (vec[i].get()) {
             trc->setTracingIndex(name, i);
             MarkInternal(trc, vec[i].unsafeGet());
         }
     }
 }
@@ -397,40 +397,40 @@ UpdateIfRelocated(JSRuntime *rt, T **thi
     if (*thingp && rt->isHeapMinorCollecting() && rt->gcNursery.isInside(*thingp))
         rt->gcNursery.getForwardedPointer(thingp);
 #endif
     return *thingp;
 }
 
 #define DeclMarkerImpl(base, type)                                                                \
 void                                                                                              \
-Mark##base(JSTracer *trc, BarrieredPtr<type> *thing, const char *name)                            \
+Mark##base(JSTracer *trc, BarrieredPtr<type*> *thing, const char *name)                           \
 {                                                                                                 \
     Mark<type>(trc, thing, name);                                                                 \
 }                                                                                                 \
                                                                                                   \
 void                                                                                              \
 Mark##base##Root(JSTracer *trc, type **thingp, const char *name)                                  \
 {                                                                                                 \
     MarkRoot<type>(trc, thingp, name);                                                            \
 }                                                                                                 \
                                                                                                   \
 void                                                                                              \
 Mark##base##Unbarriered(JSTracer *trc, type **thingp, const char *name)                           \
 {                                                                                                 \
     MarkUnbarriered<type>(trc, thingp, name);                                                     \
 }                                                                                                 \
                                                                                                   \
-/* Explicitly instantiate MarkUnbarriered<type>. It is referenced from */                         \
+/* Explicitly instantiate MarkUnbarriered<type*>. It is referenced from */                        \
 /* other translation units and the instantiation might otherwise get */                           \
 /* inlined away. */                                                                               \
 template void MarkUnbarriered<type>(JSTracer *, type **, const char *);                           \
                                                                                                   \
 void                                                                                              \
-Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type> *vec, const char *name)                \
+Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type*> *vec, const char *name)               \
 {                                                                                                 \
     MarkRange<type>(trc, len, vec, name);                                                         \
 }                                                                                                 \
                                                                                                   \
 void                                                                                              \
 Mark##base##RootRange(JSTracer *trc, size_t len, type **vec, const char *name)                    \
 {                                                                                                 \
     MarkRootRange<type>(trc, len, vec, name);                                                     \
@@ -438,35 +438,35 @@ Mark##base##RootRange(JSTracer *trc, siz
                                                                                                   \
 bool                                                                                              \
 Is##base##Marked(type **thingp)                                                                   \
 {                                                                                                 \
     return IsMarked<type>(thingp);                                                                \
 }                                                                                                 \
                                                                                                   \
 bool                                                                                              \
-Is##base##Marked(BarrieredPtr<type> *thingp)                                                      \
+Is##base##Marked(BarrieredPtr<type*> *thingp)                                                     \
 {                                                                                                 \
     return IsMarked<type>(thingp->unsafeGet());                                                   \
 }                                                                                                 \
                                                                                                   \
 bool                                                                                              \
 Is##base##AboutToBeFinalized(type **thingp)                                                       \
 {                                                                                                 \
     return IsAboutToBeFinalized<type>(thingp);                                                    \
 }                                                                                                 \
                                                                                                   \
 bool                                                                                              \
-Is##base##AboutToBeFinalized(BarrieredPtr<type> *thingp)                                          \
+Is##base##AboutToBeFinalized(BarrieredPtr<type*> *thingp)                                         \
 {                                                                                                 \
     return IsAboutToBeFinalized<type>(thingp->unsafeGet());                                       \
 }                                                                                                 \
                                                                                                   \
 type *                                                                                            \
-Update##base##IfRelocated(JSRuntime *rt, BarrieredPtr<type> *thingp)                              \
+Update##base##IfRelocated(JSRuntime *rt, BarrieredPtr<type*> *thingp)                             \
 {                                                                                                 \
     return UpdateIfRelocated<type>(rt, thingp->unsafeGet());                                      \
 }                                                                                                 \
                                                                                                   \
 type *                                                                                            \
 Update##base##IfRelocated(JSRuntime *rt, type **thingp)                                           \
 {                                                                                                 \
     return UpdateIfRelocated<type>(rt, thingp);                                                   \
@@ -822,23 +822,16 @@ gc::MarkCrossCompartmentSlot(JSTracer *t
 {
     if (dst->isMarkable() && ShouldMarkCrossCompartment(trc, src, (Cell *)dst->toGCThing()))
         MarkSlot(trc, dst, name);
 }
 
 /*** Special Marking ***/
 
 void
-gc::MarkObject(JSTracer *trc, HeapPtr<GlobalObject, JSScript *> *thingp, const char *name)
-{
-    trc->setTracingName(name);
-    MarkInternal(trc, thingp->unsafeGet());
-}
-
-void
 gc::MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name)
 {
     trc->setTracingName(name);
     MarkValueInternal(trc, v);
 }
 
 bool
 gc::IsCellMarked(Cell **thingp)
--- a/js/src/gc/Marking.h
+++ b/js/src/gc/Marking.h
@@ -22,17 +22,17 @@ class BaseShape;
 class DebugScopeObject;
 struct GCMarker;
 class GlobalObject;
 class LazyScript;
 class ScopeObject;
 class Shape;
 class UnownedBaseShape;
 
-template<class, typename> class HeapPtr;
+template<class> class HeapPtr;
 
 namespace jit {
 class JitCode;
 class IonScript;
 class VMFunction;
 }
 
 namespace types {
@@ -44,20 +44,20 @@ namespace gc {
 /*** Object Marking ***/
 
 /*
  * These functions expose marking functionality for all of the different GC
  * thing kinds. For each GC thing, there are several variants. As an example,
  * these are the variants generated for JSObject. They are listed from most to
  * least desirable for use:
  *
- * MarkObject(JSTracer *trc, const HeapPtr<JSObject> &thing, const char *name);
+ * MarkObject(JSTracer *trc, const HeapPtrObject &thing, const char *name);
  *     This function should be used for marking JSObjects, in preference to all
- *     others below. Use it when you have HeapPtr<JSObject>, which
- *     automatically implements write barriers.
+ *     others below. Use it when you have HeapPtrObject, which automatically
+ *     implements write barriers.
  *
  * MarkObjectRoot(JSTracer *trc, JSObject *thing, const char *name);
  *     This function is only valid during the root marking phase of GC (i.e.,
  *     when MarkRuntime is on the stack).
  *
  * MarkObjectUnbarriered(JSTracer *trc, JSObject *thing, const char *name);
  *     Like MarkObject, this function can be called at any time. It is more
  *     forgiving, since it doesn't demand a HeapPtr as an argument. Its use
@@ -83,26 +83,26 @@ namespace gc {
  *
  * UpdateObjectIfRelocated(JSObject **thingp);
  *     In some circumstances -- e.g. optional weak marking -- it is necessary
  *     to look at the pointer before marking it strongly or weakly. In these
  *     cases, the following must be called to update the pointer before use.
  */
 
 #define DeclMarker(base, type)                                                                    \
-void Mark##base(JSTracer *trc, BarrieredPtr<type> *thing, const char *name);                      \
+void Mark##base(JSTracer *trc, BarrieredPtr<type*> *thing, const char *name);                     \
 void Mark##base##Root(JSTracer *trc, type **thingp, const char *name);                            \
 void Mark##base##Unbarriered(JSTracer *trc, type **thingp, const char *name);                     \
-void Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type> *thing, const char *name);        \
+void Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type*> *thing, const char *name);       \
 void Mark##base##RootRange(JSTracer *trc, size_t len, type **thing, const char *name);            \
 bool Is##base##Marked(type **thingp);                                                             \
-bool Is##base##Marked(BarrieredPtr<type> *thingp);                                                \
+bool Is##base##Marked(BarrieredPtr<type*> *thingp);                                               \
 bool Is##base##AboutToBeFinalized(type **thingp);                                                 \
-bool Is##base##AboutToBeFinalized(BarrieredPtr<type> *thingp);                                    \
-type *Update##base##IfRelocated(JSRuntime *rt, BarrieredPtr<type> *thingp);                       \
+bool Is##base##AboutToBeFinalized(BarrieredPtr<type*> *thingp);                                   \
+type *Update##base##IfRelocated(JSRuntime *rt, BarrieredPtr<type*> *thingp);                      \
 type *Update##base##IfRelocated(JSRuntime *rt, type **thingp);
 
 DeclMarker(BaseShape, BaseShape)
 DeclMarker(BaseShape, UnownedBaseShape)
 DeclMarker(JitCode, jit::JitCode)
 DeclMarker(Object, ArgumentsObject)
 DeclMarker(Object, ArrayBufferObject)
 DeclMarker(Object, ArrayBufferViewObject)
@@ -235,23 +235,16 @@ MarkCrossCompartmentScriptUnbarriered(JS
  */
 void
 MarkCrossCompartmentSlot(JSTracer *trc, JSObject *src, HeapSlot *dst_slot, const char *name);
 
 
 /*** Special Cases ***/
 
 /*
- * The unioned HeapPtr stored in script->globalObj needs special treatment to
- * typecheck correctly.
- */
-void
-MarkObject(JSTracer *trc, HeapPtr<GlobalObject, JSScript *> *thingp, const char *name);
-
-/*
  * MarkChildren<JSObject> is exposed solely for preWriteBarrier on
  * JSObject::TradeGuts. It should not be considered external interface.
  */
 void
 MarkChildren(JSTracer *trc, JSObject *obj);
 
 /*
  * Trace through the shape and any shapes it contains to mark
@@ -285,17 +278,17 @@ Mark(JSTracer *trc, BarrieredPtrObject *
 
 inline void
 Mark(JSTracer *trc, BarrieredPtrScript *o, const char *name)
 {
     MarkScript(trc, o, name);
 }
 
 inline void
-Mark(JSTracer *trc, HeapPtr<jit::JitCode> *code, const char *name)
+Mark(JSTracer *trc, HeapPtrJitCode *code, const char *name)
 {
     MarkJitCode(trc, code, name);
 }
 
 /* For use by WeakMap's HashKeyRef instantiation. */
 inline void
 Mark(JSTracer *trc, JSObject **objp, const char *name)
 {
--- a/js/src/jit/AsmJSModule.h
+++ b/js/src/jit/AsmJSModule.h
@@ -450,17 +450,17 @@ class AsmJSModule
     } pod;
 
     uint8_t *                             code_;
     uint8_t *                             interruptExit_;
 
     StaticLinkData                        staticLinkData_;
     bool                                  dynamicallyLinked_;
     bool                                  loadedFromCache_;
-    HeapPtr<ArrayBufferObject>            maybeHeap_;
+    HeapPtrArrayBufferObject              maybeHeap_;
 
     // The next two fields need to be kept out of the Pod as they depend on the
     // position of the module within the ScriptSource and thus aren't invariant
     // with caching.
     uint32_t                              funcStart_;
     uint32_t                              offsetToEndOfUseAsm_;
 
     ScriptSource *                        scriptSource_;
--- a/js/src/jit/BaselineJIT.h
+++ b/js/src/jit/BaselineJIT.h
@@ -102,17 +102,17 @@ struct BaselineScript
 
     // Limit the locals on a given script so that stack check on baseline frames
     // doesn't overflow a uint32_t value.
     // (MAX_JSSCRIPT_SLOTS * sizeof(Value)) must fit within a uint32_t.
     static const uint32_t MAX_JSSCRIPT_SLOTS = 0xffffu;
 
   private:
     // Code pointer containing the actual method.
-    HeapPtr<JitCode> method_;
+    HeapPtrJitCode method_;
 
     // For heavyweight scripts, template objects to use for the call object and
     // decl env object (linked via the call object's enclosing scope).
     HeapPtrObject templateScope_;
 
     // Allocated space for fallback stubs.
     FallbackICStubSpace fallbackStubSpace_;
 
--- a/js/src/jit/IonCode.h
+++ b/js/src/jit/IonCode.h
@@ -164,20 +164,20 @@ struct DependentAsmJSModuleExit
     { }
 };
 
 // An IonScript attaches Ion-generated information to a JSScript.
 struct IonScript
 {
   private:
     // Code pointer containing the actual method.
-    EncapsulatedPtr<JitCode> method_;
+    EncapsulatedPtrJitCode method_;
 
     // Deoptimization table used by this method.
-    EncapsulatedPtr<JitCode> deoptTable_;
+    EncapsulatedPtrJitCode deoptTable_;
 
     // Entrypoint for OSR, or nullptr.
     jsbytecode *osrPc_;
 
     // Offset to OSR entrypoint from method_->raw(), or 0.
     uint32_t osrEntryOffset_;
 
     // Offset to entrypoint skipping type arg check from method_->raw().
--- a/js/src/jsinfer.h
+++ b/js/src/jsinfer.h
@@ -982,17 +982,17 @@ struct TypeObject : gc::BarrieredCell<Ty
      * additional information to a type object:
      *
      * - `TypeNewScript`: If addendum is a `TypeNewScript`, it
      *   indicates that objects of this type have always been
      *   constructed using 'new' on the specified script, which adds
      *   some number of properties to the object in a definite order
      *   before the object escapes.
      */
-    HeapPtr<TypeObjectAddendum> addendum;
+    HeapPtrTypeObjectAddendum addendum;
   public:
 
     TypeObjectFlags flags() const {
         return flags_;
     }
 
     void addFlags(TypeObjectFlags flags) {
         flags_ |= flags;
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -50,17 +50,17 @@ using mozilla::PodZero;
 
 typedef Rooted<PropertyIteratorObject*> RootedPropertyIteratorObject;
 
 static const gc::AllocKind ITERATOR_FINALIZE_KIND = gc::FINALIZE_OBJECT2_BACKGROUND;
 
 void
 NativeIterator::mark(JSTracer *trc)
 {
-    for (HeapPtr<JSFlatString> *str = begin(); str < end(); str++)
+    for (HeapPtrFlatString *str = begin(); str < end(); str++)
         MarkString(trc, str, "prop");
     if (obj)
         MarkObject(trc, &obj, "obj");
 
     // The SuppressDeletedPropertyHelper loop can GC, so make sure that if the
     // GC removes any elements from the list, it won't remove this one.
     if (iterObj_)
         MarkObjectUnbarriered(trc, &iterObj_, "iterObj");
@@ -421,17 +421,17 @@ NativeIterator::allocateIterator(JSConte
     size_t plength = props.length();
     NativeIterator *ni = (NativeIterator *)
         cx->malloc_(sizeof(NativeIterator)
                     + plength * sizeof(JSString *)
                     + slength * sizeof(Shape *));
     if (!ni)
         return nullptr;
     AutoValueVector strings(cx);
-    ni->props_array = ni->props_cursor = (HeapPtr<JSFlatString> *) (ni + 1);
+    ni->props_array = ni->props_cursor = (HeapPtrFlatString *) (ni + 1);
     ni->props_end = ni->props_array + plength;
     if (plength) {
         for (size_t i = 0; i < plength; i++) {
             JSFlatString *str = IdToString(cx, props[i]);
             if (!str || !strings.append(StringValue(str)))
                 return nullptr;
             ni->props_array[i].init(str);
         }
@@ -1039,19 +1039,19 @@ SuppressDeletedPropertyHelper(JSContext 
     NativeIterator *enumeratorList = cx->compartment()->enumerators;
     NativeIterator *ni = enumeratorList->next();
 
     while (ni != enumeratorList) {
       again:
         /* This only works for identified suppressed keys, not values. */
         if (ni->isKeyIter() && ni->obj == obj && ni->props_cursor < ni->props_end) {
             /* Check whether id is still to come. */
-            HeapPtr<JSFlatString> *props_cursor = ni->current();
-            HeapPtr<JSFlatString> *props_end = ni->end();
-            for (HeapPtr<JSFlatString> *idp = props_cursor; idp < props_end; ++idp) {
+            HeapPtrFlatString *props_cursor = ni->current();
+            HeapPtrFlatString *props_end = ni->end();
+            for (HeapPtrFlatString *idp = props_cursor; idp < props_end; ++idp) {
                 if (predicate(*idp)) {
                     /*
                      * Check whether another property along the prototype chain
                      * became visible as a result of this deletion.
                      */
                     RootedObject proto(cx);
                     if (!JSObject::getProto(cx, obj, &proto))
                         return false;
@@ -1086,17 +1086,17 @@ SuppressDeletedPropertyHelper(JSContext 
                     /*
                      * No property along the prototype chain stepped in to take the
                      * property's place, so go ahead and delete id from the list.
                      * If it is the next property to be enumerated, just skip it.
                      */
                     if (idp == props_cursor) {
                         ni->incCursor();
                     } else {
-                        for (HeapPtr<JSFlatString> *p = idp; p + 1 != props_end; p++)
+                        for (HeapPtrFlatString *p = idp; p + 1 != props_end; p++)
                             *p = *(p + 1);
                         ni->props_end = ni->end() - 1;
 
                         /*
                          * This invokes the pre barrier on this element, since
                          * it's no longer going to be marked, and ensures that
                          * any existing remembered set entry will be dropped.
                          */
--- a/js/src/jsiter.h
+++ b/js/src/jsiter.h
@@ -26,50 +26,50 @@
 #define JSITER_UNREUSABLE   0x2000
 
 namespace js {
 
 struct NativeIterator
 {
     HeapPtrObject obj;                  // Object being iterated.
     JSObject *iterObj_;                 // Internal iterator object.
-    HeapPtr<JSFlatString> *props_array;
-    HeapPtr<JSFlatString> *props_cursor;
-    HeapPtr<JSFlatString> *props_end;
+    HeapPtrFlatString *props_array;
+    HeapPtrFlatString *props_cursor;
+    HeapPtrFlatString *props_end;
     Shape **shapes_array;
     uint32_t shapes_length;
     uint32_t shapes_key;
     uint32_t flags;
 
   private:
     /* While in compartment->enumerators, these form a doubly linked list. */
     NativeIterator *next_;
     NativeIterator *prev_;
 
   public:
     bool isKeyIter() const {
         return (flags & JSITER_FOREACH) == 0;
     }
 
-    inline HeapPtr<JSFlatString> *begin() const {
+    inline HeapPtrFlatString *begin() const {
         return props_array;
     }
 
-    inline HeapPtr<JSFlatString> *end() const {
+    inline HeapPtrFlatString *end() const {
         return props_end;
     }
 
     size_t numKeys() const {
         return end() - begin();
     }
 
     JSObject *iterObj() const {
         return iterObj_;
     }
-    HeapPtr<JSFlatString> *current() const {
+    HeapPtrFlatString *current() const {
         JS_ASSERT(props_cursor < props_end);
         return props_cursor;
     }
 
     NativeIterator *next() {
         return next_;
     }
 
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -433,17 +433,17 @@ class JSObject : public js::ObjectImpl
      * Constructs a new, unique shape for the object.
      */
     static inline bool setSingletonType(js::ExclusiveContext *cx, js::HandleObject obj);
 
     // uninlinedGetType() is the same as getType(), but not inlined.
     inline js::types::TypeObject* getType(JSContext *cx);
     js::types::TypeObject* uninlinedGetType(JSContext *cx);
 
-    const js::HeapPtr<js::types::TypeObject> &typeFromGC() const {
+    const js::HeapPtrTypeObject &typeFromGC() const {
         /* Direct field access for use by GC. */
         return type_;
     }
 
     /*
      * We allow the prototype of an object to be lazily computed if the object
      * is a proxy. In the lazy case, we store (JSObject *)0x1 in the proto field
      * of the object's TypeObject. We offer three ways of getting the prototype:
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -846,17 +846,17 @@ js::XDRScript(XDRState<mode> *xdr, Handl
     }
 
     /*
      * Here looping from 0-to-length to xdr objects is essential to ensure that
      * all references to enclosing blocks (via FindScopeObjectIndex below) happen
      * after the enclosing block has been XDR'd.
      */
     for (i = 0; i != nobjects; ++i) {
-        HeapPtr<JSObject> *objp = &script->objects()->vector[i];
+        HeapPtrObject *objp = &script->objects()->vector[i];
         XDRClassKind classk;
 
         if (mode == XDR_ENCODE) {
             JSObject *obj = *objp;
             if (obj->is<BlockObject>())
                 classk = CK_BlockObject;
             else if (obj->is<StaticWithObject>())
                 classk = CK_WithObject;
@@ -2355,23 +2355,23 @@ JSScript::partiallyInit(ExclusiveContext
         JS_ASSERT(reinterpret_cast<uintptr_t>(cursor) % sizeof(jsval) == 0);
         script->consts()->length = nconsts;
         script->consts()->vector = (HeapValue *)cursor;
         cursor += nconsts * sizeof(script->consts()->vector[0]);
     }
 
     if (nobjects != 0) {
         script->objects()->length = nobjects;
-        script->objects()->vector = (HeapPtr<JSObject> *)cursor;
+        script->objects()->vector = (HeapPtrObject *)cursor;
         cursor += nobjects * sizeof(script->objects()->vector[0]);
     }
 
     if (nregexps != 0) {
         script->regexps()->length = nregexps;
-        script->regexps()->vector = (HeapPtr<JSObject> *)cursor;
+        script->regexps()->vector = (HeapPtrObject *)cursor;
         cursor += nregexps * sizeof(script->regexps()->vector[0]);
     }
 
     if (ntrynotes != 0) {
         script->trynotes()->length = ntrynotes;
         script->trynotes()->vector = reinterpret_cast<JSTryNote *>(cursor);
         size_t vectorSize = ntrynotes * sizeof(script->trynotes()->vector[0]);
 #ifdef DEBUG
@@ -3021,23 +3021,23 @@ js::CloneScript(JSContext *cx, HandleObj
 
     if (nconsts != 0) {
         HeapValue *vector = Rebase<HeapValue>(dst, src, src->consts()->vector);
         dst->consts()->vector = vector;
         for (unsigned i = 0; i < nconsts; ++i)
             JS_ASSERT_IF(vector[i].isMarkable(), vector[i].toString()->isAtom());
     }
     if (nobjects != 0) {
-        HeapPtrObject *vector = Rebase<HeapPtr<JSObject> >(dst, src, src->objects()->vector);
+        HeapPtrObject *vector = Rebase<HeapPtrObject>(dst, src, src->objects()->vector);
         dst->objects()->vector = vector;
         for (unsigned i = 0; i < nobjects; ++i)
             vector[i].init(objects[i]);
     }
     if (nregexps != 0) {
-        HeapPtrObject *vector = Rebase<HeapPtr<JSObject> >(dst, src, src->regexps()->vector);
+        HeapPtrObject *vector = Rebase<HeapPtrObject>(dst, src, src->regexps()->vector);
         dst->regexps()->vector = vector;
         for (unsigned i = 0; i < nregexps; ++i)
             vector[i].init(regexps[i]);
     }
     if (ntrynotes != 0)
         dst->trynotes()->vector = Rebase<JSTryNote>(dst, src, src->trynotes()->vector);
     if (nblockscopes != 0)
         dst->blockScopes()->vector = Rebase<BlockScopeNote>(dst, src, src->blockScopes()->vector);
--- a/js/src/jsscript.h
+++ b/js/src/jsscript.h
@@ -175,17 +175,17 @@ typedef InternalHandle<Bindings *> Inter
  * both function and top-level scripts (the latter is needed to track names in
  * strict mode eval code, to give such code its own lexical environment).
  */
 class Bindings
 {
     friend class BindingIter;
     friend class AliasedFormalIter;
 
-    HeapPtr<Shape> callObjShape_;
+    HeapPtrShape callObjShape_;
     uintptr_t bindingArrayAndFlag_;
     uint16_t numArgs_;
     uint16_t numBlockScoped_;
     uint32_t numVars_;
 
     /*
      * During parsing, bindings are allocated out of a temporary LifoAlloc.
      * After parsing, a JSScript object is created and the bindings are
--- a/js/src/vm/RegExpStatics.h
+++ b/js/src/vm/RegExpStatics.h
@@ -15,29 +15,29 @@
 namespace js {
 
 class GlobalObject;
 
 class RegExpStatics
 {
     /* The latest RegExp output, set after execution. */
     VectorMatchPairs        matches;
-    HeapPtr<JSLinearString> matchesInput;
+    HeapPtrLinearString     matchesInput;
 
     /*
      * The previous RegExp input, used to resolve lazy state.
      * A raw RegExpShared cannot be stored because it may be in
      * a different compartment via evalcx().
      */
-    HeapPtr<JSAtom>         lazySource;
+    HeapPtrAtom             lazySource;
     RegExpFlag              lazyFlags;
     size_t                  lazyIndex;
 
     /* The latest RegExp input, set before execution. */
-    HeapPtr<JSString>       pendingInput;
+    HeapPtrString           pendingInput;
     RegExpFlag              flags;
 
     /*
      * If true, |matchesInput| and the |lazy*| fields may be used
      * to replay the last executed RegExp, and |matches| is invalid.
      */
     bool                    pendingLazyEvaluation;
 
--- a/js/src/vm/ScopeObject.cpp
+++ b/js/src/vm/ScopeObject.cpp
@@ -1558,17 +1558,17 @@ js_IsDebugScopeSlow(ProxyObject *proxy)
     JS_ASSERT(proxy->hasClass(&ProxyObject::uncallableClass_));
     return proxy->handler() == &DebugScopeProxy::singleton;
 }
 
 /*****************************************************************************/
 
 /* static */ MOZ_ALWAYS_INLINE void
 DebugScopes::proxiedScopesPostWriteBarrier(JSRuntime *rt, ObjectWeakMap *map,
-                                           const EncapsulatedPtr<JSObject> &key)
+                                           const EncapsulatedPtrObject &key)
 {
 #ifdef JSGC_GENERATIONAL
     /*
      * Strip the barriers from the type before inserting into the store buffer.
      * This will automatically ensure that barriers do not fire during GC.
      *
      * Some compilers complain about instantiating the WeakMap class for
      * unbarriered type arguments, so we cast to a HashMap instead.  Because of
--- a/js/src/vm/ScopeObject.h
+++ b/js/src/vm/ScopeObject.h
@@ -721,18 +721,18 @@ class ScopeIterKey
 };
 
 class ScopeIterVal
 {
     friend class ScopeIter;
     friend class DebugScopes;
 
     AbstractFramePtr frame_;
-    RelocatablePtr<JSObject> cur_;
-    RelocatablePtr<NestedScopeObject> staticScope_;
+    RelocatablePtrObject cur_;
+    RelocatablePtrNestedScopeObject staticScope_;
     ScopeIter::Type type_;
     bool hasScopeObject_;
 
     static void staticAsserts();
 
   public:
     ScopeIterVal(const ScopeIter &si)
       : frame_(si.frame()), cur_(si.cur_), staticScope_(si.staticScope_), type_(si.type_),
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -597,17 +597,17 @@ class BaseShape : public gc::BarrieredCe
 
     union {
         StrictPropertyOp rawSetter;     /* setter hook for shape */
         JSObject        *setterObj;     /* user-defined callable "set" object or
                                            null if shape->hasSetterValue() */
     };
 
     /* For owned BaseShapes, the canonical unowned BaseShape. */
-    HeapPtr<UnownedBaseShape> unowned_;
+    HeapPtrUnownedBaseShape unowned_;
 
     /* For owned BaseShapes, the shape's shape table. */
     ShapeTable       *table_;
 
     BaseShape(const BaseShape &base) MOZ_DELETE;
 
   public:
     void finalize(FreeOp *fop);
--- a/js/src/vm/String.h
+++ b/js/src/vm/String.h
@@ -920,18 +920,16 @@ class PropertyName : public JSAtom
 JS_STATIC_ASSERT(sizeof(PropertyName) == sizeof(JSString));
 
 static MOZ_ALWAYS_INLINE jsid
 NameToId(PropertyName *name)
 {
     return NON_INTEGER_ATOM_TO_JSID(name);
 }
 
-typedef HeapPtr<JSAtom> HeapPtrAtom;
-
 class AutoNameVector : public AutoVectorRooter<PropertyName *>
 {
     typedef AutoVectorRooter<PropertyName *> BaseType;
   public:
     explicit AutoNameVector(JSContext *cx
                             MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
         : AutoVectorRooter<PropertyName *>(cx, NAMEVECTOR)
     {