Bug 1547677 - Rename the ReadBarriered wrapper type to WeakHeapPtr and remove WeakRef r=sfink
authorJon Coppeard <jcoppeard@mozilla.com>
Mon, 29 Apr 2019 11:30:29 +0100
changeset 530923 c6640ffe809f4129e436205d8c5146f4bb90b059
parent 530922 f210be8c82a050a4ea525db1568e7c137d9038b1
child 530924 e4210db75a7f22d639f59fe5955de6bf62582df9
child 531097 c9918832d469c13bb49f895ca2f7562a3f7c19df
push id11265
push userffxbld-merge
push dateMon, 13 May 2019 10:53:39 +0000
treeherdermozilla-beta@77e0fe8dbdd3 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerssfink
bugs1547677
milestone68.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1547677 - Rename the ReadBarriered wrapper type to WeakHeapPtr and remove WeakRef r=sfink I removed WeakRef since this is already very close to WeakHeapPtr and is an alias of it. I think having the two is more confusing than using the same name for both (the difference is whether you trace it with TraceWeakEdge or sweep it in a more manual fashion). Differential Revision: https://phabricator.services.mozilla.com/D29180 Differential Revision: https://phabricator.services.mozilla.com/D29337
js/src/gc/Barrier.h
js/src/gc/Marking-inl.h
js/src/gc/Marking.cpp
js/src/gc/Marking.h
js/src/gc/NurseryAwareHashMap.h
js/src/gc/Policy.h
js/src/gc/Tracer.h
js/src/gc/WeakMap-inl.h
js/src/jit/Ion.cpp
js/src/jit/JitRealm.h
js/src/jsapi-tests/testGCWeakRef.cpp
js/src/vm/ArgumentsObject.cpp
js/src/vm/ArrayBufferObject.h
js/src/vm/AtomsTable.h
js/src/vm/Debugger.cpp
js/src/vm/Debugger.h
js/src/vm/EnvironmentObject.cpp
js/src/vm/EnvironmentObject.h
js/src/vm/GlobalObject.h
js/src/vm/JSAtom.cpp
js/src/vm/JSScript.h
js/src/vm/ObjectGroup.cpp
js/src/vm/ObjectGroup.h
js/src/vm/Realm.h
js/src/vm/RegExpShared.h
js/src/vm/SavedFrame.h
js/src/vm/SavedStacks.cpp
js/src/vm/Shape.cpp
js/src/vm/Shape.h
js/src/vm/SymbolType.h
js/src/wasm/WasmInstance.h
js/src/wasm/WasmJS.h
js/src/wasm/WasmTable.h
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -54,17 +54,17 @@
  *
  * PreBarriered  Provides a pre-barrier but not a post-barrier. Necessary when
  *               generational GC updates are handled manually, e.g. for hash
  *               table keys that don't use MovableCellHasher.
  *
  * HeapSlot      Provides pre and post-barriers, optimised for use in JSObject
  *               slots and elements.
  *
- * ReadBarriered Provides read and post-write barriers, for use with weak
+ * WeakHeapPtr   Provides read and post-write barriers, for use with weak
  *               pointers.
  *
  * The following classes are implemented in js/RootingAPI.h (in the JS
  * namespace):
  *
  * Heap          General wrapper for external clients. Like HeapPtr but also
  *               handles cycle collector concerns. Most external clients should
  *               use this.
@@ -83,17 +83,17 @@
  *     Is your pointer weak or strong?
  *       Strong =>
  *         Do you want automatic handling of nursery pointers?
  *           Yes, of course =>
  *             Can your object be destroyed outside of a GC?
  *               Yes => Use HeapPtr<T>
  *               No => Use GCPtr<T> (optimization)
  *           No, I'll do this myself => Use PreBarriered<T>
- *       Weak => Use ReadBarriered<T>
+ *       Weak => Use WeakHeapPtr<T>
  *   No, it's external =>
  *     Can your pointer refer to nursery objects?
  *       Yes => Use JS::Heap<T>
  *       Never => Use JS::TenuredHeap<T> (optimization)
  *
  * Write barriers
  * ==============
  *
@@ -236,36 +236,36 @@
  * the "obj->field.init(value)" method instead of "obj->field = value". We use
  * the init naming idiom in many places to signify that a field is being
  * assigned for the first time.
  *
  * This file implements the following hierarchy of classes:
  *
  * BarrieredBase             base class of all barriers
  *  |  |
- *  | WriteBarrieredBase     base class which provides common write operations
+ *  | WriteBarriered         base class which provides common write operations
  *  |  |  |  |  |
  *  |  |  |  | PreBarriered  provides pre-barriers only
  *  |  |  |  |
  *  |  |  | GCPtr            provides pre- and post-barriers
  *  |  |  |
  *  |  | HeapPtr             provides pre- and post-barriers; is relocatable
  *  |  |                     and deletable for use inside C++ managed memory
  *  |  |
  *  | HeapSlot               similar to GCPtr, but tailored to slots storage
  *  |
- * ReadBarrieredBase         base class which provides common read operations
+ * ReadBarriered             base class which provides common read operations
  *  |
- * ReadBarriered             provides read barriers only
+ * WeakHeapPtr               provides read barriers only
  *
  *
  * The implementation of the barrier logic is implemented on T::writeBarrier.*,
  * via:
  *
- * WriteBarrieredBase<T>::pre
+ * WriteBarriered<T>::pre
  *  -> InternalBarrierMethods<T*>::preBarrier
  *      -> T::writeBarrierPre
  *  -> InternalBarrierMethods<Value>::preBarrier
  *  -> InternalBarrierMethods<jsid>::preBarrier
  *      -> InternalBarrierMethods<T*>::preBarrier
  *          -> T::writeBarrierPre
  *
  * GCPtr<T>::post and HeapPtr<T>::post
@@ -420,24 +420,24 @@ class MOZ_NON_MEMMOVABLE BarrieredBase {
   // instantiation. Friending to the generic template leads to a number of
   // unintended consequences, including template resolution ambiguity and a
   // circular dependency with Tracing.h.
   T* unsafeUnbarrieredForTracing() { return &value; }
 };
 
 // Base class for barriered pointer types that intercept only writes.
 template <class T>
-class WriteBarrieredBase
+class WriteBarriered
     : public BarrieredBase<T>,
-      public WrappedPtrOperations<T, WriteBarrieredBase<T>> {
+      public WrappedPtrOperations<T, WriteBarriered<T>> {
  protected:
   using BarrieredBase<T>::value;
 
-  // WriteBarrieredBase is not directly instantiable.
-  explicit WriteBarrieredBase(const T& v) : BarrieredBase<T>(v) {}
+  // WriteBarriered is not directly instantiable.
+  explicit WriteBarriered(const T& v) : BarrieredBase<T>(v) {}
 
  public:
   using ElementType = T;
 
   DECLARE_POINTER_CONSTREF_OPS(T);
 
   // Use this if the automatic coercion to T isn't working.
   const T& get() const { return this->value; }
@@ -463,26 +463,26 @@ class WriteBarrieredBase
  * manually implemented when using this class. GCPtr and HeapPtr should be used
  * in all cases that do not require explicit low-level control of moving
  * behavior.
  *
  * This class is useful for example for HashMap keys where automatically
  * updating a moved nursery pointer would break the hash table.
  */
 template <class T>
-class PreBarriered : public WriteBarrieredBase<T> {
+class PreBarriered : public WriteBarriered<T> {
  public:
-  PreBarriered() : WriteBarrieredBase<T>(JS::SafelyInitialized<T>()) {}
+  PreBarriered() : WriteBarriered<T>(JS::SafelyInitialized<T>()) {}
   /*
    * Allow implicit construction for use in generic contexts, such as
    * DebuggerWeakMap::markKeys.
    */
-  MOZ_IMPLICIT PreBarriered(const T& v) : WriteBarrieredBase<T>(v) {}
+  MOZ_IMPLICIT PreBarriered(const T& v) : WriteBarriered<T>(v) {}
   explicit PreBarriered(const PreBarriered<T>& v)
-      : WriteBarrieredBase<T>(v.value) {}
+      : WriteBarriered<T>(v.value) {}
   ~PreBarriered() { this->pre(); }
 
   void init(const T& v) { this->value = v; }
 
   /* Use to set the pointer to nullptr. */
   void clear() {
     this->pre();
     this->value = nullptr;
@@ -505,23 +505,23 @@ class PreBarriered : public WriteBarrier
  * used in contexts where it may be implicitly moved or deleted, e.g. most
  * containers.
  *
  * The post-barriers implemented by this class are faster than those
  * implemented by js::HeapPtr<T> or JS::Heap<T> at the cost of not
  * automatically handling deletion or movement.
  */
 template <class T>
-class GCPtr : public WriteBarrieredBase<T> {
+class GCPtr : public WriteBarriered<T> {
  public:
-  GCPtr() : WriteBarrieredBase<T>(JS::SafelyInitialized<T>()) {}
-  explicit GCPtr(const T& v) : WriteBarrieredBase<T>(v) {
+  GCPtr() : WriteBarriered<T>(JS::SafelyInitialized<T>()) {}
+  explicit GCPtr(const T& v) : WriteBarriered<T>(v) {
     this->post(JS::SafelyInitialized<T>(), v);
   }
-  explicit GCPtr(const GCPtr<T>& v) : WriteBarrieredBase<T>(v) {
+  explicit GCPtr(const GCPtr<T>& v) : WriteBarriered<T>(v) {
     this->post(JS::SafelyInitialized<T>(), v);
   }
 #ifdef DEBUG
   ~GCPtr() {
     // No barriers are necessary as this only happens when we are sweeping
     // or when after GCManagedDeletePolicy has triggered the barriers for us
     // and cleared the pointer.
     //
@@ -582,32 +582,32 @@ class GCPtr : public WriteBarrieredBase<
  *
  * Obviously, JSObjects, JSStrings, and the like get tenured and compacted, so
  * whatever pointers they contain get relocated, in the sense used here.
  * However, since the GC itself is moving those values, it takes care of its
  * internal pointers to those pointers itself. HeapPtr is only necessary
  * when the relocation would otherwise occur without the GC's knowledge.
  */
 template <class T>
-class HeapPtr : public WriteBarrieredBase<T> {
+class HeapPtr : public WriteBarriered<T> {
  public:
-  HeapPtr() : WriteBarrieredBase<T>(JS::SafelyInitialized<T>()) {}
+  HeapPtr() : WriteBarriered<T>(JS::SafelyInitialized<T>()) {}
 
   // Implicitly adding barriers is a reasonable default.
-  MOZ_IMPLICIT HeapPtr(const T& v) : WriteBarrieredBase<T>(v) {
+  MOZ_IMPLICIT HeapPtr(const T& v) : WriteBarriered<T>(v) {
     this->post(JS::SafelyInitialized<T>(), this->value);
   }
 
   /*
    * For HeapPtr, move semantics are equivalent to copy semantics. In
    * C++, a copy constructor taking const-ref is the way to get a single
    * function that will be used for both lvalue and rvalue copies, so we can
    * simply omit the rvalue variant.
    */
-  MOZ_IMPLICIT HeapPtr(const HeapPtr<T>& v) : WriteBarrieredBase<T>(v) {
+  MOZ_IMPLICIT HeapPtr(const HeapPtr<T>& v) : WriteBarriered<T>(v) {
     this->post(JS::SafelyInitialized<T>(), this->value);
   }
 
   ~HeapPtr() {
     this->pre();
     this->post(this->value, JS::SafelyInitialized<T>());
   }
 
@@ -636,64 +636,64 @@ class HeapPtr : public WriteBarrieredBas
     T tmp = this->value;
     this->value = v;
     this->post(tmp, this->value);
   }
 };
 
 // Base class for barriered pointer types that intercept reads and writes.
 template <typename T>
-class ReadBarrieredBase : public BarrieredBase<T> {
+class ReadBarriered : public BarrieredBase<T> {
  protected:
-  // ReadBarrieredBase is not directly instantiable.
-  explicit ReadBarrieredBase(const T& v) : BarrieredBase<T>(v) {}
+  // ReadBarriered is not directly instantiable.
+  explicit ReadBarriered(const T& v) : BarrieredBase<T>(v) {}
 
  protected:
   void read() const { InternalBarrierMethods<T>::readBarrier(this->value); }
   void post(const T& prev, const T& next) {
     InternalBarrierMethods<T>::postBarrier(&this->value, prev, next);
   }
 };
 
 // Incremental GC requires that weak pointers have read barriers. See the block
 // comment at the top of Barrier.h for a complete discussion of why.
 //
 // Note that this class also has post-barriers, so is safe to use with nursery
 // pointers. However, when used as a hashtable key, care must still be taken to
 // insert manual post-barriers on the table for rekeying if the key is based in
 // any way on the address of the object.
 template <typename T>
-class ReadBarriered : public ReadBarrieredBase<T>,
-                      public WrappedPtrOperations<T, ReadBarriered<T>> {
+class WeakHeapPtr : public ReadBarriered<T>,
+                    public WrappedPtrOperations<T, WeakHeapPtr<T>> {
  protected:
-  using ReadBarrieredBase<T>::value;
+  using ReadBarriered<T>::value;
 
  public:
-  ReadBarriered() : ReadBarrieredBase<T>(JS::SafelyInitialized<T>()) {}
+  WeakHeapPtr() : ReadBarriered<T>(JS::SafelyInitialized<T>()) {}
 
   // It is okay to add barriers implicitly.
-  MOZ_IMPLICIT ReadBarriered(const T& v) : ReadBarrieredBase<T>(v) {
+  MOZ_IMPLICIT WeakHeapPtr(const T& v) : ReadBarriered<T>(v) {
     this->post(JS::SafelyInitialized<T>(), v);
   }
 
   // The copy constructor creates a new weak edge but the wrapped pointer does
   // not escape, so no read barrier is necessary.
-  explicit ReadBarriered(const ReadBarriered& v) : ReadBarrieredBase<T>(v) {
+  explicit WeakHeapPtr(const WeakHeapPtr& v) : ReadBarriered<T>(v) {
     this->post(JS::SafelyInitialized<T>(), v.unbarrieredGet());
   }
 
   // Move retains the lifetime status of the source edge, so does not fire
   // the read barrier of the defunct edge.
-  ReadBarriered(ReadBarriered&& v) : ReadBarrieredBase<T>(std::move(v)) {
+  WeakHeapPtr(WeakHeapPtr&& v) : ReadBarriered<T>(std::move(v)) {
     this->post(JS::SafelyInitialized<T>(), v.value);
   }
 
-  ~ReadBarriered() { this->post(this->value, JS::SafelyInitialized<T>()); }
+  ~WeakHeapPtr() { this->post(this->value, JS::SafelyInitialized<T>()); }
 
-  ReadBarriered& operator=(const ReadBarriered& v) {
+  WeakHeapPtr& operator=(const WeakHeapPtr& v) {
     AssertTargetIsNotGray(v.value);
     T prior = this->value;
     this->value = v.value;
     this->post(prior, v.value);
     return *this;
   }
 
   const T& get() const {
@@ -720,22 +720,22 @@ class ReadBarriered : public ReadBarrier
     this->value = v;
     this->post(tmp, v);
   }
 };
 
 // A WeakRef pointer does not hold its target live and is automatically nulled
 // out when the GC discovers that it is not reachable from any other path.
 template <typename T>
-using WeakRef = ReadBarriered<T>;
+using WeakRef = WeakHeapPtr<T>;
 
 // A pre- and post-barriered Value that is specialized to be aware that it
 // resides in a slots or elements vector. This allows it to be relocated in
 // memory, but with substantially less overhead than a HeapPtr.
-class HeapSlot : public WriteBarrieredBase<Value> {
+class HeapSlot : public WriteBarriered<Value> {
  public:
   enum Kind { Slot = 0, Element = 1 };
 
   void init(NativeObject* owner, Kind kind, uint32_t slot, const Value& v) {
     value = v;
     post(owner, kind, slot, v);
   }
 
@@ -900,18 +900,18 @@ struct MovableCellHasher<HeapPtr<T>> {
   }
   static bool match(const Key& k, const Lookup& l) {
     return MovableCellHasher<T>::match(k, l);
   }
   static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); }
 };
 
 template <typename T>
-struct MovableCellHasher<ReadBarriered<T>> {
-  using Key = ReadBarriered<T>;
+struct MovableCellHasher<WeakHeapPtr<T>> {
+  using Key = WeakHeapPtr<T>;
   using Lookup = T;
 
   static bool hasHash(const Lookup& l) {
     return MovableCellHasher<T>::hasHash(l);
   }
   static bool ensureHash(const Lookup& l) {
     return MovableCellHasher<T>::ensureHash(l);
   }
@@ -940,20 +940,20 @@ struct PreBarrieredHasher {
   typedef PreBarriered<T> Key;
   typedef T Lookup;
 
   static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
   static bool match(const Key& k, Lookup l) { return k.get() == l; }
   static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); }
 };
 
-/* Useful for hashtables with a ReadBarriered as key. */
+/* Useful for hashtables with a WeakHeapPtr as key. */
 template <class T>
-struct ReadBarrieredHasher {
-  typedef ReadBarriered<T> Key;
+struct WeakHeapPtrHasher {
+  typedef WeakHeapPtr<T> Key;
   typedef T Lookup;
 
   static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
   static bool match(const Key& k, Lookup l) { return k.unbarrieredGet() == l; }
   static void rekey(Key& k, const Key& newKey) {
     k.set(newKey.unbarrieredGet());
   }
 };
@@ -964,19 +964,19 @@ namespace mozilla {
 
 /* Specialized hashing policy for GCPtrs. */
 template <class T>
 struct DefaultHasher<js::GCPtr<T>> : js::GCPtrHasher<T> {};
 
 template <class T>
 struct DefaultHasher<js::PreBarriered<T>> : js::PreBarrieredHasher<T> {};
 
-/* Specialized hashing policy for ReadBarriereds. */
+/* Specialized hashing policy for WeakHeapPtrs. */
 template <class T>
-struct DefaultHasher<js::ReadBarriered<T>> : js::ReadBarrieredHasher<T> {};
+struct DefaultHasher<js::WeakHeapPtr<T>> : js::WeakHeapPtrHasher<T> {};
 
 }  // namespace mozilla
 
 namespace js {
 
 class ArrayObject;
 class DebugEnvironmentProxy;
 class GlobalObject;
@@ -1011,28 +1011,27 @@ using GCPtrUnownedBaseShape = GCPtr<Unow
 using GCPtrObjectGroup = GCPtr<ObjectGroup*>;
 using GCPtrScope = GCPtr<Scope*>;
 using GCPtrValue = GCPtr<Value>;
 using GCPtrId = GCPtr<jsid>;
 
 using ImmutablePropertyNamePtr = ImmutableTenuredPtr<PropertyName*>;
 using ImmutableSymbolPtr = ImmutableTenuredPtr<JS::Symbol*>;
 
-using ReadBarrieredDebugEnvironmentProxy =
-    ReadBarriered<DebugEnvironmentProxy*>;
-using ReadBarrieredGlobalObject = ReadBarriered<GlobalObject*>;
-using ReadBarrieredObject = ReadBarriered<JSObject*>;
-using ReadBarrieredScript = ReadBarriered<JSScript*>;
-using ReadBarrieredScriptSourceObject = ReadBarriered<ScriptSourceObject*>;
-using ReadBarrieredShape = ReadBarriered<Shape*>;
-using ReadBarrieredJitCode = ReadBarriered<jit::JitCode*>;
-using ReadBarrieredObjectGroup = ReadBarriered<ObjectGroup*>;
-using ReadBarrieredSymbol = ReadBarriered<JS::Symbol*>;
-using ReadBarrieredWasmInstanceObject = ReadBarriered<WasmInstanceObject*>;
-using ReadBarrieredWasmTableObject = ReadBarriered<WasmTableObject*>;
+using WeakHeapPtrDebugEnvironmentProxy = WeakHeapPtr<DebugEnvironmentProxy*>;
+using WeakHeapPtrGlobalObject = WeakHeapPtr<GlobalObject*>;
+using WeakHeapPtrObject = WeakHeapPtr<JSObject*>;
+using WeakHeapPtrScript = WeakHeapPtr<JSScript*>;
+using WeakHeapPtrScriptSourceObject = WeakHeapPtr<ScriptSourceObject*>;
+using WeakHeapPtrShape = WeakHeapPtr<Shape*>;
+using WeakHeapPtrJitCode = WeakHeapPtr<jit::JitCode*>;
+using WeakHeapPtrObjectGroup = WeakHeapPtr<ObjectGroup*>;
+using WeakHeapPtrSymbol = WeakHeapPtr<JS::Symbol*>;
+using WeakHeapPtrWasmInstanceObject = WeakHeapPtr<WasmInstanceObject*>;
+using WeakHeapPtrWasmTableObject = WeakHeapPtr<WasmTableObject*>;
 
 using HeapPtrJitCode = HeapPtr<jit::JitCode*>;
 using HeapPtrRegExpShared = HeapPtr<RegExpShared*>;
 using HeapPtrValue = HeapPtr<Value>;
 
 namespace detail {
 
 template <typename T>
@@ -1046,18 +1045,18 @@ struct DefineComparisonOps<GCPtr<T>> : m
 };
 
 template <typename T>
 struct DefineComparisonOps<HeapPtr<T>> : mozilla::TrueType {
   static const T& get(const HeapPtr<T>& v) { return v.get(); }
 };
 
 template <typename T>
-struct DefineComparisonOps<ReadBarriered<T>> : mozilla::TrueType {
-  static const T& get(const ReadBarriered<T>& v) { return v.unbarrieredGet(); }
+struct DefineComparisonOps<WeakHeapPtr<T>> : mozilla::TrueType {
+  static const T& get(const WeakHeapPtr<T>& v) { return v.unbarrieredGet(); }
 };
 
 template <>
 struct DefineComparisonOps<HeapSlot> : mozilla::TrueType {
   static const Value& get(const HeapSlot& v) { return v.get(); }
 };
 
 } /* namespace detail */
--- a/js/src/gc/Marking-inl.h
+++ b/js/src/gc/Marking-inl.h
@@ -126,17 +126,17 @@ inline bool IsGCThingValidAfterMovingGC(
 template <typename T>
 inline void CheckGCThingAfterMovingGC(T* t) {
   if (t) {
     MOZ_RELEASE_ASSERT(IsGCThingValidAfterMovingGC(t));
   }
 }
 
 template <typename T>
-inline void CheckGCThingAfterMovingGC(const ReadBarriered<T*>& t) {
+inline void CheckGCThingAfterMovingGC(const WeakHeapPtr<T*>& t) {
   CheckGCThingAfterMovingGC(t.unbarrieredGet());
 }
 
 inline void CheckValueAfterMovingGC(const JS::Value& value) {
   ApplyGCThingTyped(value, [](auto t) { CheckGCThingAfterMovingGC(t); });
 }
 
 #endif  // JSGC_HASH_TABLE_CHECKS
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -474,17 +474,17 @@ void js::TraceManuallyBarrieredCrossComp
 template void js::TraceManuallyBarrieredCrossCompartmentEdge<JSObject*>(
     JSTracer*, JSObject*, JSObject**, const char*);
 template void js::TraceManuallyBarrieredCrossCompartmentEdge<JSScript*>(
     JSTracer*, JSObject*, JSScript**, const char*);
 template void js::TraceManuallyBarrieredCrossCompartmentEdge<LazyScript*>(
     JSTracer*, JSObject*, LazyScript**, const char*);
 
 void js::TraceCrossCompartmentEdge(JSTracer* trc, JSObject* src,
-                                   WriteBarrieredBase<Value>* dst,
+                                   WriteBarriered<Value>* dst,
                                    const char* name) {
   if (ShouldTraceCrossCompartment(trc, src, dst->get())) {
     TraceEdgeInternal(trc, dst->unsafeUnbarrieredForTracing(), name);
   }
 }
 
 template <typename T>
 void js::TraceProcessGlobalRoot(JSTracer* trc, T* thing, const char* name) {
--- a/js/src/gc/Marking.h
+++ b/js/src/gc/Marking.h
@@ -84,47 +84,47 @@ template <typename T>
 inline bool IsMarkedUnbarriered(JSRuntime* rt, T* thingp) {
   return IsMarkedInternal(rt, ConvertToBase(thingp));
 }
 
 // Report whether a GC thing has been marked with any color. Things which are in
 // zones that are not currently being collected or are owned by another runtime
 // are always reported as being marked.
 template <typename T>
-inline bool IsMarked(JSRuntime* rt, WriteBarrieredBase<T>* thingp) {
+inline bool IsMarked(JSRuntime* rt, WriteBarriered<T>* thingp) {
   return IsMarkedInternal(rt,
                           ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
 }
 
 // Report whether a GC thing has been marked black.
 template <typename T>
 inline bool IsMarkedBlackUnbarriered(JSRuntime* rt, T* thingp) {
   return IsMarkedBlackInternal(rt, ConvertToBase(thingp));
 }
 
 // Report whether a GC thing has been marked black.
 template <typename T>
-inline bool IsMarkedBlack(JSRuntime* rt, WriteBarrieredBase<T>* thingp) {
+inline bool IsMarkedBlack(JSRuntime* rt, WriteBarriered<T>* thingp) {
   return IsMarkedBlackInternal(
       rt, ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
 }
 
 template <typename T>
 inline bool IsAboutToBeFinalizedUnbarriered(T* thingp) {
   return IsAboutToBeFinalizedInternal(ConvertToBase(thingp));
 }
 
 template <typename T>
-inline bool IsAboutToBeFinalized(WriteBarrieredBase<T>* thingp) {
+inline bool IsAboutToBeFinalized(WriteBarriered<T>* thingp) {
   return IsAboutToBeFinalizedInternal(
       ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
 }
 
 template <typename T>
-inline bool IsAboutToBeFinalized(ReadBarrieredBase<T>* thingp) {
+inline bool IsAboutToBeFinalized(ReadBarriered<T>* thingp) {
   return IsAboutToBeFinalizedInternal(
       ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
 }
 
 bool IsAboutToBeFinalizedDuringSweep(TenuredCell& tenured);
 
 inline Cell* ToMarkable(const Value& v) {
   if (v.isGCThing()) {
@@ -178,17 +178,17 @@ inline T MaybeForwarded(T t);
 
 template <typename T>
 inline bool IsGCThingValidAfterMovingGC(T* t);
 
 template <typename T>
 inline void CheckGCThingAfterMovingGC(T* t);
 
 template <typename T>
-inline void CheckGCThingAfterMovingGC(const ReadBarriered<T*>& t);
+inline void CheckGCThingAfterMovingGC(const WeakHeapPtr<T*>& t);
 
 inline void CheckValueAfterMovingGC(const JS::Value& value);
 
 #endif  // JSGC_HASH_TABLE_CHECKS
 
 } /* namespace gc */
 } /* namespace js */
 
--- a/js/src/gc/NurseryAwareHashMap.h
+++ b/js/src/gc/NurseryAwareHashMap.h
@@ -15,32 +15,32 @@
 
 namespace js {
 
 namespace detail {
 // This class only handles the incremental case and does not deal with nursery
 // pointers. The only users should be for NurseryAwareHashMap; it is defined
 // externally because we need a GCPolicy for its use in the contained map.
 template <typename T>
-class UnsafeBareReadBarriered : public ReadBarrieredBase<T> {
+class UnsafeBareWeakHeapPtr : public ReadBarriered<T> {
  public:
-  UnsafeBareReadBarriered()
-      : ReadBarrieredBase<T>(JS::SafelyInitialized<T>()) {}
-  MOZ_IMPLICIT UnsafeBareReadBarriered(const T& v) : ReadBarrieredBase<T>(v) {}
-  explicit UnsafeBareReadBarriered(const UnsafeBareReadBarriered& v)
-      : ReadBarrieredBase<T>(v) {}
-  UnsafeBareReadBarriered(UnsafeBareReadBarriered&& v)
-      : ReadBarrieredBase<T>(std::move(v)) {}
+  UnsafeBareWeakHeapPtr()
+      : ReadBarriered<T>(JS::SafelyInitialized<T>()) {}
+  MOZ_IMPLICIT UnsafeBareWeakHeapPtr(const T& v) : ReadBarriered<T>(v) {}
+  explicit UnsafeBareWeakHeapPtr(const UnsafeBareWeakHeapPtr& v)
+      : ReadBarriered<T>(v) {}
+  UnsafeBareWeakHeapPtr(UnsafeBareWeakHeapPtr&& v)
+      : ReadBarriered<T>(std::move(v)) {}
 
-  UnsafeBareReadBarriered& operator=(const UnsafeBareReadBarriered& v) {
+  UnsafeBareWeakHeapPtr& operator=(const UnsafeBareWeakHeapPtr& v) {
     this->value = v.value;
     return *this;
   }
 
-  UnsafeBareReadBarriered& operator=(const T& v) {
+  UnsafeBareWeakHeapPtr& operator=(const T& v) {
     this->value = v;
     return *this;
   }
 
   const T get() const {
     if (!InternalBarrierMethods<T>::isMarkable(this->value)) {
       return JS::SafelyInitialized<T>();
     }
@@ -66,17 +66,17 @@ class UnsafeBareReadBarriered : public R
 // all values contain a strong reference to the key. It also requires the
 // policy to contain an |isTenured| and |needsSweep| members, which is fairly
 // non-standard. This limits its usefulness to the CrossCompartmentMap at the
 // moment, but might serve as a useful base for other tables in future.
 template <typename Key, typename Value,
           typename HashPolicy = DefaultHasher<Key>,
           typename AllocPolicy = TempAllocPolicy>
 class NurseryAwareHashMap {
-  using BarrieredValue = detail::UnsafeBareReadBarriered<Value>;
+  using BarrieredValue = detail::UnsafeBareWeakHeapPtr<Value>;
   using MapType =
       GCRekeyableHashMap<Key, BarrieredValue, HashPolicy, AllocPolicy>;
   MapType map;
 
   // Keep a list of all keys for which JS::GCPolicy<Key>::isTenured is false.
   // This lets us avoid a full traveral of the map on each minor GC, keeping
   // the minor GC times proportional to the nursery heap size.
   Vector<Key, 0, AllocPolicy> nurseryEntries;
@@ -175,21 +175,21 @@ class NurseryAwareHashMap {
 
   bool hasNurseryEntries() const { return !nurseryEntries.empty(); }
 };
 
 }  // namespace js
 
 namespace JS {
 template <typename T>
-struct GCPolicy<js::detail::UnsafeBareReadBarriered<T>> {
+struct GCPolicy<js::detail::UnsafeBareWeakHeapPtr<T>> {
   static void trace(JSTracer* trc,
-                    js::detail::UnsafeBareReadBarriered<T>* thingp,
+                    js::detail::UnsafeBareWeakHeapPtr<T>* thingp,
                     const char* name) {
     js::TraceEdge(trc, thingp, name);
   }
-  static bool needsSweep(js::detail::UnsafeBareReadBarriered<T>* thingp) {
+  static bool needsSweep(js::detail::UnsafeBareWeakHeapPtr<T>* thingp) {
     return js::gc::IsAboutToBeFinalized(thingp);
   }
 };
 }  // namespace JS
 
 #endif  // gc_NurseryAwareHashMap_h
--- a/js/src/gc/Policy.h
+++ b/js/src/gc/Policy.h
@@ -67,21 +67,21 @@ struct GCPolicy<js::HeapPtr<T>> {
     js::TraceNullableEdge(trc, thingp, name);
   }
   static bool needsSweep(js::HeapPtr<T>* thingp) {
     return js::gc::IsAboutToBeFinalized(thingp);
   }
 };
 
 template <typename T>
-struct GCPolicy<js::ReadBarriered<T>> {
-  static void trace(JSTracer* trc, js::ReadBarriered<T>* thingp,
+struct GCPolicy<js::WeakHeapPtr<T>> {
+  static void trace(JSTracer* trc, js::WeakHeapPtr<T>* thingp,
                     const char* name) {
     js::TraceEdge(trc, thingp, name);
   }
-  static bool needsSweep(js::ReadBarriered<T>* thingp) {
+  static bool needsSweep(js::WeakHeapPtr<T>* thingp) {
     return js::gc::IsAboutToBeFinalized(thingp);
   }
 };
 
 }  // namespace JS
 
 #endif  // gc_Policy_h
--- a/js/src/gc/Tracer.h
+++ b/js/src/gc/Tracer.h
@@ -106,41 +106,41 @@ inline void AssertRootMarkingPhase(JSTra
 
 }  // namespace gc
 
 // Trace through an edge in the live object graph on behalf of tracing. The
 // effect of tracing the edge depends on the JSTracer being used. For pointer
 // types, |*thingp| must not be null.
 
 template <typename T>
-inline void TraceEdge(JSTracer* trc, WriteBarrieredBase<T>* thingp,
+inline void TraceEdge(JSTracer* trc, WriteBarriered<T>* thingp,
                       const char* name) {
   gc::TraceEdgeInternal(
       trc, gc::ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name);
 }
 
 template <typename T>
-inline void TraceEdge(JSTracer* trc, ReadBarriered<T>* thingp,
+inline void TraceEdge(JSTracer* trc, WeakHeapPtr<T>* thingp,
                       const char* name) {
   gc::TraceEdgeInternal(trc, gc::ConvertToBase(thingp->unsafeGet()), name);
 }
 
 // Trace through a possibly-null edge in the live object graph on behalf of
 // tracing.
 
 template <typename T>
-inline void TraceNullableEdge(JSTracer* trc, WriteBarrieredBase<T>* thingp,
+inline void TraceNullableEdge(JSTracer* trc, WriteBarriered<T>* thingp,
                               const char* name) {
   if (InternalBarrierMethods<T>::isMarkable(thingp->get())) {
     TraceEdge(trc, thingp, name);
   }
 }
 
 template <typename T>
-inline void TraceNullableEdge(JSTracer* trc, ReadBarriered<T>* thingp,
+inline void TraceNullableEdge(JSTracer* trc, WeakHeapPtr<T>* thingp,
                               const char* name) {
   if (InternalBarrierMethods<T>::isMarkable(thingp->unbarrieredGet())) {
     TraceEdge(trc, thingp, name);
   }
 }
 
 // Trace through a "root" edge. These edges are the initial edges in the object
 // graph traversal. Root edges are asserted to only be traversed in the initial
@@ -148,34 +148,34 @@ inline void TraceNullableEdge(JSTracer* 
 
 template <typename T>
 inline void TraceRoot(JSTracer* trc, T* thingp, const char* name) {
   gc::AssertRootMarkingPhase(trc);
   gc::TraceEdgeInternal(trc, gc::ConvertToBase(thingp), name);
 }
 
 template <typename T>
-inline void TraceRoot(JSTracer* trc, ReadBarriered<T>* thingp,
+inline void TraceRoot(JSTracer* trc, WeakHeapPtr<T>* thingp,
                       const char* name) {
   TraceRoot(trc, thingp->unsafeGet(), name);
 }
 
 // Idential to TraceRoot, except that this variant will not crash if |*thingp|
 // is null.
 
 template <typename T>
 inline void TraceNullableRoot(JSTracer* trc, T* thingp, const char* name) {
   gc::AssertRootMarkingPhase(trc);
   if (InternalBarrierMethods<T>::isMarkable(*thingp)) {
     gc::TraceEdgeInternal(trc, gc::ConvertToBase(thingp), name);
   }
 }
 
 template <typename T>
-inline void TraceNullableRoot(JSTracer* trc, ReadBarriered<T>* thingp,
+inline void TraceNullableRoot(JSTracer* trc, WeakHeapPtr<T>* thingp,
                               const char* name) {
   TraceNullableRoot(trc, thingp->unsafeGet(), name);
 }
 
 // Like TraceEdge, but for edges that do not use one of the automatic barrier
 // classes and, thus, must be treated specially for moving GC. This method is
 // separate from TraceEdge to make accidental use of such edges more obvious.
 
@@ -185,42 +185,42 @@ inline void TraceManuallyBarrieredEdge(J
   gc::TraceEdgeInternal(trc, gc::ConvertToBase(thingp), name);
 }
 
 // Visits a WeakRef, but does not trace its referents. If *thingp is not marked
 // at the end of marking, it is replaced by nullptr. This method records
 // thingp, so the edge location must not change after this function is called.
 
 template <typename T>
-inline void TraceWeakEdge(JSTracer* trc, WeakRef<T>* thingp, const char* name) {
+inline void TraceWeakEdge(JSTracer* trc, WeakHeapPtr<T>* thingp, const char* name) {
   gc::TraceWeakEdgeInternal(
       trc, gc::ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name);
 }
 
 // Trace all edges contained in the given array.
 
 template <typename T>
-void TraceRange(JSTracer* trc, size_t len, WriteBarrieredBase<T>* vec,
+void TraceRange(JSTracer* trc, size_t len, WriteBarriered<T>* vec,
                 const char* name) {
   gc::TraceRangeInternal(
       trc, len, gc::ConvertToBase(vec[0].unsafeUnbarrieredForTracing()), name);
 }
 
 // Trace all root edges in the given array.
 
 template <typename T>
 void TraceRootRange(JSTracer* trc, size_t len, T* vec, const char* name) {
   gc::AssertRootMarkingPhase(trc);
   gc::TraceRangeInternal(trc, len, gc::ConvertToBase(vec), name);
 }
 
 // Trace an edge that crosses compartment boundaries. If the compartment of the
 // destination thing is not being GC'd, then the edge will not be traced.
 void TraceCrossCompartmentEdge(JSTracer* trc, JSObject* src,
-                               WriteBarrieredBase<Value>* dst,
+                               WriteBarriered<Value>* dst,
                                const char* name);
 
 // As above but with manual barriers.
 template <typename T>
 void TraceManuallyBarrieredCrossCompartmentEdge(JSTracer* trc, JSObject* src,
                                                 T* dst, const char* name);
 
 // Permanent atoms and well-known symbols are shared between runtimes and must
--- a/js/src/gc/WeakMap-inl.h
+++ b/js/src/gc/WeakMap-inl.h
@@ -10,17 +10,17 @@
 #include "gc/WeakMap.h"
 
 #include "js/TraceKind.h"
 #include "vm/JSContext.h"
 
 namespace js {
 
 template <typename T>
-static T extractUnbarriered(const WriteBarrieredBase<T>& v) {
+static T extractUnbarriered(const WriteBarriered<T>& v) {
   return v.get();
 }
 
 template <typename T>
 static T* extractUnbarriered(T* v) {
   return v;
 }
 
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -394,17 +394,17 @@ static T PopNextBitmaskValue(uint32_t* b
 
   MOZ_ASSERT(index < uint32_t(T::Count));
   return T(index);
 }
 
 void JitRealm::performStubReadBarriers(uint32_t stubsToBarrier) const {
   while (stubsToBarrier) {
     auto stub = PopNextBitmaskValue<StubIndex>(&stubsToBarrier);
-    const ReadBarrieredJitCode& jitCode = stubs_[stub];
+    const WeakHeapPtrJitCode& jitCode = stubs_[stub];
     MOZ_ASSERT(jitCode);
     jitCode.get();
   }
 }
 
 void jit::FreeIonBuilder(IonBuilder* builder) {
   // The builder is allocated into its LifoAlloc, so destroying that will
   // destroy the builder and all other data accumulated during compilation,
@@ -568,17 +568,17 @@ void JitRuntime::SweepJitcodeGlobalTable
 }
 
 void JitRealm::sweep(JS::Realm* realm) {
   // Any outstanding compilations should have been cancelled by the GC.
   MOZ_ASSERT(!HasOffThreadIonCompile(realm));
 
   stubCodes_->sweep();
 
-  for (ReadBarrieredJitCode& stub : stubs_) {
+  for (WeakHeapPtrJitCode& stub : stubs_) {
     if (stub && IsAboutToBeFinalized(&stub)) {
       stub.set(nullptr);
     }
   }
 }
 
 void JitZone::sweep() { baselineCacheIRStubCodes_.sweep(); }
 
--- a/js/src/jit/JitRealm.h
+++ b/js/src/jit/JitRealm.h
@@ -467,17 +467,17 @@ struct CacheIRStubKey : public DefaultHa
 
   void operator=(CacheIRStubKey&& other) {
     stubInfo = std::move(other.stubInfo);
   }
 };
 
 template <typename Key>
 struct IcStubCodeMapGCPolicy {
-  static bool needsSweep(Key*, ReadBarrieredJitCode* value) {
+  static bool needsSweep(Key*, WeakHeapPtrJitCode* value) {
     return IsAboutToBeFinalized(value);
   }
 };
 
 class JitZone {
   // Allocated space for optimized baseline stubs.
   OptimizedICStubSpace optimizedStubSpace_;
   // Allocated space for cached cfg.
@@ -485,17 +485,17 @@ class JitZone {
 
   // Set of CacheIRStubInfo instances used by Ion stubs in this Zone.
   using IonCacheIRStubInfoSet =
       HashSet<CacheIRStubKey, CacheIRStubKey, SystemAllocPolicy>;
   IonCacheIRStubInfoSet ionCacheIRStubInfoSet_;
 
   // Map CacheIRStubKey to shared JitCode objects.
   using BaselineCacheIRStubCodeMap =
-      GCHashMap<CacheIRStubKey, ReadBarrieredJitCode, CacheIRStubKey,
+      GCHashMap<CacheIRStubKey, WeakHeapPtrJitCode, CacheIRStubKey,
                 SystemAllocPolicy, IcStubCodeMapGCPolicy<CacheIRStubKey>>;
   BaselineCacheIRStubCodeMap baselineCacheIRStubCodes_;
 
  public:
   void sweep();
 
   void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
                               size_t* jitZone, size_t* baselineStubsOptimized,
@@ -536,17 +536,17 @@ class JitZone {
   void purgeIonCacheIRStubInfo() { ionCacheIRStubInfoSet_.clearAndCompact(); }
 };
 
 class JitRealm {
   friend class JitActivation;
 
   // Map ICStub keys to ICStub shared code objects.
   using ICStubCodeMap =
-      GCHashMap<uint32_t, ReadBarrieredJitCode, DefaultHasher<uint32_t>,
+      GCHashMap<uint32_t, WeakHeapPtrJitCode, DefaultHasher<uint32_t>,
                 ZoneAllocPolicy, IcStubCodeMapGCPolicy<uint32_t>>;
   ICStubCodeMap* stubCodes_;
 
   // The JitRealm stores stubs to concatenate strings inline and perform RegExp
   // calls inline. These bake in zone and realm specific pointers and can't be
   // stored in JitRuntime. They also are dependent on the value of
   // 'stringsCanBeInNursery' and must be flushed when its value changes.
   //
@@ -558,17 +558,17 @@ class JitRealm {
   enum StubIndex : uint32_t {
     StringConcat = 0,
     RegExpMatcher,
     RegExpSearcher,
     RegExpTester,
     Count
   };
 
-  mozilla::EnumeratedArray<StubIndex, StubIndex::Count, ReadBarrieredJitCode>
+  mozilla::EnumeratedArray<StubIndex, StubIndex::Count, WeakHeapPtrJitCode>
       stubs_;
 
   bool stringsCanBeInNursery;
 
   JitCode* generateStringConcatStub(JSContext* cx);
   JitCode* generateRegExpMatcherStub(JSContext* cx);
   JitCode* generateRegExpSearcherStub(JSContext* cx);
   JitCode* generateRegExpTesterStub(JSContext* cx);
@@ -610,23 +610,23 @@ class JitRealm {
     }
     stubs_[StringConcat] = generateStringConcatStub(cx);
     return stubs_[StringConcat];
   }
 
   void sweep(JS::Realm* realm);
 
   void discardStubs() {
-    for (ReadBarrieredJitCode& stubRef : stubs_) {
+    for (WeakHeapPtrJitCode& stubRef : stubs_) {
       stubRef = nullptr;
     }
   }
 
   bool hasStubs() const {
-    for (const ReadBarrieredJitCode& stubRef : stubs_) {
+    for (const WeakHeapPtrJitCode& stubRef : stubs_) {
       if (stubRef) {
         return true;
       }
     }
     return false;
   }
 
   void setStringsCanBeInNursery(bool allow) {
--- a/js/src/jsapi-tests/testGCWeakRef.cpp
+++ b/js/src/jsapi-tests/testGCWeakRef.cpp
@@ -7,17 +7,17 @@
 
 #include "gc/Barrier.h"
 #include "js/RootingAPI.h"
 
 #include "jsapi-tests/tests.h"
 
 struct MyHeap {
   explicit MyHeap(JSObject* obj) : weak(obj) {}
-  js::WeakRef<JSObject*> weak;
+  js::WeakHeapPtrObject weak;
 
   void trace(JSTracer* trc) { js::TraceWeakEdge(trc, &weak, "weak"); }
 };
 
 BEGIN_TEST(testGCWeakRef) {
   // Create an object and add a property to it so that we can read the
   // property back later to verify that object internals are not garbage.
   JS::RootedObject obj(cx, JS_NewPlainObject(cx));
--- a/js/src/vm/ArgumentsObject.cpp
+++ b/js/src/vm/ArgumentsObject.cpp
@@ -246,17 +246,17 @@ ArgumentsObject* ArgumentsObject::create
 }
 
 ArgumentsObject* Realm::maybeArgumentsTemplateObject(bool mapped) const {
   return mapped ? mappedArgumentsTemplate_ : unmappedArgumentsTemplate_;
 }
 
 ArgumentsObject* Realm::getOrCreateArgumentsTemplateObject(JSContext* cx,
                                                            bool mapped) {
-  ReadBarriered<ArgumentsObject*>& obj =
+  WeakHeapPtr<ArgumentsObject*>& obj =
       mapped ? mappedArgumentsTemplate_ : unmappedArgumentsTemplate_;
 
   ArgumentsObject* templateObj = obj;
   if (templateObj) {
     return templateObj;
   }
 
   templateObj = ArgumentsObject::createTemplateObject(cx, mapped);
--- a/js/src/vm/ArrayBufferObject.h
+++ b/js/src/vm/ArrayBufferObject.h
@@ -610,24 +610,24 @@ class InnerViewTable {
 
  private:
   struct MapGCPolicy {
     static bool needsSweep(JSObject** key, ViewVector* value) {
       return InnerViewTable::sweepEntry(key, *value);
     }
   };
 
-  // This key is a raw pointer and not a ReadBarriered because the post-
-  // barrier would hold nursery-allocated entries live unconditionally. It is
-  // a very common pattern in low-level and performance-oriented JavaScript
-  // to create hundreds or thousands of very short lived temporary views on a
-  // larger buffer; having to tenured all of these would be a catastrophic
-  // performance regression. Thus, it is vital that nursery pointers in this
-  // map not be held live. Special support is required in the minor GC,
-  // implemented in sweepAfterMinorGC.
+  // This key is a raw pointer and not a WeakHeapPtr because the post-barrier
+  // would hold nursery-allocated entries live unconditionally. It is a very
+  // common pattern in low-level and performance-oriented JavaScript to create
+  // hundreds or thousands of very short lived temporary views on a larger
+  // buffer; having to tenured all of these would be a catastrophic performance
+  // regression. Thus, it is vital that nursery pointers in this map not be held
+  // live. Special support is required in the minor GC, implemented in
+  // sweepAfterMinorGC.
   typedef GCHashMap<JSObject*, ViewVector, MovableCellHasher<JSObject*>,
                     SystemAllocPolicy, MapGCPolicy>
       Map;
 
   // For all objects sharing their storage with some other view, this maps
   // the object to the list of such views. All entries in this map are weak.
   Map map;
 
--- a/js/src/vm/AtomsTable.h
+++ b/js/src/vm/AtomsTable.h
@@ -37,21 +37,21 @@ class MOZ_RAII AutoLockAllAtoms {
  public:
   explicit AutoLockAllAtoms(JSRuntime* rt);
   ~AutoLockAllAtoms();
 };
 
 struct AtomHasher {
   struct Lookup;
   static inline HashNumber hash(const Lookup& l);
-  static MOZ_ALWAYS_INLINE bool match(const ReadBarriered<JSAtom*>& entry,
+  static MOZ_ALWAYS_INLINE bool match(const WeakHeapPtr<JSAtom*>& entry,
                                       const Lookup& lookup);
 };
 
-using AtomSet = JS::GCHashSet<ReadBarriered<JSAtom*>, AtomHasher,
+using AtomSet = JS::GCHashSet<WeakHeapPtr<JSAtom*>, AtomHasher,
                               SystemAllocPolicy>;
 
 // This class is a wrapper for AtomSet that is used to ensure the AtomSet is
 // not modified. It should only expose read-only methods from AtomSet.
 // Note however that the atoms within the table can be marked during GC.
 class FrozenAtomSet {
   AtomSet* mSet;
 
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -2521,17 +2521,17 @@ void Debugger::slowPathOnNewGlobalObject
 }
 
 /* static */
 bool Debugger::slowPathOnLogAllocationSite(JSContext* cx, HandleObject obj,
                                            HandleSavedFrame frame,
                                            mozilla::TimeStamp when,
                                            GlobalObject::DebuggerVector& dbgs) {
   MOZ_ASSERT(!dbgs.empty());
-  mozilla::DebugOnly<ReadBarriered<Debugger*>*> begin = dbgs.begin();
+  mozilla::DebugOnly<WeakHeapPtr<Debugger*>*> begin = dbgs.begin();
 
   // Root all the Debuggers while we're iterating over them;
   // appendAllocationSite calls Compartment::wrap, and thus can GC.
   //
   // SpiderMonkey protocol is generally for the caller to prove that it has
   // rooted the stuff it's asking you to operate on (i.e. by passing a
   // Handle), but in this case, we're iterating over a global's list of
   // Debuggers, and globals only hold their Debuggers weakly.
@@ -4394,22 +4394,22 @@ static T* findDebuggerInVector(Debugger*
     if (*p == dbg) {
       break;
     }
   }
   MOZ_ASSERT(p != vec->end());
   return p;
 }
 
-// a ReadBarriered version for findDebuggerInVector
+// a WeakHeapPtr version for findDebuggerInVector
 // TODO: Bug 1515934 - findDebuggerInVector<T> triggers read barriers.
-static ReadBarriered<Debugger*>* findDebuggerInVector(
+static WeakHeapPtr<Debugger*>* findDebuggerInVector(
     Debugger* dbg,
-    Vector<ReadBarriered<Debugger*>, 0, js::SystemAllocPolicy>* vec) {
-  ReadBarriered<Debugger*>* p;
+    Vector<WeakHeapPtr<Debugger*>, 0, js::SystemAllocPolicy>* vec) {
+  WeakHeapPtr<Debugger*>* p;
   for (p = vec->begin(); p != vec->end(); p++) {
     if (p->unbarrieredGet() == dbg) {
       break;
     }
   }
   MOZ_ASSERT(p != vec->end());
   return p;
 }
--- a/js/src/vm/Debugger.h
+++ b/js/src/vm/Debugger.h
@@ -82,18 +82,18 @@ enum class ResumeMode {
 class AbstractGeneratorObject;
 class Breakpoint;
 class DebuggerMemory;
 class PromiseObject;
 class ScriptedOnStepHandler;
 class ScriptedOnPopHandler;
 class WasmInstanceObject;
 
-typedef HashSet<ReadBarrieredGlobalObject,
-                MovableCellHasher<ReadBarrieredGlobalObject>, ZoneAllocPolicy>
+typedef HashSet<WeakHeapPtrGlobalObject,
+                MovableCellHasher<WeakHeapPtrGlobalObject>, ZoneAllocPolicy>
     WeakGlobalObjectSet;
 
 #ifdef DEBUG
 extern void CheckDebuggeeThing(JSScript* script, bool invisibleOk);
 
 extern void CheckDebuggeeThing(LazyScript* script, bool invisibleOk);
 
 extern void CheckDebuggeeThing(JSObject* obj, bool invisibleOk);
@@ -433,17 +433,17 @@ class Debugger : private mozilla::Linked
 
     void trace(JSTracer* trc) {
       TraceNullableEdge(trc, &frame, "Debugger::AllocationsLogEntry::frame");
       TraceNullableEdge(trc, &ctorName,
                         "Debugger::AllocationsLogEntry::ctorName");
     }
   };
 
-  // Barrier methods so we can have ReadBarriered<Debugger*>.
+  // Barrier methods so we can have WeakHeapPtr<Debugger*>.
   static void readBarrier(Debugger* dbg) {
     InternalBarrierMethods<JSObject*>::readBarrier(dbg->object);
   }
   static void writeBarrierPost(Debugger** vp, Debugger* prev, Debugger* next) {}
 #ifdef DEBUG
   static void assertThingIsNotGray(Debugger* dbg) { return; }
 #endif
 
@@ -2055,17 +2055,17 @@ bool Debugger::observesNewScript() const
   return enabled && getHook(OnNewScript);
 }
 
 bool Debugger::observesNewGlobalObject() const {
   return enabled && getHook(OnNewGlobalObject);
 }
 
 bool Debugger::observesGlobal(GlobalObject* global) const {
-  ReadBarriered<GlobalObject*> debuggee(global);
+  WeakHeapPtr<GlobalObject*> debuggee(global);
   return debuggees.has(debuggee);
 }
 
 /* static */ void Debugger::onNewScript(JSContext* cx, HandleScript script) {
   // We early return in slowPathOnNewScript for self-hosted scripts, so we can
   // ignore those in our assertion here.
   MOZ_ASSERT_IF(!script->realm()->creationOptions().invisibleToDebugger() &&
                     !script->selfHosted(),
--- a/js/src/vm/EnvironmentObject.cpp
+++ b/js/src/vm/EnvironmentObject.cpp
@@ -2630,17 +2630,17 @@ bool DebugEnvironments::addDebugEnvironm
   DebugEnvironments* envs = ensureRealmData(cx);
   if (!envs) {
     return false;
   }
 
   MissingEnvironmentKey key(ei);
   MOZ_ASSERT(!envs->missingEnvs.has(key));
   if (!envs->missingEnvs.put(key,
-                             ReadBarriered<DebugEnvironmentProxy*>(debugEnv))) {
+                             WeakHeapPtr<DebugEnvironmentProxy*>(debugEnv))) {
     ReportOutOfMemory(cx);
     return false;
   }
 
   // Only add to liveEnvs if we synthesized the debug env on a live
   // frame.
   if (ei.withinInitialFrame()) {
     MOZ_ASSERT(!envs->liveEnvs.has(&debugEnv->environment()));
--- a/js/src/vm/EnvironmentObject.h
+++ b/js/src/vm/EnvironmentObject.h
@@ -945,31 +945,31 @@ class DebugEnvironments {
 
   /* The map from (non-debug) environments to debug environments. */
   ObjectWeakMap proxiedEnvs;
 
   /*
    * The map from live frames which have optimized-away environments to the
    * corresponding debug environments.
    */
-  typedef HashMap<MissingEnvironmentKey, ReadBarrieredDebugEnvironmentProxy,
+  typedef HashMap<MissingEnvironmentKey, WeakHeapPtrDebugEnvironmentProxy,
                   MissingEnvironmentKey, ZoneAllocPolicy>
       MissingEnvironmentMap;
   MissingEnvironmentMap missingEnvs;
 
   /*
    * The map from environment objects of live frames to the live frame. This
    * map updated lazily whenever the debugger needs the information. In
    * between two lazy updates, liveEnvs becomes incomplete (but not invalid,
    * onPop* removes environments as they are popped). Thus, two consecutive
    * debugger lazy updates of liveEnvs need only fill in the new
    * environments.
    */
-  typedef GCHashMap<ReadBarriered<JSObject*>, LiveEnvironmentVal,
-                    MovableCellHasher<ReadBarriered<JSObject*>>,
+  typedef GCHashMap<WeakHeapPtr<JSObject*>, LiveEnvironmentVal,
+                    MovableCellHasher<WeakHeapPtr<JSObject*>>,
                     ZoneAllocPolicy>
       LiveEnvironmentMap;
   LiveEnvironmentMap liveEnvs;
 
  public:
   DebugEnvironments(JSContext* cx, Zone* zone);
   ~DebugEnvironments();
 
--- a/js/src/vm/GlobalObject.h
+++ b/js/src/vm/GlobalObject.h
@@ -850,17 +850,17 @@ class GlobalObject : public NativeObject
   static bool initTypedObjectModule(JSContext* cx,
                                     Handle<GlobalObject*> global);
 
   static bool initStandardClasses(JSContext* cx, Handle<GlobalObject*> global);
   static bool initSelfHostingBuiltins(JSContext* cx,
                                       Handle<GlobalObject*> global,
                                       const JSFunctionSpec* builtins);
 
-  typedef js::Vector<js::ReadBarriered<js::Debugger*>, 0, js::SystemAllocPolicy>
+  typedef js::Vector<js::WeakHeapPtr<js::Debugger*>, 0, js::SystemAllocPolicy>
       DebuggerVector;
 
   /*
    * The collection of Debugger objects debugging this global. If this global
    * is not a debuggee, this returns either nullptr or an empty vector.
    */
   DebuggerVector* getDebuggers() const;
 
--- a/js/src/vm/JSAtom.cpp
+++ b/js/src/vm/JSAtom.cpp
@@ -115,17 +115,17 @@ struct js::AtomHasher::Lookup {
         type(LittleEndianTwoByte),
         length(length),
         atom(nullptr),
         hash(mozilla::HashStringKnownLength(chars, length)) {}
 };
 
 inline HashNumber js::AtomHasher::hash(const Lookup& l) { return l.hash; }
 
-MOZ_ALWAYS_INLINE bool js::AtomHasher::match(const ReadBarriered<JSAtom*>& entry,
+MOZ_ALWAYS_INLINE bool js::AtomHasher::match(const WeakHeapPtr<JSAtom*>& entry,
                                              const Lookup& lookup) {
   JSAtom* key = entry.unbarrieredGet();
   if (lookup.atom) {
     return lookup.atom == key;
   }
   if (key->length() != lookup.length || key->hash() != lookup.hash) {
     return false;
   }
--- a/js/src/vm/JSScript.h
+++ b/js/src/vm/JSScript.h
@@ -3016,17 +3016,17 @@ class alignas(uintptr_t) LazyScriptData 
 };
 
 // Information about a script which may be (or has been) lazily compiled to
 // bytecode from its source.
 class LazyScript : public gc::TenuredCell {
   // If non-nullptr, the script has been compiled and this is a forwarding
   // pointer to the result. This is a weak pointer: after relazification, we
   // can collect the script if there are no other pointers to it.
-  WeakRef<JSScript*> script_;
+  WeakHeapPtrScript script_;
 
   // Original function with which the lazy script is associated.
   GCPtrFunction function_;
 
   // This field holds one of:
   //   * LazyScript in which the script is nested.  This case happens if the
   //     enclosing script is lazily parsed and have never been compiled.
   //
--- a/js/src/vm/ObjectGroup.cpp
+++ b/js/src/vm/ObjectGroup.cpp
@@ -354,17 +354,17 @@ bool JSObject::setNewGroupUnknown(JSCont
  * types to use for some prototype. An optional associated object is used which
  * allows multiple groups to be created with the same prototype. The
  * associated object may be a function (for types constructed with 'new') or a
  * type descriptor (for typed objects). These entries are also used for the set
  * of lazy groups in the realm, which use a null associated object
  * (though there are only a few of these per realm).
  */
 struct ObjectGroupRealm::NewEntry {
-  ReadBarrieredObjectGroup group;
+  WeakHeapPtrObjectGroup group;
 
   // Note: This pointer is only used for equality and does not need a read
   // barrier.
   JSObject* associated;
 
   NewEntry(ObjectGroup* group, JSObject* associated)
       : group(group), associated(associated) {}
 
@@ -1057,18 +1057,18 @@ struct ObjectGroupRealm::PlainObjectKey 
         return true;
       }
     }
     return false;
   }
 };
 
 struct ObjectGroupRealm::PlainObjectEntry {
-  ReadBarrieredObjectGroup group;
-  ReadBarrieredShape shape;
+  WeakHeapPtrObjectGroup group;
+  WeakHeapPtrShape shape;
   TypeSet::Type* types;
 
   bool needsSweep(unsigned nproperties) {
     if (IsAboutToBeFinalized(&group)) {
       return true;
     }
     if (IsAboutToBeFinalized(&shape)) {
       return true;
@@ -1307,22 +1307,22 @@ JSObject* ObjectGroup::newPlainObject(JS
 }
 
 /////////////////////////////////////////////////////////////////////
 // ObjectGroupRealm AllocationSiteTable
 /////////////////////////////////////////////////////////////////////
 
 struct ObjectGroupRealm::AllocationSiteKey
     : public DefaultHasher<AllocationSiteKey> {
-  ReadBarrieredScript script;
+  WeakHeapPtrScript script;
 
   uint32_t offset : 24;
   JSProtoKey kind : 8;
 
-  ReadBarrieredObject proto;
+  WeakHeapPtrObject proto;
 
   static const uint32_t OFFSET_LIMIT = (1 << 23);
 
   AllocationSiteKey(JSScript* script_, uint32_t offset_, JSProtoKey kind_,
                     JSObject* proto_)
       : script(script_), offset(offset_), kind(kind_), proto(proto_) {
     MOZ_ASSERT(offset_ < OFFSET_LIMIT);
   }
@@ -1376,19 +1376,19 @@ struct ObjectGroupRealm::AllocationSiteK
   bool operator==(const AllocationSiteKey& other) const {
     return script == other.script && offset == other.offset &&
            kind == other.kind && proto == other.proto;
   }
 };
 
 class ObjectGroupRealm::AllocationSiteTable
     : public JS::WeakCache<
-          js::GCHashMap<AllocationSiteKey, ReadBarrieredObjectGroup,
+          js::GCHashMap<AllocationSiteKey, WeakHeapPtrObjectGroup,
                         AllocationSiteKey, SystemAllocPolicy>> {
-  using Table = js::GCHashMap<AllocationSiteKey, ReadBarrieredObjectGroup,
+  using Table = js::GCHashMap<AllocationSiteKey, WeakHeapPtrObjectGroup,
                               AllocationSiteKey, SystemAllocPolicy>;
   using Base = JS::WeakCache<Table>;
 
  public:
   explicit AllocationSiteTable(Zone* zone) : Base(zone) {}
 };
 
 /* static */
@@ -1783,17 +1783,17 @@ void ObjectGroupRealm::sweep() {
 
   if (arrayObjectTable) {
     arrayObjectTable->sweep();
   }
   if (plainObjectTable) {
     plainObjectTable->sweep();
   }
   if (stringSplitStringGroup) {
-    if (JS::GCPolicy<ReadBarrieredObjectGroup>::needsSweep(
+    if (JS::GCPolicy<WeakHeapPtrObjectGroup>::needsSweep(
             &stringSplitStringGroup)) {
       stringSplitStringGroup = nullptr;
     }
   }
 }
 
 void ObjectGroupRealm::fixupNewTableAfterMovingGC(NewTable* table) {
   /*
--- a/js/src/vm/ObjectGroup.h
+++ b/js/src/vm/ObjectGroup.h
@@ -534,17 +534,17 @@ class ObjectGroup : public gc::TenuredCe
 
 // Structure used to manage the groups in a realm.
 class ObjectGroupRealm {
  private:
   class NewTable;
 
   struct ArrayObjectKey;
   using ArrayObjectTable =
-      js::GCRekeyableHashMap<ArrayObjectKey, ReadBarrieredObjectGroup,
+      js::GCRekeyableHashMap<ArrayObjectKey, WeakHeapPtrObjectGroup,
                              ArrayObjectKey, SystemAllocPolicy>;
 
   struct PlainObjectKey;
   struct PlainObjectEntry;
   struct PlainObjectTableSweepPolicy {
     static bool needsSweep(PlainObjectKey* key, PlainObjectEntry* entry);
   };
   using PlainObjectTable =
@@ -592,17 +592,17 @@ class ObjectGroupRealm {
   AllocationSiteTable* allocationSiteTable = nullptr;
 
   // A single per-realm ObjectGroup for all calls to StringSplitString.
   // StringSplitString is always called from self-hosted code, and conceptually
   // the return object for a string.split(string) operation should have a
   // unified type.  Having a global group for this also allows us to remove
   // the hash-table lookup that would be required if we allocated this group
   // on the basis of call-site pc.
-  ReadBarrieredObjectGroup stringSplitStringGroup = {};
+  WeakHeapPtrObjectGroup stringSplitStringGroup = {};
 
   // END OF PROPERTIES
 
  private:
   friend class ObjectGroup;
 
   struct AllocationSiteKey;
 
--- a/js/src/vm/Realm.h
+++ b/js/src/vm/Realm.h
@@ -301,21 +301,21 @@ struct GCPolicy<js::DelayMetadata> : pub
 class JS::Realm : public JS::shadow::Realm {
   JS::Zone* zone_;
   JSRuntime* runtime_;
 
   const JS::RealmCreationOptions creationOptions_;
   JS::RealmBehaviors behaviors_;
 
   friend struct ::JSContext;
-  js::ReadBarrieredGlobalObject global_;
+  js::WeakHeapPtrGlobalObject global_;
 
   // The global lexical environment. This is stored here instead of in
   // GlobalObject for easier/faster JIT access.
-  js::ReadBarriered<js::LexicalEnvironmentObject*> lexicalEnv_;
+  js::WeakHeapPtr<js::LexicalEnvironmentObject*> lexicalEnv_;
 
   // Note: this is private to enforce use of ObjectRealm::get(obj).
   js::ObjectRealm objects_;
   friend js::ObjectRealm& js::ObjectRealm::get(const JSObject*);
 
   // Object group tables and other state in the realm. This is private to
   // enforce use of ObjectGroupRealm::get(group)/getForNewObject(cx).
   js::ObjectGroupRealm objectGroups_;
@@ -358,20 +358,20 @@ class JS::Realm : public JS::shadow::Rea
   const js::AllocationMetadataBuilder* allocationMetadataBuilder_ = nullptr;
   void* realmPrivate_ = nullptr;
 
   // This pointer is controlled by the embedder. If it is non-null, and if
   // cx->enableAccessValidation is true, then we assert that *validAccessPtr
   // is true before running any code in this realm.
   bool* validAccessPtr_ = nullptr;
 
-  js::ReadBarriered<js::ArgumentsObject*> mappedArgumentsTemplate_{nullptr};
-  js::ReadBarriered<js::ArgumentsObject*> unmappedArgumentsTemplate_{nullptr};
-  js::ReadBarriered<js::NativeObject*> iterResultTemplate_{nullptr};
-  js::ReadBarriered<js::NativeObject*> iterResultWithoutPrototypeTemplate_{
+  js::WeakHeapPtr<js::ArgumentsObject*> mappedArgumentsTemplate_{nullptr};
+  js::WeakHeapPtr<js::ArgumentsObject*> unmappedArgumentsTemplate_{nullptr};
+  js::WeakHeapPtr<js::NativeObject*> iterResultTemplate_{nullptr};
+  js::WeakHeapPtr<js::NativeObject*> iterResultWithoutPrototypeTemplate_{
       nullptr};
 
   // There are two ways to enter a realm:
   //
   // (1) AutoRealm (and JSAutoRealm, JS::EnterRealm)
   // (2) When calling a cross-realm (but same-compartment) function in JIT
   //     code.
   //
@@ -422,17 +422,17 @@ class JS::Realm : public JS::shadow::Rea
 #ifdef MOZ_VTUNE
   js::UniquePtr<js::ScriptVTuneIdMap> scriptVTuneIdMap;
 #endif
 
   /*
    * Lazily initialized script source object to use for scripts cloned
    * from the self-hosting global.
    */
-  js::ReadBarrieredScriptSourceObject selfHostingScriptSource{nullptr};
+  js::WeakHeapPtrScriptSourceObject selfHostingScriptSource{nullptr};
 
   // Last time at which an animation was played for this realm.
   js::MainThreadData<mozilla::TimeStamp> lastAnimationTime;
 
   /*
    * For generational GC, record whether a write barrier has added this
    * realm's global to the store buffer since the last minor GC.
    *
--- a/js/src/vm/RegExpShared.h
+++ b/js/src/vm/RegExpShared.h
@@ -71,17 +71,17 @@ class RegExpShared : public gc::TenuredC
   using JitCodeTable = UniquePtr<uint8_t[], JS::FreePolicy>;
   using JitCodeTables = Vector<JitCodeTable, 0, SystemAllocPolicy>;
 
  private:
   friend class RegExpStatics;
   friend class RegExpZone;
 
   struct RegExpCompilation {
-    ReadBarriered<jit::JitCode*> jitCode;
+    WeakHeapPtr<jit::JitCode*> jitCode;
     uint8_t* byteCode;
 
     RegExpCompilation() : byteCode(nullptr) {}
 
     bool compiled(ForceByteCodeEnum force = DontForceByteCode) const {
       return byteCode || (force == DontForceByteCode && jitCode);
     }
   };
@@ -204,17 +204,17 @@ class RegExpShared : public gc::TenuredC
 
 class RegExpZone {
   struct Key {
     JSAtom* atom = nullptr;
     JS::RegExpFlags flags = JS::RegExpFlag::NoFlags;
 
     Key() = default;
     Key(JSAtom* atom, JS::RegExpFlags flags) : atom(atom), flags(flags) {}
-    MOZ_IMPLICIT Key(const ReadBarriered<RegExpShared*>& shared)
+    MOZ_IMPLICIT Key(const WeakHeapPtr<RegExpShared*>& shared)
         : atom(shared.unbarrieredGet()->getSource()),
           flags(shared.unbarrieredGet()->getFlags()) {}
 
     typedef Key Lookup;
     static HashNumber hash(const Lookup& l) {
       HashNumber hash = DefaultHasher<JSAtom*>::hash(l.atom);
       return mozilla::AddToHash(hash, l.flags.value());
     }
@@ -223,17 +223,17 @@ class RegExpZone {
     }
   };
 
   /*
    * The set of all RegExpShareds in the zone. On every GC, every RegExpShared
    * that was not marked is deleted and removed from the set.
    */
   using Set = JS::WeakCache<
-      JS::GCHashSet<ReadBarriered<RegExpShared*>, Key, ZoneAllocPolicy>>;
+      JS::GCHashSet<WeakHeapPtr<RegExpShared*>, Key, ZoneAllocPolicy>>;
   Set set_;
 
  public:
   explicit RegExpZone(Zone* zone);
 
   ~RegExpZone() { MOZ_ASSERT(set_.empty()); }
 
   bool empty() const { return set_.empty(); }
@@ -256,38 +256,38 @@ class RegExpZone {
 };
 
 class RegExpRealm {
   /*
    * This is the template object where the result of re.exec() is based on,
    * if there is a result. This is used in CreateRegExpMatchResult to set
    * the input/index properties faster.
    */
-  ReadBarriered<ArrayObject*> matchResultTemplateObject_;
+  WeakHeapPtr<ArrayObject*> matchResultTemplateObject_;
 
   /*
    * The shape of RegExp.prototype object that satisfies following:
    *   * RegExp.prototype.flags getter is not modified
    *   * RegExp.prototype.global getter is not modified
    *   * RegExp.prototype.ignoreCase getter is not modified
    *   * RegExp.prototype.multiline getter is not modified
    *   * RegExp.prototype.sticky getter is not modified
    *   * RegExp.prototype.unicode getter is not modified
    *   * RegExp.prototype.exec is an own data property
    *   * RegExp.prototype[@@match] is an own data property
    *   * RegExp.prototype[@@search] is an own data property
    */
-  ReadBarriered<Shape*> optimizableRegExpPrototypeShape_;
+  WeakHeapPtr<Shape*> optimizableRegExpPrototypeShape_;
 
   /*
    * The shape of RegExp instance that satisfies following:
    *   * lastProperty is lastIndex
    *   * prototype is RegExp.prototype
    */
-  ReadBarriered<Shape*> optimizableRegExpInstanceShape_;
+  WeakHeapPtr<Shape*> optimizableRegExpInstanceShape_;
 
   ArrayObject* createMatchResultTemplateObject(JSContext* cx);
 
  public:
   explicit RegExpRealm();
 
   void sweep();
 
--- a/js/src/vm/SavedFrame.h
+++ b/js/src/vm/SavedFrame.h
@@ -101,17 +101,17 @@ class SavedFrame : public NativeObject {
     RootedRange(JSContext* cx, HandleSavedFrame frame) : frame_(cx, frame) {}
     RootedIterator begin() { return RootedIterator(*this); }
     RootedIterator end() { return RootedIterator(); }
   };
 
   struct Lookup;
   struct HashPolicy;
 
-  typedef JS::GCHashSet<ReadBarriered<SavedFrame*>, HashPolicy,
+  typedef JS::GCHashSet<WeakHeapPtr<SavedFrame*>, HashPolicy,
                         SystemAllocPolicy>
       Set;
 
  private:
   static SavedFrame* create(JSContext* cx);
   static MOZ_MUST_USE bool finishSavedFrameInit(JSContext* cx,
                                                 HandleObject ctor,
                                                 HandleObject proto);
@@ -147,17 +147,17 @@ struct SavedFrame::HashPolicy {
   typedef MovableCellHasher<SavedFrame*> SavedFramePtrHasher;
   typedef PointerHasher<JSPrincipals*> JSPrincipalsPtrHasher;
 
   static bool hasHash(const Lookup& l);
   static bool ensureHash(const Lookup& l);
   static HashNumber hash(const Lookup& lookup);
   static bool match(SavedFrame* existing, const Lookup& lookup);
 
-  typedef ReadBarriered<SavedFrame*> Key;
+  typedef WeakHeapPtr<SavedFrame*> Key;
   static void rekey(Key& key, const Key& newKey);
 };
 
 }  // namespace js
 
 namespace mozilla {
 
 template <>
--- a/js/src/vm/SavedStacks.cpp
+++ b/js/src/vm/SavedStacks.cpp
@@ -1746,17 +1746,17 @@ void SavedStacks::chooseSamplingProbabil
     return;
   }
 
   GlobalObject::DebuggerVector* dbgs = global->getDebuggers();
   if (!dbgs || dbgs->empty()) {
     return;
   }
 
-  mozilla::DebugOnly<ReadBarriered<Debugger*>*> begin = dbgs->begin();
+  mozilla::DebugOnly<WeakHeapPtr<Debugger*>*> begin = dbgs->begin();
   mozilla::DebugOnly<bool> foundAnyDebuggers = false;
 
   double probability = 0;
   for (auto p = dbgs->begin(); p < dbgs->end(); p++) {
     // The set of debuggers had better not change while we're iterating,
     // such that the vector gets reallocated.
     MOZ_ASSERT(dbgs->begin() == begin);
     // Use unbarrieredGet() to prevent triggering read barrier while collecting,
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -2222,17 +2222,17 @@ void EmptyShape::insertInitialShape(JSCo
 #ifdef DEBUG
   Shape* nshape = shape;
   while (!nshape->isEmptyShape()) {
     nshape = nshape->previous();
   }
   MOZ_ASSERT(nshape == entry.shape);
 #endif
 
-  entry.shape = ReadBarrieredShape(shape);
+  entry.shape = WeakHeapPtrShape(shape);
 
   /*
    * This affects the shape that will be produced by the various NewObject
    * methods, so clear any cache entry referring to the old shape. This is
    * not required for correctness: the NewObject must always check for a
    * nativeEmpty() result and generate the appropriate properties if found.
    * Clearing the cache entry avoids this duplicate regeneration.
    *
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -824,17 +824,17 @@ UnownedBaseShape* BaseShape::toUnowned()
 }
 
 UnownedBaseShape* BaseShape::baseUnowned() {
   MOZ_ASSERT(isOwned() && unowned_);
   return unowned_;
 }
 
 /* Entries for the per-zone baseShapes set of unowned base shapes. */
-struct StackBaseShape : public DefaultHasher<ReadBarriered<UnownedBaseShape*>> {
+struct StackBaseShape : public DefaultHasher<WeakHeapPtr<UnownedBaseShape*>> {
   uint32_t flags;
   const Class* clasp;
 
   explicit StackBaseShape(BaseShape* base)
       : flags(base->flags & BaseShape::OBJECT_FLAG_MASK), clasp(base->clasp_) {}
 
   inline StackBaseShape(const Class* clasp, uint32_t objectFlags);
   explicit inline StackBaseShape(Shape* shape);
@@ -846,27 +846,27 @@ struct StackBaseShape : public DefaultHa
     MOZ_IMPLICIT Lookup(const StackBaseShape& base)
         : flags(base.flags), clasp(base.clasp) {}
 
     MOZ_IMPLICIT Lookup(UnownedBaseShape* base)
         : flags(base->getObjectFlags()), clasp(base->clasp()) {
       MOZ_ASSERT(!base->isOwned());
     }
 
-    explicit Lookup(const ReadBarriered<UnownedBaseShape*>& base)
+    explicit Lookup(const WeakHeapPtr<UnownedBaseShape*>& base)
         : flags(base.unbarrieredGet()->getObjectFlags()),
           clasp(base.unbarrieredGet()->clasp()) {
       MOZ_ASSERT(!base.unbarrieredGet()->isOwned());
     }
   };
 
   static HashNumber hash(const Lookup& lookup) {
     return mozilla::HashGeneric(lookup.flags, lookup.clasp);
   }
-  static inline bool match(const ReadBarriered<UnownedBaseShape*>& key,
+  static inline bool match(const WeakHeapPtr<UnownedBaseShape*>& key,
                            const Lookup& lookup) {
     return key.unbarrieredGet()->flags == lookup.flags &&
            key.unbarrieredGet()->clasp_ == lookup.clasp;
   }
 };
 
 static MOZ_ALWAYS_INLINE js::HashNumber HashId(jsid id) {
   // HashGeneric alone would work, but bits of atom and symbol addresses
@@ -891,17 +891,17 @@ struct DefaultHasher<jsid> {
   static bool match(jsid id1, jsid id2) { return id1 == id2; }
 };
 
 }  // namespace mozilla
 
 namespace js {
 
 using BaseShapeSet =
-    JS::WeakCache<JS::GCHashSet<ReadBarriered<UnownedBaseShape*>,
+    JS::WeakCache<JS::GCHashSet<WeakHeapPtr<UnownedBaseShape*>,
                                 StackBaseShape, SystemAllocPolicy>>;
 
 class Shape : public gc::TenuredCell {
   friend class ::JSObject;
   friend class ::JSFunction;
   friend class NativeObject;
   friend class PropertyTree;
   friend class TenuringTracer;
@@ -1507,23 +1507,23 @@ struct EmptyShape : public js::Shape {
  * objects in the zone and the associated types.
  */
 struct InitialShapeEntry {
   /*
    * Initial shape to give to the object. This is an empty shape, except for
    * certain classes (e.g. String, RegExp) which may add certain baked-in
    * properties.
    */
-  ReadBarriered<Shape*> shape;
+  WeakHeapPtr<Shape*> shape;
 
   /*
    * Matching prototype for the entry. The shape of an object determines its
    * prototype, but the prototype cannot be determined from the shape itself.
    */
-  ReadBarriered<TaggedProto> proto;
+  WeakHeapPtr<TaggedProto> proto;
 
   /* State used to determine a match on an initial shape. */
   struct Lookup {
     const Class* clasp;
     TaggedProto proto;
     uint32_t nfixed;
     uint32_t baseFlags;
 
--- a/js/src/vm/SymbolType.h
+++ b/js/src/vm/SymbolType.h
@@ -133,17 +133,17 @@ struct HashSymbolsByDescription {
  *
  * The memory management strategy here is modeled after js::AtomSet. It's like
  * a WeakSet. The registry itself does not keep any symbols alive; when a
  * symbol in the registry is collected, the registry entry is removed. No GC
  * nondeterminism is exposed to scripts, because there is no API for
  * enumerating the symbol registry, querying its size, etc.
  */
 class SymbolRegistry
-    : public GCHashSet<ReadBarrieredSymbol, HashSymbolsByDescription,
+    : public GCHashSet<WeakHeapPtrSymbol, HashSymbolsByDescription,
                        SystemAllocPolicy> {
  public:
   SymbolRegistry() {}
 };
 
 // ES6 rev 27 (2014 Aug 24) 19.4.3.3
 bool SymbolDescriptiveString(JSContext* cx, JS::Symbol* sym,
                              JS::MutableHandleValue result);
--- a/js/src/wasm/WasmInstance.h
+++ b/js/src/wasm/WasmInstance.h
@@ -38,17 +38,17 @@ namespace wasm {
 // while it still has live Instances.
 //
 // The instance's code may be shared among multiple instances provided none of
 // those instances are being debugged. Instances that are being debugged own
 // their code.
 
 class Instance {
   JS::Realm* const realm_;
-  ReadBarrieredWasmInstanceObject object_;
+  WeakHeapPtrWasmInstanceObject object_;
   void* jsJitArgsRectifier_;
   void* jsJitExceptionHandler_;
   void* preBarrierCode_;
   const SharedCode code_;
   const UniqueTlsData tlsData_;
   GCPtrWasmMemoryObject memory_;
   const SharedTableVector tables_;
   DataSegmentVector passiveDataSegments_;
--- a/js/src/wasm/WasmJS.h
+++ b/js/src/wasm/WasmJS.h
@@ -227,17 +227,17 @@ class WasmInstanceObject : public Native
   using ExportMap = GCHashMap<uint32_t, HeapPtr<JSFunction*>,
                               DefaultHasher<uint32_t>, SystemAllocPolicy>;
   ExportMap& exports() const;
 
   // WeakScopeMap maps from function index to js::Scope. This maps is weak
   // to avoid holding scope objects alive. The scopes are normally created
   // during debugging.
   using ScopeMap =
-      JS::WeakCache<GCHashMap<uint32_t, ReadBarriered<WasmFunctionScope*>,
+      JS::WeakCache<GCHashMap<uint32_t, WeakHeapPtr<WasmFunctionScope*>,
                               DefaultHasher<uint32_t>, SystemAllocPolicy>>;
   ScopeMap& scopes() const;
 
  public:
   static const unsigned RESERVED_SLOTS = 6;
   static const Class class_;
   static const JSPropertySpec properties[];
   static const JSFunctionSpec methods[];
@@ -288,18 +288,18 @@ class WasmMemoryObject : public NativeOb
   static void finalize(FreeOp* fop, JSObject* obj);
   static bool bufferGetterImpl(JSContext* cx, const CallArgs& args);
   static bool bufferGetter(JSContext* cx, unsigned argc, Value* vp);
   static bool growImpl(JSContext* cx, const CallArgs& args);
   static bool grow(JSContext* cx, unsigned argc, Value* vp);
   static uint32_t growShared(HandleWasmMemoryObject memory, uint32_t delta);
 
   using InstanceSet = JS::WeakCache<GCHashSet<
-      ReadBarrieredWasmInstanceObject,
-      MovableCellHasher<ReadBarrieredWasmInstanceObject>, SystemAllocPolicy>>;
+      WeakHeapPtrWasmInstanceObject,
+      MovableCellHasher<WeakHeapPtrWasmInstanceObject>, SystemAllocPolicy>>;
   bool hasObservers() const;
   InstanceSet& observers() const;
   InstanceSet* getOrCreateObservers(JSContext* cx);
 
  public:
   static const unsigned RESERVED_SLOTS = 2;
   static const Class class_;
   static const JSPropertySpec properties[];
--- a/js/src/wasm/WasmTable.h
+++ b/js/src/wasm/WasmTable.h
@@ -37,21 +37,21 @@ namespace wasm {
 // TODO/AnyRef-boxing: With boxed immediates and strings, JSObject* is no longer
 // the most appropriate representation for Cell::anyref.
 STATIC_ASSERT_ANYREF_IS_JSOBJECT;
 
 typedef GCVector<HeapPtr<JSObject*>, 0, SystemAllocPolicy> TableAnyRefVector;
 
 class Table : public ShareableBase<Table> {
   using InstanceSet = JS::WeakCache<GCHashSet<
-      ReadBarrieredWasmInstanceObject,
-      MovableCellHasher<ReadBarrieredWasmInstanceObject>, SystemAllocPolicy>>;
+      WeakHeapPtrWasmInstanceObject,
+      MovableCellHasher<WeakHeapPtrWasmInstanceObject>, SystemAllocPolicy>>;
   using UniqueAnyFuncArray = UniquePtr<FunctionTableElem[], JS::FreePolicy>;
 
-  ReadBarrieredWasmTableObject maybeObject_;
+  WeakHeapPtrWasmTableObject maybeObject_;
   InstanceSet observers_;
   UniqueAnyFuncArray functions_;  // either functions_ has data
   TableAnyRefVector objects_;     //   or objects_, but not both
   const TableKind kind_;
   uint32_t length_;
   const Maybe<uint32_t> maximum_;
 
   template <class>