Bug 1273276 - Rename HeapPtr to GCPtr; r=jonco
authorTerrence Cole <terrence@mozilla.com>
Wed, 18 May 2016 12:03:23 -0700
changeset 337840 f95d305dc0f804dea500ea8910683668f8729c7e
parent 337839 8fe22dd4fc8a753f9b4c479b773af8ce03f4dae7
child 337841 f18e46bce0a40e6b73435efb633c50a38fdb18e8
push id6249
push userjlund@mozilla.com
push dateMon, 01 Aug 2016 13:59:36 +0000
treeherdermozilla-beta@bad9d4f5bf7e [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjonco
bugs1273276
milestone49.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1273276 - Rename HeapPtr to GCPtr; r=jonco
js/src/asmjs/WasmModule.h
js/src/builtin/TypedObject.cpp
js/src/builtin/TypedObject.h
js/src/frontend/BytecodeEmitter.cpp
js/src/frontend/ParseMaps.cpp
js/src/frontend/ParseMaps.h
js/src/frontend/Parser.cpp
js/src/gc/Barrier.h
js/src/gc/Marking.cpp
js/src/gc/Zone.cpp
js/src/gdb/tests/test-Root.cpp
js/src/jit/BaselineCacheIR.cpp
js/src/jit/BaselineCacheIR.h
js/src/jit/BaselineIC.h
js/src/jit/IonCaches.h
js/src/jit/MacroAssembler.cpp
js/src/jit/SharedIC.h
js/src/jsfun.cpp
js/src/jsfun.h
js/src/jsgc.cpp
js/src/jsiter.cpp
js/src/jsiter.h
js/src/jsobj.cpp
js/src/jsobj.h
js/src/jsscript.cpp
js/src/jsscript.h
js/src/jsweakmap.h
js/src/vm/ArgumentsObject-inl.h
js/src/vm/ArgumentsObject.cpp
js/src/vm/ArgumentsObject.h
js/src/vm/Debugger.cpp
js/src/vm/Debugger.h
js/src/vm/NativeObject.cpp
js/src/vm/NativeObject.h
js/src/vm/ObjectGroup.h
js/src/vm/PIC.h
js/src/vm/ProxyObject.cpp
js/src/vm/ProxyObject.h
js/src/vm/ReceiverGuard.h
js/src/vm/SelfHosting.cpp
js/src/vm/Shape.cpp
js/src/vm/Shape.h
js/src/vm/Stack-inl.h
js/src/vm/TaggedProto.h
js/src/vm/UnboxedObject.cpp
js/src/vm/UnboxedObject.h
--- a/js/src/asmjs/WasmModule.h
+++ b/js/src/asmjs/WasmModule.h
@@ -462,18 +462,18 @@ typedef UniquePtr<ModuleData> UniqueModu
 // callExport().
 
 class Module : public mozilla::LinkedListElement<Module>
 {
     typedef UniquePtr<const ModuleData> UniqueConstModuleData;
     struct ImportExit {
         void* code;
         jit::BaselineScript* baselineScript;
-        HeapPtrFunction fun;
-        static_assert(sizeof(HeapPtrFunction) == sizeof(void*), "for JIT access");
+        GCPtrFunction fun;
+        static_assert(sizeof(GCPtrFunction) == sizeof(void*), "for JIT access");
     };
     struct EntryArg {
         uint64_t lo;
         uint64_t hi;
     };
     typedef int32_t (*EntryFuncPtr)(EntryArg* args, uint8_t* global);
     struct FuncPtrTable {
         uint32_t globalDataOffset;
@@ -481,17 +481,17 @@ class Module : public mozilla::LinkedLis
         explicit FuncPtrTable(const StaticLinkData::FuncPtrTable& table)
           : globalDataOffset(table.globalDataOffset),
             numElems(table.elemOffsets.length())
         {}
     };
     typedef Vector<FuncPtrTable, 0, SystemAllocPolicy> FuncPtrTableVector;
     typedef Vector<CacheableChars, 0, SystemAllocPolicy> FuncLabelVector;
     typedef RelocatablePtrArrayBufferObjectMaybeShared BufferPtr;
-    typedef HeapPtr<WasmModuleObject*> ModuleObjectPtr;
+    typedef GCPtr<WasmModuleObject*> ModuleObjectPtr;
 
     // Initialized when constructed:
     const UniqueConstModuleData  module_;
 
     // Initialized during staticallyLink:
     bool                         staticallyLinked_;
     uint8_t*                     interrupt_;
     uint8_t*                     outOfBounds_;
@@ -533,17 +533,17 @@ class Module : public mozilla::LinkedLis
 
     explicit Module(UniqueModuleData module);
     virtual ~Module();
     virtual void trace(JSTracer* trc);
     virtual void readBarrier();
     virtual void addSizeOfMisc(MallocSizeOf mallocSizeOf, size_t* code, size_t* data);
 
     void setOwner(WasmModuleObject* owner) { MOZ_ASSERT(!ownerObject_); ownerObject_ = owner; }
-    inline const HeapPtr<WasmModuleObject*>& owner() const;
+    inline const GCPtr<WasmModuleObject*>& owner() const;
 
     void setSource(Bytes&& source) { source_ = Move(source); }
 
     uint8_t* code() const { return module_->code.get(); }
     uint32_t codeBytes() const { return module_->codeBytes; }
     uint8_t* globalData() const { return code() + module_->codeBytes; }
     uint32_t globalBytes() const { return module_->globalBytes; }
     HeapUsage heapUsage() const { return module_->heapUsage; }
@@ -683,17 +683,17 @@ class WasmModuleObject : public NativeOb
     static const unsigned RESERVED_SLOTS = 1;
     static WasmModuleObject* create(ExclusiveContext* cx);
     MOZ_MUST_USE bool init(wasm::Module* module);
     wasm::Module& module() const;
     void addSizeOfMisc(mozilla::MallocSizeOf mallocSizeOf, size_t* code, size_t* data);
     static const Class class_;
 };
 
-inline const HeapPtr<WasmModuleObject*>&
+inline const GCPtr<WasmModuleObject*>&
 wasm::Module::owner() const {
     MOZ_ASSERT(&ownerObject_->module() == this);
     return ownerObject_;
 }
 
 using WasmModuleObjectVector = GCVector<WasmModuleObject*>;
 
 } // namespace js
--- a/js/src/builtin/TypedObject.cpp
+++ b/js/src/builtin/TypedObject.cpp
@@ -2609,17 +2609,17 @@ js::StoreScalar##T::Func(JSContext*, uns
     double d = args[2].toNumber();                                              \
     *target = ConvertScalar<T>(d);                                              \
     args.rval().setUndefined();                                                 \
     return true;                                                                \
 }
 
 #define JS_STORE_REFERENCE_CLASS_IMPL(_constant, T, _name)                      \
 bool                                                                            \
-js::StoreReference##T::Func(JSContext* cx, unsigned argc, Value* vp)    \
+js::StoreReference##_name::Func(JSContext* cx, unsigned argc, Value* vp)        \
 {                                                                               \
     CallArgs args = CallArgsFromVp(argc, vp);                                   \
     MOZ_ASSERT(args.length() == 4);                                             \
     MOZ_ASSERT(args[0].isObject() && args[0].toObject().is<TypedObject>());     \
     MOZ_ASSERT(args[1].isInt32());                                              \
     MOZ_ASSERT(args[2].isString() || args[2].isNull());                         \
                                                                                 \
     TypedObject& typedObj = args[0].toObject().as<TypedObject>();               \
@@ -2636,17 +2636,17 @@ js::StoreReference##T::Func(JSContext* c
     if (!store(cx, target, args[3], &typedObj, id))                             \
         return false;                                                           \
     args.rval().setUndefined();                                                 \
     return true;                                                                \
 }
 
 #define JS_LOAD_SCALAR_CLASS_IMPL(_constant, T, _name)                                  \
 bool                                                                                    \
-js::LoadScalar##T::Func(JSContext*, unsigned argc, Value* vp)                  \
+js::LoadScalar##T::Func(JSContext*, unsigned argc, Value* vp)                           \
 {                                                                                       \
     CallArgs args = CallArgsFromVp(argc, vp);                                           \
     MOZ_ASSERT(args.length() == 2);                                                     \
     MOZ_ASSERT(args[0].isObject() && args[0].toObject().is<TypedObject>());             \
     MOZ_ASSERT(args[1].isInt32());                                                      \
                                                                                         \
     TypedObject& typedObj = args[0].toObject().as<TypedObject>();                       \
     int32_t offset = args[1].toInt32();                                                 \
@@ -2656,17 +2656,17 @@ js::LoadScalar##T::Func(JSContext*, unsi
                                                                                         \
     T* target = reinterpret_cast<T*>(typedObj.typedMem(offset));                        \
     args.rval().setNumber((double) *target);                                            \
     return true;                                                                        \
 }
 
 #define JS_LOAD_REFERENCE_CLASS_IMPL(_constant, T, _name)                       \
 bool                                                                            \
-js::LoadReference##T::Func(JSContext*, unsigned argc, Value* vp)       \
+js::LoadReference##_name::Func(JSContext*, unsigned argc, Value* vp)            \
 {                                                                               \
     CallArgs args = CallArgsFromVp(argc, vp);                                   \
     MOZ_ASSERT(args.length() == 2);                                             \
     MOZ_ASSERT(args[0].isObject() && args[0].toObject().is<TypedObject>());     \
     MOZ_ASSERT(args[1].isInt32());                                              \
                                                                                 \
     TypedObject& typedObj = args[0].toObject().as<TypedObject>();               \
     int32_t offset = args[1].toInt32();                                         \
@@ -2679,36 +2679,36 @@ js::LoadReference##T::Func(JSContext*, u
     return true;                                                                \
 }
 
 // Because the precise syntax for storing values/objects/strings
 // differs, we abstract it away using specialized variants of the
 // private methods `store()` and `load()`.
 
 bool
-StoreReferenceHeapValue::store(JSContext* cx, HeapValue* heap, const Value& v,
-                               TypedObject* obj, jsid id)
+StoreReferenceAny::store(JSContext* cx, GCPtrValue* heap, const Value& v,
+                         TypedObject* obj, jsid id)
 {
     // Undefined values are not included in type inference information for
     // value properties of typed objects, as these properties are always
     // considered to contain undefined.
     if (!v.isUndefined()) {
         if (cx->isJSContext())
             AddTypePropertyId(cx->asJSContext(), obj, id, v);
         else if (!HasTypePropertyId(obj, id, v))
             return false;
     }
 
     *heap = v;
     return true;
 }
 
 bool
-StoreReferenceHeapPtrObject::store(JSContext* cx, HeapPtrObject* heap, const Value& v,
-                                   TypedObject* obj, jsid id)
+StoreReferenceObject::store(JSContext* cx, GCPtrObject* heap, const Value& v,
+                            TypedObject* obj, jsid id)
 {
     MOZ_ASSERT(v.isObjectOrNull()); // or else Store_object is being misused
 
     // Null pointers are not included in type inference information for
     // object properties of typed objects, as these properties are always
     // considered to contain null.
     if (v.isObject()) {
         if (cx->isJSContext())
@@ -2717,47 +2717,44 @@ StoreReferenceHeapPtrObject::store(JSCon
             return false;
     }
 
     *heap = v.toObjectOrNull();
     return true;
 }
 
 bool
-StoreReferenceHeapPtrString::store(JSContext* cx, HeapPtrString* heap, const Value& v,
-                                   TypedObject* obj, jsid id)
+StoreReferencestring::store(JSContext* cx, GCPtrString* heap, const Value& v,
+                            TypedObject* obj, jsid id)
 {
     MOZ_ASSERT(v.isString()); // or else Store_string is being misused
 
     // Note: string references are not reflected in type information for the object.
     *heap = v.toString();
 
     return true;
 }
 
 void
-LoadReferenceHeapValue::load(HeapValue* heap,
-                             MutableHandleValue v)
+LoadReferenceAny::load(GCPtrValue* heap, MutableHandleValue v)
 {
     v.set(*heap);
 }
 
 void
-LoadReferenceHeapPtrObject::load(HeapPtrObject* heap,
-                                 MutableHandleValue v)
+LoadReferenceObject::load(GCPtrObject* heap, MutableHandleValue v)
 {
     if (*heap)
         v.setObject(**heap);
     else
         v.setNull();
 }
 
 void
-LoadReferenceHeapPtrString::load(HeapPtrString* heap,
-                                 MutableHandleValue v)
+LoadReferencestring::load(GCPtrString* heap, MutableHandleValue v)
 {
     v.setString(*heap);
 }
 
 // I was using templates for this stuff instead of macros, but ran
 // into problems with the Unagi compiler.
 JS_FOR_EACH_UNIQUE_SCALAR_TYPE_REPR_CTYPE(JS_STORE_SCALAR_CLASS_IMPL)
 JS_FOR_EACH_UNIQUE_SCALAR_TYPE_REPR_CTYPE(JS_LOAD_SCALAR_CLASS_IMPL)
@@ -2830,33 +2827,33 @@ class MemoryInitVisitor {
 } // namespace
 
 void
 MemoryInitVisitor::visitReference(ReferenceTypeDescr& descr, uint8_t* mem)
 {
     switch (descr.type()) {
       case ReferenceTypeDescr::TYPE_ANY:
       {
-        js::HeapValue* heapValue = reinterpret_cast<js::HeapValue*>(mem);
+        js::GCPtrValue* heapValue = reinterpret_cast<js::GCPtrValue*>(mem);
         heapValue->init(UndefinedValue());
         return;
       }
 
       case ReferenceTypeDescr::TYPE_OBJECT:
       {
-        js::HeapPtrObject* objectPtr =
-            reinterpret_cast<js::HeapPtrObject*>(mem);
+        js::GCPtrObject* objectPtr =
+            reinterpret_cast<js::GCPtrObject*>(mem);
         objectPtr->init(nullptr);
         return;
       }
 
       case ReferenceTypeDescr::TYPE_STRING:
       {
-        js::HeapPtrString* stringPtr =
-            reinterpret_cast<js::HeapPtrString*>(mem);
+        js::GCPtrString* stringPtr =
+            reinterpret_cast<js::GCPtrString*>(mem);
         stringPtr->init(rt_->emptyString);
         return;
       }
     }
 
     MOZ_CRASH("Invalid kind");
 }
 
@@ -2900,31 +2897,31 @@ class MemoryTracingVisitor {
 } // namespace
 
 void
 MemoryTracingVisitor::visitReference(ReferenceTypeDescr& descr, uint8_t* mem)
 {
     switch (descr.type()) {
       case ReferenceTypeDescr::TYPE_ANY:
       {
-        HeapValue* heapValue = reinterpret_cast<js::HeapValue*>(mem);
+        GCPtrValue* heapValue = reinterpret_cast<js::GCPtrValue*>(mem);
         TraceEdge(trace_, heapValue, "reference-val");
         return;
       }
 
       case ReferenceTypeDescr::TYPE_OBJECT:
       {
-        HeapPtrObject* objectPtr = reinterpret_cast<js::HeapPtrObject*>(mem);
+        GCPtrObject* objectPtr = reinterpret_cast<js::GCPtrObject*>(mem);
         TraceNullableEdge(trace_, objectPtr, "reference-obj");
         return;
       }
 
       case ReferenceTypeDescr::TYPE_STRING:
       {
-        HeapPtrString* stringPtr = reinterpret_cast<js::HeapPtrString*>(mem);
+        GCPtrString* stringPtr = reinterpret_cast<js::GCPtrString*>(mem);
         TraceNullableEdge(trace_, stringPtr, "reference-str");
         return;
       }
     }
 
     MOZ_CRASH("Invalid kind");
 }
 
--- a/js/src/builtin/TypedObject.h
+++ b/js/src/builtin/TypedObject.h
@@ -305,20 +305,20 @@ class ReferenceTypeDescr : public Simple
 
     const char* typeName() const {
         return typeName(type());
     }
 
     static MOZ_MUST_USE bool call(JSContext* cx, unsigned argc, Value* vp);
 };
 
-#define JS_FOR_EACH_REFERENCE_TYPE_REPR(macro_)                    \
-    macro_(ReferenceTypeDescr::TYPE_ANY,    HeapValue, Any)        \
-    macro_(ReferenceTypeDescr::TYPE_OBJECT, HeapPtrObject, Object) \
-    macro_(ReferenceTypeDescr::TYPE_STRING, HeapPtrString, string)
+#define JS_FOR_EACH_REFERENCE_TYPE_REPR(macro_) \
+    macro_(ReferenceTypeDescr::TYPE_ANY, GCPtrValue, Any) \
+    macro_(ReferenceTypeDescr::TYPE_OBJECT, GCPtrObject, Object) \
+    macro_(ReferenceTypeDescr::TYPE_STRING, GCPtrString, string)
 
 // Type descriptors whose instances are objects and hence which have
 // an associated `prototype` property.
 class ComplexTypeDescr : public TypeDescr
 {
   public:
     // Returns the prototype that instances of this type descriptor
     // will have.
@@ -493,17 +493,17 @@ class TypedObject : public JSObject
                                                  Handle<TypedObject*> typedObj,
                                                  Handle<TypeDescr*> typeDescr,
                                                  uint32_t index,
                                                  MutableHandleValue vp);
 
   protected:
     static const ObjectOps objectOps_;
 
-    HeapPtrShape shape_;
+    GCPtrShape shape_;
 
     static MOZ_MUST_USE bool obj_lookupProperty(JSContext* cx, HandleObject obj,
                                                 HandleId id, MutableHandleObject objp,
                                                 MutableHandleShape propp);
 
     static MOZ_MUST_USE bool obj_defineProperty(JSContext* cx, HandleObject obj, HandleId id,
                                                 Handle<PropertyDescriptor> desc,
                                                 ObjectOpResult& result);
@@ -580,17 +580,17 @@ class TypedObject : public JSObject
     Shape** addressOfShapeFromGC() { return shape_.unsafeUnbarrieredForTracing(); }
 };
 
 typedef Handle<TypedObject*> HandleTypedObject;
 
 class OutlineTypedObject : public TypedObject
 {
     // The object which owns the data this object points to. Because this
-    // pointer is managed in tandem with |data|, this is not a HeapPtr and
+    // pointer is managed in tandem with |data|, this is not a GCPtr and
     // barriers are managed directly.
     JSObject* owner_;
 
     // Data pointer to some offset in the owner's contents.
     uint8_t* data_;
 
     void setOwnerAndData(JSObject* owner, uint8_t* data);
 
@@ -860,17 +860,17 @@ class StoreScalar##T {                  
  * `targetDatum` at the offset `targetOffset`.
  *
  * Assumes (and asserts) that:
  * - `targetDatum` is attached
  * - `targetOffset` is a valid offset within the bounds of `targetDatum`
  * - `value` is an object or null (`Store_Object`) or string (`Store_string`).
  */
 #define JS_STORE_REFERENCE_CLASS_DEFN(_constant, T, _name)                    \
-class StoreReference##T {                                                     \
+class StoreReference##_name {                                                 \
   private:                                                                    \
     static MOZ_MUST_USE bool store(JSContext* cx, T* heap, const Value& v,    \
                                    TypedObject* obj, jsid id);                \
                                                                               \
   public:                                                                     \
     static MOZ_MUST_USE bool Func(JSContext* cx, unsigned argc, Value* vp);   \
     static const JSJitInfo JitInfo;                                           \
 };
@@ -894,17 +894,17 @@ class LoadScalar##T {                   
  * Usage: LoadReference(targetDatum, targetOffset, value)
  *
  * Intrinsic function. Stores value (which must be an int32 or uint32)
  * by `scalarTypeRepr` (which must be a type repr obj) and stores the
  * value at the memory for `targetDatum` at offset `targetOffset`.
  * `targetDatum` must be attached.
  */
 #define JS_LOAD_REFERENCE_CLASS_DEFN(_constant, T, _name)                     \
-class LoadReference##T {                                                      \
+class LoadReference##_name {                                                  \
   private:                                                                    \
     static void load(T* heap, MutableHandleValue v);                          \
                                                                               \
   public:                                                                     \
     static MOZ_MUST_USE bool Func(JSContext* cx, unsigned argc, Value* vp);   \
     static const JSJitInfo JitInfo;                                           \
 };
 
--- a/js/src/frontend/BytecodeEmitter.cpp
+++ b/js/src/frontend/BytecodeEmitter.cpp
@@ -9375,17 +9375,17 @@ CGObjectList::indexOf(JSObject* obj)
 }
 
 void
 CGObjectList::finish(ObjectArray* array)
 {
     MOZ_ASSERT(length <= INDEX_LIMIT);
     MOZ_ASSERT(length == array->length);
 
-    js::HeapPtrObject* cursor = array->vector + array->length;
+    js::GCPtrObject* cursor = array->vector + array->length;
     ObjectBox* objbox = lastbox;
     do {
         --cursor;
         MOZ_ASSERT(!*cursor);
         MOZ_ASSERT(objbox->object->isTenured());
         *cursor = objbox->object;
     } while ((objbox = objbox->emitLink) != nullptr);
     MOZ_ASSERT(cursor == array->vector);
--- a/js/src/frontend/ParseMaps.cpp
+++ b/js/src/frontend/ParseMaps.cpp
@@ -117,17 +117,17 @@ AtomDecls<ParseHandler>::addShadowedForA
     AtomDefnListAddPtr p = map->lookupForAdd(atom);
     if (!p)
         return map->add(p, atom, DefinitionList(ParseHandler::definitionToBits(defn)));
 
     return p.value().appendBack<ParseHandler>(cx, alloc, defn);
 }
 
 void
-frontend::InitAtomMap(frontend::AtomIndexMap* indices, HeapPtrAtom* atoms)
+frontend::InitAtomMap(frontend::AtomIndexMap* indices, GCPtrAtom* atoms)
 {
     if (indices->isMap()) {
         typedef AtomIndexMap::WordMap WordMap;
         const WordMap& wm = indices->asMap();
         for (WordMap::Range r = wm.all(); !r.empty(); r.popFront()) {
             JSAtom* atom = r.front().key();
             jsatomid index = r.front().value();
             MOZ_ASSERT(index < indices->count());
--- a/js/src/frontend/ParseMaps.h
+++ b/js/src/frontend/ParseMaps.h
@@ -32,17 +32,17 @@ typedef InlineMap<JSAtom*, DefinitionSin
 typedef InlineMap<JSAtom*, DefinitionList, 24> AtomDefnListMap;
 
 /*
  * For all unmapped atoms recorded in al, add a mapping from the atom's index
  * to its address. map->length must already be set to the number of atoms in
  * the list and map->vector must point to pre-allocated memory.
  */
 void
-InitAtomMap(AtomIndexMap* indices, HeapPtrAtom* atoms);
+InitAtomMap(AtomIndexMap* indices, GCPtrAtom* atoms);
 
 /*
  * A pool that permits the reuse of the backing storage for the defn, index, or
  * defn-or-header (multi) maps.
  *
  * The pool owns all the maps that are given out, and is responsible for
  * relinquishing all resources when |purgeAll| is triggered.
  */
--- a/js/src/frontend/Parser.cpp
+++ b/js/src/frontend/Parser.cpp
@@ -2903,17 +2903,17 @@ Parser<SyntaxParseHandler>::finishFuncti
         return false;
 
     LazyScript::FreeVariable* freeVariables = lazy->freeVariables();
     size_t i = 0;
     for (AtomDefnRange r = pc->lexdeps->all(); !r.empty(); r.popFront())
         freeVariables[i++] = LazyScript::FreeVariable(r.front().key());
     MOZ_ASSERT(i == numFreeVariables);
 
-    HeapPtrFunction* innerFunctions = lazy->innerFunctions();
+    GCPtrFunction* innerFunctions = lazy->innerFunctions();
     for (size_t i = 0; i < numInnerFunctions; i++)
         innerFunctions[i].init(pc->innerFunctions[i]);
 
     if (pc->sc->strict())
         lazy->setStrict();
     lazy->setGeneratorKind(funbox->generatorKind());
     if (funbox->isLikelyConstructorWrapper())
         lazy->setLikelyConstructorWrapper();
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -118,39 +118,37 @@
  *                            IMPLEMENTATION DETAILS
  *
  * Since it would be awkward to change every write to memory into a function
  * call, this file contains a bunch of C++ classes and templates that use
  * operator overloading to take care of barriers automatically. In many cases,
  * all that's necessary to make some field be barriered is to replace
  *     Type* field;
  * with
- *     HeapPtr<Type> field;
- * There are also special classes HeapValue and HeapId, which barrier js::Value
- * and jsid, respectively.
+ *     GCPtr<Type> field;
  *
  * One additional note: not all object writes need to be pre-barriered. Writes
  * to newly allocated objects do not need a pre-barrier. In these cases, we use
  * the "obj->field.init(value)" method instead of "obj->field = value". We use
  * the init naming idiom in many places to signify that a field is being
  * assigned for the first time.
  *
  * This file implements four classes, illustrated here:
  *
  * BarrieredBase             base class of all barriers
  *  |  |
  *  | WriteBarrieredBase     base class which provides common write operations
  *  |  |  |  |  |
  *  |  |  |  | PreBarriered  provides pre-barriers only
  *  |  |  |  |
- *  |  |  | HeapPtr          provides pre- and post-barriers
+ *  |  |  | GCPtr            provides pre- and post-barriers
  *  |  |  |
  *  |  | RelocatablePtr      provides pre- and post-barriers and is relocatable
  *  |  |
- *  | HeapSlot               similar to HeapPtr, but tailored to slots storage
+ *  | HeapSlot               similar to GCPtr, but tailored to slots storage
  *  |
  * ReadBarrieredBase         base class which provides common read operations
  *  |
  * ReadBarriered             provides read barriers only
  *
  *
  * The implementation of the barrier logic is implemented on T::writeBarrier.*,
  * via:
@@ -158,17 +156,17 @@
  * WriteBarrieredBase<T>::pre
  *  -> InternalBarrierMethods<T*>::preBarrier
  *      -> T::writeBarrierPre
  *  -> InternalBarrierMethods<Value>::preBarrier
  *  -> InternalBarrierMethods<jsid>::preBarrier
  *      -> InternalBarrierMethods<T*>::preBarrier
  *          -> T::writeBarrierPre
  *
- * HeapPtr<T>::post and RelocatablePtr<T>::post
+ * GCPtr<T>::post and RelocatablePtr<T>::post
  *  -> InternalBarrierMethods<T*>::postBarrier
  *      -> T::writeBarrierPost
  *  -> InternalBarrierMethods<Value>::postBarrier
  *      -> StoreBuffer::put
  *
  * These classes are designed to be used by the internals of the JS engine.
  * Barriers designed to be used externally are provided in js/RootingAPI.h.
  * These external barriers call into the same post-barrier implementations at
@@ -197,16 +195,17 @@ class ArgumentsObject;
 class ArrayBufferObjectMaybeShared;
 class ArrayBufferObject;
 class ArrayBufferViewObject;
 class SharedArrayBufferObject;
 class BaseShape;
 class DebugScopeObject;
 class GlobalObject;
 class LazyScript;
+class ModuleObject;
 class ModuleEnvironmentObject;
 class ModuleNamespaceObject;
 class NativeObject;
 class NestedScopeObject;
 class PlainObject;
 class PropertyName;
 class SavedFrame;
 class ScopeObject;
@@ -366,17 +365,17 @@ class WriteBarrieredBase : public Barrie
 
   protected:
     void pre() { InternalBarrierMethods<T>::preBarrier(this->value); }
     void post(T prev, T next) { InternalBarrierMethods<T>::postBarrier(&this->value, prev, next); }
 };
 
 /*
  * PreBarriered only automatically handles pre-barriers. Post-barriers must
- * be manually implemented when using this class. HeapPtr and RelocatablePtr
+ * be manually implemented when using this class. GCPtr and RelocatablePtr
  * should be used in all cases that do not require explicit low-level control
  * of moving behavior, e.g. for HashMap keys.
  */
 template <class T>
 class PreBarriered : public WriteBarrieredBase<T>
 {
   public:
     PreBarriered() : WriteBarrieredBase<T>(JS::GCPolicy<T>::initial()) {}
@@ -404,74 +403,71 @@ class PreBarriered : public WriteBarrier
         this->pre();
         this->value = v;
     }
 };
 
 /*
  * A pre- and post-barriered heap pointer, for use inside the JS engine.
  *
- * It must only be stored in memory that has GC lifetime. HeapPtr must not be
+ * It must only be stored in memory that has GC lifetime. GCPtr must not be
  * used in contexts where it may be implicitly moved or deleted, e.g. most
  * containers.
  *
- * Not to be confused with JS::Heap<T>. This is a different class from the
- * external interface and implements substantially different semantics.
- *
  * The post-barriers implemented by this class are faster than those
  * implemented by RelocatablePtr<T> or JS::Heap<T> at the cost of not
  * automatically handling deletion or movement.
  */
 template <class T>
-class HeapPtr : public WriteBarrieredBase<T>
+class GCPtr : public WriteBarrieredBase<T>
 {
   public:
-    HeapPtr() : WriteBarrieredBase<T>(JS::GCPolicy<T>::initial()) {}
-    explicit HeapPtr(T v) : WriteBarrieredBase<T>(v) {
+    GCPtr() : WriteBarrieredBase<T>(JS::GCPolicy<T>::initial()) {}
+    explicit GCPtr(T v) : WriteBarrieredBase<T>(v) {
         this->post(JS::GCPolicy<T>::initial(), v);
     }
-    explicit HeapPtr(const HeapPtr<T>& v) : WriteBarrieredBase<T>(v) {
+    explicit GCPtr(const GCPtr<T>& v) : WriteBarrieredBase<T>(v) {
         this->post(JS::GCPolicy<T>::initial(), v);
     }
 #ifdef DEBUG
-    ~HeapPtr() {
+    ~GCPtr() {
         // No prebarrier necessary as this only happens when we are sweeping or
         // before the containing object becomes part of the GC graph.
         MOZ_ASSERT(CurrentThreadIsGCSweeping() || CurrentThreadIsHandlingInitFailure());
     }
 #endif
 
     void init(T v) {
         this->value = v;
         this->post(JS::GCPolicy<T>::initial(), v);
     }
 
-    DECLARE_POINTER_ASSIGN_OPS(HeapPtr, T);
+    DECLARE_POINTER_ASSIGN_OPS(GCPtr, T);
 
     T unbarrieredGet() const {
         return this->value;
     }
 
   private:
     void set(const T& v) {
         this->pre();
         T tmp = this->value;
         this->value = v;
         this->post(tmp, this->value);
     }
 
     /*
-     * Unlike RelocatablePtr<T>, HeapPtr<T> must be managed with GC lifetimes.
+     * Unlike RelocatablePtr<T>, GCPtr<T> must be managed with GC lifetimes.
      * Specifically, the memory used by the pointer itself must be live until
      * at least the next minor GC. For that reason, move semantics are invalid
      * and are deleted here. Please note that not all containers support move
      * semantics, so this does not completely prevent invalid uses.
      */
-    HeapPtr(HeapPtr<T>&&) = delete;
-    HeapPtr<T>& operator=(HeapPtr<T>&&) = delete;
+    GCPtr(GCPtr<T>&&) = delete;
+    GCPtr<T>& operator=(GCPtr<T>&&) = delete;
 };
 
 /*
  * A pre- and post-barriered heap pointer, for use inside the JS engine. These
  * heap pointers can be stored in C++ containers like GCVector and GCHashMap.
  *
  * The GC sometimes keeps pointers to pointers to GC things --- for example, to
  * track references into the nursery. However, C++ containers like GCVector and
@@ -721,17 +717,17 @@ class HeapSlotArray
     explicit HeapSlotArray(HeapSlot* array, bool allowWrite)
       : array(array)
 #ifdef DEBUG
       , allowWrite_(allowWrite)
 #endif
     {}
 
     operator const Value*() const {
-        JS_STATIC_ASSERT(sizeof(HeapPtr<Value>) == sizeof(Value));
+        JS_STATIC_ASSERT(sizeof(GCPtr<Value>) == sizeof(Value));
         JS_STATIC_ASSERT(sizeof(HeapSlot) == sizeof(Value));
         return reinterpret_cast<const Value*>(array);
     }
     operator HeapSlot*() const { MOZ_ASSERT(allowWrite()); return array; }
 
     HeapSlotArray operator +(int offset) const { return HeapSlotArray(array + offset, allowWrite()); }
     HeapSlotArray operator +(uint32_t offset) const { return HeapSlotArray(array + offset, allowWrite()); }
 
@@ -827,31 +823,31 @@ struct MovableCellHasher<ReadBarriered<T
 
     static HashNumber hash(const Lookup& l) { return MovableCellHasher<T>::hash(l); }
     static bool match(const Key& k, const Lookup& l) {
         return MovableCellHasher<T>::match(k.unbarrieredGet(), l);
     }
     static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); }
 };
 
-/* Useful for hashtables with a HeapPtr as key. */
+/* Useful for hashtables with a GCPtr as key. */
 template <class T>
-struct HeapPtrHasher
+struct GCPtrHasher
 {
-    typedef HeapPtr<T> Key;
+    typedef GCPtr<T> Key;
     typedef T Lookup;
 
     static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
     static bool match(const Key& k, Lookup l) { return k.get() == l; }
     static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); }
 };
 
-/* Specialized hashing policy for HeapPtrs. */
+/* Specialized hashing policy for GCPtrs. */
 template <class T>
-struct DefaultHasher<HeapPtr<T>> : HeapPtrHasher<T> { };
+struct DefaultHasher<GCPtr<T>> : GCPtrHasher<T> {};
 
 template <class T>
 struct PreBarrieredHasher
 {
     typedef PreBarriered<T> Key;
     typedef T Lookup;
 
     static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
@@ -906,44 +902,45 @@ typedef RelocatablePtr<NestedScopeObject
 typedef RelocatablePtr<Shape*> RelocatablePtrShape;
 typedef RelocatablePtr<ObjectGroup*> RelocatablePtrObjectGroup;
 typedef RelocatablePtr<jit::JitCode*> RelocatablePtrJitCode;
 typedef RelocatablePtr<JSLinearString*> RelocatablePtrLinearString;
 typedef RelocatablePtr<JSString*> RelocatablePtrString;
 typedef RelocatablePtr<JSAtom*> RelocatablePtrAtom;
 typedef RelocatablePtr<ArrayBufferObjectMaybeShared*> RelocatablePtrArrayBufferObjectMaybeShared;
 
-typedef HeapPtr<NativeObject*> HeapPtrNativeObject;
-typedef HeapPtr<ArrayObject*> HeapPtrArrayObject;
-typedef HeapPtr<ArrayBufferObjectMaybeShared*> HeapPtrArrayBufferObjectMaybeShared;
-typedef HeapPtr<ArrayBufferObject*> HeapPtrArrayBufferObject;
-typedef HeapPtr<BaseShape*> HeapPtrBaseShape;
-typedef HeapPtr<JSAtom*> HeapPtrAtom;
-typedef HeapPtr<JSFlatString*> HeapPtrFlatString;
-typedef HeapPtr<JSFunction*> HeapPtrFunction;
-typedef HeapPtr<JSLinearString*> HeapPtrLinearString;
-typedef HeapPtr<JSObject*> HeapPtrObject;
-typedef HeapPtr<JSScript*> HeapPtrScript;
-typedef HeapPtr<JSString*> HeapPtrString;
-typedef HeapPtr<ModuleEnvironmentObject*> HeapPtrModuleEnvironmentObject;
-typedef HeapPtr<ModuleNamespaceObject*> HeapPtrModuleNamespaceObject;
-typedef HeapPtr<PlainObject*> HeapPtrPlainObject;
-typedef HeapPtr<PropertyName*> HeapPtrPropertyName;
-typedef HeapPtr<Shape*> HeapPtrShape;
-typedef HeapPtr<UnownedBaseShape*> HeapPtrUnownedBaseShape;
-typedef HeapPtr<jit::JitCode*> HeapPtrJitCode;
-typedef HeapPtr<ObjectGroup*> HeapPtrObjectGroup;
+typedef GCPtr<NativeObject*> GCPtrNativeObject;
+typedef GCPtr<ArrayObject*> GCPtrArrayObject;
+typedef GCPtr<ArrayBufferObjectMaybeShared*> GCPtrArrayBufferObjectMaybeShared;
+typedef GCPtr<ArrayBufferObject*> GCPtrArrayBufferObject;
+typedef GCPtr<BaseShape*> GCPtrBaseShape;
+typedef GCPtr<JSAtom*> GCPtrAtom;
+typedef GCPtr<JSFlatString*> GCPtrFlatString;
+typedef GCPtr<JSFunction*> GCPtrFunction;
+typedef GCPtr<JSLinearString*> GCPtrLinearString;
+typedef GCPtr<JSObject*> GCPtrObject;
+typedef GCPtr<JSScript*> GCPtrScript;
+typedef GCPtr<JSString*> GCPtrString;
+typedef GCPtr<ModuleObject*> GCPtrModuleObject;
+typedef GCPtr<ModuleEnvironmentObject*> GCPtrModuleEnvironmentObject;
+typedef GCPtr<ModuleNamespaceObject*> GCPtrModuleNamespaceObject;
+typedef GCPtr<PlainObject*> GCPtrPlainObject;
+typedef GCPtr<PropertyName*> GCPtrPropertyName;
+typedef GCPtr<Shape*> GCPtrShape;
+typedef GCPtr<UnownedBaseShape*> GCPtrUnownedBaseShape;
+typedef GCPtr<jit::JitCode*> GCPtrJitCode;
+typedef GCPtr<ObjectGroup*> GCPtrObjectGroup;
 
 typedef PreBarriered<Value> PreBarrieredValue;
 typedef RelocatablePtr<Value> RelocatableValue;
-typedef HeapPtr<Value> HeapValue;
+typedef GCPtr<Value> GCPtrValue;
 
 typedef PreBarriered<jsid> PreBarrieredId;
 typedef RelocatablePtr<jsid> RelocatableId;
-typedef HeapPtr<jsid> HeapId;
+typedef GCPtr<jsid> GCPtrId;
 
 typedef ImmutableTenuredPtr<PropertyName*> ImmutablePropertyNamePtr;
 typedef ImmutableTenuredPtr<JS::Symbol*> ImmutableSymbolPtr;
 
 typedef ReadBarriered<DebugScopeObject*> ReadBarrieredDebugScopeObject;
 typedef ReadBarriered<GlobalObject*> ReadBarrieredGlobalObject;
 typedef ReadBarriered<JSObject*> ReadBarrieredObject;
 typedef ReadBarriered<JSScript*> ReadBarrieredScript;
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -977,17 +977,17 @@ LazyScript::traceChildren(JSTracer* trc)
 
     // We rely on the fact that atoms are always tenured.
     FreeVariable* freeVariables = this->freeVariables();
     for (auto i : MakeRange(numFreeVariables())) {
         JSAtom* atom = freeVariables[i].atom();
         TraceManuallyBarrieredEdge(trc, &atom, "lazyScriptFreeVariable");
     }
 
-    HeapPtrFunction* innerFunctions = this->innerFunctions();
+    GCPtrFunction* innerFunctions = this->innerFunctions();
     for (auto i : MakeRange(numInnerFunctions()))
         TraceEdge(trc, &innerFunctions[i], "lazyScriptInnerFunction");
 }
 inline void
 js::GCMarker::eagerlyMarkChildren(LazyScript *thing)
 {
     if (thing->script_)
         noteWeakEdge(thing->script_.unsafeUnbarrieredForTracing());
@@ -1001,17 +1001,17 @@ js::GCMarker::eagerlyMarkChildren(LazySc
     if (thing->enclosingScope_)
         traverseEdge(thing, static_cast<JSObject*>(thing->enclosingScope_));
 
     // We rely on the fact that atoms are always tenured.
     LazyScript::FreeVariable* freeVariables = thing->freeVariables();
     for (auto i : MakeRange(thing->numFreeVariables()))
         traverseEdge(thing, static_cast<JSString*>(freeVariables[i].atom()));
 
-    HeapPtrFunction* innerFunctions = thing->innerFunctions();
+    GCPtrFunction* innerFunctions = thing->innerFunctions();
     for (auto i : MakeRange(thing->numInnerFunctions()))
         traverseEdge(thing, static_cast<JSObject*>(innerFunctions[i]));
 }
 
 void
 Shape::traceChildren(JSTracer* trc)
 {
     TraceEdge(trc, &base_, "base");
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -163,17 +163,17 @@ Zone::sweepBreakpoints(FreeOp* fop)
         for (unsigned i = 0; i < script->length(); i++) {
             BreakpointSite* site = script->getBreakpointSite(script->offsetToPC(i));
             if (!site)
                 continue;
 
             Breakpoint* nextbp;
             for (Breakpoint* bp = site->firstBreakpoint(); bp; bp = nextbp) {
                 nextbp = bp->nextInSite();
-                HeapPtrNativeObject& dbgobj = bp->debugger->toJSObjectRef();
+                GCPtrNativeObject& dbgobj = bp->debugger->toJSObjectRef();
 
                 // If we are sweeping, then we expect the script and the
                 // debugger object to be swept in the same zone group, except if
                 // the breakpoint was added after we computed the zone
                 // groups. In this case both script and debugger object must be
                 // live.
                 MOZ_ASSERT_IF(isGCSweeping() && dbgobj->zone()->isCollecting(),
                               dbgobj->zone()->isGCSweeping() ||
--- a/js/src/gdb/tests/test-Root.cpp
+++ b/js/src/gdb/tests/test-Root.cpp
@@ -35,22 +35,22 @@ FRAGMENT(Root, HeapSlot) {
 
   (void) plinth;
   (void) array;
 }
 
 FRAGMENT(Root, barriers) {
   JSObject* obj = JS_NewPlainObject(cx);
   js::PreBarriered<JSObject*> prebarriered(obj);
-  js::HeapPtr<JSObject*> heapptr(obj);
+  js::GCPtr<JSObject*> heapptr(obj);
   js::RelocatablePtr<JSObject*> relocatable(obj);
 
   JS::Value val = JS::ObjectValue(*obj);
   js::PreBarrieredValue prebarrieredValue(JS::ObjectValue(*obj));
-  js::HeapValue heapValue(JS::ObjectValue(*obj));
+  js::GCPtrValue heapValue(JS::ObjectValue(*obj));
   js::RelocatableValue relocatableValue(JS::ObjectValue(*obj));
 
   breakpoint();
 
   (void) prebarriered;
   (void) heapptr;
   (void) relocatable;
   (void) val;
--- a/js/src/jit/BaselineCacheIR.cpp
+++ b/js/src/jit/BaselineCacheIR.cpp
@@ -1054,61 +1054,61 @@ BaselineCacheIRCompiler::init(CacheKind 
 
     MOZ_ASSERT(numInputs == 1);
     allocator.initInputLocation(0, R0);
 
     return true;
 }
 
 template <typename T>
-static HeapPtr<T>*
-AsHeapPtr(uintptr_t* ptr)
+static GCPtr<T>*
+AsGCPtr(uintptr_t* ptr)
 {
-    return reinterpret_cast<HeapPtr<T>*>(ptr);
+    return reinterpret_cast<GCPtr<T>*>(ptr);
 }
 
 template<class T>
-HeapPtr<T>&
+GCPtr<T>&
 CacheIRStubInfo::getStubField(ICStub* stub, uint32_t field) const
 {
     uint8_t* stubData = (uint8_t*)stub + stubDataOffset_;
     MOZ_ASSERT(uintptr_t(stubData) % sizeof(uintptr_t) == 0);
 
-    return *AsHeapPtr<T>((uintptr_t*)stubData + field);
+    return *AsGCPtr<T>((uintptr_t*)stubData + field);
 }
 
-template HeapPtr<Shape*>& CacheIRStubInfo::getStubField(ICStub* stub, uint32_t offset) const;
-template HeapPtr<ObjectGroup*>& CacheIRStubInfo::getStubField(ICStub* stub, uint32_t offset) const;
-template HeapPtr<JSObject*>& CacheIRStubInfo::getStubField(ICStub* stub, uint32_t offset) const;
+template GCPtr<Shape*>& CacheIRStubInfo::getStubField(ICStub* stub, uint32_t offset) const;
+template GCPtr<ObjectGroup*>& CacheIRStubInfo::getStubField(ICStub* stub, uint32_t offset) const;
+template GCPtr<JSObject*>& CacheIRStubInfo::getStubField(ICStub* stub, uint32_t offset) const;
 
 template <typename T>
 static void
-InitHeapPtr(uintptr_t* ptr, uintptr_t val)
+InitGCPtr(uintptr_t* ptr, uintptr_t val)
 {
-    AsHeapPtr<T*>(ptr)->init((T*)val);
+    AsGCPtr<T*>(ptr)->init((T*)val);
 }
 
 void
 CacheIRWriter::copyStubData(uint8_t* dest) const
 {
     uintptr_t* destWords = reinterpret_cast<uintptr_t*>(dest);
 
     for (size_t i = 0; i < stubFields_.length(); i++) {
         switch (stubFields_[i].gcType) {
           case StubField::GCType::NoGCThing:
             destWords[i] = stubFields_[i].word;
             continue;
           case StubField::GCType::Shape:
-            InitHeapPtr<Shape>(destWords + i, stubFields_[i].word);
+            InitGCPtr<Shape>(destWords + i, stubFields_[i].word);
             continue;
           case StubField::GCType::JSObject:
-            InitHeapPtr<JSObject>(destWords + i, stubFields_[i].word);
+            InitGCPtr<JSObject>(destWords + i, stubFields_[i].word);
             continue;
           case StubField::GCType::ObjectGroup:
-            InitHeapPtr<ObjectGroup>(destWords + i, stubFields_[i].word);
+            InitGCPtr<ObjectGroup>(destWords + i, stubFields_[i].word);
             continue;
           case StubField::GCType::Limit:
             break;
         }
         MOZ_CRASH();
     }
 }
 
--- a/js/src/jit/BaselineCacheIR.h
+++ b/js/src/jit/BaselineCacheIR.h
@@ -48,17 +48,17 @@ class CacheIRStubInfo
     uint32_t stubDataOffset() const { return stubDataOffset_; }
 
     StubField::GCType gcType(uint32_t i) const { return (StubField::GCType)gcTypes_[i]; }
 
     static CacheIRStubInfo* New(CacheKind kind, uint32_t stubDataOffset,
                                 const CacheIRWriter& writer);
 
     template <class T>
-    js::HeapPtr<T>& getStubField(ICStub* stub, uint32_t field) const;
+    js::GCPtr<T>& getStubField(ICStub* stub, uint32_t field) const;
 };
 
 void TraceBaselineCacheIRStub(JSTracer* trc, ICStub* stub, const CacheIRStubInfo* stubInfo);
 
 ICStub* AttachBaselineCacheIRStub(JSContext* cx, const CacheIRWriter& writer, CacheKind kind,
                                   ICFallbackStub* stub);
 
 } // namespace jit
--- a/js/src/jit/BaselineIC.h
+++ b/js/src/jit/BaselineIC.h
@@ -10,16 +10,17 @@
 #include "mozilla/Assertions.h"
 
 #include "jscntxt.h"
 #include "jscompartment.h"
 #include "jsgc.h"
 #include "jsopcode.h"
 
 #include "builtin/TypedObject.h"
+#include "gc/Barrier.h"
 #include "jit/BaselineICList.h"
 #include "jit/BaselineJIT.h"
 #include "jit/SharedIC.h"
 #include "jit/SharedICRegisters.h"
 #include "js/GCVector.h"
 #include "vm/ArrayObject.h"
 #include "vm/UnboxedObject.h"
 
@@ -120,22 +121,22 @@ class ICTypeUpdate_PrimitiveSet : public
     };
 };
 
 // Type update stub to handle a singleton object.
 class ICTypeUpdate_SingleObject : public ICStub
 {
     friend class ICStubSpace;
 
-    HeapPtrObject obj_;
+    GCPtrObject obj_;
 
     ICTypeUpdate_SingleObject(JitCode* stubCode, JSObject* obj);
 
   public:
-    HeapPtrObject& object() {
+    GCPtrObject& object() {
         return obj_;
     }
 
     static size_t offsetOfObject() {
         return offsetof(ICTypeUpdate_SingleObject, obj_);
     }
 
     class Compiler : public ICStubCompiler {
@@ -155,22 +156,22 @@ class ICTypeUpdate_SingleObject : public
     };
 };
 
 // Type update stub to handle a single ObjectGroup.
 class ICTypeUpdate_ObjectGroup : public ICStub
 {
     friend class ICStubSpace;
 
-    HeapPtrObjectGroup group_;
+    GCPtrObjectGroup group_;
 
     ICTypeUpdate_ObjectGroup(JitCode* stubCode, ObjectGroup* group);
 
   public:
-    HeapPtrObjectGroup& group() {
+    GCPtrObjectGroup& group() {
         return group_;
     }
 
     static size_t offsetOfGroup() {
         return offsetof(ICTypeUpdate_ObjectGroup, group_);
     }
 
     class Compiler : public ICStubCompiler {
@@ -462,27 +463,27 @@ class ICGetElemNativeStub : public ICMon
         return (extra_ >> ISSYMBOL_SHIFT) & ISSYMBOL_MASK;
     }
 };
 
 template <class T>
 class ICGetElemNativeStubImpl : public ICGetElemNativeStub
 {
   protected:
-    HeapPtr<T> key_;
+    GCPtr<T> key_;
 
     ICGetElemNativeStubImpl(ICStub::Kind kind, JitCode* stubCode, ICStub* firstMonitorStub,
                             ReceiverGuard guard, const T* key, AccessType acctype, bool needsAtomize)
       : ICGetElemNativeStub(kind, stubCode, firstMonitorStub, guard, acctype, needsAtomize,
                             mozilla::IsSame<T, JS::Symbol*>::value),
         key_(*key)
     {}
 
   public:
-    HeapPtr<T>& key() {
+    GCPtr<T>& key() {
         return key_;
     }
     static size_t offsetOfKey() {
         return offsetof(ICGetElemNativeStubImpl, key_);
     }
 };
 
 typedef ICGetElemNativeStub::AccessType AccType;
@@ -518,25 +519,25 @@ class ICGetElemNativeSlotStub : public I
         return offsetof(ICGetElemNativeSlotStub, offset_);
     }
 };
 
 template <class T>
 class ICGetElemNativeGetterStub : public ICGetElemNativeStubImpl<T>
 {
   protected:
-    HeapPtrFunction getter_;
+    GCPtrFunction getter_;
     uint32_t pcOffset_;
 
     ICGetElemNativeGetterStub(ICStub::Kind kind, JitCode* stubCode, ICStub* firstMonitorStub,
                               ReceiverGuard guard, const T* key, AccType acctype, bool needsAtomize,
                               JSFunction* getter, uint32_t pcOffset);
 
   public:
-    HeapPtrFunction& getter() {
+    GCPtrFunction& getter() {
         return getter_;
     }
     static size_t offsetOfGetter() {
         return offsetof(ICGetElemNativeGetterStub, getter_);
     }
 
     static size_t offsetOfPCOffset() {
         return offsetof(ICGetElemNativeGetterStub, pcOffset_);
@@ -588,32 +589,32 @@ class ICGetElem_UnboxedProperty : public
 class ICGetElem_UnboxedPropertyName :
       public ICGetElem_UnboxedProperty<PropertyName*>
 {};
 
 template <class T>
 class ICGetElem_NativePrototypeSlot : public ICGetElemNativeSlotStub<T>
 {
     friend class ICStubSpace;
-    HeapPtrObject holder_;
-    HeapPtrShape holderShape_;
+    GCPtrObject holder_;
+    GCPtrShape holderShape_;
 
     ICGetElem_NativePrototypeSlot(JitCode* stubCode, ICStub* firstMonitorStub, ReceiverGuard guard,
                                   const T* key, AccType acctype, bool needsAtomize, uint32_t offset,
                                   JSObject* holder, Shape* holderShape);
 
   public:
-    HeapPtrObject& holder() {
+    GCPtrObject& holder() {
         return holder_;
     }
     static size_t offsetOfHolder() {
         return offsetof(ICGetElem_NativePrototypeSlot, holder_);
     }
 
-    HeapPtrShape& holderShape() {
+    GCPtrShape& holderShape() {
         return holderShape_;
     }
     static size_t offsetOfHolderShape() {
         return offsetof(ICGetElem_NativePrototypeSlot, holderShape_);
     }
 };
 
 class ICGetElem_NativePrototypeSlotName :
@@ -622,34 +623,34 @@ class ICGetElem_NativePrototypeSlotName 
 class ICGetElem_NativePrototypeSlotSymbol :
       public ICGetElem_NativePrototypeSlot<JS::Symbol*>
 {};
 
 template <class T>
 class ICGetElemNativePrototypeCallStub : public ICGetElemNativeGetterStub<T>
 {
     friend class ICStubSpace;
-    HeapPtrObject holder_;
-    HeapPtrShape holderShape_;
+    GCPtrObject holder_;
+    GCPtrShape holderShape_;
 
   protected:
     ICGetElemNativePrototypeCallStub(ICStub::Kind kind, JitCode* stubCode, ICStub* firstMonitorStub,
                                      ReceiverGuard guard, const T* key, AccType acctype,
                                      bool needsAtomize, JSFunction* getter, uint32_t pcOffset,
                                      JSObject* holder, Shape* holderShape);
 
   public:
-    HeapPtrObject& holder() {
+    GCPtrObject& holder() {
         return holder_;
     }
     static size_t offsetOfHolder() {
         return offsetof(ICGetElemNativePrototypeCallStub, holder_);
     }
 
-    HeapPtrShape& holderShape() {
+    GCPtrShape& holderShape() {
         return holderShape_;
     }
     static size_t offsetOfHolderShape() {
         return offsetof(ICGetElemNativePrototypeCallStub, holderShape_);
     }
 };
 
 template <class T>
@@ -845,29 +846,29 @@ class ICGetElem_String : public ICStub
         }
     };
 };
 
 class ICGetElem_Dense : public ICMonitoredStub
 {
     friend class ICStubSpace;
 
-    HeapPtrShape shape_;
+    GCPtrShape shape_;
 
     ICGetElem_Dense(JitCode* stubCode, ICStub* firstMonitorStub, Shape* shape);
 
   public:
     static ICGetElem_Dense* Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
                                   ICGetElem_Dense& other);
 
     static size_t offsetOfShape() {
         return offsetof(ICGetElem_Dense, shape_);
     }
 
-    HeapPtrShape& shape() {
+    GCPtrShape& shape() {
         return shape_;
     }
 
     class Compiler : public ICStubCompiler {
       ICStub* firstMonitorStub_;
       RootedShape shape_;
 
       protected:
@@ -890,29 +891,29 @@ class ICGetElem_Dense : public ICMonitor
         }
     };
 };
 
 class ICGetElem_UnboxedArray : public ICMonitoredStub
 {
     friend class ICStubSpace;
 
-    HeapPtrObjectGroup group_;
+    GCPtrObjectGroup group_;
 
     ICGetElem_UnboxedArray(JitCode* stubCode, ICStub* firstMonitorStub, ObjectGroup* group);
 
   public:
     static ICGetElem_UnboxedArray* Clone(JSContext* cx, ICStubSpace* space,
                                          ICStub* firstMonitorStub, ICGetElem_UnboxedArray& other);
 
     static size_t offsetOfGroup() {
         return offsetof(ICGetElem_UnboxedArray, group_);
     }
 
-    HeapPtrObjectGroup& group() {
+    GCPtrObjectGroup& group() {
         return group_;
     }
 
     class Compiler : public ICStubCompiler {
       ICStub* firstMonitorStub_;
       RootedObjectGroup group_;
       JSValueType elementType_;
 
@@ -940,26 +941,26 @@ class ICGetElem_UnboxedArray : public IC
 };
 
 // Accesses scalar elements of a typed array or typed object.
 class ICGetElem_TypedArray : public ICStub
 {
     friend class ICStubSpace;
 
   protected: // Protected to silence Clang warning.
-    HeapPtrShape shape_;
+    GCPtrShape shape_;
 
     ICGetElem_TypedArray(JitCode* stubCode, Shape* shape, Scalar::Type type);
 
   public:
     static size_t offsetOfShape() {
         return offsetof(ICGetElem_TypedArray, shape_);
     }
 
-    HeapPtrShape& shape() {
+    GCPtrShape& shape() {
         return shape_;
     }
 
     class Compiler : public ICStubCompiler {
       RootedShape shape_;
       Scalar::Type type_;
       TypedThingLayout layout_;
 
@@ -1071,33 +1072,33 @@ class ICSetElem_Fallback : public ICFall
         }
     };
 };
 
 class ICSetElem_DenseOrUnboxedArray : public ICUpdatedStub
 {
     friend class ICStubSpace;
 
-    HeapPtrShape shape_; // null for unboxed arrays
-    HeapPtrObjectGroup group_;
+    GCPtrShape shape_; // null for unboxed arrays
+    GCPtrObjectGroup group_;
 
     ICSetElem_DenseOrUnboxedArray(JitCode* stubCode, Shape* shape, ObjectGroup* group);
 
   public:
     static size_t offsetOfShape() {
         return offsetof(ICSetElem_DenseOrUnboxedArray, shape_);
     }
     static size_t offsetOfGroup() {
         return offsetof(ICSetElem_DenseOrUnboxedArray, group_);
     }
 
-    HeapPtrShape& shape() {
+    GCPtrShape& shape() {
         return shape_;
     }
-    HeapPtrObjectGroup& group() {
+    GCPtrObjectGroup& group() {
         return group_;
     }
 
     class Compiler : public ICStubCompiler {
         RootedShape shape_;
         RootedObjectGroup group_;
         JSValueType unboxedType_;
 
@@ -1138,26 +1139,26 @@ template <size_t ProtoChainDepth> class 
 class ICSetElem_DenseOrUnboxedArrayAdd : public ICUpdatedStub
 {
     friend class ICStubSpace;
 
   public:
     static const size_t MAX_PROTO_CHAIN_DEPTH = 4;
 
   protected:
-    HeapPtrObjectGroup group_;
+    GCPtrObjectGroup group_;
 
     ICSetElem_DenseOrUnboxedArrayAdd(JitCode* stubCode, ObjectGroup* group, size_t protoChainDepth);
 
   public:
     static size_t offsetOfGroup() {
         return offsetof(ICSetElem_DenseOrUnboxedArrayAdd, group_);
     }
 
-    HeapPtrObjectGroup& group() {
+    GCPtrObjectGroup& group() {
         return group_;
     }
     size_t protoChainDepth() const {
         MOZ_ASSERT(extra_ <= MAX_PROTO_CHAIN_DEPTH);
         return extra_;
     }
 
     template <size_t ProtoChainDepth>
@@ -1174,17 +1175,17 @@ class ICSetElem_DenseOrUnboxedArrayAdd :
 
 template <size_t ProtoChainDepth>
 class ICSetElem_DenseOrUnboxedArrayAddImpl : public ICSetElem_DenseOrUnboxedArrayAdd
 {
     friend class ICStubSpace;
 
     // Note: for unboxed arrays, the first shape is null.
     static const size_t NumShapes = ProtoChainDepth + 1;
-    mozilla::Array<HeapPtrShape, NumShapes> shapes_;
+    mozilla::Array<GCPtrShape, NumShapes> shapes_;
 
     ICSetElem_DenseOrUnboxedArrayAddImpl(JitCode* stubCode, ObjectGroup* group,
                                          Handle<ShapeVector> shapes)
       : ICSetElem_DenseOrUnboxedArrayAdd(stubCode, group, ProtoChainDepth)
     {
         MOZ_ASSERT(shapes.length() == NumShapes);
         for (size_t i = 0; i < NumShapes; i++)
             shapes_[i].init(shapes[i]);
@@ -1195,17 +1196,17 @@ class ICSetElem_DenseOrUnboxedArrayAddIm
         for (size_t i = 0; i < NumShapes; i++)
             TraceNullableEdge(trc, &shapes_[i], "baseline-setelem-denseadd-stub-shape");
     }
     Shape* shape(size_t i) const {
         MOZ_ASSERT(i < NumShapes);
         return shapes_[i];
     }
     static size_t offsetOfShape(size_t idx) {
-        return offsetof(ICSetElem_DenseOrUnboxedArrayAddImpl, shapes_) + idx * sizeof(HeapPtrShape);
+        return offsetof(ICSetElem_DenseOrUnboxedArrayAddImpl, shapes_) + idx * sizeof(GCPtrShape);
     }
 };
 
 class ICSetElemDenseOrUnboxedArrayAddCompiler : public ICStubCompiler {
     RootedObject obj_;
     size_t protoChainDepth_;
     JSValueType unboxedType_;
 
@@ -1240,17 +1241,17 @@ class ICSetElemDenseOrUnboxedArrayAddCom
 };
 
 // Accesses scalar elements of a typed array or typed object.
 class ICSetElem_TypedArray : public ICStub
 {
     friend class ICStubSpace;
 
   protected: // Protected to silence Clang warning.
-    HeapPtrShape shape_;
+    GCPtrShape shape_;
 
     ICSetElem_TypedArray(JitCode* stubCode, Shape* shape, Scalar::Type type,
                          bool expectOutOfBounds);
 
   public:
     Scalar::Type type() const {
         return (Scalar::Type) (extra_ & 0xff);
     }
@@ -1258,17 +1259,17 @@ class ICSetElem_TypedArray : public ICSt
     bool expectOutOfBounds() const {
         return (extra_ >> 8) & 1;
     }
 
     static size_t offsetOfShape() {
         return offsetof(ICSetElem_TypedArray, shape_);
     }
 
-    HeapPtrShape& shape() {
+    GCPtrShape& shape() {
         return shape_;
     }
 
     class Compiler : public ICStubCompiler {
         RootedShape shape_;
         Scalar::Type type_;
         TypedThingLayout layout_;
         bool expectOutOfBounds_;
@@ -1326,32 +1327,32 @@ class ICIn_Fallback : public ICFallbackS
             return newStub<ICIn_Fallback>(space, getStubCode());
         }
     };
 };
 
 // Base class for In_Native and In_NativePrototype stubs.
 class ICInNativeStub : public ICStub
 {
-    HeapPtrShape shape_;
-    HeapPtrPropertyName name_;
+    GCPtrShape shape_;
+    GCPtrPropertyName name_;
 
   protected:
     ICInNativeStub(ICStub::Kind kind, JitCode* stubCode, HandleShape shape,
                    HandlePropertyName name);
 
   public:
-    HeapPtrShape& shape() {
+    GCPtrShape& shape() {
         return shape_;
     }
     static size_t offsetOfShape() {
         return offsetof(ICInNativeStub, shape_);
     }
 
-    HeapPtrPropertyName& name() {
+    GCPtrPropertyName& name() {
         return name_;
     }
     static size_t offsetOfName() {
         return offsetof(ICInNativeStub, name_);
     }
 };
 
 // Stub for confirming an own property on a native object.
@@ -1366,27 +1367,27 @@ class ICIn_Native : public ICInNativeStu
 
 // Stub for confirming a property on a native object's prototype. Note that due to
 // the shape teleporting optimization, we only have to guard on the object's shape
 // and the holder's shape.
 class ICIn_NativePrototype : public ICInNativeStub
 {
     friend class ICStubSpace;
 
-    HeapPtrObject holder_;
-    HeapPtrShape holderShape_;
+    GCPtrObject holder_;
+    GCPtrShape holderShape_;
 
     ICIn_NativePrototype(JitCode* stubCode, HandleShape shape, HandlePropertyName name,
                          HandleObject holder, HandleShape holderShape);
 
   public:
-    HeapPtrObject& holder() {
+    GCPtrObject& holder() {
         return holder_;
     }
-    HeapPtrShape& holderShape() {
+    GCPtrShape& holderShape() {
         return holderShape_;
     }
     static size_t offsetOfHolder() {
         return offsetof(ICIn_NativePrototype, holder_);
     }
     static size_t offsetOfHolderShape() {
         return offsetof(ICIn_NativePrototype, holderShape_);
     }
@@ -1426,31 +1427,31 @@ class ICInNativeCompiler : public ICStub
 };
 
 template <size_t ProtoChainDepth> class ICIn_NativeDoesNotExistImpl;
 
 class ICIn_NativeDoesNotExist : public ICStub
 {
     friend class ICStubSpace;
 
-    HeapPtrPropertyName name_;
+    GCPtrPropertyName name_;
 
   public:
     static const size_t MAX_PROTO_CHAIN_DEPTH = 8;
 
   protected:
     ICIn_NativeDoesNotExist(JitCode* stubCode, size_t protoChainDepth,
                             HandlePropertyName name);
 
   public:
     size_t protoChainDepth() const {
         MOZ_ASSERT(extra_ <= MAX_PROTO_CHAIN_DEPTH);
         return extra_;
     }
-    HeapPtrPropertyName& name() {
+    GCPtrPropertyName& name() {
         return name_;
     }
 
     template <size_t ProtoChainDepth>
     ICIn_NativeDoesNotExistImpl<ProtoChainDepth>* toImpl() {
         MOZ_ASSERT(ProtoChainDepth == protoChainDepth());
         return static_cast<ICIn_NativeDoesNotExistImpl<ProtoChainDepth>*>(this);
     }
@@ -1466,29 +1467,29 @@ class ICIn_NativeDoesNotExistImpl : publ
 {
     friend class ICStubSpace;
 
   public:
     static const size_t MAX_PROTO_CHAIN_DEPTH = 8;
     static const size_t NumShapes = ProtoChainDepth + 1;
 
   private:
-    mozilla::Array<HeapPtrShape, NumShapes> shapes_;
+    mozilla::Array<GCPtrShape, NumShapes> shapes_;
 
     ICIn_NativeDoesNotExistImpl(JitCode* stubCode, Handle<ShapeVector> shapes,
                                 HandlePropertyName name);
 
   public:
     void traceShapes(JSTracer* trc) {
         for (size_t i = 0; i < NumShapes; i++)
             TraceEdge(trc, &shapes_[i], "baseline-innativedoesnotexist-stub-shape");
     }
 
     static size_t offsetOfShape(size_t idx) {
-        return offsetof(ICIn_NativeDoesNotExistImpl, shapes_) + (idx * sizeof(HeapPtrShape));
+        return offsetof(ICIn_NativeDoesNotExistImpl, shapes_) + (idx * sizeof(GCPtrShape));
     }
 };
 
 class ICInNativeDoesNotExistCompiler : public ICStubCompiler
 {
     RootedObject obj_;
     RootedPropertyName name_;
     size_t protoChainDepth_;
@@ -1513,22 +1514,22 @@ class ICInNativeDoesNotExistCompiler : p
 
     ICStub* getStub(ICStubSpace* space);
 };
 
 class ICIn_Dense : public ICStub
 {
     friend class ICStubSpace;
 
-    HeapPtrShape shape_;
+    GCPtrShape shape_;
 
     ICIn_Dense(JitCode* stubCode, HandleShape shape);
 
   public:
-    HeapPtrShape& shape() {
+    GCPtrShape& shape() {
         return shape_;
     }
     static size_t offsetOfShape() {
         return offsetof(ICIn_Dense, shape_);
     }
 
     class Compiler : public ICStubCompiler {
       RootedShape shape_;
@@ -1629,17 +1630,17 @@ class ICGetName_GlobalLexical : public I
 // shape checks are required all along the scope chain.
 template <size_t NumHops>
 class ICGetName_Scope : public ICMonitoredStub
 {
     friend class ICStubSpace;
 
     static const size_t MAX_HOPS = 6;
 
-    mozilla::Array<HeapPtrShape, NumHops + 1> shapes_;
+    mozilla::Array<GCPtrShape, NumHops + 1> shapes_;
     uint32_t offset_;
 
     ICGetName_Scope(JitCode* stubCode, ICStub* firstMonitorStub,
                     Handle<ShapeVector> shapes, uint32_t offset);
 
     static Kind GetStubKind() {
         return (Kind) (GetName_Scope0 + NumHops);
     }
@@ -1647,17 +1648,17 @@ class ICGetName_Scope : public ICMonitor
   public:
     void traceScopes(JSTracer* trc) {
         for (size_t i = 0; i < NumHops + 1; i++)
             TraceEdge(trc, &shapes_[i], "baseline-scope-stub-shape");
     }
 
     static size_t offsetOfShape(size_t index) {
         MOZ_ASSERT(index <= NumHops);
-        return offsetof(ICGetName_Scope, shapes_) + (index * sizeof(HeapPtrShape));
+        return offsetof(ICGetName_Scope, shapes_) + (index * sizeof(GCPtrShape));
     }
     static size_t offsetOfOffset() {
         return offsetof(ICGetName_Scope, offset_);
     }
 
     class Compiler : public ICStubCompiler {
         ICStub* firstMonitorStub_;
         Rooted<ShapeVector> shapes_;
@@ -1748,23 +1749,23 @@ class ICGetIntrinsic_Fallback : public I
     };
 };
 
 // Stub that loads the constant result of a GETINTRINSIC operation.
 class ICGetIntrinsic_Constant : public ICStub
 {
     friend class ICStubSpace;
 
-    HeapValue value_;
+    GCPtrValue value_;
 
     ICGetIntrinsic_Constant(JitCode* stubCode, const Value& value);
     ~ICGetIntrinsic_Constant();
 
   public:
-    HeapValue& value() {
+    GCPtrValue& value() {
         return value_;
     }
     static size_t offsetOfValue() {
         return offsetof(ICGetIntrinsic_Constant, value_);
     }
 
     class Compiler : public ICStubCompiler {
         MOZ_MUST_USE bool generateStubCode(MacroAssembler& masm);
@@ -1831,27 +1832,27 @@ class ICSetProp_Fallback : public ICFall
 };
 
 // Optimized SETPROP/SETGNAME/SETNAME stub.
 class ICSetProp_Native : public ICUpdatedStub
 {
     friend class ICStubSpace;
 
   protected: // Protected to silence Clang warning.
-    HeapPtrObjectGroup group_;
-    HeapPtrShape shape_;
+    GCPtrObjectGroup group_;
+    GCPtrShape shape_;
     uint32_t offset_;
 
     ICSetProp_Native(JitCode* stubCode, ObjectGroup* group, Shape* shape, uint32_t offset);
 
   public:
-    HeapPtrObjectGroup& group() {
+    GCPtrObjectGroup& group() {
         return group_;
     }
-    HeapPtrShape& shape() {
+    GCPtrShape& shape() {
         return shape_;
     }
     void notePreliminaryObject() {
         extra_ = 1;
     }
     bool hasPreliminaryObject() const {
         return extra_;
     }
@@ -1896,35 +1897,35 @@ class ICSetProp_Native : public ICUpdate
 template <size_t ProtoChainDepth> class ICSetProp_NativeAddImpl;
 
 class ICSetProp_NativeAdd : public ICUpdatedStub
 {
   public:
     static const size_t MAX_PROTO_CHAIN_DEPTH = 4;
 
   protected: // Protected to silence Clang warning.
-    HeapPtrObjectGroup group_;
-    HeapPtrShape newShape_;
-    HeapPtrObjectGroup newGroup_;
+    GCPtrObjectGroup group_;
+    GCPtrShape newShape_;
+    GCPtrObjectGroup newGroup_;
     uint32_t offset_;
 
     ICSetProp_NativeAdd(JitCode* stubCode, ObjectGroup* group, size_t protoChainDepth,
                         Shape* newShape, ObjectGroup* newGroup, uint32_t offset);
 
   public:
     size_t protoChainDepth() const {
         return extra_;
     }
-    HeapPtrObjectGroup& group() {
+    GCPtrObjectGroup& group() {
         return group_;
     }
-    HeapPtrShape& newShape() {
+    GCPtrShape& newShape() {
         return newShape_;
     }
-    HeapPtrObjectGroup& newGroup() {
+    GCPtrObjectGroup& newGroup() {
         return newGroup_;
     }
 
     template <size_t ProtoChainDepth>
     ICSetProp_NativeAddImpl<ProtoChainDepth>* toImpl() {
         MOZ_ASSERT(ProtoChainDepth == protoChainDepth());
         return static_cast<ICSetProp_NativeAddImpl<ProtoChainDepth>*>(this);
     }
@@ -1944,30 +1945,30 @@ class ICSetProp_NativeAdd : public ICUpd
 };
 
 template <size_t ProtoChainDepth>
 class ICSetProp_NativeAddImpl : public ICSetProp_NativeAdd
 {
     friend class ICStubSpace;
 
     static const size_t NumShapes = ProtoChainDepth + 1;
-    mozilla::Array<HeapPtrShape, NumShapes> shapes_;
+    mozilla::Array<GCPtrShape, NumShapes> shapes_;
 
     ICSetProp_NativeAddImpl(JitCode* stubCode, ObjectGroup* group,
                             Handle<ShapeVector> shapes,
                             Shape* newShape, ObjectGroup* newGroup, uint32_t offset);
 
   public:
     void traceShapes(JSTracer* trc) {
         for (size_t i = 0; i < NumShapes; i++)
             TraceEdge(trc, &shapes_[i], "baseline-setpropnativeadd-stub-shape");
     }
 
     static size_t offsetOfShape(size_t idx) {
-        return offsetof(ICSetProp_NativeAddImpl, shapes_) + (idx * sizeof(HeapPtrShape));
+        return offsetof(ICSetProp_NativeAddImpl, shapes_) + (idx * sizeof(GCPtrShape));
     }
 };
 
 class ICSetPropNativeAddCompiler : public ICStubCompiler
 {
     RootedObject obj_;
     RootedShape oldShape_;
     RootedObjectGroup oldGroup_;
@@ -2016,29 +2017,29 @@ class ICSetPropNativeAddCompiler : publi
 
     ICUpdatedStub* getStub(ICStubSpace* space);
 };
 
 class ICSetProp_Unboxed : public ICUpdatedStub
 {
     friend class ICStubSpace;
 
-    HeapPtrObjectGroup group_;
+    GCPtrObjectGroup group_;
     uint32_t fieldOffset_;
 
     ICSetProp_Unboxed(JitCode* stubCode, ObjectGroup* group, uint32_t fieldOffset)
       : ICUpdatedStub(ICStub::SetProp_Unboxed, stubCode),
         group_(group),
         fieldOffset_(fieldOffset)
     {
         (void) fieldOffset_; // Silence clang warning
     }
 
   public:
-    HeapPtrObjectGroup& group() {
+    GCPtrObjectGroup& group() {
         return group_;
     }
 
     static size_t offsetOfGroup() {
         return offsetof(ICSetProp_Unboxed, group_);
     }
     static size_t offsetOfFieldOffset() {
         return offsetof(ICSetProp_Unboxed, fieldOffset_);
@@ -2080,37 +2081,37 @@ class ICSetProp_Unboxed : public ICUpdat
         }
     };
 };
 
 class ICSetProp_TypedObject : public ICUpdatedStub
 {
     friend class ICStubSpace;
 
-    HeapPtrShape shape_;
-    HeapPtrObjectGroup group_;
+    GCPtrShape shape_;
+    GCPtrObjectGroup group_;
     uint32_t fieldOffset_;
     bool isObjectReference_;
 
     ICSetProp_TypedObject(JitCode* stubCode, Shape* shape, ObjectGroup* group,
                           uint32_t fieldOffset, bool isObjectReference)
       : ICUpdatedStub(ICStub::SetProp_TypedObject, stubCode),
         shape_(shape),
         group_(group),
         fieldOffset_(fieldOffset),
         isObjectReference_(isObjectReference)
     {
         (void) fieldOffset_; // Silence clang warning
     }
 
   public:
-    HeapPtrShape& shape() {
+    GCPtrShape& shape() {
         return shape_;
     }
-    HeapPtrObjectGroup& group() {
+    GCPtrObjectGroup& group() {
         return group_;
     }
     bool isObjectReference() {
         return isObjectReference_;
     }
 
     static size_t offsetOfShape() {
         return offsetof(ICSetProp_TypedObject, shape_);
@@ -2177,40 +2178,40 @@ class ICSetPropCallSetter : public ICStu
   protected:
     // Shape/group of receiver object. Used for both own and proto setters.
     HeapReceiverGuard receiverGuard_;
 
     // Holder and holder shape. For own setters, guarding on receiverGuard_ is
     // sufficient, although Ion may use holder_ and holderShape_ even for own
     // setters. In this case holderShape_ == receiverGuard_.shape_ (isOwnSetter
     // below relies on this).
-    HeapPtrObject holder_;
-    HeapPtrShape holderShape_;
+    GCPtrObject holder_;
+    GCPtrShape holderShape_;
 
     // Function to call.
-    HeapPtrFunction setter_;
+    GCPtrFunction setter_;
 
     // PC of call, for profiler
     uint32_t pcOffset_;
 
     ICSetPropCallSetter(Kind kind, JitCode* stubCode, ReceiverGuard receiverGuard,
                         JSObject* holder, Shape* holderShape, JSFunction* setter,
                         uint32_t pcOffset);
 
   public:
     HeapReceiverGuard& receiverGuard() {
         return receiverGuard_;
     }
-    HeapPtrObject& holder() {
+    GCPtrObject& holder() {
         return holder_;
     }
-    HeapPtrShape& holderShape() {
+    GCPtrShape& holderShape() {
         return holderShape_;
     }
-    HeapPtrFunction& setter() {
+    GCPtrFunction& setter() {
         return setter_;
     }
 
     bool isOwnSetter() const {
         MOZ_ASSERT(holder_->isNative());
         MOZ_ASSERT(holderShape_);
         return receiverGuard_.shape() == holderShape_;
     }
@@ -2454,32 +2455,32 @@ class ICCall_Scripted : public ICMonitor
     friend class ICStubSpace;
   public:
     // The maximum number of inlineable spread call arguments. Keep this small
     // to avoid controllable stack overflows by attackers passing large arrays
     // to spread call. This value is shared with ICCall_Native.
     static const uint32_t MAX_ARGS_SPREAD_LENGTH = 16;
 
   protected:
-    HeapPtrFunction callee_;
-    HeapPtrObject templateObject_;
+    GCPtrFunction callee_;
+    GCPtrObject templateObject_;
     uint32_t pcOffset_;
 
     ICCall_Scripted(JitCode* stubCode, ICStub* firstMonitorStub,
                     JSFunction* callee, JSObject* templateObject,
                     uint32_t pcOffset);
 
   public:
     static ICCall_Scripted* Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
                                   ICCall_Scripted& other);
 
-    HeapPtrFunction& callee() {
+    GCPtrFunction& callee() {
         return callee_;
     }
-    HeapPtrObject& templateObject() {
+    GCPtrObject& templateObject() {
         return templateObject_;
     }
 
     static size_t offsetOfCallee() {
         return offsetof(ICCall_Scripted, callee_);
     }
     static size_t offsetOfPCOffset() {
         return offsetof(ICCall_Scripted, pcOffset_);
@@ -2558,36 +2559,36 @@ class ICCallScriptedCompiler : public IC
     }
 };
 
 class ICCall_Native : public ICMonitoredStub
 {
     friend class ICStubSpace;
 
   protected:
-    HeapPtrFunction callee_;
-    HeapPtrObject templateObject_;
+    GCPtrFunction callee_;
+    GCPtrObject templateObject_;
     uint32_t pcOffset_;
 
 #ifdef JS_SIMULATOR
     void *native_;
 #endif
 
     ICCall_Native(JitCode* stubCode, ICStub* firstMonitorStub,
                   JSFunction* callee, JSObject* templateObject,
                   uint32_t pcOffset);
 
   public:
     static ICCall_Native* Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
                                 ICCall_Native& other);
 
-    HeapPtrFunction& callee() {
+    GCPtrFunction& callee() {
         return callee_;
     }
-    HeapPtrObject& templateObject() {
+    GCPtrObject& templateObject() {
         return templateObject_;
     }
 
     static size_t offsetOfCallee() {
         return offsetof(ICCall_Native, callee_);
     }
     static size_t offsetOfPCOffset() {
         return offsetof(ICCall_Native, pcOffset_);
@@ -2639,34 +2640,34 @@ class ICCall_Native : public ICMonitored
 
 class ICCall_ClassHook : public ICMonitoredStub
 {
     friend class ICStubSpace;
 
   protected:
     const Class* clasp_;
     void* native_;
-    HeapPtrObject templateObject_;
+    GCPtrObject templateObject_;
     uint32_t pcOffset_;
 
     ICCall_ClassHook(JitCode* stubCode, ICStub* firstMonitorStub,
                      const Class* clasp, Native native, JSObject* templateObject,
                      uint32_t pcOffset);
 
   public:
     static ICCall_ClassHook* Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
                                    ICCall_ClassHook& other);
 
     const Class* clasp() {
         return clasp_;
     }
     void* native() {
         return native_;
     }
-    HeapPtrObject& templateObject() {
+    GCPtrObject& templateObject() {
         return templateObject_;
     }
 
     static size_t offsetOfClass() {
         return offsetof(ICCall_ClassHook, clasp_);
     }
     static size_t offsetOfNative() {
         return offsetof(ICCall_ClassHook, native_);
@@ -2862,19 +2863,19 @@ class ICCall_ScriptedFunCall : public IC
 };
 
 class ICCall_StringSplit : public ICMonitoredStub
 {
     friend class ICStubSpace;
 
   protected:
     uint32_t pcOffset_;
-    HeapPtrString expectedStr_;
-    HeapPtrString expectedSep_;
-    HeapPtrObject templateObject_;
+    GCPtrString expectedStr_;
+    GCPtrString expectedSep_;
+    GCPtrObject templateObject_;
 
     ICCall_StringSplit(JitCode* stubCode, ICStub* firstMonitorStub, uint32_t pcOffset, JSString* str,
                        JSString* sep, JSObject* templateObject)
       : ICMonitoredStub(ICStub::Call_StringSplit, stubCode, firstMonitorStub),
         pcOffset_(pcOffset), expectedStr_(str), expectedSep_(sep),
         templateObject_(templateObject)
     { }
 
@@ -2886,25 +2887,25 @@ class ICCall_StringSplit : public ICMoni
     static size_t offsetOfExpectedSep() {
         return offsetof(ICCall_StringSplit, expectedSep_);
     }
 
     static size_t offsetOfTemplateObject() {
         return offsetof(ICCall_StringSplit, templateObject_);
     }
 
-    HeapPtrString& expectedStr() {
+    GCPtrString& expectedStr() {
         return expectedStr_;
     }
 
-    HeapPtrString& expectedSep() {
+    GCPtrString& expectedSep() {
         return expectedSep_;
     }
 
-    HeapPtrObject& templateObject() {
+    GCPtrObject& templateObject() {
         return templateObject_;
     }
 
     class Compiler : public ICCallStubCompiler {
       protected:
         ICStub* firstMonitorStub_;
         uint32_t pcOffset_;
         RootedString expectedStr_;
@@ -3139,27 +3140,27 @@ class ICInstanceOf_Fallback : public ICF
         }
     };
 };
 
 class ICInstanceOf_Function : public ICStub
 {
     friend class ICStubSpace;
 
-    HeapPtrShape shape_;
-    HeapPtrObject prototypeObj_;
+    GCPtrShape shape_;
+    GCPtrObject prototypeObj_;
     uint32_t slot_;
 
     ICInstanceOf_Function(JitCode* stubCode, Shape* shape, JSObject* prototypeObj, uint32_t slot);
 
   public:
-    HeapPtrShape& shape() {
+    GCPtrShape& shape() {
         return shape_;
     }
-    HeapPtrObject& prototypeObject() {
+    GCPtrObject& prototypeObject() {
         return prototypeObj_;
     }
     uint32_t slot() const {
         return slot_;
     }
     static size_t offsetOfShape() {
         return offsetof(ICInstanceOf_Function, shape_);
     }
@@ -3260,26 +3261,26 @@ class ICTypeOf_Typed : public ICFallback
         }
     };
 };
 
 class ICRest_Fallback : public ICFallbackStub
 {
     friend class ICStubSpace;
 
-    HeapPtrArrayObject templateObject_;
+    GCPtrArrayObject templateObject_;
 
     ICRest_Fallback(JitCode* stubCode, ArrayObject* templateObject)
       : ICFallbackStub(ICStub::Rest_Fallback, stubCode), templateObject_(templateObject)
     { }
 
   public:
     static const uint32_t MAX_OPTIMIZED_STUBS = 8;
 
-    HeapPtrArrayObject& templateObject() {
+    GCPtrArrayObject& templateObject() {
         return templateObject_;
     }
 
     class Compiler : public ICStubCompiler {
       protected:
         RootedArrayObject templateObject;
         MOZ_MUST_USE bool generateStubCode(MacroAssembler& masm);
 
--- a/js/src/jit/IonCaches.h
+++ b/js/src/jit/IonCaches.h
@@ -364,17 +364,17 @@ class IonCache
 // Helper for idempotent GetPropertyIC location tracking. Declared externally
 // to be forward declarable.
 //
 // Since all the scripts stored in CacheLocations are guaranteed to have been
 // Ion compiled, and are kept alive by function objects in jitcode, and since
 // the CacheLocations only have the lifespan of the jitcode, there is no need
 // to trace or mark any of the scripts. Since JSScripts are always allocated
 // tenured, and never moved, we can keep raw pointers, and there is no need
-// for HeapPtrScripts here.
+// for GCPtrScripts here.
 struct CacheLocation {
     jsbytecode* pc;
     JSScript* script;
 
     CacheLocation(jsbytecode* pcin, JSScript* scriptin)
         : pc(pcin), script(scriptin)
     { }
 };
--- a/js/src/jit/MacroAssembler.cpp
+++ b/js/src/jit/MacroAssembler.cpp
@@ -860,17 +860,17 @@ MacroAssembler::allocateObject(Register 
     checkAllocatorState(fail);
 
     if (shouldNurseryAllocate(allocKind, initialHeap))
         return nurseryAllocate(result, temp, allocKind, nDynamicSlots, initialHeap, fail);
 
     if (!nDynamicSlots)
         return freeListAllocate(result, temp, allocKind, fail);
 
-    callMallocStub(nDynamicSlots * sizeof(HeapValue), temp, fail);
+    callMallocStub(nDynamicSlots * sizeof(GCPtrValue), temp, fail);
 
     Label failAlloc;
     Label success;
 
     push(temp);
     freeListAllocate(result, temp, allocKind, &failAlloc);
 
     pop(temp);
@@ -952,26 +952,26 @@ MacroAssembler::fillSlotsWithConstantVal
 
 #ifdef JS_NUNBOX32
     // We only have a single spare register, so do the initialization as two
     // strided writes of the tag and body.
     jsval_layout jv = JSVAL_TO_IMPL(v);
 
     Address addr = base;
     move32(Imm32(jv.s.payload.i32), temp);
-    for (unsigned i = start; i < end; ++i, addr.offset += sizeof(HeapValue))
+    for (unsigned i = start; i < end; ++i, addr.offset += sizeof(GCPtrValue))
         store32(temp, ToPayload(addr));
 
     addr = base;
     move32(Imm32(jv.s.tag), temp);
-    for (unsigned i = start; i < end; ++i, addr.offset += sizeof(HeapValue))
+    for (unsigned i = start; i < end; ++i, addr.offset += sizeof(GCPtrValue))
         store32(temp, ToType(addr));
 #else
     moveValue(v, temp);
-    for (uint32_t i = start; i < end; ++i, base.offset += sizeof(HeapValue))
+    for (uint32_t i = start; i < end; ++i, base.offset += sizeof(GCPtrValue))
         storePtr(temp, base);
 #endif
 }
 
 void
 MacroAssembler::fillSlotsWithUndefined(Address base, Register temp, uint32_t start, uint32_t end)
 {
     fillSlotsWithConstantValue(base, temp, start, end, UndefinedValue());
--- a/js/src/jit/SharedIC.h
+++ b/js/src/jit/SharedIC.h
@@ -1493,22 +1493,22 @@ class ICTypeMonitor_PrimitiveSet : publi
         }
     };
 };
 
 class ICTypeMonitor_SingleObject : public ICStub
 {
     friend class ICStubSpace;
 
-    HeapPtrObject obj_;
+    GCPtrObject obj_;
 
     ICTypeMonitor_SingleObject(JitCode* stubCode, JSObject* obj);
 
   public:
-    HeapPtrObject& object() {
+    GCPtrObject& object() {
         return obj_;
     }
 
     static size_t offsetOfObject() {
         return offsetof(ICTypeMonitor_SingleObject, obj_);
     }
 
     class Compiler : public ICStubCompiler {
@@ -1527,22 +1527,22 @@ class ICTypeMonitor_SingleObject : publi
         }
     };
 };
 
 class ICTypeMonitor_ObjectGroup : public ICStub
 {
     friend class ICStubSpace;
 
-    HeapPtrObjectGroup group_;
+    GCPtrObjectGroup group_;
 
     ICTypeMonitor_ObjectGroup(JitCode* stubCode, ObjectGroup* group);
 
   public:
-    HeapPtrObjectGroup& group() {
+    GCPtrObjectGroup& group() {
         return group_;
     }
 
     static size_t offsetOfGroup() {
         return offsetof(ICTypeMonitor_ObjectGroup, group_);
     }
 
     class Compiler : public ICStubCompiler {
@@ -2375,26 +2375,26 @@ class ICGetProp_Generic : public ICMonit
 
 // Stub for accessing a property on a primitive's prototype.
 class ICGetProp_Primitive : public ICMonitoredStub
 {
     friend class ICStubSpace;
 
   protected: // Protected to silence Clang warning.
     // Shape of String.prototype/Number.prototype to check for.
-    HeapPtrShape protoShape_;
+    GCPtrShape protoShape_;
 
     // Fixed or dynamic slot offset.
     uint32_t offset_;
 
     ICGetProp_Primitive(JitCode* stubCode, ICStub* firstMonitorStub, JSValueType primitiveType,
                         Shape* protoShape, uint32_t offset);
 
   public:
-    HeapPtrShape& protoShape() {
+    GCPtrShape& protoShape() {
         return protoShape_;
     }
     JSValueType primitiveType() const {
         return JSValueType(extra_);
     }
 
     static size_t offsetOfProtoShape() {
         return offsetof(ICGetProp_Primitive, protoShape_);
@@ -2499,29 +2499,29 @@ class ICGetPropNativeStub : public ICMon
     static size_t offsetOfOffset() {
         return offsetof(ICGetPropNativeStub, offset_);
     }
 };
 
 class ICGetPropNativePrototypeStub : public ICGetPropNativeStub
 {
     // Holder and its shape.
-    HeapPtrObject holder_;
-    HeapPtrShape holderShape_;
+    GCPtrObject holder_;
+    GCPtrShape holderShape_;
 
   protected:
     ICGetPropNativePrototypeStub(ICStub::Kind kind, JitCode* stubCode, ICStub* firstMonitorStub,
                                  ReceiverGuard guard, uint32_t offset, JSObject* holder,
                                  Shape* holderShape);
 
   public:
-    HeapPtrObject& holder() {
+    GCPtrObject& holder() {
         return holder_;
     }
-    HeapPtrShape& holderShape() {
+    GCPtrShape& holderShape() {
         return holderShape_;
     }
     static size_t offsetOfHolder() {
         return offsetof(ICGetPropNativePrototypeStub, holder_);
     }
     static size_t offsetOfHolderShape() {
         return offsetof(ICGetPropNativePrototypeStub, holderShape_);
     }
@@ -2533,26 +2533,26 @@ class ICGetPropNativePrototypeStub : pub
 // shape of the global object.
 //
 // The receiver object is the global lexical scope.
 class ICGetName_Global : public ICGetPropNativePrototypeStub
 {
     friend class ICStubSpace;
 
   protected:
-    HeapPtrShape globalShape_;
+    GCPtrShape globalShape_;
 
     ICGetName_Global(JitCode* stubCode, ICStub* firstMonitorStub, ReceiverGuard guard,
                      uint32_t slot, JSObject* holder, Shape* holderShape, Shape* globalShape);
 
   public:
     static ICGetName_Global* Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
                                    ICGetName_Global& other);
 
-    HeapPtrShape& globalShape() {
+    GCPtrShape& globalShape() {
         return globalShape_;
     }
     static size_t offsetOfGlobalShape() {
         return offsetof(ICGetName_Global, globalShape_);
     }
 };
 
 // Compiler for native GetProp stubs.
@@ -2632,38 +2632,38 @@ class ICGetPropCallGetter : public ICMon
     // In the GetPropCallDOMProxyNative case, the receiver guard enforces
     // the proxy handler, because Shape implies Class.
     HeapReceiverGuard receiverGuard_;
 
     // Holder and holder shape. For own getters, guarding on receiverGuard_ is
     // sufficient, although Ion may use holder_ and holderShape_ even for own
     // getters. In this case holderShape_ == receiverGuard_.shape_ (isOwnGetter
     // below relies on this).
-    HeapPtrObject holder_;
-
-    HeapPtrShape holderShape_;
+    GCPtrObject holder_;
+
+    GCPtrShape holderShape_;
 
     // Function to call.
-    HeapPtrFunction getter_;
+    GCPtrFunction getter_;
 
     // PC offset of call
     uint32_t pcOffset_;
 
     ICGetPropCallGetter(Kind kind, JitCode* stubCode, ICStub* firstMonitorStub,
                         ReceiverGuard receiverGuard, JSObject* holder,
                         Shape* holderShape, JSFunction* getter, uint32_t pcOffset);
 
   public:
-    HeapPtrObject& holder() {
+    GCPtrObject& holder() {
         return holder_;
     }
-    HeapPtrShape& holderShape() {
+    GCPtrShape& holderShape() {
         return holderShape_;
     }
-    HeapPtrFunction& getter() {
+    GCPtrFunction& getter() {
         return getter_;
     }
     HeapReceiverGuard& receiverGuard() {
         return receiverGuard_;
     }
 
     bool isOwnGetter() const {
         MOZ_ASSERT(holder_->isNative());
@@ -2788,33 +2788,33 @@ class ICGetProp_CallNative : public ICGe
 };
 
 // Stub for calling a native getter on the GlobalObject.
 class ICGetProp_CallNativeGlobal : public ICGetPropCallGetter
 {
     friend class ICStubSpace;
 
   protected:
-    HeapPtrShape globalShape_;
+    GCPtrShape globalShape_;
 
     ICGetProp_CallNativeGlobal(JitCode* stubCode, ICStub* firstMonitorStub,
                                ReceiverGuard receiverGuard,
                                JSObject* holder, Shape* holderShape, Shape* globalShape,
                                JSFunction* getter, uint32_t pcOffset)
       : ICGetPropCallGetter(GetProp_CallNativeGlobal, stubCode, firstMonitorStub,
                             receiverGuard, holder, holderShape, getter, pcOffset),
         globalShape_(globalShape)
     { }
 
   public:
     static ICGetProp_CallNativeGlobal* Clone(JSContext* cx, ICStubSpace* space,
                                              ICStub* firstMonitorStub,
                                              ICGetProp_CallNativeGlobal& other);
 
-    HeapPtrShape& globalShape() {
+    GCPtrShape& globalShape() {
         return globalShape_;
     }
     static size_t offsetOfGlobalShape() {
         return offsetof(ICGetProp_CallNativeGlobal, globalShape_);
     }
 };
 
 class ICGetPropCallNativeCompiler : public ICGetPropCallGetter::Compiler
@@ -2842,26 +2842,26 @@ class ICGetPropCallNativeCompiler : publ
     ICStub* getStub(ICStubSpace* space);
 };
 
 class ICGetPropCallDOMProxyNativeStub : public ICGetPropCallGetter
 {
   friend class ICStubSpace;
   protected:
     // Object shape of expected expando object. (nullptr if no expando object should be there)
-    HeapPtrShape expandoShape_;
+    GCPtrShape expandoShape_;
 
     ICGetPropCallDOMProxyNativeStub(ICStub::Kind kind, JitCode* stubCode,
                                     ICStub* firstMonitorStub, Shape* shape,
                                     Shape* expandoShape,
                                     JSObject* holder, Shape* holderShape,
                                     JSFunction* getter, uint32_t pcOffset);
 
   public:
-    HeapPtrShape& expandoShape() {
+    GCPtrShape& expandoShape() {
         return expandoShape_;
     }
     static size_t offsetOfExpandoShape() {
         return offsetof(ICGetPropCallDOMProxyNativeStub, expandoShape_);
     }
 };
 
 class ICGetProp_CallDOMProxyNative : public ICGetPropCallDOMProxyNativeStub
@@ -2947,34 +2947,34 @@ class ICGetPropCallDOMProxyNativeCompile
 
     ICStub* getStub(ICStubSpace* space);
 };
 
 class ICGetProp_DOMProxyShadowed : public ICMonitoredStub
 {
   friend class ICStubSpace;
   protected:
-    HeapPtrShape shape_;
+    GCPtrShape shape_;
     const BaseProxyHandler* proxyHandler_;
-    HeapPtrPropertyName name_;
+    GCPtrPropertyName name_;
     uint32_t pcOffset_;
 
     ICGetProp_DOMProxyShadowed(JitCode* stubCode, ICStub* firstMonitorStub, Shape* shape,
                                const BaseProxyHandler* proxyHandler, PropertyName* name,
                                uint32_t pcOffset);
 
   public:
     static ICGetProp_DOMProxyShadowed* Clone(JSContext* cx, ICStubSpace* space,
                                              ICStub* firstMonitorStub,
                                              ICGetProp_DOMProxyShadowed& other);
 
-    HeapPtrShape& shape() {
+    GCPtrShape& shape() {
         return shape_;
     }
-    HeapPtrPropertyName& name() {
+    GCPtrPropertyName& name() {
         return name_;
     }
 
     static size_t offsetOfShape() {
         return offsetof(ICGetProp_DOMProxyShadowed, shape_);
     }
     static size_t offsetOfProxyHandler() {
         return offsetof(ICGetProp_DOMProxyShadowed, proxyHandler_);
@@ -3071,21 +3071,21 @@ class ICGetProp_ArgumentsCallee : public
 
 // JSOP_NEWARRAY
 // JSOP_NEWINIT
 
 class ICNewArray_Fallback : public ICFallbackStub
 {
     friend class ICStubSpace;
 
-    HeapPtrObject templateObject_;
+    GCPtrObject templateObject_;
 
     // The group used for objects created here is always available, even if the
     // template object itself is not.
-    HeapPtrObjectGroup templateGroup_;
+    GCPtrObjectGroup templateGroup_;
 
     ICNewArray_Fallback(JitCode* stubCode, ObjectGroup* templateGroup)
       : ICFallbackStub(ICStub::NewArray_Fallback, stubCode),
         templateObject_(nullptr), templateGroup_(templateGroup)
     {}
 
   public:
     class Compiler : public ICStubCompiler {
@@ -3098,42 +3098,42 @@ class ICNewArray_Fallback : public ICFal
             templateGroup(cx, templateGroup)
         {}
 
         ICStub* getStub(ICStubSpace* space) {
             return newStub<ICNewArray_Fallback>(space, getStubCode(), templateGroup);
         }
     };
 
-    HeapPtrObject& templateObject() {
+    GCPtrObject& templateObject() {
         return templateObject_;
     }
 
     void setTemplateObject(JSObject* obj) {
         MOZ_ASSERT(obj->group() == templateGroup());
         templateObject_ = obj;
     }
 
-    HeapPtrObjectGroup& templateGroup() {
+    GCPtrObjectGroup& templateGroup() {
         return templateGroup_;
     }
 
     void setTemplateGroup(ObjectGroup* group) {
         templateObject_ = nullptr;
         templateGroup_ = group;
     }
 };
 
 // JSOP_NEWOBJECT
 
 class ICNewObject_Fallback : public ICFallbackStub
 {
     friend class ICStubSpace;
 
-    HeapPtrObject templateObject_;
+    GCPtrObject templateObject_;
 
     explicit ICNewObject_Fallback(JitCode* stubCode)
       : ICFallbackStub(ICStub::NewObject_Fallback, stubCode), templateObject_(nullptr)
     {}
 
   public:
     class Compiler : public ICStubCompiler {
         bool generateStubCode(MacroAssembler& masm);
@@ -3143,17 +3143,17 @@ class ICNewObject_Fallback : public ICFa
           : ICStubCompiler(cx, ICStub::NewObject_Fallback, engine)
         {}
 
         ICStub* getStub(ICStubSpace* space) {
             return newStub<ICNewObject_Fallback>(space, getStubCode());
         }
     };
 
-    HeapPtrObject& templateObject() {
+    GCPtrObject& templateObject() {
         return templateObject_;
     }
 
     void setTemplateObject(JSObject* obj) {
         templateObject_ = obj;
     }
 };
 
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -706,17 +706,17 @@ fun_hasInstance(JSContext* cx, HandleObj
     return true;
 }
 
 inline void
 JSFunction::trace(JSTracer* trc)
 {
     if (isExtended()) {
         TraceRange(trc, ArrayLength(toExtended()->extendedSlots),
-                   (HeapValue*)toExtended()->extendedSlots, "nativeReserved");
+                   (GCPtrValue*)toExtended()->extendedSlots, "nativeReserved");
     }
 
     TraceNullableEdge(trc, &atom_, "atom");
 
     if (isInterpreted()) {
         // Functions can be be marked as interpreted despite having no script
         // yet at some points when parsing, and can be lazy with no lazy script
         // for self-hosted code.
--- a/js/src/jsfun.h
+++ b/js/src/jsfun.h
@@ -139,17 +139,17 @@ class JSFunction : public js::NativeObje
             union {
                 JSObject*   env_;    /* environment for new activations;
                                         use the accessor! */
                 js::frontend::FunctionBox* funbox_; /* the function box when parsing */
             };
         } i;
         void*           nativeOrScript;
     } u;
-    js::HeapPtrAtom  atom_;       /* name for diagnostics and decompiling */
+    js::GCPtrAtom atom_;      /* name for diagnostics and decompiling */
 
   public:
 
     /* Call objects must be created for each invocation of this function. */
     bool needsCallObject() const {
         MOZ_ASSERT(!isInterpretedLazy());
         MOZ_ASSERT(!isBeingParsed());
 
@@ -347,22 +347,22 @@ class JSFunction : public js::NativeObje
      */
     JSObject* environment() const {
         MOZ_ASSERT(isInterpreted() && !isBeingParsed());
         return u.i.env_;
     }
 
     void setEnvironment(JSObject* obj) {
         MOZ_ASSERT(isInterpreted() && !isBeingParsed());
-        *reinterpret_cast<js::HeapPtrObject*>(&u.i.env_) = obj;
+        *reinterpret_cast<js::GCPtrObject*>(&u.i.env_) = obj;
     }
 
     void initEnvironment(JSObject* obj) {
         MOZ_ASSERT(isInterpreted() && !isBeingParsed());
-        reinterpret_cast<js::HeapPtrObject*>(&u.i.env_)->init(obj);
+        reinterpret_cast<js::GCPtrObject*>(&u.i.env_)->init(obj);
     }
 
     void unsetEnvironment() {
         setEnvironment(nullptr);
     }
 
   private:
     void setFunctionBox(js::frontend::FunctionBox* funbox) {
@@ -559,19 +559,19 @@ class JSFunction : public js::NativeObje
     /* Bound function accessors. */
 
     JSObject* getBoundFunctionTarget() const;
     const js::Value& getBoundFunctionThis() const;
     const js::Value& getBoundFunctionArgument(JSContext* cx, unsigned which) const;
     size_t getBoundFunctionArgumentCount() const;
 
   private:
-    js::HeapPtrScript& mutableScript() {
+    js::GCPtrScript& mutableScript() {
         MOZ_ASSERT(hasScript());
-        return *(js::HeapPtrScript*)&u.i.s.script_;
+        return *(js::GCPtrScript*)&u.i.s.script_;
     }
 
     inline js::FunctionExtended* toExtended();
     inline const js::FunctionExtended* toExtended() const;
 
   public:
     inline bool isExtended() const {
         bool extended = !!(flags() & EXTENDED);
@@ -701,27 +701,27 @@ class FunctionExtended : public JSFuncti
     /*
      * wasm/asm.js exported functions store the index of the export in the
      * module's export vector in the second slot.
      */
     static const unsigned WASM_EXPORT_INDEX_SLOT = 1;
 
     static inline size_t offsetOfExtendedSlot(unsigned which) {
         MOZ_ASSERT(which < NUM_EXTENDED_SLOTS);
-        return offsetof(FunctionExtended, extendedSlots) + which * sizeof(HeapValue);
+        return offsetof(FunctionExtended, extendedSlots) + which * sizeof(GCPtrValue);
     }
     static inline size_t offsetOfArrowNewTargetSlot() {
         return offsetOfExtendedSlot(ARROW_NEWTARGET_SLOT);
     }
 
   private:
     friend class JSFunction;
 
     /* Reserved slots available for storage by particular native functions. */
-    HeapValue extendedSlots[NUM_EXTENDED_SLOTS];
+    GCPtrValue extendedSlots[NUM_EXTENDED_SLOTS];
 };
 
 extern bool
 CanReuseScriptForClone(JSCompartment* compartment, HandleFunction fun, HandleObject newParent);
 
 extern JSFunction*
 CloneFunctionReuseScript(JSContext* cx, HandleFunction fun, HandleObject parent,
                          gc::AllocKind kind = gc::AllocKind::FUNCTION,
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1713,17 +1713,17 @@ GCRuntime::addRoot(Value* vp, const char
 {
     /*
      * Sometimes Firefox will hold weak references to objects and then convert
      * them to strong references by calling AddRoot (e.g., via PreserveWrapper,
      * or ModifyBusyCount in workers). We need a read barrier to cover these
      * cases.
      */
     if (isIncrementalGCInProgress())
-        HeapValue::writeBarrierPre(*vp);
+        GCPtrValue::writeBarrierPre(*vp);
 
     return rootsHash.put(vp, name);
 }
 
 void
 GCRuntime::removeRoot(Value* vp)
 {
     rootsHash.remove(vp);
@@ -2125,17 +2125,17 @@ RelocateCell(Zone* zone, TenuredCell* sr
 
             // Fixup the pointer to inline object elements if necessary.
             if (srcNative->hasFixedElements())
                 dstNative->setFixedElements();
 
             // For copy-on-write objects that own their elements, fix up the
             // owner pointer to point to the relocated object.
             if (srcNative->denseElementsAreCopyOnWrite()) {
-                HeapPtrNativeObject& owner = dstNative->getElementsHeader()->ownerObject();
+                GCPtrNativeObject& owner = dstNative->getElementsHeader()->ownerObject();
                 if (owner == srcNative)
                     owner = dstNative;
             }
         }
 
         // Call object moved hook if present.
         if (JSObjectMovedOp op = srcObj->getClass()->extObjectMovedOp())
             op(dstObj, srcObj);
@@ -7584,17 +7584,17 @@ JS::IncrementalReferenceBarrier(GCCellPt
         return;
 
     DispatchTyped(IncrementalReferenceBarrierFunctor(), thing);
 }
 
 JS_PUBLIC_API(void)
 JS::IncrementalValueBarrier(const Value& v)
 {
-    js::HeapValue::writeBarrierPre(v);
+    js::GCPtrValue::writeBarrierPre(v);
 }
 
 JS_PUBLIC_API(void)
 JS::IncrementalObjectBarrier(JSObject* obj)
 {
     if (!obj)
         return;
 
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -50,17 +50,17 @@ using mozilla::PodZero;
 
 typedef Rooted<PropertyIteratorObject*> RootedPropertyIteratorObject;
 
 static const gc::AllocKind ITERATOR_FINALIZE_KIND = gc::AllocKind::OBJECT2_BACKGROUND;
 
 void
 NativeIterator::trace(JSTracer* trc)
 {
-    for (HeapPtrFlatString* str = begin(); str < end(); str++)
+    for (GCPtrFlatString* str = begin(); str < end(); str++)
         TraceNullableEdge(trc, str, "prop");
     TraceNullableEdge(trc, &obj, "obj");
 
     for (size_t i = 0; i < guard_length; i++)
         guard_array[i].trace(trc);
 
     // The SuppressDeletedPropertyHelper loop can GC, so make sure that if the
     // GC removes any elements from the list, it won't remove this one.
@@ -603,17 +603,17 @@ NativeIterator::allocateIterator(JSConte
     if (!ni) {
         ReportOutOfMemory(cx);
         return nullptr;
     }
 
     void** extra = reinterpret_cast<void**>(ni + 1);
     PodZero(ni);
     PodZero(extra, extraLength);
-    ni->props_array = ni->props_cursor = reinterpret_cast<HeapPtrFlatString*>(extra);
+    ni->props_array = ni->props_cursor = reinterpret_cast<GCPtrFlatString*>(extra);
     ni->props_end = ni->props_array + plength;
     return ni;
 }
 
 NativeIterator*
 NativeIterator::allocateSentinel(JSContext* maybecx)
 {
     NativeIterator* ni = js_pod_malloc<NativeIterator>();
@@ -1288,19 +1288,19 @@ SuppressDeletedPropertyHelper(JSContext*
     NativeIterator* enumeratorList = cx->compartment()->enumerators;
     NativeIterator* ni = enumeratorList->next();
 
     while (ni != enumeratorList) {
       again:
         /* This only works for identified suppressed keys, not values. */
         if (ni->isKeyIter() && ni->obj == obj && ni->props_cursor < ni->props_end) {
             /* Check whether id is still to come. */
-            HeapPtrFlatString* props_cursor = ni->current();
-            HeapPtrFlatString* props_end = ni->end();
-            for (HeapPtrFlatString* idp = props_cursor; idp < props_end; ++idp) {
+            GCPtrFlatString* props_cursor = ni->current();
+            GCPtrFlatString* props_end = ni->end();
+            for (GCPtrFlatString* idp = props_cursor; idp < props_end; ++idp) {
                 if (predicate(*idp)) {
                     /*
                      * Check whether another property along the prototype chain
                      * became visible as a result of this deletion.
                      */
                     RootedObject proto(cx);
                     if (!GetPrototype(cx, obj, &proto))
                         return false;
@@ -1330,17 +1330,17 @@ SuppressDeletedPropertyHelper(JSContext*
                     /*
                      * No property along the prototype chain stepped in to take the
                      * property's place, so go ahead and delete id from the list.
                      * If it is the next property to be enumerated, just skip it.
                      */
                     if (idp == props_cursor) {
                         ni->incCursor();
                     } else {
-                        for (HeapPtrFlatString* p = idp; p + 1 != props_end; p++)
+                        for (GCPtrFlatString* p = idp; p + 1 != props_end; p++)
                             *p = *(p + 1);
                         ni->props_end = ni->end() - 1;
 
                         /*
                          * This invokes the pre barrier on this element, since
                          * it's no longer going to be marked, and ensures that
                          * any existing remembered set entry will be dropped.
                          */
--- a/js/src/jsiter.h
+++ b/js/src/jsiter.h
@@ -27,52 +27,52 @@
 #define JSITER_UNREUSABLE   0x2000
 
 namespace js {
 
 class PropertyIteratorObject;
 
 struct NativeIterator
 {
-    HeapPtrObject obj;                  // Object being iterated.
-    JSObject* iterObj_;                 // Internal iterator object.
-    HeapPtrFlatString* props_array;
-    HeapPtrFlatString* props_cursor;
-    HeapPtrFlatString* props_end;
+    GCPtrObject obj;    // Object being iterated.
+    JSObject* iterObj_; // Internal iterator object.
+    GCPtrFlatString* props_array;
+    GCPtrFlatString* props_cursor;
+    GCPtrFlatString* props_end;
     HeapReceiverGuard* guard_array;
     uint32_t guard_length;
     uint32_t guard_key;
     uint32_t flags;
 
   private:
     /* While in compartment->enumerators, these form a doubly linked list. */
     NativeIterator* next_;
     NativeIterator* prev_;
 
   public:
     bool isKeyIter() const {
         return (flags & JSITER_FOREACH) == 0;
     }
 
-    inline HeapPtrFlatString* begin() const {
+    inline GCPtrFlatString* begin() const {
         return props_array;
     }
 
-    inline HeapPtrFlatString* end() const {
+    inline GCPtrFlatString* end() const {
         return props_end;
     }
 
     size_t numKeys() const {
         return end() - begin();
     }
 
     JSObject* iterObj() const {
         return iterObj_;
     }
-    HeapPtrFlatString* current() const {
+    GCPtrFlatString* current() const {
         MOZ_ASSERT(props_cursor < props_end);
         return props_cursor;
     }
 
     NativeIterator* next() {
         return next_;
     }
 
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -3857,17 +3857,17 @@ JSObject::traceChildren(JSTracer* trc)
                                            "object slot");
                 ++index;
             }
             MOZ_ASSERT(nslots == nobj->slotSpan());
         }
 
         do {
             if (nobj->denseElementsAreCopyOnWrite()) {
-                HeapPtrNativeObject& owner = nobj->getElementsHeader()->ownerObject();
+                GCPtrNativeObject& owner = nobj->getElementsHeader()->ownerObject();
                 if (owner != nobj) {
                     TraceEdge(trc, &owner, "objectElementsOwner");
                     break;
                 }
             }
 
             TraceRange(trc,
                        nobj->getDenseInitializedLength(),
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -93,17 +93,17 @@ bool SetImmutablePrototype(js::Exclusive
  * The second word of a JSObject generally stores its shape; if the second word
  * stores anything else, the value stored cannot be a valid Shape* pointer, so
  * that shape guards can be performed on objects without regard to the specific
  * layout in use.
  */
 class JSObject : public js::gc::Cell
 {
   protected:
-    js::HeapPtrObjectGroup group_;
+    js::GCPtrObjectGroup group_;
 
   private:
     friend class js::Shape;
     friend class js::GCMarker;
     friend class js::NewObjectCache;
     friend class js::Nursery;
     friend class js::gc::RelocationOverlay;
     friend bool js::PreventExtensions(JSContext* cx, JS::HandleObject obj, JS::ObjectOpResult& result);
@@ -347,17 +347,17 @@ class JSObject : public js::gc::Cell
     // called for an object that was just created.
     static inline bool setSingleton(js::ExclusiveContext* cx, js::HandleObject obj);
 
     // Change an existing object to have a singleton group.
     static bool changeToSingleton(JSContext* cx, js::HandleObject obj);
 
     inline js::ObjectGroup* getGroup(JSContext* cx);
 
-    const js::HeapPtrObjectGroup& groupFromGC() const {
+    const js::GCPtrObjectGroup& groupFromGC() const {
         /* Direct field access for use by GC. */
         return group_;
     }
 
     /*
      * We permit proxies to dynamically compute their prototype if desired.
      * (Not all proxies will so desire: in particular, most DOM proxies can
      * track their prototype with a single, nullable JSObject*.)  If a proxy
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -571,17 +571,17 @@ XDRRelazificationInfo(XDRState<mode>* xd
 
     return true;
 }
 
 static inline uint32_t
 FindScopeObjectIndex(JSScript* script, NestedStaticScope& scope)
 {
     ObjectArray* objects = script->objects();
-    HeapPtrObject* vector = objects->vector;
+    GCPtrObject* vector = objects->vector;
     unsigned length = objects->length;
     for (unsigned i = 0; i < length; ++i) {
         if (vector[i] == &scope)
             return i;
     }
 
     MOZ_CRASH("Scope not found");
 }
@@ -987,17 +987,17 @@ js::XDRScript(XDRState<mode>* xdr, Handl
     }
 
     if (mode == XDR_DECODE) {
         if (!SaveSharedScriptData(cx, script, ssd, nsrcnotes))
             return false;
     }
 
     if (nconsts) {
-        HeapValue* vector = script->consts()->vector;
+        GCPtrValue* vector = script->consts()->vector;
         RootedValue val(cx);
         for (i = 0; i != nconsts; ++i) {
             if (mode == XDR_ENCODE)
                 val = vector[i];
             if (!XDRScriptConst(xdr, &val))
                 return false;
             if (mode == XDR_DECODE)
                 vector[i].init(val);
@@ -1005,17 +1005,17 @@ js::XDRScript(XDRState<mode>* xdr, Handl
     }
 
     /*
      * Here looping from 0-to-length to xdr objects is essential to ensure that
      * all references to enclosing blocks (via FindScopeObjectIndex below) happen
      * after the enclosing block has been XDR'd.
      */
     for (i = 0; i != nobjects; ++i) {
-        HeapPtrObject* objp = &script->objects()->vector[i];
+        GCPtrObject* objp = &script->objects()->vector[i];
         XDRClassKind classk;
 
         if (mode == XDR_ENCODE) {
             JSObject* obj = *objp;
             if (obj->is<StaticBlockScope>())
                 classk = CK_BlockObject;
             else if (obj->is<StaticWithScope>())
                 classk = CK_WithObject;
@@ -1284,17 +1284,17 @@ js::XDRLazyScript(XDRState<mode>* xdr, H
 
     // Code free variables.
     if (!XDRLazyFreeVariables(xdr, lazy))
         return false;
 
     // Code inner functions.
     {
         RootedFunction func(cx);
-        HeapPtrFunction* innerFunctions = lazy->innerFunctions();
+        GCPtrFunction* innerFunctions = lazy->innerFunctions();
         size_t numInnerFunctions = lazy->numInnerFunctions();
         for (size_t i = 0; i < numInnerFunctions; i++) {
             if (mode == XDR_ENCODE)
                 func = innerFunctions[i];
 
             if (!XDRInterpretedFunction(xdr, fun, enclosingScript, &func))
                 return false;
 
@@ -2442,22 +2442,22 @@ js::SharedScriptData::new_(ExclusiveCont
 
     entry->length = length;
     entry->natoms = natoms;
     entry->marked = false;
     memset(entry->data + baseLength, 0, padding);
 
     /*
      * Call constructors to initialize the storage that will be accessed as a
-     * HeapPtrAtom array via atoms().
+     * GCPtrAtom array via atoms().
      */
-    HeapPtrAtom* atoms = entry->atoms();
+    GCPtrAtom* atoms = entry->atoms();
     MOZ_ASSERT(reinterpret_cast<uintptr_t>(atoms) % sizeof(JSAtom*) == 0);
     for (unsigned i = 0; i < natoms; ++i)
-        new (&atoms[i]) HeapPtrAtom();
+        new (&atoms[i]) GCPtrAtom();
 
     return entry;
 }
 
 /*
  * Takes ownership of its *ssd parameter and either adds it into the runtime's
  * ScriptDataTable or frees it if a matching entry already exists.
  *
@@ -2636,27 +2636,27 @@ js::FreeScriptData(JSRuntime* rt, AutoLo
  * we check this below).
  */
 JS_STATIC_ASSERT(KEEPS_JSVAL_ALIGNMENT(ConstArray));
 JS_STATIC_ASSERT(KEEPS_JSVAL_ALIGNMENT(ObjectArray));       /* there are two of these */
 JS_STATIC_ASSERT(KEEPS_JSVAL_ALIGNMENT(TryNoteArray));
 JS_STATIC_ASSERT(KEEPS_JSVAL_ALIGNMENT(BlockScopeArray));
 
 /* These assertions ensure there is no padding required between array elements. */
-JS_STATIC_ASSERT(HAS_JSVAL_ALIGNMENT(HeapValue));
-JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(HeapValue, HeapPtrObject));
-JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(HeapPtrObject, HeapPtrObject));
-JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(HeapPtrObject, JSTryNote));
+JS_STATIC_ASSERT(HAS_JSVAL_ALIGNMENT(GCPtrValue));
+JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(GCPtrValue, GCPtrObject));
+JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(GCPtrObject, GCPtrObject));
+JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(GCPtrObject, JSTryNote));
 JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(JSTryNote, uint32_t));
 JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(uint32_t, uint32_t));
 
-JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(HeapValue, BlockScopeNote));
+JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(GCPtrValue, BlockScopeNote));
 JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(BlockScopeNote, BlockScopeNote));
 JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(JSTryNote, BlockScopeNote));
-JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(HeapPtrObject, BlockScopeNote));
+JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(GCPtrObject, BlockScopeNote));
 JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(BlockScopeNote, uint32_t));
 
 static inline size_t
 ScriptDataSize(uint32_t nbindings, uint32_t nconsts, uint32_t nobjects,
                uint32_t ntrynotes, uint32_t nblockscopes, uint32_t nyieldoffsets)
 {
     size_t size = 0;
 
@@ -2775,23 +2775,23 @@ JSScript::partiallyInit(ExclusiveContext
     if (nyieldoffsets != 0) {
         yieldOffsets = reinterpret_cast<YieldOffsetArray*>(cursor);
         cursor += sizeof(YieldOffsetArray);
     }
 
     if (nconsts != 0) {
         MOZ_ASSERT(reinterpret_cast<uintptr_t>(cursor) % sizeof(JS::Value) == 0);
         script->consts()->length = nconsts;
-        script->consts()->vector = (HeapValue*)cursor;
+        script->consts()->vector = (GCPtrValue*)cursor;
         cursor += nconsts * sizeof(script->consts()->vector[0]);
     }
 
     if (nobjects != 0) {
         script->objects()->length = nobjects;
-        script->objects()->vector = (HeapPtrObject*)cursor;
+        script->objects()->vector = (GCPtrObject*)cursor;
         cursor += nobjects * sizeof(script->objects()->vector[0]);
     }
 
     if (ntrynotes != 0) {
         script->trynotes()->length = ntrynotes;
         script->trynotes()->vector = reinterpret_cast<JSTryNote*>(cursor);
         size_t vectorSize = ntrynotes * sizeof(script->trynotes()->vector[0]);
 #ifdef DEBUG
@@ -3479,17 +3479,17 @@ js::detail::CopyScript(JSContext* cx, Ha
     Rooted<Bindings> bindings(cx);
     if (!Bindings::clone(cx, &bindings, data, src))
         return false;
 
     /* Objects */
 
     AutoObjectVector objects(cx);
     if (nobjects != 0) {
-        HeapPtrObject* vector = src->objects()->vector;
+        GCPtrObject* vector = src->objects()->vector;
         for (unsigned i = 0; i < nobjects; i++) {
             RootedObject obj(cx, vector[i]);
             RootedObject clone(cx);
             if (obj->is<NestedStaticScope>()) {
                 Rooted<NestedStaticScope*> innerBlock(cx, &obj->as<NestedStaticScope>());
 
                 RootedObject enclosingScope(cx);
                 if (NestedStaticScope* enclosingBlock = innerBlock->enclosingNestedScope()) {
@@ -3595,23 +3595,23 @@ js::detail::CopyScript(JSContext* cx, Ha
     dst->hasInnerFunctions_ = src->hasInnerFunctions();
     dst->isGeneratorExp_ = src->isGeneratorExp();
     dst->setGeneratorKind(src->generatorKind());
     dst->isDerivedClassConstructor_ = src->isDerivedClassConstructor();
     dst->needsHomeObject_ = src->needsHomeObject();
     dst->isDefaultClassConstructor_ = src->isDefaultClassConstructor();
 
     if (nconsts != 0) {
-        HeapValue* vector = Rebase<HeapValue>(dst, src, src->consts()->vector);
+        GCPtrValue* vector = Rebase<GCPtrValue>(dst, src, src->consts()->vector);
         dst->consts()->vector = vector;
         for (unsigned i = 0; i < nconsts; ++i)
             MOZ_ASSERT_IF(vector[i].isMarkable(), vector[i].toString()->isAtom());
     }
     if (nobjects != 0) {
-        HeapPtrObject* vector = Rebase<HeapPtrObject>(dst, src, src->objects()->vector);
+        GCPtrObject* vector = Rebase<GCPtrObject>(dst, src, src->objects()->vector);
         dst->objects()->vector = vector;
         for (unsigned i = 0; i < nobjects; ++i)
             vector[i].init(&objects[i]->as<NativeObject>());
     }
     if (ntrynotes != 0)
         dst->trynotes()->vector = Rebase<JSTryNote>(dst, src, src->trynotes()->vector);
     if (nblockscopes != 0)
         dst->blockScopes()->vector = Rebase<BlockScopeNote>(dst, src, src->blockScopes()->vector);
@@ -4278,17 +4278,17 @@ LazyScript::CreateRaw(ExclusiveContext* 
 
     packed = packedFields;
 
     // Reset runtime flags to obtain a fresh LazyScript.
     p.hasBeenCloned = false;
     p.treatAsRunOnce = false;
 
     size_t bytes = (p.numFreeVariables * sizeof(FreeVariable))
-                 + (p.numInnerFunctions * sizeof(HeapPtrFunction));
+                 + (p.numInnerFunctions * sizeof(GCPtrFunction));
 
     ScopedJSFreePtr<uint8_t> table(bytes ? fun->zone()->pod_malloc<uint8_t>(bytes) : nullptr);
     if (bytes && !table) {
         ReportOutOfMemory(cx);
         return nullptr;
     }
 
     LazyScript* res = Allocate<LazyScript>(cx);
@@ -4347,17 +4347,17 @@ LazyScript::Create(ExclusiveContext* cx,
 
     // Fill with dummies, to be GC-safe after the initialization of the free
     // variables and inner functions.
     size_t i, num;
     FreeVariable* variables = res->freeVariables();
     for (i = 0, num = res->numFreeVariables(); i < num; i++)
         variables[i] = FreeVariable(dummyAtom);
 
-    HeapPtrFunction* functions = res->innerFunctions();
+    GCPtrFunction* functions = res->innerFunctions();
     for (i = 0, num = res->numInnerFunctions(); i < num; i++)
         functions[i].init(dummyFun);
 
     // Set the enclosing scope of the lazy function, this would later be
     // used to define the environment when the function would be used.
     MOZ_ASSERT(!res->sourceObject());
     res->setEnclosingScopeAndSource(enclosingScope, &sourceObjectScript->scriptSourceUnwrap());
 
--- a/js/src/jsscript.h
+++ b/js/src/jsscript.h
@@ -122,41 +122,41 @@ struct BlockScopeNote {
                                 // block scope in this range.
     uint32_t        start;      // Bytecode offset at which this scope starts,
                                 // from script->main().
     uint32_t        length;     // Bytecode length of scope.
     uint32_t        parent;     // Index of parent block scope in notes, or UINT32_MAX.
 };
 
 struct ConstArray {
-    js::HeapValue*  vector;    /* array of indexed constant values */
-    uint32_t        length;
+    js::GCPtrValue* vector;     // array of indexed constant values
+    uint32_t length;
 };
 
 struct ObjectArray {
-    js::HeapPtrObject* vector;  // Array of indexed objects.
-    uint32_t        length;     // Count of indexed objects.
+    js::GCPtrObject* vector;    // Array of indexed objects.
+    uint32_t length;            // Count of indexed objects.
 };
 
 struct TryNoteArray {
-    JSTryNote*      vector;    // Array of indexed try notes.
+    JSTryNote*      vector;     // Array of indexed try notes.
     uint32_t        length;     // Count of indexed try notes.
 };
 
 struct BlockScopeArray {
     BlockScopeNote* vector;     // Array of indexed BlockScopeNote records.
     uint32_t        length;     // Count of indexed try notes.
 };
 
 class YieldOffsetArray {
     friend bool
     detail::CopyScript(JSContext* cx, HandleObject scriptStaticScope, HandleScript src,
                        HandleScript dst);
 
-    uint32_t*       vector_;   // Array of bytecode offsets.
+    uint32_t*       vector_;    // Array of bytecode offsets.
     uint32_t        length_;    // Count of bytecode offsets.
 
   public:
     void init(uint32_t* vector, uint32_t length) {
         vector_ = vector;
         length_ = length;
     }
     uint32_t& operator[](uint32_t index) {
@@ -981,33 +981,33 @@ class JSScript : public js::gc::TenuredC
     // Word-sized fields.
 
   private:
     jsbytecode*     code_;     /* bytecodes and their immediate operands */
   public:
     uint8_t*        data;      /* pointer to variable-length data array (see
                                    comment above Create() for details) */
 
-    js::HeapPtrAtom* atoms;     /* maps immediate index to literal struct */
+    js::GCPtrAtom* atoms;      /* maps immediate index to literal struct */
 
     JSCompartment*  compartment_;
 
   private:
     /* Persistent type information retained across GCs. */
     js::TypeScript* types_;
 
     // This script's ScriptSourceObject, or a CCW thereof.
     //
     // (When we clone a JSScript into a new compartment, we don't clone its
     // source object. Instead, the clone refers to a wrapper.)
-    js::HeapPtrObject sourceObject_;
-
-    js::HeapPtrFunction function_;
-    js::HeapPtr<js::ModuleObject*> module_;
-    js::HeapPtrObject   enclosingStaticScope_;
+    js::GCPtrObject sourceObject_;
+
+    js::GCPtrFunction function_;
+    js::GCPtrModuleObject module_;
+    js::GCPtrObject enclosingStaticScope_;
 
     /*
      * Information attached by Ion. Nexto a valid IonScript this could be
      * ION_DISABLED_SCRIPT, ION_COMPILING_SCRIPT or ION_PENDING_SCRIPT.
      * The later is a ion compilation that is ready, but hasn't been linked
      * yet.
      */
     js::jit::IonScript* ion;
@@ -1805,22 +1805,22 @@ class JSScript : public js::gc::TenuredC
         MOZ_ASSERT(hasYieldOffsets());
         return *reinterpret_cast<js::YieldOffsetArray*>(data + yieldOffsetsOffset());
     }
 
     bool hasLoops();
 
     size_t natoms() const { return natoms_; }
 
-    js::HeapPtrAtom& getAtom(size_t index) const {
+    js::GCPtrAtom& getAtom(size_t index) const {
         MOZ_ASSERT(index < natoms());
         return atoms[index];
     }
 
-    js::HeapPtrAtom& getAtom(jsbytecode* pc) const {
+    js::GCPtrAtom& getAtom(jsbytecode* pc) const {
         MOZ_ASSERT(containsPC(pc) && containsPC(pc + sizeof(uint32_t)));
         return getAtom(GET_UINT32_INDEX(pc));
     }
 
     js::PropertyName* getName(size_t index) {
         return getAtom(index)->asPropertyName();
     }
 
@@ -2144,25 +2144,25 @@ class LazyScript : public gc::TenuredCel
 
   private:
     // If non-nullptr, the script has been compiled and this is a forwarding
     // pointer to the result. This is a weak pointer: after relazification, we
     // can collect the script if there are no other pointers to it.
     WeakRef<JSScript*> script_;
 
     // Original function with which the lazy script is associated.
-    HeapPtrFunction function_;
+    GCPtrFunction function_;
 
     // Function or block chain in which the script is nested, or nullptr.
-    HeapPtrObject enclosingScope_;
+    GCPtrObject enclosingScope_;
 
     // ScriptSourceObject. We leave this set to nullptr until we generate
     // bytecode for our immediate parent. This is never a CCW; we don't clone
     // LazyScripts into other compartments.
-    HeapPtrObject sourceObject_;
+    GCPtrObject sourceObject_;
 
     // Heap allocated table with any free variables or inner functions.
     void* table_;
 
     // Add padding so LazyScript is gc::Cell aligned. Make padding protected
     // instead of private to suppress -Wunused-private-field compiler warnings.
   protected:
 #if JS_BITS_PER_WORD == 32
@@ -2286,18 +2286,18 @@ class LazyScript : public gc::TenuredCel
     }
     FreeVariable* freeVariables() {
         return (FreeVariable*)table_;
     }
 
     uint32_t numInnerFunctions() const {
         return p_.numInnerFunctions;
     }
-    HeapPtrFunction* innerFunctions() {
-        return (HeapPtrFunction*)&freeVariables()[numFreeVariables()];
+    GCPtrFunction* innerFunctions() {
+        return (GCPtrFunction*)&freeVariables()[numFreeVariables()];
     }
 
     GeneratorKind generatorKind() const { return GeneratorKindFromBits(p_.generatorKindBits); }
 
     bool isGenerator() const { return generatorKind() != NotGenerator; }
 
     bool isLegacyGenerator() const { return generatorKind() == LegacyGenerator; }
 
@@ -2417,20 +2417,20 @@ struct SharedScriptData
     uint32_t length;
     uint32_t natoms;
     mozilla::Atomic<bool, mozilla::ReleaseAcquire> marked;
     jsbytecode data[1];
 
     static SharedScriptData* new_(ExclusiveContext* cx, uint32_t codeLength,
                                   uint32_t srcnotesLength, uint32_t natoms);
 
-    HeapPtrAtom* atoms() {
+    GCPtrAtom* atoms() {
         if (!natoms)
             return nullptr;
-        return reinterpret_cast<HeapPtrAtom*>(data + length - sizeof(JSAtom*) * natoms);
+        return reinterpret_cast<GCPtrAtom*>(data + length - sizeof(JSAtom*) * natoms);
     }
 
     static SharedScriptData* fromBytecode(const jsbytecode* bytecode) {
         return (SharedScriptData*)(bytecode - offsetof(SharedScriptData, data));
     }
 
   private:
     SharedScriptData() = delete;
--- a/js/src/jsweakmap.h
+++ b/js/src/jsweakmap.h
@@ -90,17 +90,17 @@ class WeakMapBase : public mozilla::Link
     // Any weakmap key types that want to participate in the non-iterative
     // ephemeron marking must override this method.
     virtual void traceEntry(JSTracer* trc, gc::Cell* markedCell, JS::GCCellPtr l) = 0;
 
     virtual bool traceEntries(JSTracer* trc) = 0;
 
   protected:
     // Object that this weak map is part of, if any.
-    HeapPtrObject memberOf;
+    GCPtrObject memberOf;
 
     // Zone containing this weak map.
     JS::Zone* zone;
 
     // Whether this object has been traced during garbage collection.
     bool marked;
 };
 
--- a/js/src/vm/ArgumentsObject-inl.h
+++ b/js/src/vm/ArgumentsObject-inl.h
@@ -28,17 +28,17 @@ ArgumentsObject::element(uint32_t i) con
     }
     return v;
 }
 
 inline void
 ArgumentsObject::setElement(JSContext* cx, uint32_t i, const Value& v)
 {
     MOZ_ASSERT(!isElementDeleted(i));
-    HeapValue& lhs = data()->args[i];
+    GCPtrValue& lhs = data()->args[i];
     if (IsMagicScopeSlotValue(lhs)) {
         uint32_t slot = SlotFromMagicScopeSlotValue(lhs);
         CallObject& callobj = getFixedSlot(MAYBE_CALL_SLOT).toObject().as<CallObject>();
         for (Shape::Range<NoGC> r(callobj.lastProperty()); !r.empty(); r.popFront()) {
             if (r.front().slot() == slot) {
                 callobj.setAliasedVarFromArguments(cx, lhs, r.front().propid(), v);
                 return;
             }
--- a/js/src/vm/ArgumentsObject.cpp
+++ b/js/src/vm/ArgumentsObject.cpp
@@ -16,17 +16,17 @@
 
 #include "gc/Nursery-inl.h"
 #include "vm/Stack-inl.h"
 
 using namespace js;
 using namespace js::gc;
 
 static void
-CopyStackFrameArguments(const AbstractFramePtr frame, HeapValue* dst, unsigned totalArgs)
+CopyStackFrameArguments(const AbstractFramePtr frame, GCPtrValue* dst, unsigned totalArgs)
 {
     MOZ_ASSERT_IF(frame.isInterpreterFrame(), !frame.asInterpreterFrame()->runningInJit());
 
     MOZ_ASSERT(Max(frame.numActualArgs(), frame.numFormalArgs()) == totalArgs);
 
     /* Copy arguments. */
     Value* src = frame.argv();
     Value* end = src + totalArgs;
@@ -63,17 +63,17 @@ ArgumentsObject::MaybeForwardToCallObjec
 struct CopyFrameArgs
 {
     AbstractFramePtr frame_;
 
     explicit CopyFrameArgs(AbstractFramePtr frame)
       : frame_(frame)
     { }
 
-    void copyArgs(JSContext*, HeapValue* dst, unsigned totalArgs) const {
+    void copyArgs(JSContext*, GCPtrValue* dst, unsigned totalArgs) const {
         CopyStackFrameArguments(frame_, dst, totalArgs);
     }
 
     /*
      * If a call object exists and the arguments object aliases formals, the
      * call object is the canonical location for formals.
      */
     void maybeForwardToCallObject(ArgumentsObject* obj, ArgumentsData* data) {
@@ -85,32 +85,32 @@ struct CopyJitFrameArgs
 {
     jit::JitFrameLayout* frame_;
     HandleObject callObj_;
 
     CopyJitFrameArgs(jit::JitFrameLayout* frame, HandleObject callObj)
       : frame_(frame), callObj_(callObj)
     { }
 
-    void copyArgs(JSContext*, HeapValue* dstBase, unsigned totalArgs) const {
+    void copyArgs(JSContext*, GCPtrValue* dstBase, unsigned totalArgs) const {
         unsigned numActuals = frame_->numActualArgs();
         unsigned numFormals = jit::CalleeTokenToFunction(frame_->calleeToken())->nargs();
         MOZ_ASSERT(numActuals <= totalArgs);
         MOZ_ASSERT(numFormals <= totalArgs);
         MOZ_ASSERT(Max(numActuals, numFormals) == totalArgs);
 
         /* Copy all arguments. */
         Value* src = frame_->argv() + 1;  /* +1 to skip this. */
         Value* end = src + numActuals;
-        HeapValue* dst = dstBase;
+        GCPtrValue* dst = dstBase;
         while (src != end)
             (dst++)->init(*src++);
 
         if (numActuals < numFormals) {
-            HeapValue* dstEnd = dstBase + totalArgs;
+            GCPtrValue* dstEnd = dstBase + totalArgs;
             while (dst != dstEnd)
                 (dst++)->init(UndefinedValue());
         }
     }
 
     /*
      * If a call object exists and the arguments object aliases formals, the
      * call object is the canonical location for formals.
@@ -123,30 +123,30 @@ struct CopyJitFrameArgs
 struct CopyScriptFrameIterArgs
 {
     ScriptFrameIter& iter_;
 
     explicit CopyScriptFrameIterArgs(ScriptFrameIter& iter)
       : iter_(iter)
     { }
 
-    void copyArgs(JSContext* cx, HeapValue* dstBase, unsigned totalArgs) const {
+    void copyArgs(JSContext* cx, GCPtrValue* dstBase, unsigned totalArgs) const {
         /* Copy actual arguments. */
         iter_.unaliasedForEachActual(cx, CopyToHeap(dstBase));
 
         /* Define formals which are not part of the actuals. */
         unsigned numActuals = iter_.numActualArgs();
         unsigned numFormals = iter_.calleeTemplate()->nargs();
         MOZ_ASSERT(numActuals <= totalArgs);
         MOZ_ASSERT(numFormals <= totalArgs);
         MOZ_ASSERT(Max(numActuals, numFormals) == totalArgs);
 
         if (numActuals < numFormals) {
-            HeapValue* dst = dstBase + numActuals;
-            HeapValue* dstEnd = dstBase + totalArgs;
+            GCPtrValue* dst = dstBase + numActuals;
+            GCPtrValue* dstEnd = dstBase + totalArgs;
             while (dst != dstEnd)
                 (dst++)->init(UndefinedValue());
         }
     }
 
     /*
      * Ion frames are copying every argument onto the stack, other locations are
      * invalid.
--- a/js/src/vm/ArgumentsObject.h
+++ b/js/src/vm/ArgumentsObject.h
@@ -37,17 +37,17 @@ struct ArgumentsData
 
     /* Size of ArgumentsData and data allocated after it. */
     uint32_t    dataBytes;
 
     /*
      * arguments.callee, or MagicValue(JS_OVERWRITTEN_CALLEE) if
      * arguments.callee has been modified.
      */
-    HeapValue   callee;
+    GCPtrValue callee;
 
     /* The script for the function containing this arguments object. */
     JSScript*   script;
 
     /*
      * Pointer to an array of bits indicating, for every argument in 'slots',
      * whether the element has been deleted. See isElementDeleted comment.
      */
@@ -56,26 +56,26 @@ struct ArgumentsData
     /*
      * This array holds either the current argument value or the magic
      * forwarding value. The latter means that the function has both a
      * CallObject and an ArgumentsObject AND the particular formal variable is
      * aliased by the CallObject. In such cases, the CallObject holds the
      * canonical value so any element access to the arguments object should load
      * the value out of the CallObject (which is pointed to by MAYBE_CALL_SLOT).
      */
-    HeapValue   args[1];
+    GCPtrValue args[1];
 
     /* For jit use: */
     static ptrdiff_t offsetOfArgs() { return offsetof(ArgumentsData, args); }
 
     /* Iterate args. */
-    HeapValue* begin() { return args; }
-    const HeapValue* begin() const { return args; }
-    HeapValue* end() { return args + numArgs; }
-    const HeapValue* end() const { return args + numArgs; }
+    GCPtrValue* begin() { return args; }
+    const GCPtrValue* begin() const { return args; }
+    GCPtrValue* end() { return args + numArgs; }
+    const GCPtrValue* end() const { return args + numArgs; }
 };
 
 // Maximum supported value of arguments.length. This bounds the maximum
 // number of arguments that can be supplied to Function.prototype.apply.
 // This value also bounds the number of elements parsed in an array
 // initializer.
 static const unsigned ARGS_LENGTH_MAX = 500 * 1000;
 
@@ -267,17 +267,17 @@ class ArgumentsObject : public NativeObj
         MOZ_ASSERT(i < data()->numArgs);
         const Value& v = data()->args[i];
         MOZ_ASSERT(!v.isMagic());
         return v;
     }
 
     void setArg(unsigned i, const Value& v) {
         MOZ_ASSERT(i < data()->numArgs);
-        HeapValue& lhs = data()->args[i];
+        GCPtrValue& lhs = data()->args[i];
         MOZ_ASSERT(!lhs.isMagic());
         lhs = v;
     }
 
     /*
      * Attempt to speedily and efficiently access the i-th element of this
      * arguments object.  Return true if the element was speedily returned.
      * Return false if the element must be looked up more slowly using
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -2742,17 +2742,17 @@ Debugger::markAllIteratively(GCMarker* t
                 Debugger* dbg = *p;
 
                 /*
                  * dbg is a Debugger with at least one debuggee. Check three things:
                  *   - dbg is actually in a compartment that is being marked
                  *   - it isn't already marked
                  *   - it actually has hooks that might be called
                  */
-                HeapPtrNativeObject& dbgobj = dbg->toJSObjectRef();
+                GCPtrNativeObject& dbgobj = dbg->toJSObjectRef();
                 if (!dbgobj->zone()->isGCMarking())
                     continue;
 
                 bool dbgMarked = IsMarked(&dbgobj);
                 if (!dbgMarked && dbg->hasAnyLiveHooks()) {
                     /*
                      * obj could be reachable only via its live, enabled
                      * debugger hooks, which may yet be called.
@@ -2791,17 +2791,17 @@ Debugger::markAllIteratively(GCMarker* t
 /* static */ void
 Debugger::markAll(JSTracer* trc)
 {
     JSRuntime* rt = trc->runtime();
     for (Debugger* dbg : rt->debuggerList) {
         for (WeakGlobalObjectSet::Enum e(dbg->debuggees); !e.empty(); e.popFront())
             TraceManuallyBarrieredEdge(trc, e.mutableFront().unsafeGet(), "Global Object");
 
-        HeapPtrNativeObject& dbgobj = dbg->toJSObjectRef();
+        GCPtrNativeObject& dbgobj = dbg->toJSObjectRef();
         TraceEdge(trc, &dbgobj, "Debugger Object");
 
         dbg->scripts.trace(trc);
         dbg->sources.trace(trc);
         dbg->objects.trace(trc);
         dbg->environments.trace(trc);
         dbg->wasmModuleScripts.trace(trc);
         dbg->wasmModuleSources.trace(trc);
--- a/js/src/vm/Debugger.h
+++ b/js/src/vm/Debugger.h
@@ -345,20 +345,20 @@ class Debugger : private mozilla::Linked
 
     // Barrier methods so we can have ReadBarriered<Debugger*>.
     static void readBarrier(Debugger* dbg) {
         InternalBarrierMethods<JSObject*>::readBarrier(dbg->object);
     }
     static void writeBarrierPost(Debugger** vp, Debugger* prev, Debugger* next) {}
 
   private:
-    HeapPtrNativeObject object;         /* The Debugger object. Strong reference. */
-    WeakGlobalObjectSet debuggees;      /* Debuggee globals. Cross-compartment weak references. */
+    GCPtrNativeObject object; /* The Debugger object. Strong reference. */
+    WeakGlobalObjectSet debuggees; /* Debuggee globals. Cross-compartment weak references. */
     JS::ZoneSet debuggeeZones; /* Set of zones that we have debuggees in. */
-    js::HeapPtrObject uncaughtExceptionHook; /* Strong reference. */
+    js::GCPtrObject uncaughtExceptionHook; /* Strong reference. */
     bool enabled;
     bool allowUnobservedAsmJS;
 
     // Whether to enable code coverage on the Debuggee.
     bool collectCoverageInfo;
 
     JSCList breakpoints;                /* Circular list of all js::Breakpoints in this debugger */
 
@@ -750,18 +750,18 @@ class Debugger : private mozilla::Linked
     static bool replaceFrameGuts(JSContext* cx, AbstractFramePtr from, AbstractFramePtr to,
                                  ScriptFrameIter& iter);
 
   public:
     Debugger(JSContext* cx, NativeObject* dbg);
     ~Debugger();
 
     bool init(JSContext* cx);
-    inline const js::HeapPtrNativeObject& toJSObject() const;
-    inline js::HeapPtrNativeObject& toJSObjectRef();
+    inline const js::GCPtrNativeObject& toJSObject() const;
+    inline js::GCPtrNativeObject& toJSObjectRef();
     static inline Debugger* fromJSObject(const JSObject* obj);
     static Debugger* fromChildJSObject(JSObject* obj);
 
     bool hasMemory() const;
     DebuggerMemory& memory() const;
 
     WeakGlobalObjectSet::Range allDebuggees() const { return debuggees.all(); }
 
@@ -1158,24 +1158,24 @@ Debugger::firstBreakpoint() const
 }
 
 /* static */ Debugger*
 Debugger::fromOnNewGlobalObjectWatchersLink(JSCList* link) {
     char* p = reinterpret_cast<char*>(link);
     return reinterpret_cast<Debugger*>(p - offsetof(Debugger, onNewGlobalObjectWatchersLink));
 }
 
-const js::HeapPtrNativeObject&
+const js::GCPtrNativeObject&
 Debugger::toJSObject() const
 {
     MOZ_ASSERT(object);
     return object;
 }
 
-js::HeapPtrNativeObject&
+js::GCPtrNativeObject&
 Debugger::toJSObjectRef()
 {
     MOZ_ASSERT(object);
     return object;
 }
 
 bool
 Debugger::observesEnterFrame() const
--- a/js/src/vm/NativeObject.cpp
+++ b/js/src/vm/NativeObject.cpp
@@ -79,17 +79,17 @@ ObjectElements::ConvertElementsToDoubles
 
     header->setShouldConvertDoubleElements();
     return true;
 }
 
 /* static */ bool
 ObjectElements::MakeElementsCopyOnWrite(ExclusiveContext* cx, NativeObject* obj)
 {
-    static_assert(sizeof(HeapSlot) >= sizeof(HeapPtrObject),
+    static_assert(sizeof(HeapSlot) >= sizeof(GCPtrObject),
                   "there must be enough room for the owner object pointer at "
                   "the end of the elements");
     if (!obj->ensureElements(cx, obj->getDenseInitializedLength() + 1))
         return false;
 
     ObjectElements* header = obj->getElementsHeader();
 
     // As soon as we have (or may soon have) multiple objects referencing a
--- a/js/src/vm/NativeObject.h
+++ b/js/src/vm/NativeObject.h
@@ -48,17 +48,17 @@ static MOZ_ALWAYS_INLINE void
 Debug_SetValueRangeToCrashOnTouch(Value* vec, size_t len)
 {
 #ifdef DEBUG
     Debug_SetValueRangeToCrashOnTouch(vec, vec + len);
 #endif
 }
 
 static MOZ_ALWAYS_INLINE void
-Debug_SetValueRangeToCrashOnTouch(HeapValue* vec, size_t len)
+Debug_SetValueRangeToCrashOnTouch(GCPtrValue* vec, size_t len)
 {
 #ifdef DEBUG
     Debug_SetValueRangeToCrashOnTouch((Value*) vec, len);
 #endif
 }
 
 static MOZ_ALWAYS_INLINE void
 Debug_SetSlotRangeToCrashOnTouch(HeapSlot* vec, uint32_t len)
@@ -281,19 +281,19 @@ class ObjectElements
     static ObjectElements * fromElements(HeapSlot* elems) {
         return reinterpret_cast<ObjectElements*>(uintptr_t(elems) - sizeof(ObjectElements));
     }
 
     bool isSharedMemory() const {
         return flags & SHARED_MEMORY;
     }
 
-    HeapPtrNativeObject& ownerObject() const {
+    GCPtrNativeObject& ownerObject() const {
         MOZ_ASSERT(isCopyOnWrite());
-        return *(HeapPtrNativeObject*)(&elements()[initializedLength]);
+        return *(GCPtrNativeObject*)(&elements()[initializedLength]);
     }
 
     static int offsetOfFlags() {
         return int(offsetof(ObjectElements, flags)) - int(sizeof(ObjectElements));
     }
     static int offsetOfInitializedLength() {
         return int(offsetof(ObjectElements, initializedLength)) - int(sizeof(ObjectElements));
     }
@@ -374,17 +374,17 @@ enum class DenseElementResult {
  *
  * Slots and elements may both be non-empty. The slots may be either names or
  * indexes; no indexed property will be in both the slots and elements.
  */
 class NativeObject : public JSObject
 {
   protected:
     // Property layout description and other state.
-    HeapPtrShape shape_;
+    GCPtrShape shape_;
 
     /* Slots for object properties. */
     js::HeapSlot* slots_;
 
     /* Slots for object dense elements. */
     js::HeapSlot* elements_;
 
     friend class ::JSObject;
--- a/js/src/vm/ObjectGroup.h
+++ b/js/src/vm/ObjectGroup.h
@@ -79,17 +79,17 @@ enum NewObjectKind {
 class ObjectGroup : public gc::TenuredCell
 {
     friend void gc::MergeCompartments(JSCompartment* source, JSCompartment* target);
 
     /* Class shared by objects in this group. */
     const Class* clasp_;
 
     /* Prototype shared by objects in this group. */
-    HeapPtr<TaggedProto> proto_;
+    GCPtr<TaggedProto> proto_;
 
     /* Compartment shared by objects in this group. */
     JSCompartment* compartment_;
 
   public:
 
     const Class* clasp() const {
         return clasp_;
@@ -98,21 +98,21 @@ class ObjectGroup : public gc::TenuredCe
     void setClasp(const Class* clasp) {
         clasp_ = clasp;
     }
 
     bool hasDynamicPrototype() const {
         return proto_.isDynamic();
     }
 
-    const HeapPtr<TaggedProto>& proto() const {
+    const GCPtr<TaggedProto>& proto() const {
         return proto_;
     }
 
-    HeapPtr<TaggedProto>& proto() {
+    GCPtr<TaggedProto>& proto() {
         return proto_;
     }
 
     void setProto(TaggedProto proto);
     void setProtoUnchecked(TaggedProto proto);
 
     bool singleton() const {
         return flagsDontCheckGeneration() & OBJECT_FLAG_SINGLETON;
@@ -275,17 +275,17 @@ class ObjectGroup : public gc::TenuredCe
     }
 
     class Property
     {
       public:
         // Identifier for this property, JSID_VOID for the aggregate integer
         // index property, or JSID_EMPTY for properties holding constraints
         // listening to changes in the group's state.
-        HeapId id;
+        GCPtrId id;
 
         // Possible own types for this property.
         HeapTypeSet types;
 
         explicit Property(jsid id)
           : id(id)
         {}
 
--- a/js/src/vm/PIC.h
+++ b/js/src/vm/PIC.h
@@ -173,30 +173,30 @@ struct ForOfPIC
      *  ArrayIterator.prototype's canonical value for 'next' (canonicalNextFunc_)
      *      To quickly retrieve and ensure that the 'next' method for ArrayIterator
      *      objects has not changed.
      */
     class Chain : public BaseChain
     {
       private:
         // Pointer to canonical Array.prototype and ArrayIterator.prototype
-        HeapPtrNativeObject arrayProto_;
-        HeapPtrNativeObject arrayIteratorProto_;
+        GCPtrNativeObject arrayProto_;
+        GCPtrNativeObject arrayIteratorProto_;
 
         // Shape of matching Array.prototype object, and slot containing
         // the @@iterator for it, and the canonical value.
-        HeapPtrShape arrayProtoShape_;
+        GCPtrShape arrayProtoShape_;
         uint32_t arrayProtoIteratorSlot_;
-        HeapValue canonicalIteratorFunc_;
+        GCPtrValue canonicalIteratorFunc_;
 
         // Shape of matching ArrayIteratorProto, and slot containing
         // the 'next' property, and the canonical value.
-        HeapPtrShape arrayIteratorProtoShape_;
+        GCPtrShape arrayIteratorProtoShape_;
         uint32_t arrayIteratorProtoNextSlot_;
-        HeapValue canonicalNextFunc_;
+        GCPtrValue canonicalNextFunc_;
 
         // Initialization flag marking lazy initialization of above fields.
         bool initialized_;
 
         // Disabled flag is set when we don't want to try optimizing anymore
         // because core objects were changed.
         bool disabled_;
 
--- a/js/src/vm/ProxyObject.cpp
+++ b/js/src/vm/ProxyObject.cpp
@@ -84,12 +84,12 @@ ProxyObject::nuke(const BaseProxyHandler
 
     /* Restore the handler as requested after nuking. */
     setHandler(handler);
 }
 
 JS_FRIEND_API(void)
 js::SetValueInProxy(Value* slot, const Value& value)
 {
-    // Slots in proxies are not HeapValues, so do a cast whenever assigning
+    // Slots in proxies are not GCPtrValues, so do a cast whenever assigning
     // values to them which might trigger a barrier.
-    *reinterpret_cast<HeapValue*>(slot) = value;
+    *reinterpret_cast<GCPtrValue*>(slot) = value;
 }
--- a/js/src/vm/ProxyObject.h
+++ b/js/src/vm/ProxyObject.h
@@ -11,17 +11,17 @@
 #include "vm/NativeObject.h"
 
 namespace js {
 
 // This is the base class for the various kinds of proxy objects.  It's never
 // instantiated.
 class ProxyObject : public JSObject
 {
-    HeapPtrShape shape;
+    GCPtrShape shape;
 
     // GetProxyDataLayout computes the address of this field.
     detail::ProxyDataLayout data;
 
     void static_asserts() {
         static_assert(sizeof(ProxyObject) == sizeof(JSObject_Slots0),
                       "proxy object size must match GC thing size");
         static_assert(offsetof(ProxyObject, data) == detail::ProxyDataOffset,
@@ -34,18 +34,18 @@ class ProxyObject : public JSObject
 
     const Value& private_() {
         return GetProxyPrivate(this);
     }
 
     void setCrossCompartmentPrivate(const Value& priv);
     void setSameCompartmentPrivate(const Value& priv);
 
-    HeapValue* slotOfPrivate() {
-        return reinterpret_cast<HeapValue*>(&detail::GetProxyDataLayout(this)->values->privateSlot);
+    GCPtrValue* slotOfPrivate() {
+        return reinterpret_cast<GCPtrValue*>(&detail::GetProxyDataLayout(this)->values->privateSlot);
     }
 
     JSObject* target() const {
         return const_cast<ProxyObject*>(this)->private_().toObjectOrNull();
     }
 
     const BaseProxyHandler* handler() const {
         return GetProxyHandler(const_cast<ProxyObject*>(this));
@@ -70,19 +70,19 @@ class ProxyObject : public JSObject
         return GetProxyExtra(const_cast<ProxyObject*>(this), n);
     }
 
     void setExtra(size_t n, const Value& extra) {
         SetProxyExtra(this, n, extra);
     }
 
   private:
-    HeapValue* slotOfExtra(size_t n) {
+    GCPtrValue* slotOfExtra(size_t n) {
         MOZ_ASSERT(n < detail::PROXY_EXTRA_SLOTS);
-        return reinterpret_cast<HeapValue*>(&detail::GetProxyDataLayout(this)->values->extraSlots[n]);
+        return reinterpret_cast<GCPtrValue*>(&detail::GetProxyDataLayout(this)->values->extraSlots[n]);
     }
 
     static bool isValidProxyClass(const Class* clasp) {
         // Since we can take classes from the outside, make sure that they
         // are "sane". They have to quack enough like proxies for us to belive
         // they should be treated as such.
 
         // proxy_Trace is just a trivial wrapper around ProxyObject::trace for
--- a/js/src/vm/ReceiverGuard.h
+++ b/js/src/vm/ReceiverGuard.h
@@ -63,18 +63,18 @@ class ReceiverGuard
 
     uintptr_t hash() const {
         return (uintptr_t(group) >> 3) ^ (uintptr_t(shape) >> 3);
     }
 };
 
 class HeapReceiverGuard
 {
-    HeapPtrObjectGroup group_;
-    HeapPtrShape shape_;
+    GCPtrObjectGroup group_;
+    GCPtrShape shape_;
 
   public:
     explicit HeapReceiverGuard(const ReceiverGuard& guard)
       : group_(guard.group), shape_(guard.shape)
     {}
 
     bool matches(const ReceiverGuard& guard) {
         return group_ == guard.group && shape_ == guard.shape;
--- a/js/src/vm/SelfHosting.cpp
+++ b/js/src/vm/SelfHosting.cpp
@@ -2482,18 +2482,18 @@ static const JSFunctionSpec intrinsic_fu
 
 #define LOAD_AND_STORE_SCALAR_FN_DECLS(_constant, _type, _name)         \
     JS_FN("Store_" #_name, js::StoreScalar##_type::Func, 3, 0),         \
     JS_FN("Load_" #_name,  js::LoadScalar##_type::Func, 3, 0),
     JS_FOR_EACH_UNIQUE_SCALAR_TYPE_REPR_CTYPE(LOAD_AND_STORE_SCALAR_FN_DECLS)
 #undef LOAD_AND_STORE_SCALAR_FN_DECLS
 
 #define LOAD_AND_STORE_REFERENCE_FN_DECLS(_constant, _type, _name)      \
-    JS_FN("Store_" #_name, js::StoreReference##_type::Func, 3, 0),      \
-    JS_FN("Load_" #_name,  js::LoadReference##_type::Func, 3, 0),
+    JS_FN("Store_" #_name, js::StoreReference##_name::Func, 3, 0),      \
+    JS_FN("Load_" #_name,  js::LoadReference##_name::Func, 3, 0),
     JS_FOR_EACH_REFERENCE_TYPE_REPR(LOAD_AND_STORE_REFERENCE_FN_DECLS)
 #undef LOAD_AND_STORE_REFERENCE_FN_DECLS
 
     // See builtin/Intl.h for descriptions of the intl_* functions.
     JS_FN("intl_availableCalendars", intl_availableCalendars, 1,0),
     JS_FN("intl_availableCollations", intl_availableCollations, 1,0),
     JS_FN("intl_Collator", intl_Collator, 2,0),
     JS_FN("intl_Collator_availableLocales", intl_Collator_availableLocales, 0,0),
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -85,31 +85,31 @@ Shape::removeFromDictionary(NativeObject
 
     if (parent)
         parent->listp = listp;
     *listp = parent;
     listp = nullptr;
 }
 
 void
-Shape::insertIntoDictionary(HeapPtrShape* dictp)
+Shape::insertIntoDictionary(GCPtrShape* dictp)
 {
     // Don't assert inDictionaryMode() here because we may be called from
     // JSObject::toDictionaryMode via JSObject::newDictionaryShape.
     MOZ_ASSERT(inDictionary());
     MOZ_ASSERT(!listp);
 
     MOZ_ASSERT_IF(*dictp, (*dictp)->inDictionary());
     MOZ_ASSERT_IF(*dictp, (*dictp)->listp == dictp);
     MOZ_ASSERT_IF(*dictp, compartment() == (*dictp)->compartment());
 
     setParent(dictp->get());
     if (parent)
         parent->listp = &parent;
-    listp = (HeapPtrShape*) dictp;
+    listp = (GCPtrShape*) dictp;
     *dictp = this;
 }
 
 bool
 Shape::makeOwnBaseShape(ExclusiveContext* cx)
 {
     MOZ_ASSERT(!base()->isOwned());
     assertSameCompartmentDebugOnly(cx, compartment());
@@ -478,17 +478,17 @@ js::NativeObject::toDictionaryMode(Exclu
         MOZ_ASSERT(!shape->inDictionary());
 
         Shape* dprop = shape->isAccessorShape() ? Allocate<AccessorShape>(cx) : Allocate<Shape>(cx);
         if (!dprop) {
             ReportOutOfMemory(cx);
             return false;
         }
 
-        HeapPtrShape* listp = dictionaryShape ? &dictionaryShape->parent : nullptr;
+        GCPtrShape* listp = dictionaryShape ? &dictionaryShape->parent : nullptr;
         StackShape child(shape);
         dprop->initDictionaryShape(child, self->numFixedSlots(), listp);
 
         if (!dictionaryShape)
             root = dprop;
 
         MOZ_ASSERT(!dprop->hasTable());
         dictionaryShape = dprop;
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -374,17 +374,17 @@ class BaseShape : public gc::TenuredCell
   private:
     const Class*        clasp_;        /* Class of referring object. */
     JSCompartment*      compartment_;  /* Compartment shape belongs to. */
     uint32_t            flags;          /* Vector of above flags. */
     uint32_t            slotSpan_;      /* Object slot span for BaseShapes at
                                          * dictionary last properties. */
 
     /* For owned BaseShapes, the canonical unowned BaseShape. */
-    HeapPtrUnownedBaseShape unowned_;
+    GCPtrUnownedBaseShape unowned_;
 
     /* For owned BaseShapes, the shape's shape table. */
     ShapeTable*      table_;
 
     BaseShape(const BaseShape& base) = delete;
     BaseShape& operator=(const BaseShape& other) = delete;
 
   public:
@@ -537,18 +537,18 @@ class Shape : public gc::TenuredCell
     friend class StaticBlockScope;
     friend class TenuringTracer;
     friend struct StackBaseShape;
     friend struct StackShape;
     friend struct JS::ubi::Concrete<Shape>;
     friend class js::gc::RelocationOverlay;
 
   protected:
-    HeapPtrBaseShape    base_;
-    PreBarrieredId      propid_;
+    GCPtrBaseShape base_;
+    PreBarrieredId propid_;
 
     enum SlotInfo : uint32_t
     {
         /* Number of fixed slots in objects with this shape. */
         // FIXED_SLOTS_MAX is the biggest count of fixed slots a Shape can store
         FIXED_SLOTS_MAX        = 0x1f,
         FIXED_SLOTS_SHIFT      = 27,
         FIXED_SLOTS_MASK       = uint32_t(FIXED_SLOTS_MAX << FIXED_SLOTS_SHIFT),
@@ -571,36 +571,37 @@ class Shape : public gc::TenuredCell
          */
         SLOT_MASK              = JS_BIT(24) - 1
     };
 
     uint32_t            slotInfo;       /* mask of above info */
     uint8_t             attrs;          /* attributes, see jsapi.h JSPROP_* */
     uint8_t             flags;          /* flags, see below for defines */
 
-    HeapPtrShape        parent;        /* parent node, reverse for..in order */
+    GCPtrShape   parent;          /* parent node, reverse for..in order */
     /* kids is valid when !inDictionary(), listp is valid when inDictionary(). */
     union {
-        KidsPointer kids;       /* null, single child, or a tagged ptr
-                                   to many-kids data structure */
-        HeapPtrShape* listp;    /* dictionary list starting at shape_
-                                   has a double-indirect back pointer,
-                                   either to the next shape's parent if not
-                                   last, else to obj->shape_ */
+        KidsPointer kids;         /* null, single child, or a tagged ptr
+                                     to many-kids data structure */
+        GCPtrShape* listp;        /* dictionary list starting at shape_
+                                     has a double-indirect back pointer,
+                                     either to the next shape's parent if not
+                                     last, else to obj->shape_ */
     };
 
     template<MaybeAdding Adding = MaybeAdding::NotAdding>
     static inline Shape* search(ExclusiveContext* cx, Shape* start, jsid id,
                                 ShapeTable::Entry** pentry);
     static inline Shape* searchNoHashify(Shape* start, jsid id);
 
     void removeFromDictionary(NativeObject* obj);
-    void insertIntoDictionary(HeapPtrShape* dictp);
+    void insertIntoDictionary(GCPtrShape* dictp);
 
-    inline void initDictionaryShape(const StackShape& child, uint32_t nfixed, HeapPtrShape* dictp);
+    inline void initDictionaryShape(const StackShape& child, uint32_t nfixed,
+                                    GCPtrShape* dictp);
 
     /* Replace the base shape of the last shape in a non-dictionary lineage with base. */
     static Shape* replaceLastProperty(ExclusiveContext* cx, StackBaseShape& base,
                                       TaggedProto proto, HandleShape shape);
 
     /*
      * This function is thread safe if every shape in the lineage of |shape|
      * is thread local, which is the case when we clone the entire shape
@@ -647,17 +648,17 @@ class Shape : public gc::TenuredCell
         MOZ_ASSERT_IF(flags & ACCESSOR_SHAPE, getAllocKind() == gc::AllocKind::ACCESSOR_SHAPE);
         return flags & ACCESSOR_SHAPE;
     }
     AccessorShape& asAccessorShape() const {
         MOZ_ASSERT(isAccessorShape());
         return *(AccessorShape*)this;
     }
 
-    const HeapPtrShape& previous() const { return parent; }
+    const GCPtrShape& previous() const { return parent; }
     JSCompartment* compartment() const { return base()->compartment(); }
     JSCompartment* maybeCompartment() const { return compartment(); }
 
     template <AllowGC allowGC>
     class Range {
       protected:
         friend class Shape;
 
@@ -1345,17 +1346,17 @@ Shape::getterObject() const
 inline JSObject*
 Shape::setterObject() const
 {
     MOZ_ASSERT(hasSetterValue());
     return asAccessorShape().setterObj;
 }
 
 inline void
-Shape::initDictionaryShape(const StackShape& child, uint32_t nfixed, HeapPtrShape* dictp)
+Shape::initDictionaryShape(const StackShape& child, uint32_t nfixed, GCPtrShape* dictp)
 {
     if (child.isAccessorShape())
         new (this) AccessorShape(child, nfixed);
     else
         new (this) Shape(child, nfixed);
     this->flags |= IN_DICTIONARY;
 
     this->listp = nullptr;
--- a/js/src/vm/Stack-inl.h
+++ b/js/src/vm/Stack-inl.h
@@ -153,18 +153,18 @@ struct CopyTo
 {
     Value* dst;
     explicit CopyTo(Value* dst) : dst(dst) {}
     void operator()(const Value& src) { *dst++ = src; }
 };
 
 struct CopyToHeap
 {
-    HeapValue* dst;
-    explicit CopyToHeap(HeapValue* dst) : dst(dst) {}
+    GCPtrValue* dst;
+    explicit CopyToHeap(GCPtrValue* dst) : dst(dst) {}
     void operator()(const Value& src) { dst->init(src); ++dst; }
 };
 
 inline ArgumentsObject&
 InterpreterFrame::argsObj() const
 {
     MOZ_ASSERT(script()->needsArgsObj());
     MOZ_ASSERT(flags_ & HAS_ARGS_OBJ);
--- a/js/src/vm/TaggedProto.h
+++ b/js/src/vm/TaggedProto.h
@@ -96,17 +96,17 @@ template <>
 class HandleBase<TaggedProto> : public TaggedProtoOperations<Handle<TaggedProto>>
 {};
 
 template <>
 class RootedBase<TaggedProto> : public TaggedProtoOperations<Rooted<TaggedProto>>
 {};
 
 template <>
-class BarrieredBaseMixins<TaggedProto> : public TaggedProtoOperations<HeapPtr<TaggedProto>>
+class BarrieredBaseMixins<TaggedProto> : public TaggedProtoOperations<GCPtr<TaggedProto>>
 {};
 
 // If the TaggedProto is a JSObject pointer, convert to that type and call |f|
 // with the pointer. If the TaggedProto is lazy, calls F::defaultValue.
 template <typename F, typename... Args>
 auto
 DispatchTyped(F f, TaggedProto& proto, Args&&... args)
   -> decltype(f(static_cast<JSObject*>(nullptr), mozilla::Forward<Args>(args)...))
--- a/js/src/vm/UnboxedObject.cpp
+++ b/js/src/vm/UnboxedObject.cpp
@@ -306,23 +306,23 @@ UnboxedPlainObject::trace(JSTracer* trc,
 
     const UnboxedLayout& layout = obj->as<UnboxedPlainObject>().layoutDontCheckGeneration();
     const int32_t* list = layout.traceList();
     if (!list)
         return;
 
     uint8_t* data = obj->as<UnboxedPlainObject>().data();
     while (*list != -1) {
-        HeapPtrString* heap = reinterpret_cast<HeapPtrString*>(data + *list);
+        GCPtrString* heap = reinterpret_cast<GCPtrString*>(data + *list);
         TraceEdge(trc, heap, "unboxed_string");
         list++;
     }
     list++;
     while (*list != -1) {
-        HeapPtrObject* heap = reinterpret_cast<HeapPtrObject*>(data + *list);
+        GCPtrObject* heap = reinterpret_cast<GCPtrObject*>(data + *list);
         TraceNullableEdge(trc, heap, "unboxed_object");
         list++;
     }
 
     // Unboxed objects don't have Values to trace.
     MOZ_ASSERT(*(list + 1) == -1);
 }
 
@@ -344,19 +344,19 @@ UnboxedPlainObject::ensureExpando(JSCont
 
     // If the expando is tenured then the original object must also be tenured.
     // Otherwise barriers triggered on the original object for writes to the
     // expando (as can happen in the JIT) won't see the tenured->nursery edge.
     // See WholeCellEdges::mark.
     MOZ_ASSERT_IF(!IsInsideNursery(expando), !IsInsideNursery(obj));
 
     // As with setValue(), we need to manually trigger post barriers on the
-    // whole object. If we treat the field as a HeapPtrObject and later convert
-    // the object to its native representation, we will end up with a corrupted
-    // store buffer entry.
+    // whole object. If we treat the field as a GCPtrObject and later
+    // convert the object to its native representation, we will end up with a
+    // corrupted store buffer entry.
     if (IsInsideNursery(expando) && !IsInsideNursery(obj))
         cx->runtime()->gc.storeBuffer.putWholeCell(obj);
 
     obj->expando_ = expando;
     return expando;
 }
 
 bool
@@ -644,23 +644,23 @@ UnboxedPlainObject::create(ExclusiveCont
     res->initExpando();
 
     // Initialize reference fields of the object. All fields in the object will
     // be overwritten shortly, but references need to be safe for the GC.
     const int32_t* list = res->layout().traceList();
     if (list) {
         uint8_t* data = res->data();
         while (*list != -1) {
-            HeapPtrString* heap = reinterpret_cast<HeapPtrString*>(data + *list);
+            GCPtrString* heap = reinterpret_cast<GCPtrString*>(data + *list);
             heap->init(cx->names().empty);
             list++;
         }
         list++;
         while (*list != -1) {
-            HeapPtrObject* heap = reinterpret_cast<HeapPtrObject*>(data + *list);
+            GCPtrObject* heap = reinterpret_cast<GCPtrObject*>(data + *list);
             heap->init(nullptr);
             list++;
         }
         // Unboxed objects don't have Values to initialize.
         MOZ_ASSERT(*(list + 1) == -1);
     }
 
     return res;
@@ -1145,24 +1145,24 @@ UnboxedArrayObject::trace(JSTracer* trc,
 
     MOZ_ASSERT(obj->as<UnboxedArrayObject>().elementSize() == sizeof(uintptr_t));
     size_t initlen = obj->as<UnboxedArrayObject>().initializedLength();
     void** elements = reinterpret_cast<void**>(obj->as<UnboxedArrayObject>().elements());
 
     switch (type) {
       case JSVAL_TYPE_OBJECT:
         for (size_t i = 0; i < initlen; i++) {
-            HeapPtrObject* heap = reinterpret_cast<HeapPtrObject*>(elements + i);
+            GCPtrObject* heap = reinterpret_cast<GCPtrObject*>(elements + i);
             TraceNullableEdge(trc, heap, "unboxed_object");
         }
         break;
 
       case JSVAL_TYPE_STRING:
         for (size_t i = 0; i < initlen; i++) {
-            HeapPtrString* heap = reinterpret_cast<HeapPtrString*>(elements + i);
+            GCPtrString* heap = reinterpret_cast<GCPtrString*>(elements + i);
             TraceEdge(trc, heap, "unboxed_string");
         }
         break;
 
       default:
         MOZ_CRASH();
     }
 }
--- a/js/src/vm/UnboxedObject.h
+++ b/js/src/vm/UnboxedObject.h
@@ -56,29 +56,29 @@ class UnboxedLayout : public mozilla::Li
     };
 
     typedef Vector<Property, 0, SystemAllocPolicy> PropertyVector;
 
   private:
     // If objects in this group have ever been converted to native objects,
     // these store the corresponding native group and initial shape for such
     // objects. Type information for this object is reflected in nativeGroup.
-    HeapPtrObjectGroup nativeGroup_;
-    HeapPtrShape nativeShape_;
+    GCPtrObjectGroup nativeGroup_;
+    GCPtrShape nativeShape_;
 
     // Any script/pc which the associated group is created for.
-    HeapPtrScript allocationScript_;
+    GCPtrScript allocationScript_;
     jsbytecode* allocationPc_;
 
     // If nativeGroup is set and this object originally had a TypeNewScript or
     // was keyed to an allocation site, this points to the group which replaced
     // this one. This link is only needed to keep the replacement group from
     // being GC'ed. If it were GC'ed and a new one regenerated later, that new
     // group might have a different allocation kind from this group.
-    HeapPtrObjectGroup replacementGroup_;
+    GCPtrObjectGroup replacementGroup_;
 
     // The following members are only used for unboxed plain objects.
 
     // All properties on objects with this layout, in enumeration order.
     PropertyVector properties_;
 
     // Byte size of the data for objects with this layout.
     size_t size_;
@@ -88,17 +88,17 @@ class UnboxedLayout : public mozilla::Li
 
     // List for use in tracing objects with this layout. This has the same
     // structure as the trace list on a TypeDescr.
     int32_t* traceList_;
 
     // If this layout has been used to construct script or JSON constant
     // objects, this code might be filled in to more quickly fill in objects
     // from an array of values.
-    HeapPtrJitCode constructorCode_;
+    GCPtrJitCode constructorCode_;
 
     // The following members are only used for unboxed arrays.
 
     // The type of array elements.
     JSValueType elementType_;
 
   public:
     UnboxedLayout()