Bug 1572782 - Replace internal use of js::FreeOp with JSFreeOp r=tcampbell?
☠☠ backed out by 9f551fcee77e ☠ ☠
authorJon Coppeard <jcoppeard@mozilla.com>
Mon, 12 Aug 2019 10:16:02 +0000
changeset 487434 8239e4baa0f462d82fcaa6c5546d8cd369be2c7a
parent 487433 9fd7bea2b512cce296dd2d93f024143d40b9d2af
child 487435 ec9d15c69bc85ebb91ea18faf39e2391be97d403
push id113876
push useraiakab@mozilla.com
push dateMon, 12 Aug 2019 16:26:30 +0000
treeherdermozilla-inbound@505047c3a1bd [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerstcampbell
bugs1572782
milestone70.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1572782 - Replace internal use of js::FreeOp with JSFreeOp r=tcampbell? Sorry for the huge patch. This is pretty much a search and replace of all uses of js::FreeOp. Differential Revision: https://phabricator.services.mozilla.com/D41412
js/public/Class.h
js/public/Utility.h
js/src/builtin/MapObject.cpp
js/src/builtin/MapObject.h
js/src/builtin/ModuleObject.cpp
js/src/builtin/ModuleObject.h
js/src/builtin/Stream.cpp
js/src/builtin/TestingFunctions.cpp
js/src/builtin/TypedObject.cpp
js/src/builtin/TypedObject.h
js/src/builtin/WeakMapObject.cpp
js/src/builtin/intl/Collator.cpp
js/src/builtin/intl/Collator.h
js/src/builtin/intl/DateTimeFormat.cpp
js/src/builtin/intl/DateTimeFormat.h
js/src/builtin/intl/NumberFormat.cpp
js/src/builtin/intl/NumberFormat.h
js/src/builtin/intl/PluralRules.cpp
js/src/builtin/intl/PluralRules.h
js/src/builtin/intl/RelativeTimeFormat.cpp
js/src/builtin/intl/RelativeTimeFormat.h
js/src/debugger/DebugAPI-inl.h
js/src/debugger/DebugAPI.h
js/src/debugger/DebugScript.cpp
js/src/debugger/DebugScript.h
js/src/debugger/Debugger.cpp
js/src/debugger/Debugger.h
js/src/debugger/Frame.cpp
js/src/debugger/Frame.h
js/src/gc/ArenaList.h
js/src/gc/FreeOp.h
js/src/gc/GC.cpp
js/src/gc/GC.h
js/src/gc/GCRuntime.h
js/src/gc/Heap.h
js/src/gc/Zone.cpp
js/src/gc/Zone.h
js/src/jit/BaselineJIT.cpp
js/src/jit/BaselineJIT.h
js/src/jit/Ion.cpp
js/src/jit/Ion.h
js/src/jit/IonCode.h
js/src/jit/JitRealm.h
js/src/jit/JitScript.cpp
js/src/jsexn.cpp
js/src/jsfriendapi.h
js/src/proxy/Proxy.cpp
js/src/shell/OSObject.cpp
js/src/shell/js.cpp
js/src/vm/ArgumentsObject.cpp
js/src/vm/ArgumentsObject.h
js/src/vm/ArrayBufferObject.cpp
js/src/vm/ArrayBufferObject.h
js/src/vm/BigIntType.cpp
js/src/vm/BigIntType.h
js/src/vm/Compartment.h
js/src/vm/Instrumentation.cpp
js/src/vm/Instrumentation.h
js/src/vm/Iteration.cpp
js/src/vm/Iteration.h
js/src/vm/JSContext.h
js/src/vm/JSObject-inl.h
js/src/vm/JSObject.h
js/src/vm/JSScript-inl.h
js/src/vm/JSScript.cpp
js/src/vm/JSScript.h
js/src/vm/ObjectGroup.cpp
js/src/vm/ObjectGroup.h
js/src/vm/PIC.cpp
js/src/vm/PIC.h
js/src/vm/Realm.h
js/src/vm/RegExpObject.cpp
js/src/vm/RegExpShared.h
js/src/vm/RegExpStatics.cpp
js/src/vm/Runtime.h
js/src/vm/SavedFrame.h
js/src/vm/SavedStacks.cpp
js/src/vm/Scope.cpp
js/src/vm/Scope.h
js/src/vm/Shape.cpp
js/src/vm/Shape.h
js/src/vm/SharedArrayObject.cpp
js/src/vm/SharedArrayObject.h
js/src/vm/StringType-inl.h
js/src/vm/StringType.h
js/src/vm/SymbolType.h
js/src/vm/TypeInference-inl.h
js/src/vm/TypeInference.cpp
js/src/vm/TypeInference.h
js/src/vm/TypedArrayObject.cpp
js/src/vm/TypedArrayObject.h
js/src/wasm/WasmDebug.cpp
js/src/wasm/WasmDebug.h
js/src/wasm/WasmInstance.cpp
js/src/wasm/WasmInstance.h
js/src/wasm/WasmJS.cpp
js/src/wasm/WasmJS.h
js/xpconnect/src/xpcprivate.h
--- a/js/public/Class.h
+++ b/js/public/Class.h
@@ -568,17 +568,17 @@ class JS_FRIEND_API ElementAdder {
   bool append(JSContext* cx, JS::HandleValue v);
   void appendHole();
 };
 
 typedef bool (*GetElementsOp)(JSContext* cx, JS::HandleObject obj,
                               uint32_t begin, uint32_t end,
                               ElementAdder* adder);
 
-typedef void (*FinalizeOp)(FreeOp* fop, JSObject* obj);
+typedef void (*FinalizeOp)(JSFreeOp* fop, JSObject* obj);
 
 // The special treatment of |finalize| and |trace| is necessary because if we
 // assign either of those hooks to a local variable and then call it -- as is
 // done with the other hooks -- the GC hazard analysis gets confused.
 #define JS_CLASS_MEMBERS(ClassOpsType, FreeOpType)                             \
   const char* name;                                                            \
   uint32_t flags;                                                              \
   const ClassOpsType* cOps;                                                    \
@@ -862,17 +862,17 @@ static const uint32_t JSCLASS_CACHED_PRO
 // Initializer for unused members of statically initialized JSClass structs.
 #define JSCLASS_NO_INTERNAL_MEMBERS \
   { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }
 #define JSCLASS_NO_OPTIONAL_MEMBERS 0, 0, 0, 0, 0, JSCLASS_NO_INTERNAL_MEMBERS
 
 namespace js {
 
 struct MOZ_STATIC_CLASS Class {
-  JS_CLASS_MEMBERS(js::ClassOps, FreeOp);
+  JS_CLASS_MEMBERS(js::ClassOps, JSFreeOp);
   const ClassSpec* spec;
   const ClassExtension* ext;
   const ObjectOps* oOps;
 
   /*
    * Objects of this class aren't native objects. They don't have Shapes that
    * describe their properties and layout. Classes using this flag must
    * provide their own property behavior, either by being proxy classes (do
--- a/js/public/Utility.h
+++ b/js/public/Utility.h
@@ -483,17 +483,17 @@ static inline void js_free(void* p) {
  *
  * - Otherwise, use js_malloc/js_realloc/js_calloc/js_new
  *
  * Deallocation:
  *
  * - Ordinarily, use js_free/js_delete.
  *
  * - For deallocations during GC finalization, use one of the following
- *   operations on the FreeOp provided to the finalizer:
+ *   operations on the JSFreeOp provided to the finalizer:
  *
  *     FreeOp::{free_,delete_}
  */
 
 /*
  * Given a class which should provide a 'new' method, add
  * JS_DECLARE_NEW_METHODS (see js::MallocProvider for an example).
  *
--- a/js/src/builtin/MapObject.cpp
+++ b/js/src/builtin/MapObject.cpp
@@ -246,17 +246,17 @@ MapIteratorObject* MapIteratorObject::cr
   }
 
   auto range = data->createRange(buffer, insideNursery);
   iterobj->setSlot(RangeSlot, PrivateValue(range));
 
   return iterobj;
 }
 
-void MapIteratorObject::finalize(FreeOp* fop, JSObject* obj) {
+void MapIteratorObject::finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->onMainThread());
   MOZ_ASSERT(!IsInsideNursery(obj));
 
   auto range = MapIteratorObjectRange(&obj->as<NativeObject>());
   MOZ_ASSERT(!fop->runtime()->gc.nursery().isInside(range));
 
   // Bug 1560019: Malloc memory associated with MapIteratorObjects is not
   // currently tracked.
@@ -615,25 +615,25 @@ MapObject* MapObject::create(JSContext* 
 
   InitObjectPrivate(mapObj, map.release(), MemoryUse::MapObjectTable);
   mapObj->initReservedSlot(NurseryKeysSlot, PrivateValue(nullptr));
   mapObj->initReservedSlot(HasNurseryMemorySlot,
                            JS::BooleanValue(insideNursery));
   return mapObj;
 }
 
-void MapObject::finalize(FreeOp* fop, JSObject* obj) {
+void MapObject::finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->onMainThread());
   if (ValueMap* map = obj->as<MapObject>().getData()) {
     fop->delete_(obj, map, MemoryUse::MapObjectTable);
   }
 }
 
 /* static */
-void MapObject::sweepAfterMinorGC(FreeOp* fop, MapObject* mapobj) {
+void MapObject::sweepAfterMinorGC(JSFreeOp* fop, MapObject* mapobj) {
   bool wasInsideNursery = IsInsideNursery(mapobj);
   if (wasInsideNursery && !IsForwarded(mapobj)) {
     finalize(fop, mapobj);
     return;
   }
 
   mapobj = MaybeForwarded(mapobj);
   mapobj->getData()->destroyNurseryRanges();
@@ -1018,17 +1018,17 @@ SetIteratorObject* SetIteratorObject::cr
   }
 
   auto range = data->createRange(buffer, insideNursery);
   iterobj->setSlot(RangeSlot, PrivateValue(range));
 
   return iterobj;
 }
 
-void SetIteratorObject::finalize(FreeOp* fop, JSObject* obj) {
+void SetIteratorObject::finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->onMainThread());
   MOZ_ASSERT(!IsInsideNursery(obj));
 
   auto range = SetIteratorObjectRange(&obj->as<NativeObject>());
   MOZ_ASSERT(!fop->runtime()->gc.nursery().isInside(range));
 
   // Bug 1560019: Malloc memory associated with SetIteratorObjects is not
   // currently tracked.
@@ -1239,26 +1239,26 @@ void SetObject::trace(JSTracer* trc, JSO
   SetObject* setobj = static_cast<SetObject*>(obj);
   if (ValueSet* set = setobj->getData()) {
     for (ValueSet::Range r = set->all(); !r.empty(); r.popFront()) {
       TraceKey(r, r.front(), trc);
     }
   }
 }
 
-void SetObject::finalize(FreeOp* fop, JSObject* obj) {
+void SetObject::finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->onMainThread());
   SetObject* setobj = static_cast<SetObject*>(obj);
   if (ValueSet* set = setobj->getData()) {
     fop->delete_(obj, set, MemoryUse::MapObjectTable);
   }
 }
 
 /* static */
-void SetObject::sweepAfterMinorGC(FreeOp* fop, SetObject* setobj) {
+void SetObject::sweepAfterMinorGC(JSFreeOp* fop, SetObject* setobj) {
   bool wasInsideNursery = IsInsideNursery(setobj);
   if (wasInsideNursery && !IsForwarded(setobj)) {
     finalize(fop, setobj);
     return;
   }
 
   setobj = MaybeForwarded(setobj);
   setobj->getData()->destroyNurseryRanges();
--- a/js/src/builtin/MapObject.h
+++ b/js/src/builtin/MapObject.h
@@ -134,30 +134,30 @@ class MapObject : public NativeObject {
   static MOZ_MUST_USE bool clear(JSContext* cx, HandleObject obj);
   static MOZ_MUST_USE bool iterator(JSContext* cx, IteratorKind kind,
                                     HandleObject obj, MutableHandleValue iter);
 
   using UnbarrieredTable =
       OrderedHashMap<Value, Value, UnbarrieredHashPolicy, ZoneAllocPolicy>;
   friend class OrderedHashTableRef<MapObject>;
 
-  static void sweepAfterMinorGC(FreeOp* fop, MapObject* mapobj);
+  static void sweepAfterMinorGC(JSFreeOp* fop, MapObject* mapobj);
 
  private:
   static const ClassSpec classSpec_;
   static const ClassOps classOps_;
 
   static const JSPropertySpec properties[];
   static const JSFunctionSpec methods[];
   static const JSPropertySpec staticProperties[];
   ValueMap* getData() { return static_cast<ValueMap*>(getPrivate()); }
   static ValueMap& extract(HandleObject o);
   static ValueMap& extract(const CallArgs& args);
   static void trace(JSTracer* trc, JSObject* obj);
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
   static MOZ_MUST_USE bool construct(JSContext* cx, unsigned argc, Value* vp);
 
   static bool is(HandleValue v);
   static bool is(HandleObject o);
 
   static MOZ_MUST_USE bool iterator_impl(JSContext* cx, const CallArgs& args,
                                          IteratorKind kind);
 
@@ -193,17 +193,17 @@ class MapIteratorObject : public NativeO
       "RangeSlot must match self-hosting define for range or index slot.");
   static_assert(KindSlot == ITERATOR_SLOT_ITEM_KIND,
                 "KindSlot must match self-hosting define for item kind slot.");
 
   static const JSFunctionSpec methods[];
   static MapIteratorObject* create(JSContext* cx, HandleObject mapobj,
                                    ValueMap* data,
                                    MapObject::IteratorKind kind);
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
   static size_t objectMoved(JSObject* obj, JSObject* old);
 
   static MOZ_MUST_USE bool next(Handle<MapIteratorObject*> mapIterator,
                                 HandleArrayObject resultPairObj, JSContext* cx);
 
   static JSObject* createResultPair(JSContext* cx);
 
  private:
@@ -248,31 +248,31 @@ class SetObject : public NativeObject {
                                     HandleObject obj, MutableHandleValue iter);
   static MOZ_MUST_USE bool delete_(JSContext* cx, HandleObject obj,
                                    HandleValue key, bool* rval);
 
   using UnbarrieredTable =
       OrderedHashSet<Value, UnbarrieredHashPolicy, ZoneAllocPolicy>;
   friend class OrderedHashTableRef<SetObject>;
 
-  static void sweepAfterMinorGC(FreeOp* fop, SetObject* setobj);
+  static void sweepAfterMinorGC(JSFreeOp* fop, SetObject* setobj);
 
  private:
   static const ClassSpec classSpec_;
   static const ClassOps classOps_;
 
   static const JSPropertySpec properties[];
   static const JSFunctionSpec methods[];
   static const JSPropertySpec staticProperties[];
 
   ValueSet* getData() { return static_cast<ValueSet*>(getPrivate()); }
   static ValueSet& extract(HandleObject o);
   static ValueSet& extract(const CallArgs& args);
   static void trace(JSTracer* trc, JSObject* obj);
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
   static bool construct(JSContext* cx, unsigned argc, Value* vp);
 
   static bool is(HandleValue v);
   static bool is(HandleObject o);
 
   static bool isBuiltinAdd(HandleValue add);
 
   static MOZ_MUST_USE bool iterator_impl(JSContext* cx, const CallArgs& args,
@@ -306,17 +306,17 @@ class SetIteratorObject : public NativeO
       "RangeSlot must match self-hosting define for range or index slot.");
   static_assert(KindSlot == ITERATOR_SLOT_ITEM_KIND,
                 "KindSlot must match self-hosting define for item kind slot.");
 
   static const JSFunctionSpec methods[];
   static SetIteratorObject* create(JSContext* cx, HandleObject setobj,
                                    ValueSet* data,
                                    SetObject::IteratorKind kind);
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
   static size_t objectMoved(JSObject* obj, JSObject* old);
 
   static MOZ_MUST_USE bool next(Handle<SetIteratorObject*> setIterator,
                                 HandleArrayObject resultObj, JSContext* cx);
 
   static JSObject* createResult(JSContext* cx);
 
  private:
--- a/js/src/builtin/ModuleObject.cpp
+++ b/js/src/builtin/ModuleObject.cpp
@@ -766,17 +766,17 @@ ModuleObject* ModuleObject::create(JSCon
     return nullptr;
   }
 
   self->initReservedSlot(FunctionDeclarationsSlot, PrivateValue(funDecls));
   return self;
 }
 
 /* static */
-void ModuleObject::finalize(js::FreeOp* fop, JSObject* obj) {
+void ModuleObject::finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->maybeOnHelperThread());
   ModuleObject* self = &obj->as<ModuleObject>();
   if (self->hasImportBindings()) {
     fop->delete_(obj, &self->importBindings(), MemoryUse::ModuleBindingMap);
   }
   if (FunctionDeclarationVector* funDecls = self->functionDeclarations()) {
     // Not tracked as these may move between zones on merge.
     fop->deleteUntracked(funDecls);
--- a/js/src/builtin/ModuleObject.h
+++ b/js/src/builtin/ModuleObject.h
@@ -317,17 +317,17 @@ class ModuleObject : public NativeObject
   static ModuleNamespaceObject* createNamespace(JSContext* cx,
                                                 HandleModuleObject self,
                                                 HandleObject exports);
 
  private:
   static const ClassOps classOps_;
 
   static void trace(JSTracer* trc, JSObject* obj);
-  static void finalize(js::FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
 
   bool hasImportBindings() const;
   FunctionDeclarationVector* functionDeclarations();
 };
 
 JSObject* GetOrCreateModuleMetaObject(JSContext* cx, HandleObject module);
 
 JSObject* CallModuleResolveHook(JSContext* cx, HandleValue referencingPrivate,
--- a/js/src/builtin/Stream.cpp
+++ b/js/src/builtin/Stream.cpp
@@ -3639,17 +3639,17 @@ static MOZ_MUST_USE bool SetUpExternalRe
 }
 
 static const JSPropertySpec ReadableByteStreamController_properties[] = {
     JS_PS_END};
 
 static const JSFunctionSpec ReadableByteStreamController_methods[] = {
     JS_FS_END};
 
-static void ReadableByteStreamControllerFinalize(FreeOp* fop, JSObject* obj) {
+static void ReadableByteStreamControllerFinalize(JSFreeOp* fop, JSObject* obj) {
   ReadableByteStreamController& controller =
       obj->as<ReadableByteStreamController>();
 
   if (controller.getFixedSlot(ReadableStreamController::Slot_Flags)
           .isUndefined()) {
     return;
   }
 
--- a/js/src/builtin/TestingFunctions.cpp
+++ b/js/src/builtin/TestingFunctions.cpp
@@ -3143,17 +3143,17 @@ class CloneBufferObject : public NativeO
   }
 
   static bool getCloneBufferAsArrayBuffer(JSContext* cx, unsigned int argc,
                                           JS::Value* vp) {
     CallArgs args = CallArgsFromVp(argc, vp);
     return CallNonGenericMethod<is, getCloneBufferAsArrayBuffer_impl>(cx, args);
   }
 
-  static void Finalize(FreeOp* fop, JSObject* obj) {
+  static void Finalize(JSFreeOp* fop, JSObject* obj) {
     obj->as<CloneBufferObject>().discard();
   }
 };
 
 static const ClassOps CloneBufferObjectClassOps = {nullptr, /* addProperty */
                                                    nullptr, /* delProperty */
                                                    nullptr, /* enumerate */
                                                    nullptr, /* newEnumerate */
--- a/js/src/builtin/TypedObject.cpp
+++ b/js/src/builtin/TypedObject.cpp
@@ -3022,16 +3022,16 @@ static bool CreateTraceList(JSContext* c
 
   size_t size = entries.length() * sizeof(uint32_t);
   InitReservedSlot(descr, JS_DESCR_SLOT_TRACE_LIST, list, size,
                    MemoryUse::TypeDescrTraceList);
   return true;
 }
 
 /* static */
-void TypeDescr::finalize(FreeOp* fop, JSObject* obj) {
+void TypeDescr::finalize(JSFreeOp* fop, JSObject* obj) {
   TypeDescr& descr = obj->as<TypeDescr>();
   if (descr.hasTraceList()) {
     auto list = const_cast<uint32_t*>(descr.traceList());
     size_t size = (3 + list[0] + list[1] + list[2]) * sizeof(uint32_t);
     fop->free_(obj, list, size, MemoryUse::TypeDescrTraceList);
   }
 }
--- a/js/src/builtin/TypedObject.h
+++ b/js/src/builtin/TypedObject.h
@@ -197,17 +197,17 @@ class TypeDescr : public NativeObject {
     MOZ_ASSERT(hasTraceList());
     return reinterpret_cast<uint32_t*>(
         getFixedSlot(JS_DESCR_SLOT_TRACE_LIST).toPrivate());
   }
 
   void initInstances(const JSRuntime* rt, uint8_t* mem, size_t length);
   void traceInstances(JSTracer* trace, uint8_t* mem, size_t length);
 
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
 };
 
 typedef Handle<TypeDescr*> HandleTypeDescr;
 
 class SimpleTypeDescr : public TypeDescr {};
 
 // Type for scalar type constructors like `uint8`. All such type
 // constructors share a common js::Class and JSFunctionSpec. Scalar
--- a/js/src/builtin/WeakMapObject.cpp
+++ b/js/src/builtin/WeakMapObject.cpp
@@ -173,17 +173,17 @@ JS_FRIEND_API bool JS_NondeterministicGe
 }
 
 static void WeakCollection_trace(JSTracer* trc, JSObject* obj) {
   if (ObjectValueMap* map = obj->as<WeakCollectionObject>().getMap()) {
     map->trace(trc);
   }
 }
 
-static void WeakCollection_finalize(FreeOp* fop, JSObject* obj) {
+static void WeakCollection_finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->maybeOnHelperThread());
   if (ObjectValueMap* map = obj->as<WeakCollectionObject>().getMap()) {
     fop->delete_(obj, map, MemoryUse::WeakMapObject);
   }
 }
 
 JS_PUBLIC_API JSObject* JS::NewWeakMapObject(JSContext* cx) {
   return NewBuiltinClassInstance<WeakMapObject>(cx);
--- a/js/src/builtin/intl/Collator.cpp
+++ b/js/src/builtin/intl/Collator.cpp
@@ -123,17 +123,17 @@ static bool Collator(JSContext* cx, unsi
 bool js::intl_Collator(JSContext* cx, unsigned argc, Value* vp) {
   CallArgs args = CallArgsFromVp(argc, vp);
   MOZ_ASSERT(args.length() == 2);
   MOZ_ASSERT(!args.isConstructing());
 
   return Collator(cx, args);
 }
 
-void js::CollatorObject::finalize(FreeOp* fop, JSObject* obj) {
+void js::CollatorObject::finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->onMainThread());
 
   const Value& slot =
       obj->as<CollatorObject>().getReservedSlot(CollatorObject::UCOLLATOR_SLOT);
   if (UCollator* coll = static_cast<UCollator*>(slot.toPrivate())) {
     ucol_close(coll);
   }
 }
--- a/js/src/builtin/intl/Collator.h
+++ b/js/src/builtin/intl/Collator.h
@@ -31,17 +31,17 @@ class CollatorObject : public NativeObje
 
   static_assert(INTERNALS_SLOT == INTL_INTERNALS_OBJECT_SLOT,
                 "INTERNALS_SLOT must match self-hosting define for internals "
                 "object slot");
 
  private:
   static const ClassOps classOps_;
 
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
 };
 
 extern JSObject* CreateCollatorPrototype(JSContext* cx,
                                          JS::Handle<JSObject*> Intl,
                                          JS::Handle<GlobalObject*> global);
 
 /**
  * Returns a new instance of the standard built-in Collator constructor.
--- a/js/src/builtin/intl/DateTimeFormat.cpp
+++ b/js/src/builtin/intl/DateTimeFormat.cpp
@@ -151,17 +151,17 @@ bool js::intl_DateTimeFormat(JSContext* 
   MOZ_ASSERT(args.length() == 2);
   MOZ_ASSERT(!args.isConstructing());
   // intl_DateTimeFormat is an intrinsic for self-hosted JavaScript, so it
   // cannot be used with "new", but it still has to be treated as a
   // constructor.
   return DateTimeFormat(cx, args, true, DateTimeFormatOptions::Standard);
 }
 
-void js::DateTimeFormatObject::finalize(FreeOp* fop, JSObject* obj) {
+void js::DateTimeFormatObject::finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->onMainThread());
 
   const Value& slot = obj->as<DateTimeFormatObject>().getReservedSlot(
       DateTimeFormatObject::UDATE_FORMAT_SLOT);
   if (UDateFormat* df = static_cast<UDateFormat*>(slot.toPrivate())) {
     udat_close(df);
   }
 }
--- a/js/src/builtin/intl/DateTimeFormat.h
+++ b/js/src/builtin/intl/DateTimeFormat.h
@@ -29,17 +29,17 @@ class DateTimeFormatObject : public Nati
 
   static_assert(INTERNALS_SLOT == INTL_INTERNALS_OBJECT_SLOT,
                 "INTERNALS_SLOT must match self-hosting define for internals "
                 "object slot");
 
  private:
   static const ClassOps classOps_;
 
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
 };
 
 extern JSObject* CreateDateTimeFormatPrototype(
     JSContext* cx, JS::Handle<JSObject*> Intl, JS::Handle<GlobalObject*> global,
     JS::MutableHandle<JSObject*> constructor,
     intl::DateTimeFormatOptions dtfOptions);
 
 /**
--- a/js/src/builtin/intl/NumberFormat.cpp
+++ b/js/src/builtin/intl/NumberFormat.cpp
@@ -144,17 +144,17 @@ bool js::intl_NumberFormat(JSContext* cx
   MOZ_ASSERT(args.length() == 2);
   MOZ_ASSERT(!args.isConstructing());
   // intl_NumberFormat is an intrinsic for self-hosted JavaScript, so it
   // cannot be used with "new", but it still has to be treated as a
   // constructor.
   return NumberFormat(cx, args, true);
 }
 
-void js::NumberFormatObject::finalize(FreeOp* fop, JSObject* obj) {
+void js::NumberFormatObject::finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->onMainThread());
 
   auto* numberFormat = &obj->as<NumberFormatObject>();
   UNumberFormatter* nf = numberFormat->getNumberFormatter();
   UFormattedNumber* formatted = numberFormat->getFormattedNumber();
 
   if (nf) {
     unumf_close(nf);
--- a/js/src/builtin/intl/NumberFormat.h
+++ b/js/src/builtin/intl/NumberFormat.h
@@ -60,17 +60,17 @@ class NumberFormatObject : public Native
 
   void setFormattedNumber(UFormattedNumber* formatted) {
     setFixedSlot(UFORMATTED_NUMBER_SLOT, PrivateValue(formatted));
   }
 
  private:
   static const ClassOps classOps_;
 
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
 };
 
 extern JSObject* CreateNumberFormatPrototype(JSContext* cx, HandleObject Intl,
                                              Handle<GlobalObject*> global,
                                              MutableHandleObject constructor);
 
 /**
  * Returns a new instance of the standard built-in NumberFormat constructor.
--- a/js/src/builtin/intl/PluralRules.cpp
+++ b/js/src/builtin/intl/PluralRules.cpp
@@ -109,17 +109,17 @@ static bool PluralRules(JSContext* cx, u
                               options)) {
     return false;
   }
 
   args.rval().setObject(*pluralRules);
   return true;
 }
 
-void js::PluralRulesObject::finalize(FreeOp* fop, JSObject* obj) {
+void js::PluralRulesObject::finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->onMainThread());
 
   auto* pluralRules = &obj->as<PluralRulesObject>();
   UPluralRules* pr = pluralRules->getPluralRules();
   UNumberFormatter* nf = pluralRules->getNumberFormatter();
   UFormattedNumber* formatted = pluralRules->getFormattedNumber();
 
   if (pr) {
--- a/js/src/builtin/intl/PluralRules.h
+++ b/js/src/builtin/intl/PluralRules.h
@@ -68,17 +68,17 @@ class PluralRulesObject : public NativeO
 
   void setFormattedNumber(UFormattedNumber* formatted) {
     setFixedSlot(UFORMATTED_NUMBER_SLOT, PrivateValue(formatted));
   }
 
  private:
   static const ClassOps classOps_;
 
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
 };
 
 extern JSObject* CreatePluralRulesPrototype(JSContext* cx,
                                             JS::Handle<JSObject*> Intl,
                                             JS::Handle<GlobalObject*> global);
 
 /**
  * Returns an object indicating the supported locales for plural rules
--- a/js/src/builtin/intl/RelativeTimeFormat.cpp
+++ b/js/src/builtin/intl/RelativeTimeFormat.cpp
@@ -121,17 +121,17 @@ static bool RelativeTimeFormat(JSContext
                               options)) {
     return false;
   }
 
   args.rval().setObject(*relativeTimeFormat);
   return true;
 }
 
-void js::RelativeTimeFormatObject::finalize(FreeOp* fop, JSObject* obj) {
+void js::RelativeTimeFormatObject::finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->onMainThread());
 
   constexpr auto RT_FORMAT_SLOT =
       RelativeTimeFormatObject::URELATIVE_TIME_FORMAT_SLOT;
   const Value& slot =
       obj->as<RelativeTimeFormatObject>().getReservedSlot(RT_FORMAT_SLOT);
   if (URelativeDateTimeFormatter* rtf =
           static_cast<URelativeDateTimeFormatter*>(slot.toPrivate())) {
--- a/js/src/builtin/intl/RelativeTimeFormat.h
+++ b/js/src/builtin/intl/RelativeTimeFormat.h
@@ -27,17 +27,17 @@ class RelativeTimeFormatObject : public 
 
   static_assert(INTERNALS_SLOT == INTL_INTERNALS_OBJECT_SLOT,
                 "INTERNALS_SLOT must match self-hosting define for internals "
                 "object slot");
 
  private:
   static const ClassOps classOps_;
 
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
 };
 
 extern JSObject* CreateRelativeTimeFormatPrototype(
     JSContext* cx, JS::Handle<JSObject*> Intl,
     JS::Handle<GlobalObject*> global);
 
 /**
  * Returns an object indicating the supported locales for relative time format
--- a/js/src/debugger/DebugAPI-inl.h
+++ b/js/src/debugger/DebugAPI-inl.h
@@ -168,17 +168,17 @@ void DebugAPI::onNewPromise(JSContext* c
 /* static */
 void DebugAPI::onPromiseSettled(JSContext* cx, Handle<PromiseObject*> promise) {
   if (MOZ_UNLIKELY(promise->realm()->isDebuggee())) {
     slowPathOnPromiseSettled(cx, promise);
   }
 }
 
 /* static */
-void DebugAPI::sweepBreakpoints(FreeOp* fop, JSScript* script) {
+void DebugAPI::sweepBreakpoints(JSFreeOp* fop, JSScript* script) {
   if (script->hasDebugScript()) {
     sweepBreakpointsSlow(fop, script);
   }
 }
 
 }  // namespace js
 
 #endif /* debugger_DebugAPI_inl_h */
--- a/js/src/debugger/DebugAPI.h
+++ b/js/src/debugger/DebugAPI.h
@@ -93,26 +93,26 @@ class DebugAPI {
 
   // Trace cross compartment edges in all debuggers relevant to the current GC.
   static void traceCrossCompartmentEdges(JSTracer* tracer);
 
   // Trace all debugger-owned GC things unconditionally, during a moving GC.
   static void traceAllForMovingGC(JSTracer* trc);
 
   // Sweep dying debuggers, and detach edges to dying debuggees.
-  static void sweepAll(FreeOp* fop);
+  static void sweepAll(JSFreeOp* fop);
 
   // Add sweep group edges due to the presence of any debuggers.
   static MOZ_MUST_USE bool findSweepGroupEdges(JSRuntime* rt);
 
   // Sweep breakpoints in a script associated with any debugger.
-  static inline void sweepBreakpoints(FreeOp* fop, JSScript* script);
+  static inline void sweepBreakpoints(JSFreeOp* fop, JSScript* script);
 
   // Destroy the debugging information associated with a script.
-  static void destroyDebugScript(FreeOp* fop, JSScript* script);
+  static void destroyDebugScript(JSFreeOp* fop, JSScript* script);
 
   // Validate the debugging information in a script after a moving GC>
 #ifdef JSGC_HASH_TABLE_CHECKS
   static void checkDebugScriptAfterMovingGC(DebugScript* ds);
 #endif
 
 #ifdef DEBUG
   static bool edgeIsInDebuggerWeakmap(JSRuntime* rt, JSObject* src,
@@ -343,17 +343,17 @@ class DebugAPI {
    */
   static bool getScriptInstrumentationId(JSContext* cx, HandleObject dbgObject,
                                          HandleScript script,
                                          MutableHandleValue rval);
 
  private:
   static bool stepModeEnabledSlow(JSScript* script);
   static bool hasBreakpointsAtSlow(JSScript* script, jsbytecode* pc);
-  static void sweepBreakpointsSlow(FreeOp* fop, JSScript* script);
+  static void sweepBreakpointsSlow(JSFreeOp* fop, JSScript* script);
   static void slowPathOnNewScript(JSContext* cx, HandleScript script);
   static void slowPathOnNewGlobalObject(JSContext* cx,
                                         Handle<GlobalObject*> global);
   static void slowPathNotifyParticipatesInGC(
       uint64_t majorGCNumber, GlobalObject::DebuggerVector& dbgs);
   static MOZ_MUST_USE bool slowPathOnLogAllocationSite(
       JSContext* cx, HandleObject obj, HandleSavedFrame frame,
       mozilla::TimeStamp when, GlobalObject::DebuggerVector& dbgs);
--- a/js/src/debugger/DebugScript.cpp
+++ b/js/src/debugger/DebugScript.cpp
@@ -123,17 +123,17 @@ BreakpointSite* DebugScript::getOrCreate
     debug->numSites++;
     AddCellMemory(script, sizeof(JSBreakpointSite), MemoryUse::BreakpointSite);
   }
 
   return site;
 }
 
 /* static */
-void DebugScript::destroyBreakpointSite(FreeOp* fop, JSScript* script,
+void DebugScript::destroyBreakpointSite(JSFreeOp* fop, JSScript* script,
                                         jsbytecode* pc) {
   DebugScript* debug = get(script);
   BreakpointSite*& site = debug->breakpoints[script->pcToOffset(pc)];
   MOZ_ASSERT(site);
 
   size_t size = site->type() == BreakpointSite::Type::JS
                     ? sizeof(JSBreakpointSite)
                     : sizeof(WasmBreakpointSite);
@@ -142,28 +142,28 @@ void DebugScript::destroyBreakpointSite(
 
   debug->numSites--;
   if (!debug->needed()) {
     DebugAPI::destroyDebugScript(fop, script);
   }
 }
 
 /* static */
-void DebugScript::clearBreakpointsIn(FreeOp* fop, Realm* realm, Debugger* dbg,
+void DebugScript::clearBreakpointsIn(JSFreeOp* fop, Realm* realm, Debugger* dbg,
                                      JSObject* handler) {
   for (auto script = realm->zone()->cellIter<JSScript>(); !script.done();
        script.next()) {
     if (script->realm() == realm && script->hasDebugScript()) {
       clearBreakpointsIn(fop, script, dbg, handler);
     }
   }
 }
 
 /* static */
-void DebugScript::clearBreakpointsIn(FreeOp* fop, JSScript* script,
+void DebugScript::clearBreakpointsIn(JSFreeOp* fop, JSScript* script,
                                      Debugger* dbg, JSObject* handler) {
   if (!script->hasDebugScript()) {
     return;
   }
 
   for (jsbytecode* pc = script->code(); pc < script->codeEnd(); pc++) {
     BreakpointSite* site = getBreakpointSite(script, pc);
     if (site) {
@@ -205,17 +205,17 @@ bool DebugScript::incrementStepperCount(
       script->baselineScript()->toggleDebugTraps(script, nullptr);
     }
   }
 
   return true;
 }
 
 /* static */
-void DebugScript::decrementStepperCount(FreeOp* fop, JSScript* script) {
+void DebugScript::decrementStepperCount(JSFreeOp* fop, JSScript* script) {
   DebugScript* debug = get(script);
   MOZ_ASSERT(debug);
   MOZ_ASSERT(debug->stepperCount > 0);
 
   debug->stepperCount--;
 
   if (debug->stepperCount == 0) {
     if (script->hasBaselineScript()) {
@@ -249,31 +249,31 @@ bool DebugScript::incrementGeneratorObse
   // {ensure,update}ExecutionObservabilityOfScript.
   MOZ_ASSERT_IF(script->hasBaselineScript(),
                 script->baselineScript()->hasDebugInstrumentation());
 
   return true;
 }
 
 /* static */
-void DebugScript::decrementGeneratorObserverCount(FreeOp* fop,
+void DebugScript::decrementGeneratorObserverCount(JSFreeOp* fop,
                                                   JSScript* script) {
   DebugScript* debug = get(script);
   MOZ_ASSERT(debug);
   MOZ_ASSERT(debug->generatorObserverCount > 0);
 
   debug->generatorObserverCount--;
 
   if (!debug->needed()) {
     DebugAPI::destroyDebugScript(fop, script);
   }
 }
 
 /* static */
-void DebugAPI::destroyDebugScript(FreeOp* fop, JSScript* script) {
+void DebugAPI::destroyDebugScript(JSFreeOp* fop, JSScript* script) {
   if (script->hasDebugScript()) {
     DebugScriptMap* map = script->realm()->debugScriptMap.get();
     MOZ_ASSERT(map);
     DebugScriptMap::Ptr p = map->lookup(script);
     MOZ_ASSERT(p);
     DebugScript* debug = p->value().release();
     map->remove(p);
     script->setHasDebugScript(false);
@@ -291,17 +291,17 @@ void DebugAPI::checkDebugScriptAfterMovi
     if (site && site->type() == BreakpointSite::Type::JS) {
       CheckGCThingAfterMovingGC(site->asJS()->script);
     }
   }
 }
 #endif  // JSGC_HASH_TABLE_CHECKS
 
 /* static */
-void DebugAPI::sweepBreakpointsSlow(FreeOp* fop, JSScript* script) {
+void DebugAPI::sweepBreakpointsSlow(JSFreeOp* fop, JSScript* script) {
   bool scriptGone = IsAboutToBeFinalizedUnbarriered(&script);
   for (unsigned i = 0; i < script->length(); i++) {
     BreakpointSite* site =
         DebugScript::getBreakpointSite(script, script->offsetToPC(i));
     if (!site) {
       continue;
     }
 
--- a/js/src/debugger/DebugScript.h
+++ b/js/src/debugger/DebugScript.h
@@ -78,42 +78,42 @@ class DebugScript {
   static DebugScript* get(JSScript* script);
   static DebugScript* getOrCreate(JSContext* cx, JSScript* script);
 
  public:
   static BreakpointSite* getBreakpointSite(JSScript* script, jsbytecode* pc);
   static BreakpointSite* getOrCreateBreakpointSite(JSContext* cx,
                                                    JSScript* script,
                                                    jsbytecode* pc);
-  static void destroyBreakpointSite(FreeOp* fop, JSScript* script,
+  static void destroyBreakpointSite(JSFreeOp* fop, JSScript* script,
                                     jsbytecode* pc);
 
-  static void clearBreakpointsIn(FreeOp* fop, JS::Realm* realm, Debugger* dbg,
+  static void clearBreakpointsIn(JSFreeOp* fop, JS::Realm* realm, Debugger* dbg,
                                  JSObject* handler);
-  static void clearBreakpointsIn(FreeOp* fop, JSScript* script, Debugger* dbg,
+  static void clearBreakpointsIn(JSFreeOp* fop, JSScript* script, Debugger* dbg,
                                  JSObject* handler);
 
 #ifdef DEBUG
   static uint32_t getStepperCount(JSScript* script);
 #endif
 
   /*
    * Increment or decrement the single-step count. If the count is non-zero
    * then the script is in single-step mode.
    *
    * Only incrementing is fallible, as it could allocate a DebugScript.
    */
   static bool incrementStepperCount(JSContext* cx, JSScript* script);
-  static void decrementStepperCount(FreeOp* fop, JSScript* script);
+  static void decrementStepperCount(JSFreeOp* fop, JSScript* script);
 
   /*
    * Increment or decrement the generator observer count. If the count is
    * non-zero then the script reports resumptions to the debugger.
    *
    * Only incrementing is fallible, as it could allocate a DebugScript.
    */
   static bool incrementGeneratorObserverCount(JSContext* cx, JSScript* script);
-  static void decrementGeneratorObserverCount(FreeOp* fop, JSScript* script);
+  static void decrementGeneratorObserverCount(JSFreeOp* fop, JSScript* script);
 };
 
 } /* namespace js */
 
 #endif /* dbg_DebugScript_h */
--- a/js/src/debugger/Debugger.cpp
+++ b/js/src/debugger/Debugger.cpp
@@ -303,24 +303,24 @@ bool js::ParseEvalOptions(JSContext* cx,
 
   return true;
 }
 
 /*** Breakpoints ************************************************************/
 
 BreakpointSite::BreakpointSite(Type type) : type_(type), enabledCount(0) {}
 
-void BreakpointSite::inc(FreeOp* fop) {
+void BreakpointSite::inc(JSFreeOp* fop) {
   enabledCount++;
   if (enabledCount == 1) {
     recompile(fop);
   }
 }
 
-void BreakpointSite::dec(FreeOp* fop) {
+void BreakpointSite::dec(JSFreeOp* fop) {
   MOZ_ASSERT(enabledCount > 0);
   enabledCount--;
   if (enabledCount == 0) {
     recompile(fop);
   }
 }
 
 bool BreakpointSite::isEmpty() const { return breakpoints.isEmpty(); }
@@ -361,17 +361,17 @@ inline size_t BreakpointSite::allocSize(
 Breakpoint::Breakpoint(Debugger* debugger, BreakpointSite* site,
                        JSObject* handler)
     : debugger(debugger), site(site), handler(handler) {
   MOZ_ASSERT(handler->compartment() == debugger->object->compartment());
   debugger->breakpoints.pushBack(this);
   site->breakpoints.pushBack(this);
 }
 
-void Breakpoint::destroy(FreeOp* fop,
+void Breakpoint::destroy(JSFreeOp* fop,
                          MayDestroySite mayDestroySite /* true */) {
   if (debugger->enabled) {
     site->dec(fop);
   }
   debugger->breakpoints.remove(this);
   site->breakpoints.remove(this);
   gc::Cell* cell = site->owningCellUnbarriered();
   size_t size = site->allocSize();
@@ -385,40 +385,40 @@ Breakpoint* Breakpoint::nextInDebugger()
 
 Breakpoint* Breakpoint::nextInSite() { return siteLink.mNext; }
 
 JSBreakpointSite::JSBreakpointSite(JSScript* script, jsbytecode* pc)
     : BreakpointSite(Type::JS), script(script), pc(pc) {
   MOZ_ASSERT(!DebugAPI::hasBreakpointsAt(script, pc));
 }
 
-void JSBreakpointSite::recompile(FreeOp* fop) {
+void JSBreakpointSite::recompile(JSFreeOp* fop) {
   if (script->hasBaselineScript()) {
     script->baselineScript()->toggleDebugTraps(script, pc);
   }
 }
 
-void JSBreakpointSite::destroyIfEmpty(FreeOp* fop) {
+void JSBreakpointSite::destroyIfEmpty(JSFreeOp* fop) {
   if (isEmpty()) {
     DebugScript::destroyBreakpointSite(fop, script, pc);
   }
 }
 
 WasmBreakpointSite::WasmBreakpointSite(wasm::Instance* instance_,
                                        uint32_t offset_)
     : BreakpointSite(Type::Wasm), instance(instance_), offset(offset_) {
   MOZ_ASSERT(instance);
   MOZ_ASSERT(instance->debugEnabled());
 }
 
-void WasmBreakpointSite::recompile(FreeOp* fop) {
+void WasmBreakpointSite::recompile(JSFreeOp* fop) {
   instance->debug().toggleBreakpointTrap(fop->runtime(), offset, isEnabled());
 }
 
-void WasmBreakpointSite::destroyIfEmpty(FreeOp* fop) {
+void WasmBreakpointSite::destroyIfEmpty(JSFreeOp* fop) {
   if (isEmpty()) {
     instance->destroyBreakpointSite(fop, offset);
   }
 }
 
 /*** Debugger hook dispatch *************************************************/
 
 Debugger::Debugger(JSContext* cx, NativeObject* dbg)
@@ -518,17 +518,17 @@ DebuggerMemory& Debugger::memory() const
   MOZ_ASSERT(hasMemory());
   return object->getReservedSlot(JSSLOT_DEBUG_MEMORY_INSTANCE)
       .toObject()
       .as<DebuggerMemory>();
 }
 
 /*** DebuggerVectorHolder *****************************************************/
 
-static void GlobalDebuggerVectorHolder_finalize(FreeOp* fop, JSObject* obj) {
+static void GlobalDebuggerVectorHolder_finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->maybeOnHelperThread());
   void* ptr = obj->as<NativeObject>().getPrivate();
   auto debuggers = static_cast<GlobalObject::DebuggerVector*>(ptr);
   fop->delete_(obj, debuggers, MemoryUse::GlobalDebuggerVector);
 }
 
 static const ClassOps GlobalDebuggerVectorHolder_classOps = {
     nullptr,
@@ -3058,17 +3058,17 @@ static bool AppendAndInvalidateScript(JS
 
 static bool UpdateExecutionObservabilityOfScriptsInZone(
     JSContext* cx, Zone* zone, const DebugAPI::ExecutionObservableSet& obs,
     Debugger::IsObserving observing) {
   using namespace js::jit;
 
   AutoSuppressProfilerSampling suppressProfilerSampling(cx);
 
-  FreeOp* fop = cx->runtime()->defaultFreeOp();
+  JSFreeOp* fop = cx->runtime()->defaultFreeOp();
 
   Vector<JSScript*> scripts(cx);
 
   // Iterate through observable scripts, invalidating their Ion scripts and
   // appending them to a vector for discarding their baseline scripts later.
   {
     AutoEnterAnalysis enter(fop, zone);
     if (JSScript* script = obs.singleScriptForZoneInvalidation()) {
@@ -3751,17 +3751,17 @@ void Debugger::trace(JSTracer* trc) {
   sources.trace(trc);
   objects.trace(trc);
   environments.trace(trc);
   wasmInstanceScripts.trace(trc);
   wasmInstanceSources.trace(trc);
 }
 
 /* static */
-void DebugAPI::sweepAll(FreeOp* fop) {
+void DebugAPI::sweepAll(JSFreeOp* fop) {
   JSRuntime* rt = fop->runtime();
 
   Debugger* dbg = rt->debuggerList().getFirst();
   while (dbg) {
     Debugger* next = dbg->getNext();
 
     // Detach dying debuggers and debuggees from each other. Since this
     // requires access to both objects it must be done before either
@@ -3780,17 +3780,18 @@ void DebugAPI::sweepAll(FreeOp* fop) {
       fop->delete_(dbg->object, dbg, MemoryUse::Debugger);
     }
 
     dbg = next;
   }
 }
 
 /* static */
-void Debugger::detachAllDebuggersFromGlobal(FreeOp* fop, GlobalObject* global) {
+void Debugger::detachAllDebuggersFromGlobal(JSFreeOp* fop,
+                                            GlobalObject* global) {
   const GlobalObject::DebuggerVector* debuggers = global->getDebuggers();
   MOZ_ASSERT(!debuggers->empty());
   while (!debuggers->empty()) {
     debuggers->back()->removeDebuggeeGlobal(fop, global, nullptr,
                                             Debugger::FromSweep::No);
   }
 }
 
@@ -4688,17 +4689,17 @@ static WeakHeapPtr<Debugger*>* findDebug
     if (p->unbarrieredGet() == dbg) {
       break;
     }
   }
   MOZ_ASSERT(p != vec->end());
   return p;
 }
 
-void Debugger::removeDebuggeeGlobal(FreeOp* fop, GlobalObject* global,
+void Debugger::removeDebuggeeGlobal(JSFreeOp* fop, GlobalObject* global,
                                     WeakGlobalObjectSet::Enum* debugEnum,
                                     FromSweep fromSweep) {
   // The caller might have found global by enumerating this->debuggees; if
   // so, use HashSet::Enum::removeFront rather than HashSet::remove below,
   // to avoid invalidating the live enumerator.
   MOZ_ASSERT(debuggees.has(global));
   MOZ_ASSERT(debuggeeZones.has(global->zone()));
   MOZ_ASSERT_IF(debugEnum, debugEnum->front().unbarrieredGet() == global);
@@ -6348,17 +6349,17 @@ bool Debugger::replaceFrameGuts(JSContex
     if (!dbg->frames.putNew(to, frameobj)) {
       // This OOM is subtle. At this point, both
       // removeFromDebuggerFramesOnExit and removeToDebuggerFramesOnExit
       // must both run for the same reason given above.
       //
       // The difference is that the current frameobj is no longer in its
       // Debugger's frame map, so it will not be cleaned up by neither
       // lambda. Manually clean it up here.
-      FreeOp* fop = cx->runtime()->defaultFreeOp();
+      JSFreeOp* fop = cx->runtime()->defaultFreeOp();
       frameobj->freeFrameIterData(fop);
       frameobj->maybeDecrementFrameScriptStepperCount(fop, to);
 
       ReportOutOfMemory(cx);
       return false;
     }
   }
 
@@ -6376,17 +6377,17 @@ bool DebugAPI::inFrameMaps(AbstractFrame
   return foundAny;
 }
 
 /* static */
 void Debugger::removeFromFrameMapsAndClearBreakpointsIn(JSContext* cx,
                                                         AbstractFramePtr frame,
                                                         bool suspending) {
   forEachDebuggerFrame(frame, [&](DebuggerFrame* frameobj) {
-    FreeOp* fop = cx->runtime()->defaultFreeOp();
+    JSFreeOp* fop = cx->runtime()->defaultFreeOp();
     frameobj->freeFrameIterData(fop);
 
     Debugger* dbg = Debugger::fromChildJSObject(frameobj);
     dbg->frames.remove(frame);
 
     if (frameobj->hasGenerator()) {
       // If this is a generator's final pop, remove its entry from
       // generatorFrames. Such an entry exists if and only if the
--- a/js/src/debugger/Debugger.h
+++ b/js/src/debugger/Debugger.h
@@ -729,17 +729,17 @@ class Debugger : private mozilla::Linked
   class QueryBase;
   class ScriptQuery;
   class SourceQuery;
   class ObjectQuery;
 
   enum class FromSweep { No, Yes };
 
   MOZ_MUST_USE bool addDebuggeeGlobal(JSContext* cx, Handle<GlobalObject*> obj);
-  void removeDebuggeeGlobal(FreeOp* fop, GlobalObject* global,
+  void removeDebuggeeGlobal(JSFreeOp* fop, GlobalObject* global,
                             WeakGlobalObjectSet::Enum* debugEnum,
                             FromSweep fromSweep);
 
   enum class CallUncaughtExceptionHook { No, Yes };
 
   /*
    * Apply the resumption information in (resumeMode, vp) to `frame` in
    * anticipation of returning to the debuggee.
@@ -1054,17 +1054,17 @@ class Debugger : private mozilla::Linked
 
   Zone* zone() const { return toJSObject()->zone(); }
 
   bool hasMemory() const;
   DebuggerMemory& memory() const;
 
   WeakGlobalObjectSet::Range allDebuggees() const { return debuggees.all(); }
 
-  static void detachAllDebuggersFromGlobal(FreeOp* fop, GlobalObject* global);
+  static void detachAllDebuggersFromGlobal(JSFreeOp* fop, GlobalObject* global);
 #ifdef DEBUG
   static bool isDebuggerCrossCompartmentEdge(JSObject* obj,
                                              const js::gc::Cell* cell);
 #endif
 
   static bool hasLiveHook(GlobalObject* global, Hook which);
 
   /*** Functions for use by Debugger.cpp. *********************************/
@@ -1232,17 +1232,17 @@ struct Handler {
    * JavaScript.
    */
   virtual JSObject* object() const = 0;
 
   /* Report that this Handler is now held by owner. See comment above. */
   virtual void hold(JSObject* owner) = 0;
 
   /* Report that this Handler is no longer held by owner. See comment above. */
-  virtual void drop(js::FreeOp* fop, JSObject* owner) = 0;
+  virtual void drop(JSFreeOp* fop, JSObject* owner) = 0;
 
   /*
    * Trace the reference to the handler. This method will be called by the
    * reflection object holding this Handler whenever the former is traced.
    */
   virtual void trace(JSTracer* tracer) = 0;
 
   /* Allocation size in bytes for memory accounting purposes. */
@@ -1276,30 +1276,30 @@ class BreakpointSite {
       mozilla::DoublyLinkedList<js::Breakpoint, SiteLinkAccess<js::Breakpoint>>;
   BreakpointList breakpoints;
   size_t enabledCount; /* number of breakpoints in the list that are enabled */
 
   gc::Cell* owningCellUnbarriered();
   size_t allocSize();
 
  protected:
-  virtual void recompile(FreeOp* fop) = 0;
+  virtual void recompile(JSFreeOp* fop) = 0;
   bool isEnabled() const { return enabledCount > 0; }
 
  public:
   BreakpointSite(Type type);
   Breakpoint* firstBreakpoint() const;
   virtual ~BreakpointSite() {}
   bool hasBreakpoint(Breakpoint* bp);
   Type type() const { return type_; }
 
-  void inc(FreeOp* fop);
-  void dec(FreeOp* fop);
+  void inc(JSFreeOp* fop);
+  void dec(JSFreeOp* fop);
   bool isEmpty() const;
-  virtual void destroyIfEmpty(FreeOp* fop) = 0;
+  virtual void destroyIfEmpty(JSFreeOp* fop) = 0;
 
   inline JSBreakpointSite* asJS();
   inline WasmBreakpointSite* asWasm();
 };
 
 /*
  * Each Breakpoint is a member of two linked lists: its debugger's list and its
  * site's list.
@@ -1339,58 +1339,58 @@ class Breakpoint {
    */
   mozilla::DoublyLinkedListElement<Breakpoint> debuggerLink;
   mozilla::DoublyLinkedListElement<Breakpoint> siteLink;
 
  public:
   Breakpoint(Debugger* debugger, BreakpointSite* site, JSObject* handler);
 
   enum MayDestroySite { False, True };
-  void destroy(FreeOp* fop,
+  void destroy(JSFreeOp* fop,
                MayDestroySite mayDestroySite = MayDestroySite::True);
 
   Breakpoint* nextInDebugger();
   Breakpoint* nextInSite();
   JSObject* getHandler() const { return handler; }
   PreBarrieredObject& getHandlerRef() { return handler; }
 
   inline WasmBreakpoint* asWasm();
 };
 
 class JSBreakpointSite : public BreakpointSite {
  public:
   JSScript* script;
   jsbytecode* const pc;
 
  protected:
-  void recompile(FreeOp* fop) override;
+  void recompile(JSFreeOp* fop) override;
 
  public:
   JSBreakpointSite(JSScript* script, jsbytecode* pc);
 
-  void destroyIfEmpty(FreeOp* fop) override;
+  void destroyIfEmpty(JSFreeOp* fop) override;
 };
 
 inline JSBreakpointSite* BreakpointSite::asJS() {
   MOZ_ASSERT(type() == Type::JS);
   return static_cast<JSBreakpointSite*>(this);
 }
 
 class WasmBreakpointSite : public BreakpointSite {
  public:
   wasm::Instance* instance;
   uint32_t offset;
 
  private:
-  void recompile(FreeOp* fop) override;
+  void recompile(JSFreeOp* fop) override;
 
  public:
   WasmBreakpointSite(wasm::Instance* instance, uint32_t offset);
 
-  void destroyIfEmpty(FreeOp* fop) override;
+  void destroyIfEmpty(JSFreeOp* fop) override;
 };
 
 inline WasmBreakpointSite* BreakpointSite::asWasm() {
   MOZ_ASSERT(type() == Type::Wasm);
   return static_cast<WasmBreakpointSite*>(this);
 }
 
 class WasmBreakpoint : public Breakpoint {
--- a/js/src/debugger/Frame.cpp
+++ b/js/src/debugger/Frame.cpp
@@ -100,17 +100,17 @@ ScriptedOnStepHandler::ScriptedOnStepHan
 }
 
 JSObject* ScriptedOnStepHandler::object() const { return object_; }
 
 void ScriptedOnStepHandler::hold(JSObject* owner) {
   AddCellMemory(owner, allocSize(), MemoryUse::DebuggerOnStepHandler);
 }
 
-void ScriptedOnStepHandler::drop(FreeOp* fop, JSObject* owner) {
+void ScriptedOnStepHandler::drop(JSFreeOp* fop, JSObject* owner) {
   fop->delete_(owner, this, allocSize(), MemoryUse::DebuggerOnStepHandler);
 }
 
 void ScriptedOnStepHandler::trace(JSTracer* tracer) {
   TraceEdge(tracer, &object_, "OnStepHandlerFunction.object");
 }
 
 bool ScriptedOnStepHandler::onStep(JSContext* cx, HandleDebuggerFrame frame,
@@ -132,17 +132,17 @@ ScriptedOnPopHandler::ScriptedOnPopHandl
 }
 
 JSObject* ScriptedOnPopHandler::object() const { return object_; }
 
 void ScriptedOnPopHandler::hold(JSObject* owner) {
   AddCellMemory(owner, allocSize(), MemoryUse::DebuggerOnPopHandler);
 }
 
-void ScriptedOnPopHandler::drop(FreeOp* fop, JSObject* owner) {
+void ScriptedOnPopHandler::drop(JSFreeOp* fop, JSObject* owner) {
   fop->delete_(owner, this, allocSize(), MemoryUse::DebuggerOnPopHandler);
 }
 
 void ScriptedOnPopHandler::trace(JSTracer* tracer) {
   TraceEdge(tracer, &object_, "OnStepHandlerFunction.object");
 }
 
 bool ScriptedOnPopHandler::onPop(JSContext* cx, HandleDebuggerFrame frame,
@@ -363,17 +363,17 @@ bool DebuggerFrame::setGenerator(JSConte
                    MemoryUse::DebuggerFrameGeneratorInfo);
 
   generatorFramesGuard.release();
   infoGuard.release();
 
   return true;
 }
 
-void DebuggerFrame::clearGenerator(FreeOp* fop) {
+void DebuggerFrame::clearGenerator(JSFreeOp* fop) {
   if (!hasGenerator()) {
     return;
   }
 
   GeneratorInfo* info = generatorInfo();
 
   // 4) The generator's script's observer count must be dropped.
   //
@@ -395,17 +395,17 @@ void DebuggerFrame::clearGenerator(FreeO
   }
 
   // 1) The DebuggerFrame must no longer point to the AbstractGeneratorObject.
   setReservedSlot(GENERATOR_INFO_SLOT, UndefinedValue());
   fop->delete_(this, info, MemoryUse::DebuggerFrameGeneratorInfo);
 }
 
 void DebuggerFrame::clearGenerator(
-    FreeOp* fop, Debugger* owner,
+    JSFreeOp* fop, Debugger* owner,
     Debugger::GeneratorWeakMap::Enum* maybeGeneratorFramesEnum) {
   if (!hasGenerator()) {
     return;
   }
 
   // 2) generatorFrames must no longer map the AbstractGeneratorObject to the
   // DebuggerFrame.
   GeneratorInfo* info = generatorInfo();
@@ -670,33 +670,33 @@ bool DebuggerFrame::setOnStepHandler(JSC
                                      OnStepHandler* handler) {
   MOZ_ASSERT(frame->isLive());
 
   OnStepHandler* prior = frame->onStepHandler();
   if (handler == prior) {
     return true;
   }
 
-  FreeOp* fop = cx->defaultFreeOp();
+  JSFreeOp* fop = cx->defaultFreeOp();
   AbstractFramePtr referent = DebuggerFrame::getReferent(frame);
 
   // Adjust execution observability and step counts on whatever code (JS or
   // Wasm) this frame is running.
   if (referent.isWasmDebugFrame()) {
     wasm::Instance* instance = referent.asWasmDebugFrame()->instance();
     wasm::DebugFrame* wasmFrame = referent.asWasmDebugFrame();
     if (handler && !prior) {
       // Single stepping toggled off->on.
       if (!instance->debug().incrementStepperCount(cx,
                                                    wasmFrame->funcIndex())) {
         return false;
       }
     } else if (!handler && prior) {
       // Single stepping toggled on->off.
-      FreeOp* fop = cx->runtime()->defaultFreeOp();
+      JSFreeOp* fop = cx->runtime()->defaultFreeOp();
       if (!instance->debug().decrementStepperCount(fop,
                                                    wasmFrame->funcIndex())) {
         return false;
       }
     }
   } else {
     if (handler && !prior) {
       // Single stepping toggled off->on.
@@ -965,17 +965,17 @@ OnPopHandler* DebuggerFrame::onPopHandle
 void DebuggerFrame::setOnPopHandler(JSContext* cx, OnPopHandler* handler) {
   MOZ_ASSERT(isLive());
 
   OnPopHandler* prior = onPopHandler();
   if (handler == prior) {
     return;
   }
 
-  FreeOp* fop = cx->defaultFreeOp();
+  JSFreeOp* fop = cx->defaultFreeOp();
 
   if (prior) {
     prior->drop(fop, this);
   }
 
   if (handler) {
     setReservedSlot(ONPOP_HANDLER_SLOT, PrivateValue(handler));
     handler->hold(this);
@@ -1025,25 +1025,25 @@ bool DebuggerFrame::requireScriptReferen
 }
 
 void DebuggerFrame::setFrameIterData(FrameIter::Data* data) {
   MOZ_ASSERT(data);
   MOZ_ASSERT(!frameIterData());
   InitObjectPrivate(this, data, MemoryUse::DebuggerFrameIterData);
 }
 
-void DebuggerFrame::freeFrameIterData(FreeOp* fop) {
+void DebuggerFrame::freeFrameIterData(JSFreeOp* fop) {
   if (FrameIter::Data* data = frameIterData()) {
     fop->delete_(this, data, MemoryUse::DebuggerFrameIterData);
     setPrivate(nullptr);
   }
 }
 
 void DebuggerFrame::maybeDecrementFrameScriptStepperCount(
-    FreeOp* fop, AbstractFramePtr frame) {
+    JSFreeOp* fop, AbstractFramePtr frame) {
   // If this frame has an onStep handler, decrement the script's count.
   OnStepHandler* handler = onStepHandler();
   if (!handler) {
     return;
   }
 
   if (frame.isWasmDebugFrame()) {
     wasm::Instance* instance = frame.wasmInstance();
@@ -1055,17 +1055,17 @@ void DebuggerFrame::maybeDecrementFrameS
 
   // In the case of generator frames, we may end up trying to clean up the step
   // count in more than one place, so make this method idempotent.
   handler->drop(fop, this);
   setReservedSlot(ONSTEP_HANDLER_SLOT, UndefinedValue());
 }
 
 /* static */
-void DebuggerFrame::finalize(FreeOp* fop, JSObject* obj) {
+void DebuggerFrame::finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->onMainThread());
   DebuggerFrame& frameobj = obj->as<DebuggerFrame>();
   frameobj.freeFrameIterData(fop);
   frameobj.clearGenerator(fop);
   OnStepHandler* onStepHandler = frameobj.onStepHandler();
   if (onStepHandler) {
     onStepHandler->drop(fop, &frameobj);
   }
--- a/js/src/debugger/Frame.h
+++ b/js/src/debugger/Frame.h
@@ -46,17 +46,17 @@ struct OnStepHandler : Handler {
                       ResumeMode& resumeMode, MutableHandleValue vp) = 0;
 };
 
 class ScriptedOnStepHandler final : public OnStepHandler {
  public:
   explicit ScriptedOnStepHandler(JSObject* object);
   virtual JSObject* object() const override;
   virtual void hold(JSObject* owner) override;
-  virtual void drop(js::FreeOp* fop, JSObject* owner) override;
+  virtual void drop(JSFreeOp* fop, JSObject* owner) override;
   virtual void trace(JSTracer* tracer) override;
   virtual size_t allocSize() const override;
   virtual bool onStep(JSContext* cx, HandleDebuggerFrame frame,
                       ResumeMode& resumeMode, MutableHandleValue vp) override;
 
  private:
   HeapPtr<JSObject*> object_;
 };
@@ -79,17 +79,17 @@ struct OnPopHandler : Handler {
                      MutableHandleValue vp) = 0;
 };
 
 class ScriptedOnPopHandler final : public OnPopHandler {
  public:
   explicit ScriptedOnPopHandler(JSObject* object);
   virtual JSObject* object() const override;
   virtual void hold(JSObject* owner) override;
-  virtual void drop(js::FreeOp* fop, JSObject* owner) override;
+  virtual void drop(JSFreeOp* fop, JSObject* owner) override;
   virtual void trace(JSTracer* tracer) override;
   virtual size_t allocSize() const override;
   virtual bool onPop(JSContext* cx, HandleDebuggerFrame frame,
                      const Completion& completion, ResumeMode& resumeMode,
                      MutableHandleValue vp) override;
 
  private:
   HeapPtr<JSObject*> object_;
@@ -232,36 +232,36 @@ class DebuggerFrame : public NativeObjec
    * that case, the owner is not reliably available, and is not actually
    * necessary.)
    *
    * If maybeGeneratorFramesEnum is non-null, use it to remove this frame's
    * entry from the Debugger's generatorFrames weak map. In this case, this
    * function will not otherwise disturb generatorFrames. Passing the enum
    * allows this function to be used while iterating over generatorFrames.
    */
-  void clearGenerator(FreeOp* fop);
+  void clearGenerator(JSFreeOp* fop);
   void clearGenerator(
-      FreeOp* fop, Debugger* owner,
+      JSFreeOp* fop, Debugger* owner,
       Debugger::GeneratorWeakMap::Enum* maybeGeneratorFramesEnum = nullptr);
 
   /*
    * Called after a generator/async frame is resumed, before exposing this
    * Debugger.Frame object to any hooks.
    */
   bool resume(const FrameIter& iter);
 
   bool hasAnyLiveHooks() const;
 
  private:
   static const ClassOps classOps_;
 
   static const JSPropertySpec properties_[];
   static const JSFunctionSpec methods_[];
 
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
 
   static AbstractFramePtr getReferent(HandleDebuggerFrame frame);
   static MOZ_MUST_USE bool getFrameIter(JSContext* cx,
                                         HandleDebuggerFrame frame,
                                         mozilla::Maybe<FrameIter>& result);
   static MOZ_MUST_USE bool requireScriptReferent(JSContext* cx,
                                                  HandleDebuggerFrame frame);
 
@@ -296,18 +296,18 @@ class DebuggerFrame : public NativeObjec
   static MOZ_MUST_USE bool evalWithBindingsMethod(JSContext* cx, unsigned argc,
                                                   Value* vp);
 
   Debugger* owner() const;
 
  public:
   FrameIter::Data* frameIterData() const;
   void setFrameIterData(FrameIter::Data*);
-  void freeFrameIterData(FreeOp* fop);
-  void maybeDecrementFrameScriptStepperCount(FreeOp* fop,
+  void freeFrameIterData(JSFreeOp* fop);
+  void maybeDecrementFrameScriptStepperCount(JSFreeOp* fop,
                                              AbstractFramePtr frame);
 
   class GeneratorInfo;
   inline GeneratorInfo* generatorInfo() const;
 };
 
 } /* namespace js */
 
--- a/js/src/gc/ArenaList.h
+++ b/js/src/gc/ArenaList.h
@@ -332,34 +332,36 @@ class ArenaLists {
   inline bool checkEmptyArenaLists();
   inline void checkEmptyFreeList(AllocKind kind);
 
   bool checkEmptyArenaList(AllocKind kind);
 
   bool relocateArenas(Arena*& relocatedListOut, JS::GCReason reason,
                       js::SliceBudget& sliceBudget, gcstats::Statistics& stats);
 
-  void queueForegroundObjectsForSweep(FreeOp* fop);
+  void queueForegroundObjectsForSweep(JSFreeOp* fop);
   void queueForegroundThingsForSweep();
 
   void releaseForegroundSweptEmptyArenas();
 
-  bool foregroundFinalize(FreeOp* fop, AllocKind thingKind,
+  bool foregroundFinalize(JSFreeOp* fop, AllocKind thingKind,
                           js::SliceBudget& sliceBudget,
                           SortedArenaList& sweepList);
-  static void backgroundFinalize(FreeOp* fop, Arena* listHead, Arena** empty);
+  static void backgroundFinalize(JSFreeOp* fop, Arena* listHead, Arena** empty);
 
   void setParallelAllocEnabled(bool enabled);
 
  private:
   inline JSRuntime* runtime();
   inline JSRuntime* runtimeFromAnyThread();
 
-  inline void queueForForegroundSweep(FreeOp* fop, const FinalizePhase& phase);
-  inline void queueForBackgroundSweep(FreeOp* fop, const FinalizePhase& phase);
+  inline void queueForForegroundSweep(JSFreeOp* fop,
+                                      const FinalizePhase& phase);
+  inline void queueForBackgroundSweep(JSFreeOp* fop,
+                                      const FinalizePhase& phase);
   inline void queueForForegroundSweep(AllocKind thingKind);
   inline void queueForBackgroundSweep(AllocKind thingKind);
 
   TenuredCell* refillFreeListAndAllocate(FreeLists& freeLists,
                                          AllocKind thingKind,
                                          ShouldCheckThresholds checkThresholds);
 
   friend class GCRuntime;
--- a/js/src/gc/FreeOp.h
+++ b/js/src/gc/FreeOp.h
@@ -20,21 +20,21 @@ struct JSRuntime;
 
 namespace js {
 namespace gc {
 class AutoSetThreadIsPerformingGC;
 }  // namespace gc
 }  // namespace js
 
 /*
- * A FreeOp can do one thing: free memory. For convenience, it has delete_
+ * A JSFreeOp can do one thing: free memory. For convenience, it has delete_
  * convenience methods that also call destructors.
  *
- * FreeOp is passed to finalizers and other sweep-phase hooks so that we do not
- * need to pass a JSContext to those hooks.
+ * JSFreeOp is passed to finalizers and other sweep-phase hooks so that we do
+ * not need to pass a JSContext to those hooks.
  */
 class JSFreeOp {
   using Cell = js::gc::Cell;
   using MemoryUse = js::MemoryUse;
 
   JSRuntime* runtime_;
 
   // We may accumulate a set of deferred free operations to be performed when
@@ -81,17 +81,17 @@ class JSFreeOp {
   void free_(Cell* cell, void* p, size_t nbytes, MemoryUse use);
 
   // Deprecated. Where possible, memory should be tracked against the owning GC
   // thing by calling js::AddCellMemory and the memory freed with freeLater()
   // below.
   void freeUntrackedLater(void* p) { queueForFreeLater(p); }
 
   // Queue memory that was associated with a GC thing using js::AddCellMemory to
-  // be freed when the FreeOp is destroyed.
+  // be freed when the JSFreeOp is destroyed.
   //
   // This should not be called on the default FreeOps returned by
   // JSRuntime/JSContext::defaultFreeOp() since these are not destroyed until
   // the runtime itself is destroyed.
   //
   // This is used to ensure that copy-on-write object elements are not freed
   // until all objects that refer to them have been finalized.
   void freeLater(Cell* cell, void* p, size_t nbytes, MemoryUse use);
--- a/js/src/gc/GC.cpp
+++ b/js/src/gc/GC.cpp
@@ -549,17 +549,17 @@ void Arena::staticAsserts() {
       mozilla::ArrayLength(FirstThingOffsets) == size_t(AllocKind::LIMIT),
       "We haven't defined all offsets.");
   static_assert(
       mozilla::ArrayLength(ThingsPerArena) == size_t(AllocKind::LIMIT),
       "We haven't defined all counts.");
 }
 
 template <typename T>
-inline size_t Arena::finalize(FreeOp* fop, AllocKind thingKind,
+inline size_t Arena::finalize(JSFreeOp* fop, AllocKind thingKind,
                               size_t thingSize) {
   /* Enforce requirements on size of T. */
   MOZ_ASSERT(thingSize % CellAlignBytes == 0);
   MOZ_ASSERT(thingSize >= MinCellSize);
   MOZ_ASSERT(thingSize <= 255);
 
   MOZ_ASSERT(allocated());
   MOZ_ASSERT(thingKind == getAllocKind());
@@ -624,17 +624,17 @@ inline size_t Arena::finalize(FreeOp* fo
 #endif
   return nmarked;
 }
 
 // Finalize arenas from src list, releasing empty arenas if keepArenas wasn't
 // specified and inserting the others into the appropriate destination size
 // bins.
 template <typename T>
-static inline bool FinalizeTypedArenas(FreeOp* fop, Arena** src,
+static inline bool FinalizeTypedArenas(JSFreeOp* fop, Arena** src,
                                        SortedArenaList& dest,
                                        AllocKind thingKind,
                                        SliceBudget& budget) {
   // When operating in the foreground, take the lock at the top.
   Maybe<AutoLockGC> maybeLock;
   if (fop->onMainThread()) {
     maybeLock.emplace(fop->runtime());
   }
@@ -660,17 +660,17 @@ static inline bool FinalizeTypedArenas(F
   }
 
   return true;
 }
 
 /*
  * Finalize the list of areans.
  */
-static bool FinalizeArenas(FreeOp* fop, Arena** src, SortedArenaList& dest,
+static bool FinalizeArenas(JSFreeOp* fop, Arena** src, SortedArenaList& dest,
                            AllocKind thingKind, SliceBudget& budget) {
   switch (thingKind) {
 #define EXPAND_CASE(allocKind, traceKind, type, sizedType, bgFinal, nursery, \
                     compact)                                                 \
   case AllocKind::allocKind:                                                 \
     return FinalizeTypedArenas<type>(fop, src, dest, thingKind, budget);
     FOR_EACH_ALLOCKIND(EXPAND_CASE)
 #undef EXPAND_CASE
@@ -1945,17 +1945,17 @@ void GCRuntime::removeFinalizeCallback(J
        p < finalizeCallbacks.ref().end(); p++) {
     if (p->op == callback) {
       finalizeCallbacks.ref().erase(p);
       break;
     }
   }
 }
 
-void GCRuntime::callFinalizeCallbacks(FreeOp* fop,
+void GCRuntime::callFinalizeCallbacks(JSFreeOp* fop,
                                       JSFinalizeStatus status) const {
   for (auto& p : finalizeCallbacks.ref()) {
     p.op(fop, status, p.data);
   }
 }
 
 bool GCRuntime::addWeakPointerZonesCallback(JSWeakPointerZonesCallback callback,
                                             void* data) {
@@ -2569,17 +2569,17 @@ bool MovingTracer::onBaseShapeEdge(BaseS
 }
 bool MovingTracer::onScopeEdge(Scope** scopep) { return updateEdge(scopep); }
 bool MovingTracer::onRegExpSharedEdge(RegExpShared** sharedp) {
   return updateEdge(sharedp);
 }
 bool MovingTracer::onBigIntEdge(BigInt** bip) { return updateEdge(bip); }
 
 void Zone::prepareForCompacting() {
-  FreeOp* fop = runtimeFromMainThread()->defaultFreeOp();
+  JSFreeOp* fop = runtimeFromMainThread()->defaultFreeOp();
   discardJitCode(fop);
 }
 
 void GCRuntime::sweepTypesAfterCompacting(Zone* zone) {
   zone->beginSweepTypes();
 
   AutoClearTypeInferenceStateOnOOM oom(zone);
 
@@ -2592,17 +2592,17 @@ void GCRuntime::sweepTypesAfterCompactin
     AutoSweepObjectGroup sweep(group);
   }
 
   zone->types.endSweep(rt);
 }
 
 void GCRuntime::sweepZoneAfterCompacting(Zone* zone) {
   MOZ_ASSERT(zone->isCollecting());
-  FreeOp* fop = rt->defaultFreeOp();
+  JSFreeOp* fop = rt->defaultFreeOp();
   sweepTypesAfterCompacting(zone);
   zone->sweepBreakpoints(fop);
   zone->sweepWeakMaps();
   for (auto* cache : zone->weakCaches()) {
     cache->sweep();
   }
 
   if (jit::JitZone* jitZone = zone->jitZone()) {
@@ -3162,34 +3162,34 @@ ArenaLists::~ArenaLists() {
     MOZ_ASSERT(concurrentUse(i) == ConcurrentUse::None);
     ReleaseArenaList(runtime(), arenaLists(i).head(), lock);
   }
   ReleaseArenaList(runtime(), incrementalSweptArenas.ref().head(), lock);
 
   ReleaseArenaList(runtime(), savedEmptyArenas, lock);
 }
 
-void ArenaLists::queueForForegroundSweep(FreeOp* fop,
+void ArenaLists::queueForForegroundSweep(JSFreeOp* fop,
                                          const FinalizePhase& phase) {
   gcstats::AutoPhase ap(fop->runtime()->gc.stats(), phase.statsPhase);
   for (auto kind : phase.kinds) {
     queueForForegroundSweep(kind);
   }
 }
 
 void ArenaLists::queueForForegroundSweep(AllocKind thingKind) {
   MOZ_ASSERT(!IsBackgroundFinalized(thingKind));
   MOZ_ASSERT(concurrentUse(thingKind) == ConcurrentUse::None);
   MOZ_ASSERT(!arenaListsToSweep(thingKind));
 
   arenaListsToSweep(thingKind) = arenaLists(thingKind).head();
   arenaLists(thingKind).clear();
 }
 
-void ArenaLists::queueForBackgroundSweep(FreeOp* fop,
+void ArenaLists::queueForBackgroundSweep(JSFreeOp* fop,
                                          const FinalizePhase& phase) {
   gcstats::AutoPhase ap(fop->runtime()->gc.stats(), phase.statsPhase);
   for (auto kind : phase.kinds) {
     queueForBackgroundSweep(kind);
   }
 }
 
 inline void ArenaLists::queueForBackgroundSweep(AllocKind thingKind) {
@@ -3204,17 +3204,17 @@ inline void ArenaLists::queueForBackgrou
   MOZ_ASSERT(concurrentUse(thingKind) == ConcurrentUse::None);
 
   arenaListsToSweep(thingKind) = al->head();
   al->clear();
   concurrentUse(thingKind) = ConcurrentUse::BackgroundFinalize;
 }
 
 /*static*/
-void ArenaLists::backgroundFinalize(FreeOp* fop, Arena* listHead,
+void ArenaLists::backgroundFinalize(JSFreeOp* fop, Arena* listHead,
                                     Arena** empty) {
   MOZ_ASSERT(listHead);
   MOZ_ASSERT(empty);
 
   AllocKind thingKind = listHead->getAllocKind();
   Zone* zone = listHead->zone;
 
   size_t thingsPerArena = Arena::thingsPerArena(thingKind);
@@ -3688,17 +3688,17 @@ void js::gc::BackgroundDecommitTask::run
 
 void GCRuntime::sweepBackgroundThings(ZoneList& zones, LifoAlloc& freeBlocks) {
   freeBlocks.freeAll();
 
   if (zones.isEmpty()) {
     return;
   }
 
-  FreeOp fop(nullptr);
+  JSFreeOp fop(nullptr);
 
   // Sweep zones in order. The atoms zone must be finalized last as other
   // zones may have direct pointers into it.
   while (!zones.isEmpty()) {
     Zone* zone = zones.removeFront();
     Arena* emptyArenas = nullptr;
 
     // We must finalize thing kinds in the order specified by
@@ -3864,17 +3864,17 @@ void GCRuntime::freeFromBackgroundThread
 
     Nursery::BufferSet buffers;
     mozilla::Swap(buffers, buffersToFreeAfterMinorGC.ref());
 
     AutoUnlockHelperThreadState unlock(lock);
 
     lifoBlocks.freeAll();
 
-    FreeOp* fop = TlsContext.get()->defaultFreeOp();
+    JSFreeOp* fop = TlsContext.get()->defaultFreeOp();
     for (Nursery::BufferSet::Range r = buffers.all(); !r.empty();
          r.popFront()) {
       // Malloc memory associated with nursery objects is not tracked as these
       // are assumed to be short lived.
       fop->freeUntracked(r.front());
     }
   } while (!lifoBlocksToFree.ref().isEmpty() ||
            !buffersToFreeAfterMinorGC.ref().empty());
@@ -3892,58 +3892,58 @@ bool UniqueIdGCPolicy::needsSweep(Cell**
     // handles updating the UID table manually.
     MOZ_ASSERT(t == prior);
     return result;
   });
 }
 
 void JS::Zone::sweepUniqueIds() { uniqueIds().sweep(); }
 
-void Realm::destroy(FreeOp* fop) {
+void Realm::destroy(JSFreeOp* fop) {
   JSRuntime* rt = fop->runtime();
   if (auto callback = rt->destroyRealmCallback) {
     callback(fop, this);
   }
   if (principals()) {
     JS_DropPrincipals(rt->mainContextFromOwnThread(), principals());
   }
   // Bug 1560019: Malloc memory associated with a zone but not with a specific
   // GC thing is not currently tracked.
   fop->deleteUntracked(this);
 }
 
-void Compartment::destroy(FreeOp* fop) {
+void Compartment::destroy(JSFreeOp* fop) {
   JSRuntime* rt = fop->runtime();
   if (auto callback = rt->destroyCompartmentCallback) {
     callback(fop, this);
   }
   // Bug 1560019: Malloc memory associated with a zone but not with a specific
   // GC thing is not currently tracked.
   fop->deleteUntracked(this);
   rt->gc.stats().sweptCompartment();
 }
 
-void Zone::destroy(FreeOp* fop) {
+void Zone::destroy(JSFreeOp* fop) {
   MOZ_ASSERT(compartments().empty());
   // Bug 1560019: Malloc memory associated with a zone but not with a specific
   // GC thing is not currently tracked.
   fop->deleteUntracked(this);
   fop->runtime()->gc.stats().sweptZone();
 }
 
 /*
  * It's simpler if we preserve the invariant that every zone (except the atoms
  * zone) has at least one compartment, and every compartment has at least one
  * realm. If we know we're deleting the entire zone, then sweepCompartments is
  * allowed to delete all compartments. In this case, |keepAtleastOne| is false.
  * If any cells remain alive in the zone, set |keepAtleastOne| true to prohibit
  * sweepCompartments from deleting every compartment. Instead, it preserves an
  * arbitrary compartment in the zone.
  */
-void Zone::sweepCompartments(FreeOp* fop, bool keepAtleastOne,
+void Zone::sweepCompartments(JSFreeOp* fop, bool keepAtleastOne,
                              bool destroyingRuntime) {
   MOZ_ASSERT(!compartments().empty());
   MOZ_ASSERT_IF(destroyingRuntime, !keepAtleastOne);
 
   Compartment** read = compartments().begin();
   Compartment** end = compartments().end();
   Compartment** write = read;
   while (read < end) {
@@ -3963,17 +3963,17 @@ void Zone::sweepCompartments(FreeOp* fop
       comp->destroy(fop);
     }
   }
   compartments().shrinkTo(write - compartments().begin());
   MOZ_ASSERT_IF(keepAtleastOne, !compartments().empty());
   MOZ_ASSERT_IF(destroyingRuntime, compartments().empty());
 }
 
-void Compartment::sweepRealms(FreeOp* fop, bool keepAtleastOne,
+void Compartment::sweepRealms(JSFreeOp* fop, bool keepAtleastOne,
                               bool destroyingRuntime) {
   MOZ_ASSERT(!realms().empty());
   MOZ_ASSERT_IF(destroyingRuntime, !keepAtleastOne);
 
   Realm** read = realms().begin();
   Realm** end = realms().end();
   Realm** write = read;
   while (read < end) {
@@ -4004,17 +4004,17 @@ void GCRuntime::deleteEmptyZone(Zone* zo
       zones().erase(&i);
       zone->destroy(rt->defaultFreeOp());
       return;
     }
   }
   MOZ_CRASH("Zone not found");
 }
 
-void GCRuntime::sweepZones(FreeOp* fop, bool destroyingRuntime) {
+void GCRuntime::sweepZones(JSFreeOp* fop, bool destroyingRuntime) {
   MOZ_ASSERT_IF(destroyingRuntime, numActiveZoneIters == 0);
   MOZ_ASSERT_IF(destroyingRuntime, arenasEmptyAtShutdown);
 
   if (numActiveZoneIters) {
     return;
   }
 
   assertBackgroundSweepingFinished();
@@ -5469,17 +5469,17 @@ static inline void MaybeCheckWeakMapMark
       MOZ_RELEASE_ASSERT(WeakMapBase::checkMarkingForZone(zone));
     }
   }
 
 #endif
 }
 
 IncrementalProgress GCRuntime::markGrayReferencesInCurrentGroup(
-    FreeOp* fop, SliceBudget& budget) {
+    JSFreeOp* fop, SliceBudget& budget) {
   MOZ_ASSERT(marker.markColor() == MarkColor::Black);
 
   if (hasMarkedGrayRoots) {
     return Finished;
   }
 
   MOZ_ASSERT(cellsToAssertNotGray.ref().empty());
 
@@ -5512,17 +5512,17 @@ IncrementalProgress GCRuntime::markGrayR
   if (shouldYieldForZeal(ZealMode::YieldWhileGrayMarking)) {
     return NotFinished;
   }
 #endif
 
   return markUntilBudgetExhausted(budget, gcstats::PhaseKind::SWEEP_MARK_GRAY);
 }
 
-IncrementalProgress GCRuntime::endMarkingSweepGroup(FreeOp* fop,
+IncrementalProgress GCRuntime::endMarkingSweepGroup(JSFreeOp* fop,
                                                     SliceBudget& budget) {
   MOZ_ASSERT(marker.markColor() == MarkColor::Black);
   MOZ_ASSERT(!HasIncomingCrossCompartmentPointers(rt));
 
   gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_MARK);
 
   markWeakReferencesInCurrentGroup(gcstats::PhaseKind::SWEEP_MARK_WEAK);
 
@@ -5673,17 +5673,17 @@ void GCRuntime::joinTask(GCParallelTask&
                          AutoLockHelperThreadState& locked) {
   {
     gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::JOIN_PARALLEL_TASKS);
     task.joinWithLockHeld(locked);
   }
   stats().recordParallelPhase(phase, task.duration());
 }
 
-void GCRuntime::sweepDebuggerOnMainThread(FreeOp* fop) {
+void GCRuntime::sweepDebuggerOnMainThread(JSFreeOp* fop) {
   // Detach unreachable debuggers and global objects from each other.
   // This can modify weakmaps and so must happen before weakmap sweeping.
   DebugAPI::sweepAll(fop);
 
   gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_COMPARTMENTS);
 
   // Sweep debug environment information. This performs lookups in the Zone's
   // unique IDs table and so must not happen in parallel with sweeping that
@@ -5700,17 +5700,17 @@ void GCRuntime::sweepDebuggerOnMainThrea
   {
     gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_BREAKPOINT);
     for (SweepGroupZonesIter zone(rt); !zone.done(); zone.next()) {
       zone->sweepBreakpoints(fop);
     }
   }
 }
 
-void GCRuntime::sweepJitDataOnMainThread(FreeOp* fop) {
+void GCRuntime::sweepJitDataOnMainThread(JSFreeOp* fop) {
   {
     gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_JIT_DATA);
 
     if (initialState != State::NotActive) {
       // Cancel any active or pending off thread compilations. We also did
       // this before marking (in DiscardJITCodeForGC) so this is a no-op
       // for non-incremental GCs.
       js::CancelOffThreadIonCompile(rt, JS::Zone::Sweep);
@@ -5818,17 +5818,17 @@ static void SweepWeakCachesOnMainThread(
         if (cache->needsIncrementalBarrier()) {
           cache->setNeedsIncrementalBarrier(false);
         }
         cache->sweep();
         return true;
       });
 }
 
-IncrementalProgress GCRuntime::beginSweepingSweepGroup(FreeOp* fop,
+IncrementalProgress GCRuntime::beginSweepingSweepGroup(JSFreeOp* fop,
                                                        SliceBudget& budget) {
   /*
    * Begin sweeping the group of zones in currentSweepGroup, performing
    * actions that must be done before yielding to caller.
    */
 
   using namespace gcstats;
 
@@ -5955,21 +5955,21 @@ bool GCRuntime::shouldYieldForZeal(ZealM
   if (mode == ZealMode::IncrementalMultipleSlices && !firstSweepSlice) {
     yield = false;
   }
 
   return yield;
 }
 #endif
 
-IncrementalProgress GCRuntime::endSweepingSweepGroup(FreeOp* fop,
+IncrementalProgress GCRuntime::endSweepingSweepGroup(JSFreeOp* fop,
                                                      SliceBudget& budget) {
   {
     gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::FINALIZE_END);
-    FreeOp fop(rt);
+    JSFreeOp fop(rt);
     callFinalizeCallbacks(&fop, JSFINALIZE_GROUP_END);
   }
 
   /* Free LIFO blocks on a background thread if possible. */
   startBackgroundFree();
 
   /* Update the GC state for zones we have swept. */
   for (SweepGroupZonesIter zone(rt); !zone.done(); zone.next()) {
@@ -6025,17 +6025,17 @@ void GCRuntime::beginSweepPhase(JS::GCRe
   AssertNoWrappersInGrayList(rt);
   DropStringWrappers(rt);
 
   groupZonesForSweeping(reason);
 
   sweepActions->assertFinished();
 }
 
-bool ArenaLists::foregroundFinalize(FreeOp* fop, AllocKind thingKind,
+bool ArenaLists::foregroundFinalize(JSFreeOp* fop, AllocKind thingKind,
                                     SliceBudget& sliceBudget,
                                     SortedArenaList& sweepList) {
   if (!arenaListsToSweep(thingKind) && incrementalSweptArenas.ref().isEmpty()) {
     return true;
   }
 
   // Empty arenas are not released until all foreground finalized GC things in
   // the current sweep group have been finalized.  This allows finalizers for
@@ -6076,32 +6076,32 @@ IncrementalProgress GCRuntime::markUntil
   return marker.markUntilBudgetExhausted(sliceBudget) ? Finished : NotFinished;
 }
 
 void GCRuntime::drainMarkStack() {
   auto unlimited = SliceBudget::unlimited();
   MOZ_RELEASE_ASSERT(marker.markUntilBudgetExhausted(unlimited));
 }
 
-static void SweepThing(FreeOp* fop, Shape* shape) {
+static void SweepThing(JSFreeOp* fop, Shape* shape) {
   if (!shape->isMarkedAny()) {
     shape->sweep(fop);
   }
 }
 
-static void SweepThing(FreeOp* fop, JSScript* script) {
+static void SweepThing(JSFreeOp* fop, JSScript* script) {
   AutoSweepJitScript sweep(script);
 }
 
-static void SweepThing(FreeOp* fop, ObjectGroup* group) {
+static void SweepThing(JSFreeOp* fop, ObjectGroup* group) {
   AutoSweepObjectGroup sweep(group);
 }
 
 template <typename T>
-static bool SweepArenaList(FreeOp* fop, Arena** arenasToSweep,
+static bool SweepArenaList(JSFreeOp* fop, Arena** arenasToSweep,
                            SliceBudget& sliceBudget) {
   while (Arena* arena = *arenasToSweep) {
     for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
       SweepThing(fop, i.get<T>());
     }
 
     *arenasToSweep = (*arenasToSweep)->next;
     AllocKind kind = MapTypeToFinalizeKind<T>::kind;
@@ -6109,17 +6109,17 @@ static bool SweepArenaList(FreeOp* fop, 
     if (sliceBudget.isOverBudget()) {
       return false;
     }
   }
 
   return true;
 }
 
-IncrementalProgress GCRuntime::sweepTypeInformation(FreeOp* fop,
+IncrementalProgress GCRuntime::sweepTypeInformation(JSFreeOp* fop,
                                                     SliceBudget& budget) {
   // Sweep dead type information stored in scripts and object groups, but
   // don't finalize them yet. We have to sweep dead information from both live
   // and dead scripts and object groups, so that no dead references remain in
   // them. Type inference can end up crawling these zones again, such as for
   // TypeCompartment::markSetsUnknown, and if this happens after sweeping for
   // the sweep group finishes we won't be able to determine which things in
   // the zone are live.
@@ -6145,17 +6145,17 @@ IncrementalProgress GCRuntime::sweepType
   {
     gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_TYPES_END);
     sweepZone->types.endSweep(rt);
   }
 
   return Finished;
 }
 
-IncrementalProgress GCRuntime::releaseSweptEmptyArenas(FreeOp* fop,
+IncrementalProgress GCRuntime::releaseSweptEmptyArenas(JSFreeOp* fop,
                                                        SliceBudget& budget) {
   // Foreground finalized GC things have already been finalized, and now their
   // arenas can be reclaimed by freeing empty ones and making non-empty ones
   // available for allocation.
 
   for (SweepGroupZonesIter zone(rt); !zone.done(); zone.next()) {
     zone->arenas.releaseForegroundSweptEmptyArenas();
   }
@@ -6178,17 +6178,17 @@ void GCRuntime::startSweepingAtomsTable(
     atomsTable->traceWeak(&trc);
     return;
   }
 
   // Initialize remaining atoms to sweep.
   maybeAtoms.emplace(*atomsTable);
 }
 
-IncrementalProgress GCRuntime::sweepAtomsTable(FreeOp* fop,
+IncrementalProgress GCRuntime::sweepAtomsTable(JSFreeOp* fop,
                                                SliceBudget& budget) {
   if (!atomsZone->isGCSweeping()) {
     return Finished;
   }
 
   gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_ATOMS_TABLE);
 
   auto& maybeAtoms = maybeAtomsToSweep.ref();
@@ -6308,17 +6308,17 @@ class IncrementalSweepWeakCacheTask
 
 static const size_t MaxWeakCacheSweepTasks = 8;
 
 static size_t WeakCacheSweepTaskCount() {
   size_t targetTaskCount = HelperThreadState().cpuCount;
   return Min(targetTaskCount, MaxWeakCacheSweepTasks);
 }
 
-IncrementalProgress GCRuntime::sweepWeakCaches(FreeOp* fop,
+IncrementalProgress GCRuntime::sweepWeakCaches(JSFreeOp* fop,
                                                SliceBudget& budget) {
   WeakCacheSweepIterator work(this);
 
   {
     AutoLockHelperThreadState lock;
     gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_COMPARTMENTS);
 
     Maybe<IncrementalSweepWeakCacheTask> tasks[MaxWeakCacheSweepTasks];
@@ -6329,17 +6329,17 @@ IncrementalProgress GCRuntime::sweepWeak
 
     // Tasks run until budget or work is exhausted.
   }
 
   AutoLockHelperThreadState lock;
   return work.empty(lock) ? Finished : NotFinished;
 }
 
-IncrementalProgress GCRuntime::finalizeAllocKind(FreeOp* fop,
+IncrementalProgress GCRuntime::finalizeAllocKind(JSFreeOp* fop,
                                                  SliceBudget& budget) {
   // Set the number of things per arena for this AllocKind.
   size_t thingsPerArena = Arena::thingsPerArena(sweepAllocKind);
   auto& sweepList = incrementalSweepList.ref();
   sweepList.setThingsPerArena(thingsPerArena);
 
   if (!sweepZone->arenas.foregroundFinalize(fop, sweepAllocKind, budget,
                                             sweepList)) {
@@ -6347,17 +6347,17 @@ IncrementalProgress GCRuntime::finalizeA
   }
 
   // Reset the slots of the sweep list that we used.
   sweepList.reset(thingsPerArena);
 
   return Finished;
 }
 
-IncrementalProgress GCRuntime::sweepShapeTree(FreeOp* fop,
+IncrementalProgress GCRuntime::sweepShapeTree(JSFreeOp* fop,
                                               SliceBudget& budget) {
   // Remove dead shapes from the shape tree, but don't finalize them yet.
 
   gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_SHAPE);
 
   ArenaLists& al = sweepZone->arenas;
 
   if (!SweepArenaList<Shape>(fop, &al.gcShapeArenasToUpdate.ref(), budget)) {
@@ -6449,17 +6449,17 @@ class js::gc::SweepGroupsIter {
     gc->getNextSweepGroup();
   }
 };
 
 namespace sweepaction {
 
 // Implementation of the SweepAction interface that calls a method on GCRuntime.
 class SweepActionCall final : public SweepAction {
-  using Method = IncrementalProgress (GCRuntime::*)(FreeOp* fop,
+  using Method = IncrementalProgress (GCRuntime::*)(JSFreeOp* fop,
                                                     SliceBudget& budget);
 
   Method method;
 
  public:
   explicit SweepActionCall(Method m) : method(m) {}
   IncrementalProgress run(Args& args) override {
     return (args.gc->*method)(args.fop, args.budget);
@@ -6584,17 +6584,17 @@ class SweepActionForEach final : public 
   void setElem(const Elem& value) {
     if (elemOut) {
       *elemOut = value;
     }
   }
 };
 
 static UniquePtr<SweepAction> Call(IncrementalProgress (GCRuntime::*method)(
-    FreeOp* fop, SliceBudget& budget)) {
+    JSFreeOp* fop, SliceBudget& budget)) {
   return MakeUnique<SweepActionCall>(method);
 }
 
 static UniquePtr<SweepAction> MaybeYield(ZealMode zealMode) {
   return MakeUnique<SweepActionMaybeYield>(zealMode);
 }
 
 template <typename... Rest>
@@ -6680,17 +6680,17 @@ bool GCRuntime::initSweepActions() {
 IncrementalProgress GCRuntime::performSweepActions(SliceBudget& budget) {
   // Marked GC things may vary between recording and replaying, so sweep
   // actions should not perform any recorded events.
   mozilla::recordreplay::AutoDisallowThreadEvents disallow;
 
   AutoSetThreadIsSweeping threadIsSweeping;
 
   gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP);
-  FreeOp fop(rt);
+  JSFreeOp fop(rt);
 
   // Drain the mark stack, except in the first sweep slice where we must not
   // yield to the mutator until we've starting sweeping a sweep group.
   MOZ_ASSERT(initialState <= State::Sweep);
   if (initialState != State::Sweep) {
     MOZ_ASSERT(marker.isDrained());
   } else {
     if (markUntilBudgetExhausted(budget, gcstats::PhaseKind::SWEEP_MARK) ==
@@ -6727,17 +6727,17 @@ bool GCRuntime::allCCVisibleZonesWereCol
 }
 
 void GCRuntime::endSweepPhase(bool destroyingRuntime) {
   sweepActions->assertFinished();
 
   AutoSetThreadIsSweeping threadIsSweeping;
 
   gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP);
-  FreeOp fop(rt);
+  JSFreeOp fop(rt);
 
   MOZ_ASSERT_IF(destroyingRuntime, !sweepOnBackgroundThread);
 
   {
     gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::DESTROY);
 
     /*
      * Sweep script filenames after sweeping functions in the generic loop
@@ -7253,17 +7253,17 @@ void GCRuntime::incrementalSlice(SliceBu
     }
 
       {
         // Re-sweep the zones list, now that background finalization is
         // finished to actually remove and free dead zones.
         gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::SWEEP);
         gcstats::AutoPhase ap2(stats(), gcstats::PhaseKind::DESTROY);
         AutoSetThreadIsSweeping threadIsSweeping;
-        FreeOp fop(rt);
+        JSFreeOp fop(rt);
         sweepZones(&fop, destroyingRuntime);
       }
 
       MOZ_ASSERT(!startedCompacting);
       incrementalState = State::Compact;
 
       // Always yield before compacting since it is not incremental.
       if (isCompacting && !budget.isUnlimited()) {
@@ -8427,17 +8427,17 @@ void GCRuntime::setDeterministic(bool en
 
 #ifdef DEBUG
 
 /* Should only be called manually under gdb */
 void PreventGCDuringInteractiveDebug() { TlsContext.get()->suppressGC++; }
 
 #endif
 
-void js::ReleaseAllJITCode(FreeOp* fop) {
+void js::ReleaseAllJITCode(JSFreeOp* fop) {
   js::CancelOffThreadIonCompile(fop->runtime());
 
   for (ZonesIter zone(fop->runtime(), SkipAtoms); !zone.done(); zone.next()) {
     zone->setPreservingCode(false);
     zone->discardJitCode(fop);
   }
 
   for (RealmsIter realm(fop->runtime()); !realm.done(); realm.next()) {
--- a/js/src/gc/GC.h
+++ b/js/src/gc/GC.h
@@ -51,17 +51,17 @@ struct MapTypeToFinalizeKind {};
   };
 FOR_EACH_NONOBJECT_ALLOCKIND(EXPAND_MAPTYPETOFINALIZEKIND)
 #undef EXPAND_MAPTYPETOFINALIZEKIND
 
 } /* namespace gc */
 
 extern void TraceRuntime(JSTracer* trc);
 
-extern void ReleaseAllJITCode(FreeOp* op);
+extern void ReleaseAllJITCode(JSFreeOp* op);
 
 extern void PrepareForDebugGC(JSRuntime* rt);
 
 /* Functions for managing cross compartment gray pointers. */
 
 extern void NotifyGCNukeWrapper(JSObject* o);
 
 extern unsigned NotifyGCPreSwap(JSObject* a, JSObject* b);
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -50,17 +50,17 @@ class WeakCacheSweepIterator;
 
 enum IncrementalProgress { NotFinished = 0, Finished };
 
 // Interface to a sweep action.
 struct SweepAction {
   // The arguments passed to each action.
   struct Args {
     GCRuntime* gc;
-    FreeOp* fop;
+    JSFreeOp* fop;
     SliceBudget& budget;
   };
 
   virtual ~SweepAction() {}
   virtual IncrementalProgress run(Args& state) = 0;
   virtual void assertFinished() const = 0;
   virtual bool shouldSkip() { return false; }
 };
@@ -625,35 +625,37 @@ class GCRuntime {
   void markBufferedGrayRoots(JS::Zone* zone);
   void markAllWeakReferences(gcstats::PhaseKind phase);
   void markAllGrayReferences(gcstats::PhaseKind phase);
 
   void beginSweepPhase(JS::GCReason reason, AutoGCSession& session);
   void groupZonesForSweeping(JS::GCReason reason);
   MOZ_MUST_USE bool findSweepGroupEdges();
   void getNextSweepGroup();
-  IncrementalProgress markGrayReferencesInCurrentGroup(FreeOp* fop,
+  IncrementalProgress markGrayReferencesInCurrentGroup(JSFreeOp* fop,
                                                        SliceBudget& budget);
-  IncrementalProgress endMarkingSweepGroup(FreeOp* fop, SliceBudget& budget);
+  IncrementalProgress endMarkingSweepGroup(JSFreeOp* fop, SliceBudget& budget);
   void markIncomingCrossCompartmentPointers(MarkColor color);
-  IncrementalProgress beginSweepingSweepGroup(FreeOp* fop, SliceBudget& budget);
-  void sweepDebuggerOnMainThread(FreeOp* fop);
-  void sweepJitDataOnMainThread(FreeOp* fop);
-  IncrementalProgress endSweepingSweepGroup(FreeOp* fop, SliceBudget& budget);
+  IncrementalProgress beginSweepingSweepGroup(JSFreeOp* fop,
+                                              SliceBudget& budget);
+  void sweepDebuggerOnMainThread(JSFreeOp* fop);
+  void sweepJitDataOnMainThread(JSFreeOp* fop);
+  IncrementalProgress endSweepingSweepGroup(JSFreeOp* fop, SliceBudget& budget);
   IncrementalProgress performSweepActions(SliceBudget& sliceBudget);
-  IncrementalProgress sweepTypeInformation(FreeOp* fop, SliceBudget& budget);
-  IncrementalProgress releaseSweptEmptyArenas(FreeOp* fop, SliceBudget& budget);
+  IncrementalProgress sweepTypeInformation(JSFreeOp* fop, SliceBudget& budget);
+  IncrementalProgress releaseSweptEmptyArenas(JSFreeOp* fop,
+                                              SliceBudget& budget);
   void startSweepingAtomsTable();
-  IncrementalProgress sweepAtomsTable(FreeOp* fop, SliceBudget& budget);
-  IncrementalProgress sweepWeakCaches(FreeOp* fop, SliceBudget& budget);
-  IncrementalProgress finalizeAllocKind(FreeOp* fop, SliceBudget& budget);
-  IncrementalProgress sweepShapeTree(FreeOp* fop, SliceBudget& budget);
+  IncrementalProgress sweepAtomsTable(JSFreeOp* fop, SliceBudget& budget);
+  IncrementalProgress sweepWeakCaches(JSFreeOp* fop, SliceBudget& budget);
+  IncrementalProgress finalizeAllocKind(JSFreeOp* fop, SliceBudget& budget);
+  IncrementalProgress sweepShapeTree(JSFreeOp* fop, SliceBudget& budget);
   void endSweepPhase(bool lastGC);
   bool allCCVisibleZonesWereCollected() const;
-  void sweepZones(FreeOp* fop, bool destroyingRuntime);
+  void sweepZones(JSFreeOp* fop, bool destroyingRuntime);
   void decommitFreeArenasWithoutUnlocking(const AutoLockGC& lock);
   void startDecommit();
   void queueZonesAndStartBackgroundSweep(ZoneList& zones);
   void sweepFromBackgroundThread(AutoLockHelperThreadState& lock);
   void startBackgroundFree();
   void freeFromBackgroundThread(AutoLockHelperThreadState& lock);
   void sweepBackgroundThings(ZoneList& zones, LifoAlloc& freeBlocks);
   void assertBackgroundSweepingFinished();
@@ -687,17 +689,17 @@ class GCRuntime {
   void computeNonIncrementalMarkingForValidation(AutoGCSession& session);
   void validateIncrementalMarking();
   void finishMarkingValidation();
 
 #ifdef DEBUG
   void checkForCompartmentMismatches();
 #endif
 
-  void callFinalizeCallbacks(FreeOp* fop, JSFinalizeStatus status) const;
+  void callFinalizeCallbacks(JSFreeOp* fop, JSFinalizeStatus status) const;
   void callWeakPointerZonesCallbacks() const;
   void callWeakPointerCompartmentCallbacks(JS::Compartment* comp) const;
   void callDoCycleCollectionCallback(JSContext* cx);
 
  public:
   JSRuntime* const rt;
 
   /* Embedders can use this zone and group however they wish. */
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -435,17 +435,17 @@ class Arena {
     hasDelayedGrayMarking_ = 0;
     nextDelayedMarkingArena_ = 0;
   }
 
   inline ArenaCellSet*& bufferedCells();
   inline size_t& atomBitmapStart();
 
   template <typename T>
-  size_t finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize);
+  size_t finalize(JSFreeOp* fop, AllocKind thingKind, size_t thingSize);
 
   static void staticAsserts();
 
   void unmarkAll();
   void unmarkPreMarkedFreeCells();
 
   void arenaAllocatedDuringGC();
 
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -172,17 +172,17 @@ Zone::DebuggerVector* Zone::getOrCreateD
 
   debuggers = js_new<DebuggerVector>();
   if (!debuggers) {
     ReportOutOfMemory(cx);
   }
   return debuggers;
 }
 
-void Zone::sweepBreakpoints(FreeOp* fop) {
+void Zone::sweepBreakpoints(JSFreeOp* fop) {
   if (fop->runtime()->debuggerList().isEmpty()) {
     return;
   }
 
   /*
    * Sweep all compartments in a zone at the same time, since there is no way
    * to iterate over the scripts belonging to a single compartment in a zone.
    */
@@ -338,17 +338,17 @@ void Zone::checkStringWrappersAfterMovin
 }
 #endif
 
 void Zone::sweepWeakMaps() {
   /* Finalize unreachable (key,value) pairs in all weak maps. */
   WeakMapBase::sweepZone(this);
 }
 
-void Zone::discardJitCode(FreeOp* fop,
+void Zone::discardJitCode(JSFreeOp* fop,
                           ShouldDiscardBaselineCode discardBaselineCode,
                           ShouldDiscardJitScripts discardJitScripts) {
   if (!jitZone()) {
     return;
   }
 
   if (isPreservingCode()) {
     return;
@@ -535,17 +535,17 @@ void Zone::deleteEmptyCompartment(JS::Co
   MOZ_ASSERT(comp->zone() == this);
   MOZ_ASSERT(arenas.checkEmptyArenaLists());
 
   MOZ_ASSERT(compartments().length() == 1);
   MOZ_ASSERT(compartments()[0] == comp);
   MOZ_ASSERT(comp->realms().length() == 1);
 
   Realm* realm = comp->realms()[0];
-  FreeOp* fop = runtimeFromMainThread()->defaultFreeOp();
+  JSFreeOp* fop = runtimeFromMainThread()->defaultFreeOp();
   realm->destroy(fop);
   comp->destroy(fop);
 
   compartments().clear();
 }
 
 void Zone::setHelperThreadOwnerContext(JSContext* cx) {
   MOZ_ASSERT_IF(cx, TlsContext.get() == cx);
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -142,17 +142,17 @@ namespace JS {
 //
 // We always guarantee that a zone has at least one live compartment by refusing
 // to delete the last compartment in a live zone.
 class Zone : public js::ZoneAllocator, public js::gc::GraphNodeBase<JS::Zone> {
  public:
   explicit Zone(JSRuntime* rt);
   ~Zone();
   MOZ_MUST_USE bool init(bool isSystem);
-  void destroy(js::FreeOp* fop);
+  void destroy(JSFreeOp* fop);
 
   static JS::Zone* from(ZoneAllocator* zoneAlloc) {
     return static_cast<Zone*>(zoneAlloc);
   }
 
  private:
   enum class HelperThreadUse : uint32_t { None, Pending, Active };
   mozilla::Atomic<HelperThreadUse, mozilla::SequentiallyConsistent,
@@ -197,17 +197,17 @@ class Zone : public js::ZoneAllocator, p
   };
 
   enum ShouldDiscardJitScripts : bool {
     KeepJitScripts = false,
     DiscardJitScripts
   };
 
   void discardJitCode(
-      js::FreeOp* fop,
+      JSFreeOp* fop,
       ShouldDiscardBaselineCode discardBaselineCode = DiscardBaselineCode,
       ShouldDiscardJitScripts discardJitScripts = KeepJitScripts);
 
   void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
                               size_t* typePool, size_t* regexpZone,
                               size_t* jitZone, size_t* baselineStubsOptimized,
                               size_t* cachedCFG, size_t* uniqueIdMap,
                               size_t* shapeCaches, size_t* atomsMarkBitmaps,
@@ -307,20 +307,20 @@ class Zone : public js::ZoneAllocator, p
   bool requireGCTracer() const;
 
   // For testing purposes, return the index of the sweep group which this zone
   // was swept in in the last GC.
   unsigned lastSweepGroupIndex() { return gcSweepGroupIndex; }
 #endif
 
   void sweepAfterMinorGC(JSTracer* trc);
-  void sweepBreakpoints(js::FreeOp* fop);
+  void sweepBreakpoints(JSFreeOp* fop);
   void sweepUniqueIds();
   void sweepWeakMaps();
-  void sweepCompartments(js::FreeOp* fop, bool keepAtleastOne, bool lastGC);
+  void sweepCompartments(JSFreeOp* fop, bool keepAtleastOne, bool lastGC);
 
   using DebuggerVector = js::Vector<js::Debugger*, 0, js::SystemAllocPolicy>;
 
  private:
   js::ZoneData<DebuggerVector*> debuggers;
 
   js::jit::JitZone* createJitZone(JSContext* cx);
 
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -506,17 +506,17 @@ void BaselineScript::writeBarrierPre(Zon
     script->trace(zone->barrierTracer());
   }
 }
 
 void BaselineScript::Trace(JSTracer* trc, BaselineScript* script) {
   script->trace(trc);
 }
 
-void BaselineScript::Destroy(FreeOp* fop, BaselineScript* script) {
+void BaselineScript::Destroy(JSFreeOp* fop, BaselineScript* script) {
   MOZ_ASSERT(!script->hasPendingIonBuilder());
 
   // This allocation is tracked by JSScript::setBaselineScript /
   // clearBaselineScript.
   fop->deleteUntracked(script);
 }
 
 void JS::DeletePolicy<js::jit::BaselineScript>::operator()(
@@ -910,17 +910,17 @@ void BaselineInterpreter::toggleCodeCove
   if (coverage::IsLCovEnabled()) {
     // Instrumentation is enabled no matter what.
     return;
   }
 
   toggleCodeCoverageInstrumentationUnchecked(enable);
 }
 
-void jit::FinishDiscardBaselineScript(FreeOp* fop, JSScript* script) {
+void jit::FinishDiscardBaselineScript(JSFreeOp* fop, JSScript* script) {
   MOZ_ASSERT(script->hasBaselineScript());
   MOZ_ASSERT(!script->jitScript()->active());
 
   BaselineScript* baseline = script->baselineScript();
   script->setBaselineScript(fop, nullptr);
   BaselineScript::Destroy(fop, baseline);
 }
 
--- a/js/src/jit/BaselineJIT.h
+++ b/js/src/jit/BaselineJIT.h
@@ -284,17 +284,17 @@ struct BaselineScript final {
                              uint32_t warmUpCheckPrologueOffset,
                              uint32_t profilerEnterToggleOffset,
                              uint32_t profilerExitToggleOffset,
                              size_t retAddrEntries, size_t osrEntries,
                              size_t debugTrapEntries, size_t resumeEntries,
                              size_t traceLoggerToggleOffsetEntries);
 
   static void Trace(JSTracer* trc, BaselineScript* script);
-  static void Destroy(FreeOp* fop, BaselineScript* script);
+  static void Destroy(JSFreeOp* fop, BaselineScript* script);
 
   static inline size_t offsetOfMethod() {
     return offsetof(BaselineScript, method_);
   }
 
   void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
                               size_t* data) const {
     *data += mallocSizeOf(this);
@@ -418,17 +418,17 @@ JitExecStatus EnterBaselineInterpreterAt
 bool CanBaselineInterpretScript(JSScript* script);
 
 // Called by the Baseline Interpreter to compile a script for the Baseline JIT.
 // |res| is set to the native code address in the BaselineScript to jump to, or
 // nullptr if we were unable to compile this script.
 bool BaselineCompileFromBaselineInterpreter(JSContext* cx, BaselineFrame* frame,
                                             uint8_t** res);
 
-void FinishDiscardBaselineScript(FreeOp* fop, JSScript* script);
+void FinishDiscardBaselineScript(JSFreeOp* fop, JSScript* script);
 
 void AddSizeOfBaselineData(JSScript* script, mozilla::MallocSizeOf mallocSizeOf,
                            size_t* data);
 
 void ToggleBaselineProfiling(JSContext* cx, bool enable);
 
 void ToggleBaselineTraceLoggerScripts(JSRuntime* runtime, bool enable);
 void ToggleBaselineTraceLoggerEngine(JSRuntime* runtime, bool enable);
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -703,17 +703,17 @@ void JitCode::traceChildren(JSTracer* tr
                                   movingObjects ? Reprotect : DontReprotect);
 
     uint8_t* start = code_ + dataRelocTableOffset();
     CompactBufferReader reader(start, start + dataRelocTableBytes_);
     MacroAssembler::TraceDataRelocations(trc, this, reader);
   }
 }
 
-void JitCode::finalize(FreeOp* fop) {
+void JitCode::finalize(JSFreeOp* fop) {
   // If this jitcode had a bytecode map, it must have already been removed.
 #ifdef DEBUG
   JSRuntime* rt = fop->runtime();
   if (hasBytecodeMap_) {
     MOZ_ASSERT(rt->jitRuntime()->hasJitcodeGlobalTable());
     MOZ_ASSERT(!rt->jitRuntime()->getJitcodeGlobalTable()->lookup(raw()));
   }
 #endif
@@ -1032,17 +1032,17 @@ const OsiIndex* IonScript::getOsiIndex(u
 }
 
 void IonScript::Trace(JSTracer* trc, IonScript* script) {
   if (script != ION_DISABLED_SCRIPT) {
     script->trace(trc);
   }
 }
 
-void IonScript::Destroy(FreeOp* fop, IonScript* script) {
+void IonScript::Destroy(JSFreeOp* fop, IonScript* script) {
   // This allocation is tracked by JSScript::setIonScript / clearIonScript.
   fop->deleteUntracked(script);
 }
 
 void JS::DeletePolicy<js::jit::IonScript>::operator()(
     const js::jit::IonScript* script) {
   IonScript::Destroy(rt_->defaultFreeOp(), const_cast<IonScript*>(script));
 }
@@ -2488,17 +2488,17 @@ MethodStatus jit::Recompile(JSContext* c
       ForbidCompilation(cx, script);
     }
     return status;
   }
 
   return Method_Compiled;
 }
 
-static void InvalidateActivation(FreeOp* fop,
+static void InvalidateActivation(JSFreeOp* fop,
                                  const JitActivationIterator& activations,
                                  bool invalidateAll) {
   JitSpew(JitSpew_IonInvalidate, "BEGIN invalidating activation");
 
 #ifdef CHECK_OSIPOINT_REGISTERS
   if (JitOptions.checkOsiPointRegisters) {
     activations->asJit()->setCheckRegs(false);
   }
@@ -2655,17 +2655,17 @@ static void InvalidateActivation(FreeOp*
         "   ! Invalidate ionScript %p (inv count %zu) -> patching osipoint %p",
         ionScript, ionScript->invalidationCount(), (void*)osiPatchPoint.raw());
     Assembler::PatchWrite_NearCall(osiPatchPoint, invalidateEpilogue);
   }
 
   JitSpew(JitSpew_IonInvalidate, "END invalidating activation");
 }
 
-void jit::InvalidateAll(FreeOp* fop, Zone* zone) {
+void jit::InvalidateAll(JSFreeOp* fop, Zone* zone) {
   // The caller should previously have cancelled off thread compilation.
 #ifdef DEBUG
   for (RealmsInZoneIter realm(zone); !realm.done(); realm.next()) {
     MOZ_ASSERT(!HasOffThreadIonCompile(realm));
   }
 #endif
   if (zone->isAtomsZone()) {
     return;
@@ -2686,17 +2686,17 @@ static void ClearIonScriptAfterInvalidat
   // Wait for the scripts to get warm again before doing another
   // compile, unless we are recompiling *because* a script got hot
   // (resetUses is false).
   if (resetUses) {
     script->resetWarmUpCounterToDelayIonCompilation();
   }
 }
 
-void jit::Invalidate(TypeZone& types, FreeOp* fop,
+void jit::Invalidate(TypeZone& types, JSFreeOp* fop,
                      const RecompileInfoVector& invalid, bool resetUses,
                      bool cancelOffThread) {
   JitSpew(JitSpew_IonInvalidate, "Start invalidation.");
 
   // Add an invalidation reference to all invalidated IonScripts to indicate
   // to the traversal which frames have been invalidated.
   size_t numInvalidations = 0;
   for (const RecompileInfo& info : invalid) {
@@ -2815,17 +2815,17 @@ void jit::Invalidate(JSContext* cx, JSSc
   RecompileInfoVector scripts;
   MOZ_ASSERT(script->hasIonScript());
   MOZ_RELEASE_ASSERT(scripts.reserve(1));
   scripts.infallibleEmplaceBack(script, script->ionScript()->compilationId());
 
   Invalidate(cx, scripts, resetUses, cancelOffThread);
 }
 
-void jit::FinishInvalidation(FreeOp* fop, JSScript* script) {
+void jit::FinishInvalidation(JSFreeOp* fop, JSScript* script) {
   if (!script->hasIonScript()) {
     return;
   }
 
   // In all cases, null out script->ion to avoid re-entry.
   IonScript* ion = script->ionScript();
   script->setIonScript(fop, nullptr);
 
@@ -3009,17 +3009,17 @@ size_t jit::SizeOfIonData(JSScript* scri
 
   if (script->hasIonScript()) {
     result += script->ionScript()->sizeOfIncludingThis(mallocSizeOf);
   }
 
   return result;
 }
 
-void jit::DestroyJitScripts(FreeOp* fop, JSScript* script) {
+void jit::DestroyJitScripts(JSFreeOp* fop, JSScript* script) {
   if (script->hasIonScript()) {
     IonScript* ion = script->ionScript();
     script->clearIonScript(fop);
     jit::IonScript::Destroy(fop, ion);
   }
 
   if (script->hasBaselineScript()) {
     BaselineScript* baseline = script->baselineScript();
--- a/js/src/jit/Ion.h
+++ b/js/src/jit/Ion.h
@@ -164,17 +164,17 @@ enum JitExecStatus {
 
 static inline bool IsErrorStatus(JitExecStatus status) {
   return status == JitExec_Error || status == JitExec_Aborted;
 }
 
 struct EnterJitData;
 
 // Walk the stack and invalidate active Ion frames for the invalid scripts.
-void Invalidate(TypeZone& types, FreeOp* fop,
+void Invalidate(TypeZone& types, JSFreeOp* fop,
                 const RecompileInfoVector& invalid, bool resetUses = true,
                 bool cancelOffThread = true);
 void Invalidate(JSContext* cx, const RecompileInfoVector& invalid,
                 bool resetUses = true, bool cancelOffThread = true);
 void Invalidate(JSContext* cx, JSScript* script, bool resetUses = true,
                 bool cancelOffThread = true);
 
 class IonBuilder;
@@ -249,17 +249,17 @@ class MOZ_RAII AutoEnterIonBackend {
   MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 bool OffThreadCompilationAvailable(JSContext* cx);
 
 void ForbidCompilation(JSContext* cx, JSScript* script);
 
 size_t SizeOfIonData(JSScript* script, mozilla::MallocSizeOf mallocSizeOf);
-void DestroyJitScripts(FreeOp* fop, JSScript* script);
+void DestroyJitScripts(JSFreeOp* fop, JSScript* script);
 void TraceJitScripts(JSTracer* trc, JSScript* script);
 
 bool JitSupportsSimd();
 bool JitSupportsAtomics();
 
 }  // namespace jit
 }  // namespace js
 
--- a/js/src/jit/IonCode.h
+++ b/js/src/jit/IonCode.h
@@ -96,17 +96,17 @@ class JitCode : public gc::TenuredCell {
     const uint8_t* addr_u8 = (const uint8_t*)addr;
     return raw() <= addr_u8 && addr_u8 < rawEnd();
   }
   size_t instructionsSize() const { return insnSize_; }
   size_t bufferSize() const { return bufferSize_; }
   size_t headerSize() const { return headerSize_; }
 
   void traceChildren(JSTracer* trc);
-  void finalize(FreeOp* fop);
+  void finalize(JSFreeOp* fop);
   void setInvalidated() { invalidated_ = true; }
 
   void setHasBytecodeMap() { hasBytecodeMap_ = true; }
 
   // If this JitCode object has been, effectively, corrupted due to
   // invalidation patching, then we have to remember this so we don't try and
   // trace relocation entries that may now be corrupt.
   bool invalidated() const { return !!invalidated_; }
@@ -289,17 +289,17 @@ struct IonScript {
                         uint32_t frameSize, size_t snapshotsListSize,
                         size_t snapshotsRVATableSize, size_t recoversSize,
                         size_t bailoutEntries, size_t constants,
                         size_t safepointIndexEntries, size_t osiIndexEntries,
                         size_t icEntries, size_t runtimeSize,
                         size_t safepointsSize,
                         OptimizationLevel optimizationLevel);
   static void Trace(JSTracer* trc, IonScript* script);
-  static void Destroy(FreeOp* fop, IonScript* script);
+  static void Destroy(JSFreeOp* fop, IonScript* script);
 
   static inline size_t offsetOfMethod() { return offsetof(IonScript, method_); }
   static inline size_t offsetOfOsrEntryOffset() {
     return offsetof(IonScript, osrEntryOffset_);
   }
   static inline size_t offsetOfSkipArgCheckEntryOffset() {
     return offsetof(IonScript, skipArgCheckEntryOffset_);
   }
@@ -432,17 +432,17 @@ struct IonScript {
   bool invalidated() const { return invalidationCount_ != 0; }
 
   // Invalidate the current compilation.
   void invalidate(JSContext* cx, JSScript* script, bool resetUses,
                   const char* reason);
 
   size_t invalidationCount() const { return invalidationCount_; }
   void incrementInvalidationCount() { invalidationCount_++; }
-  void decrementInvalidationCount(FreeOp* fop) {
+  void decrementInvalidationCount(JSFreeOp* fop) {
     MOZ_ASSERT(invalidationCount_);
     invalidationCount_--;
     if (!invalidationCount_) {
       Destroy(fop, this);
     }
   }
   IonCompilationId compilationId() const { return compilationId_; }
   OptimizationLevel optimizationLevel() const { return optimizationLevel_; }
--- a/js/src/jit/JitRealm.h
+++ b/js/src/jit/JitRealm.h
@@ -679,18 +679,18 @@ class JitRealm {
   // called. This is arranged by cancelling off-thread Ion compilation at the
   // start of GC and at the start of sweeping.
   void performStubReadBarriers(uint32_t stubsToBarrier) const;
 
   size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
 };
 
 // Called from Zone::discardJitCode().
-void InvalidateAll(FreeOp* fop, JS::Zone* zone);
-void FinishInvalidation(FreeOp* fop, JSScript* script);
+void InvalidateAll(JSFreeOp* fop, JS::Zone* zone);
+void FinishInvalidation(JSFreeOp* fop, JSScript* script);
 
 // This class ensures JIT code is executable on its destruction. Creators
 // must call makeWritable(), and not attempt to write to the buffer if it fails.
 //
 // AutoWritableJitCodeFallible may only fail to make code writable; it cannot
 // fail to make JIT code executable (because the creating code has no chance to
 // recover from a failed destructor).
 class MOZ_RAII AutoWritableJitCodeFallible {
--- a/js/src/jit/JitScript.cpp
+++ b/js/src/jit/JitScript.cpp
@@ -158,26 +158,26 @@ bool JSScript::createJitScript(JSContext
     InferSpew(ISpewOps, "typeSet: %sT%p%s arg%u %p", InferSpewColor(types),
               types, InferSpewColorReset(), i, this);
   }
 #endif
 
   return true;
 }
 
-void JSScript::maybeReleaseJitScript(FreeOp* fop) {
+void JSScript::maybeReleaseJitScript(JSFreeOp* fop) {
   if (!jitScript_ || zone()->types.keepJitScripts || hasBaselineScript() ||
       jitScript_->active()) {
     return;
   }
 
   releaseJitScript(fop);
 }
 
-void JSScript::releaseJitScript(FreeOp* fop) {
+void JSScript::releaseJitScript(JSFreeOp* fop) {
   MOZ_ASSERT(!hasIonScript());
 
   fop->removeCellMemory(this, jitScript_->allocBytes(), MemoryUse::JitScript);
 
   JitScript::Destroy(zone(), jitScript_);
   jitScript_ = nullptr;
   updateJitCodeRaw(fop->runtime());
 }
--- a/js/src/jsexn.cpp
+++ b/js/src/jsexn.cpp
@@ -40,17 +40,17 @@
 #include "vm/StringType.h"
 
 #include "vm/ErrorObject-inl.h"
 #include "vm/JSObject-inl.h"
 #include "vm/SavedStacks-inl.h"
 
 using namespace js;
 
-static void exn_finalize(FreeOp* fop, JSObject* obj);
+static void exn_finalize(JSFreeOp* fop, JSObject* obj);
 
 static bool exn_toSource(JSContext* cx, unsigned argc, Value* vp);
 
 #define IMPLEMENT_ERROR_PROTO_CLASS(name)                        \
   {                                                              \
     js_Object_str, JSCLASS_HAS_CACHED_PROTO(JSProto_##name),     \
         JS_NULL_CLASS_OPS,                                       \
         &ErrorObject::classSpecs[JSProto_##name - JSProto_Error] \
@@ -332,17 +332,17 @@ JSString* js::ComputeStackString(JSConte
   RootedString str(cx);
   if (!BuildStackString(cx, cx->realm()->principals(), stack, &str)) {
     return nullptr;
   }
 
   return str.get();
 }
 
-static void exn_finalize(FreeOp* fop, JSObject* obj) {
+static void exn_finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->maybeOnHelperThread());
   if (JSErrorReport* report = obj->as<ErrorObject>().getErrorReport()) {
     // Bug 1560019: This allocation is not currently tracked.
     fop->deleteUntracked(report);
   }
 }
 
 JSErrorReport* js::ErrorFromException(JSContext* cx, HandleObject objArg) {
--- a/js/src/jsfriendapi.h
+++ b/js/src/jsfriendapi.h
@@ -1120,25 +1120,16 @@ typedef struct JSDOMCallbacks DOMCallbac
 
 extern JS_FRIEND_API void SetDOMCallbacks(JSContext* cx,
                                           const DOMCallbacks* callbacks);
 
 extern JS_FRIEND_API const DOMCallbacks* GetDOMCallbacks(JSContext* cx);
 
 extern JS_FRIEND_API JSObject* GetTestingFunctions(JSContext* cx);
 
-/**
- * Helper to convert FreeOp to JSFreeOp when the definition of FreeOp is not
- * available and the compiler does not know that FreeOp inherits from
- * JSFreeOp.
- */
-inline JSFreeOp* CastToJSFreeOp(FreeOp* fop) {
-  return reinterpret_cast<JSFreeOp*>(fop);
-}
-
 /* Implemented in jsexn.cpp. */
 
 /**
  * Get an error type name from a JSExnType constant.
  * Returns nullptr for invalid arguments and JSEXN_INTERNALERR
  */
 extern JS_FRIEND_API JSFlatString* GetErrorTypeName(JSContext* cx,
                                                     int16_t exnType);
--- a/js/src/proxy/Proxy.cpp
+++ b/js/src/proxy/Proxy.cpp
@@ -710,17 +710,17 @@ void ProxyObject::trace(JSTracer* trc, J
       continue;
     }
     TraceEdge(trc, proxy->reservedSlotPtr(i), "proxy_reserved");
   }
 
   Proxy::trace(trc, obj);
 }
 
-static void proxy_Finalize(FreeOp* fop, JSObject* obj) {
+static void proxy_Finalize(JSFreeOp* fop, JSObject* obj) {
   // Suppress a bogus warning about finalize().
   JS::AutoSuppressGCAnalysis nogc;
 
   MOZ_ASSERT(obj->is<ProxyObject>());
   obj->as<ProxyObject>().handler()->finalize(fop, obj);
 
   if (!obj->as<ProxyObject>().usingInlineValueArray()) {
     // Bug 1560019: This allocation is not tracked, but is only present when
--- a/js/src/shell/OSObject.cpp
+++ b/js/src/shell/OSObject.cpp
@@ -424,17 +424,17 @@ class FileObject : public NativeObject {
       return nullptr;
     }
 
     InitReservedSlot(obj, FILE_SLOT, file, MemoryUse::FileObjectFile);
     file->acquire();
     return obj;
   }
 
-  static void finalize(FreeOp* fop, JSObject* obj) {
+  static void finalize(JSFreeOp* fop, JSObject* obj) {
     FileObject* fileObj = &obj->as<FileObject>();
     RCFile* file = fileObj->rcFile();
     fop->removeCellMemory(obj, sizeof(*file), MemoryUse::FileObjectFile);
     if (file->release()) {
       fop->deleteUntracked(file);
     }
   }
 
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -6975,17 +6975,17 @@ class StreamCacheEntry : public AtomicRe
 
 typedef RefPtr<StreamCacheEntry> StreamCacheEntryPtr;
 
 class StreamCacheEntryObject : public NativeObject {
   static const unsigned CACHE_ENTRY_SLOT = 0;
   static const ClassOps classOps_;
   static const JSPropertySpec properties_;
 
-  static void finalize(FreeOp*, JSObject* obj) {
+  static void finalize(JSFreeOp*, JSObject* obj) {
     obj->as<StreamCacheEntryObject>().cache().Release();
   }
 
   static bool cachedGetter(JSContext* cx, unsigned argc, Value* vp) {
     CallArgs args = CallArgsFromVp(argc, vp);
     if (!args.thisv().isObject() ||
         !args.thisv().toObject().is<StreamCacheEntryObject>()) {
       return false;
--- a/js/src/vm/ArgumentsObject.cpp
+++ b/js/src/vm/ArgumentsObject.cpp
@@ -896,17 +896,17 @@ bool UnmappedArgumentsObject::obj_enumer
     if (!HasOwnProperty(cx, argsobj, id, &found)) {
       return false;
     }
   }
 
   return true;
 }
 
-void ArgumentsObject::finalize(FreeOp* fop, JSObject* obj) {
+void ArgumentsObject::finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(!IsInsideNursery(obj));
   ArgumentsObject& argsobj = obj->as<ArgumentsObject>();
   if (argsobj.data()) {
     fop->free_(&argsobj, argsobj.maybeRareData(),
                RareArgumentsData::bytesRequired(argsobj.initialLength()),
                MemoryUse::RareArgumentsData);
     fop->free_(&argsobj, argsobj.data(),
                ArgumentsData::bytesRequired(argsobj.data()->numArgs),
--- a/js/src/vm/ArgumentsObject.h
+++ b/js/src/vm/ArgumentsObject.h
@@ -369,17 +369,17 @@ class ArgumentsObject : public NativeObj
     return mallocSizeOf(data()) + mallocSizeOf(maybeRareData());
   }
   size_t sizeOfData() const {
     return ArgumentsData::bytesRequired(data()->numArgs) +
            (maybeRareData() ? RareArgumentsData::bytesRequired(initialLength())
                             : 0);
   }
 
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
   static void trace(JSTracer* trc, JSObject* obj);
   static size_t objectMoved(JSObject* dst, JSObject* src);
 
   /* For jit use: */
   static size_t getDataSlotOffset() { return getFixedSlotOffset(DATA_SLOT); }
   static size_t getInitialLengthSlotOffset() {
     return getFixedSlotOffset(INITIAL_LENGTH_SLOT);
   }
--- a/js/src/vm/ArrayBufferObject.cpp
+++ b/js/src/vm/ArrayBufferObject.cpp
@@ -933,17 +933,17 @@ SharedMem<uint8_t*> ArrayBufferObject::d
   return SharedMem<uint8_t*>::unshared(getFixedSlot(DATA_SLOT).toPrivate());
 }
 
 ArrayBufferObject::FreeInfo* ArrayBufferObject::freeInfo() const {
   MOZ_ASSERT(isExternal());
   return reinterpret_cast<FreeInfo*>(inlineDataPointer());
 }
 
-void ArrayBufferObject::releaseData(FreeOp* fop) {
+void ArrayBufferObject::releaseData(JSFreeOp* fop) {
   switch (bufferKind()) {
     case INLINE_DATA:
       // Inline data doesn't require releasing.
       break;
     case MALLOCED:
       fop->free_(this, dataPointer(), byteLength(),
                  MemoryUse::ArrayBufferContents);
       break;
@@ -1474,17 +1474,17 @@ void ArrayBufferObject::addSizeOfExcludi
       MOZ_CRASH("external buffers not currently supported");
       break;
     case BAD1:
       MOZ_CRASH("bad bufferKind()");
   }
 }
 
 /* static */
-void ArrayBufferObject::finalize(FreeOp* fop, JSObject* obj) {
+void ArrayBufferObject::finalize(JSFreeOp* fop, JSObject* obj) {
   obj->as<ArrayBufferObject>().releaseData(fop);
 }
 
 /* static */
 void ArrayBufferObject::copyData(Handle<ArrayBufferObject*> toBuffer,
                                  uint32_t toIndex,
                                  Handle<ArrayBufferObject*> fromBuffer,
                                  uint32_t fromIndex, uint32_t count) {
--- a/js/src/vm/ArrayBufferObject.h
+++ b/js/src/vm/ArrayBufferObject.h
@@ -384,17 +384,17 @@ class ArrayBufferObject : public ArrayBu
     if (isExternal()) {
       return BufferContents(dataPointer(), EXTERNAL, freeInfo()->freeFunc,
                             freeInfo()->freeUserData);
     }
     return BufferContents(dataPointer(), bufferKind());
   }
   bool hasInlineData() const { return dataPointer() == inlineDataPointer(); }
 
-  void releaseData(FreeOp* fop);
+  void releaseData(JSFreeOp* fop);
 
   BufferKind bufferKind() const {
     return BufferKind(flags() & BUFFER_KIND_MASK);
   }
 
   bool isInlineData() const { return bufferKind() == INLINE_DATA; }
   bool isMalloced() const { return bufferKind() == MALLOCED; }
   bool isNoData() const { return bufferKind() == NO_DATA; }
@@ -422,17 +422,17 @@ class ArrayBufferObject : public ArrayBu
       MutableHandle<ArrayBufferObject*> newBuf, JSContext* cx);
 #ifndef WASM_HUGE_MEMORY
   static MOZ_MUST_USE bool wasmMovingGrowToSize(
       uint32_t newSize, Handle<ArrayBufferObject*> oldBuf,
       MutableHandle<ArrayBufferObject*> newBuf, JSContext* cx);
 #endif
   uint32_t wasmBoundsCheckLimit() const;
 
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
 
   static BufferContents createMappedContents(int fd, size_t offset,
                                              size_t length);
 
   static size_t offsetOfDataSlot() { return getFixedSlotOffset(DATA_SLOT); }
 
   void setHasTypedObjectViews() { setFlags(flags() | TYPED_OBJECT_VIEWS); }
 
--- a/js/src/vm/BigIntType.cpp
+++ b/js/src/vm/BigIntType.cpp
@@ -164,17 +164,17 @@ BigInt* BigInt::createUninitialized(JSCo
   return x;
 }
 
 void BigInt::initializeDigitsToZero() {
   auto digs = digits();
   std::uninitialized_fill_n(digs.begin(), digs.Length(), 0);
 }
 
-void BigInt::finalize(js::FreeOp* fop) {
+void BigInt::finalize(JSFreeOp* fop) {
   if (hasHeapDigits()) {
     size_t size = digitLength() * sizeof(Digit);
     fop->free_(this, heapDigits_, size, js::MemoryUse::BigIntDigits);
   }
 }
 
 js::HashNumber BigInt::hash() {
   js::HashNumber h =
--- a/js/src/vm/BigIntType.h
+++ b/js/src/vm/BigIntType.h
@@ -82,17 +82,17 @@ class BigInt final
   void setDigit(size_t idx, Digit digit) { digits()[idx] = digit; }
 
   bool isZero() const { return digitLength() == 0; }
   bool isNegative() const { return flagsField() & SignBit; }
 
   void initializeDigitsToZero();
 
   void traceChildren(JSTracer* trc);
-  void finalize(js::FreeOp* fop);
+  void finalize(JSFreeOp* fop);
   js::HashNumber hash();
   size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
 
   static BigInt* createUninitialized(JSContext* cx, size_t digitLength,
                                      bool isNegative);
   static BigInt* createFromDouble(JSContext* cx, double d);
   static BigInt* createFromUint64(JSContext* cx, uint64_t n);
   static BigInt* createFromInt64(JSContext* cx, int64_t n);
--- a/js/src/vm/Compartment.h
+++ b/js/src/vm/Compartment.h
@@ -319,17 +319,17 @@ class JS::Compartment {
                                                 js::HandleObject origObj,
                                                 js::MutableHandleObject obj);
   bool getOrCreateWrapper(JSContext* cx, js::HandleObject existing,
                           js::MutableHandleObject obj);
 
  public:
   explicit Compartment(JS::Zone* zone, bool invisibleToDebugger);
 
-  void destroy(js::FreeOp* fop);
+  void destroy(JSFreeOp* fop);
 
   MOZ_MUST_USE inline bool wrap(JSContext* cx, JS::MutableHandleValue vp);
 
   MOZ_MUST_USE bool wrap(JSContext* cx, js::MutableHandleString strp);
   MOZ_MUST_USE bool wrap(JSContext* cx, js::MutableHandle<JS::BigInt*> bi);
   MOZ_MUST_USE bool wrap(JSContext* cx, JS::MutableHandleObject obj);
   MOZ_MUST_USE bool wrap(JSContext* cx,
                          JS::MutableHandle<JS::PropertyDescriptor> desc);
@@ -375,18 +375,17 @@ class JS::Compartment {
    * These methods mark pointers that cross compartment boundaries. They are
    * called in per-zone GCs to prevent the wrappers' outgoing edges from
    * dangling (full GCs naturally follow pointers across compartments) and
    * when compacting to update cross-compartment pointers.
    */
   void traceOutgoingCrossCompartmentWrappers(JSTracer* trc);
   static void traceIncomingCrossCompartmentEdgesForZoneGC(JSTracer* trc);
 
-  void sweepRealms(js::FreeOp* fop, bool keepAtleastOne,
-                   bool destroyingRuntime);
+  void sweepRealms(JSFreeOp* fop, bool keepAtleastOne, bool destroyingRuntime);
   void sweepAfterMinorGC(JSTracer* trc);
   void sweepCrossCompartmentObjectWrappers();
 
   void fixupCrossCompartmentObjectWrappersAfterMovingGC(JSTracer* trc);
   void fixupAfterMovingGC(JSTracer* trc);
 
   MOZ_MUST_USE bool findSweepGroupEdges();
 };
--- a/js/src/vm/Instrumentation.cpp
+++ b/js/src/vm/Instrumentation.cpp
@@ -30,17 +30,17 @@ enum InstrumentationHolderSlots {
 };
 
 static RealmInstrumentation* GetInstrumentation(JSObject* obj) {
   Value v = JS_GetReservedSlot(obj, RealmInstrumentationSlot);
   return static_cast<RealmInstrumentation*>(v.toPrivate());
 }
 
 /* static */
-void RealmInstrumentation::holderFinalize(FreeOp* fop, JSObject* obj) {
+void RealmInstrumentation::holderFinalize(JSFreeOp* fop, JSObject* obj) {
   RealmInstrumentation* instrumentation = GetInstrumentation(obj);
   fop->delete_(obj, instrumentation, MemoryUse::RealmInstrumentation);
 }
 
 /* static */
 void RealmInstrumentation::holderTrace(JSTracer* trc, JSObject* obj) {
   RealmInstrumentation* instrumentation = GetInstrumentation(obj);
   instrumentation->trace(trc);
--- a/js/src/vm/Instrumentation.h
+++ b/js/src/vm/Instrumentation.h
@@ -78,17 +78,17 @@ class RealmInstrumentation {
   static const int32_t* addressOfActive(GlobalObject* global);
 
   // This is public for js_new.
   RealmInstrumentation(Zone* zone, JSObject* callback, JSObject* dbgObject,
                        uint32_t kinds);
 
   void trace(JSTracer* trc);
 
-  static void holderFinalize(FreeOp* fop, JSObject* obj);
+  static void holderFinalize(JSFreeOp* fop, JSObject* obj);
   static void holderTrace(JSTracer* trc, JSObject* obj);
 };
 
 // For use in the frontend when an opcode may or may not need instrumentation.
 enum class ShouldInstrument {
   No,
   Yes,
 };
--- a/js/src/vm/Iteration.cpp
+++ b/js/src/vm/Iteration.cpp
@@ -1073,17 +1073,17 @@ size_t PropertyIteratorObject::sizeOfMis
 
 void PropertyIteratorObject::trace(JSTracer* trc, JSObject* obj) {
   if (NativeIterator* ni =
           obj->as<PropertyIteratorObject>().getNativeIterator()) {
     ni->trace(trc);
   }
 }
 
-void PropertyIteratorObject::finalize(FreeOp* fop, JSObject* obj) {
+void PropertyIteratorObject::finalize(JSFreeOp* fop, JSObject* obj) {
   if (NativeIterator* ni =
           obj->as<PropertyIteratorObject>().getNativeIterator()) {
     fop->free_(obj, ni, ni->allocationSize(), MemoryUse::NativeIterator);
   }
 }
 
 const ClassOps PropertyIteratorObject::classOps_ = {nullptr, /* addProperty */
                                                     nullptr, /* delProperty */
--- a/js/src/vm/Iteration.h
+++ b/js/src/vm/Iteration.h
@@ -362,17 +362,17 @@ class PropertyIteratorObject : public Na
     return static_cast<js::NativeIterator*>(getPrivate());
   }
   void setNativeIterator(js::NativeIterator* ni) { setPrivate(ni); }
 
   size_t sizeOfMisc(mozilla::MallocSizeOf mallocSizeOf) const;
 
  private:
   static void trace(JSTracer* trc, JSObject* obj);
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
 };
 
 class ArrayIteratorObject : public NativeObject {
  public:
   static const Class class_;
 };
 
 ArrayIteratorObject* NewArrayIteratorObject(
--- a/js/src/vm/JSContext.h
+++ b/js/src/vm/JSContext.h
@@ -169,17 +169,17 @@ struct JSContext : public JS::RootingCon
   // This is reset each time we switch zone, then added to the variable in the
   // zone when we switch away from it.  This would be a js::ThreadData but we
   // need to take its address.
   uint32_t allocsThisZoneSinceMinorGC_;
 
   // Free lists for parallel allocation in the atoms zone on helper threads.
   js::ContextData<js::gc::FreeLists*> atomsZoneFreeLists_;
 
-  js::ContextData<js::FreeOp> defaultFreeOp_;
+  js::ContextData<JSFreeOp> defaultFreeOp_;
 
   // Thread that the JSContext is currently running on, if in use.
   js::Thread::Id currentThread_;
 
   js::ParseTask* parseTask_;
 
   // When a helper thread is using a context, it may need to periodically
   // free unused memory.
@@ -292,17 +292,17 @@ struct JSContext : public JS::RootingCon
   bool permanentAtomsPopulated() { return runtime_->permanentAtomsPopulated(); }
   const js::FrozenAtomSet& permanentAtoms() {
     return *runtime_->permanentAtoms();
   }
   js::WellKnownSymbols& wellKnownSymbols() {
     return *runtime_->wellKnownSymbols;
   }
   js::PropertyName* emptyString() { return runtime_->emptyString; }
-  js::FreeOp* defaultFreeOp() { return &defaultFreeOp_.ref(); }
+  JSFreeOp* defaultFreeOp() { return &defaultFreeOp_.ref(); }
   void* stackLimitAddress(JS::StackKind kind) {
     return &nativeStackLimit[kind];
   }
   void* stackLimitAddressForJitCode(JS::StackKind kind);
   uintptr_t stackLimit(JS::StackKind kind) { return nativeStackLimit[kind]; }
   uintptr_t stackLimitForJitCode(JS::StackKind kind);
   size_t gcSystemPageSize() { return js::gc::SystemPageSize(); }
 
@@ -1283,23 +1283,23 @@ class MOZ_RAII AutoUnsafeCallWithABI {
 namespace gc {
 
 // Set/unset the performing GC flag for the current thread.
 class MOZ_RAII AutoSetThreadIsPerformingGC {
   JSContext* cx;
 
  public:
   AutoSetThreadIsPerformingGC() : cx(TlsContext.get()) {
-    FreeOp* fop = cx->defaultFreeOp();
+    JSFreeOp* fop = cx->defaultFreeOp();
     MOZ_ASSERT(!fop->isCollecting());
     fop->isCollecting_ = true;
   }
 
   ~AutoSetThreadIsPerformingGC() {
-    FreeOp* fop = cx->defaultFreeOp();
+    JSFreeOp* fop = cx->defaultFreeOp();
     MOZ_ASSERT(fop->isCollecting());
     fop->isCollecting_ = false;
   }
 };
 
 // In debug builds, set/reset the GC sweeping flag for the current thread.
 struct MOZ_RAII AutoSetThreadIsSweeping {
   AutoSetThreadIsSweeping() : cx(TlsContext.get()), prevState(cx->gcSweeping) {
--- a/js/src/vm/JSObject-inl.h
+++ b/js/src/vm/JSObject-inl.h
@@ -44,17 +44,17 @@ MOZ_ALWAYS_INLINE uint32_t js::NativeObj
 }
 
 /* static */ MOZ_ALWAYS_INLINE uint32_t
 js::NativeObject::dynamicSlotsCount(Shape* shape) {
   return dynamicSlotsCount(shape->numFixedSlots(), shape->slotSpan(),
                            shape->getObjectClass());
 }
 
-inline void JSObject::finalize(js::FreeOp* fop) {
+inline void JSObject::finalize(JSFreeOp* fop) {
   js::probes::FinalizeObject(this);
 
 #ifdef DEBUG
   MOZ_ASSERT(isTenured());
   if (!IsBackgroundFinalized(asTenured().getAllocKind())) {
     /* Assert we're on the main thread. */
     MOZ_ASSERT(CurrentThreadCanAccessZone(zone()));
   }
--- a/js/src/vm/JSObject.h
+++ b/js/src/vm/JSObject.h
@@ -457,17 +457,17 @@ class JSObject : public js::gc::Cell {
   /*
    * Back to generic stuff.
    */
   MOZ_ALWAYS_INLINE bool isCallable() const;
   MOZ_ALWAYS_INLINE bool isConstructor() const;
   MOZ_ALWAYS_INLINE JSNative callHook() const;
   MOZ_ALWAYS_INLINE JSNative constructHook() const;
 
-  MOZ_ALWAYS_INLINE void finalize(js::FreeOp* fop);
+  MOZ_ALWAYS_INLINE void finalize(JSFreeOp* fop);
 
  public:
   static bool nonNativeSetProperty(JSContext* cx, js::HandleObject obj,
                                    js::HandleId id, js::HandleValue v,
                                    js::HandleValue receiver,
                                    JS::ObjectOpResult& result);
   static bool nonNativeSetElement(JSContext* cx, js::HandleObject obj,
                                   uint32_t index, js::HandleValue v,
--- a/js/src/vm/JSScript-inl.h
+++ b/js/src/vm/JSScript-inl.h
@@ -161,39 +161,39 @@ inline js::Shape* JSScript::initialEnvir
 inline JSPrincipals* JSScript::principals() { return realm()->principals(); }
 
 inline void JSScript::setBaselineScript(
     JSRuntime* rt, js::jit::BaselineScript* baselineScript) {
   setBaselineScript(rt->defaultFreeOp(), baselineScript);
 }
 
 inline void JSScript::setBaselineScript(
-    js::FreeOp* fop, js::jit::BaselineScript* baselineScript) {
+    JSFreeOp* fop, js::jit::BaselineScript* baselineScript) {
   if (hasBaselineScript()) {
     js::jit::BaselineScript::writeBarrierPre(zone(), baseline);
     clearBaselineScript(fop);
   }
   MOZ_ASSERT(!ion || ion == ION_DISABLED_SCRIPT);
 
   baseline = baselineScript;
   if (hasBaselineScript()) {
     AddCellMemory(this, baseline->allocBytes(), js::MemoryUse::BaselineScript);
   }
   resetWarmUpResetCounter();
   updateJitCodeRaw(fop->runtime());
 }
 
-inline void JSScript::clearBaselineScript(js::FreeOp* fop) {
+inline void JSScript::clearBaselineScript(JSFreeOp* fop) {
   MOZ_ASSERT(hasBaselineScript());
   fop->removeCellMemory(this, baseline->allocBytes(),
                         js::MemoryUse::BaselineScript);
   baseline = nullptr;
 }
 
-inline void JSScript::clearIonScript(js::FreeOp* fop) {
+inline void JSScript::clearIonScript(JSFreeOp* fop) {
   MOZ_ASSERT(hasIonScript());
   fop->removeCellMemory(this, ion->allocBytes(), js::MemoryUse::IonScript);
   ion = nullptr;
 }
 
 inline bool JSScript::ensureHasAnalyzedArgsUsage(JSContext* cx) {
   if (analyzedArgsUsage()) {
     return true;
--- a/js/src/vm/JSScript.cpp
+++ b/js/src/vm/JSScript.cpp
@@ -1510,17 +1510,17 @@ size_t ScriptCounts::sizeOfIncludingThis
          throwCounts_.sizeOfExcludingThis(mallocSizeOf) +
          ionCounts_->sizeOfIncludingThis(mallocSizeOf);
 }
 
 void JSScript::setIonScript(JSRuntime* rt, js::jit::IonScript* ionScript) {
   setIonScript(rt->defaultFreeOp(), ionScript);
 }
 
-void JSScript::setIonScript(FreeOp* fop, js::jit::IonScript* ionScript) {
+void JSScript::setIonScript(JSFreeOp* fop, js::jit::IonScript* ionScript) {
   MOZ_ASSERT_IF(ionScript != ION_DISABLED_SCRIPT,
                 !baselineScript()->hasPendingIonBuilder());
   if (hasIonScript()) {
     js::jit::IonScript::writeBarrierPre(zone(), ion);
     clearIonScript(fop);
   }
   ion = ionScript;
   MOZ_ASSERT_IF(hasIonScript(), hasBaselineScript());
@@ -1646,17 +1646,17 @@ bool JSScript::hasScriptName() {
   if (!realm()->scriptNameMap) {
     return false;
   }
 
   auto p = realm()->scriptNameMap->lookup(this);
   return p.found();
 }
 
-void ScriptSourceObject::finalize(FreeOp* fop, JSObject* obj) {
+void ScriptSourceObject::finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->onMainThread());
   ScriptSourceObject* sso = &obj->as<ScriptSourceObject>();
   sso->source()->decref();
 
   // Clear the private value, calling the release hook if necessary.
   sso->setPrivate(fop->runtime(), UndefinedValue());
 }
 
@@ -4184,17 +4184,17 @@ void JSScript::addSizeOfJitScript(mozill
   }
 
   jitScript_->addSizeOfIncludingThis(mallocSizeOf, sizeOfJitScript,
                                      sizeOfBaselineFallbackStubs);
 }
 
 js::GlobalObject& JSScript::uninlinedGlobal() const { return global(); }
 
-void JSScript::finalize(FreeOp* fop) {
+void JSScript::finalize(JSFreeOp* fop) {
   // NOTE: this JSScript may be partially initialized at this point.  E.g. we
   // may have created it and partially initialized it with
   // JSScript::Create(), but not yet finished initializing it with
   // fullyInitFromEmitter().
 
   // Collect code coverage information for this script and all its inner
   // scripts, and store the aggregated information on the realm.
   MOZ_ASSERT_IF(hasScriptName(), coverage::IsLCovEnabled());
@@ -4870,17 +4870,17 @@ void JSScript::traceChildren(JSTracer* t
 
   jit::TraceJitScripts(trc, this);
 
   if (trc->isMarkingTracer()) {
     GCMarker::fromTracer(trc)->markImplicitEdges(this);
   }
 }
 
-void LazyScript::finalize(FreeOp* fop) {
+void LazyScript::finalize(JSFreeOp* fop) {
   if (lazyData_) {
     fop->free_(this, lazyData_, lazyData_->allocationSize(),
                MemoryUse::LazyScriptData);
   }
 }
 
 size_t JSScript::calculateLiveFixed(jsbytecode* pc) {
   size_t nlivefixed = numAlwaysLiveFixedSlots();
--- a/js/src/vm/JSScript.h
+++ b/js/src/vm/JSScript.h
@@ -1274,17 +1274,17 @@ class ScriptSourceObject : public Native
     return &getReservedSlot(CANONICAL_SLOT).toObject() == this;
   }
   ScriptSourceObject* unwrappedCanonical() const;
 
  public:
   static const Class class_;
 
   static void trace(JSTracer* trc, JSObject* obj);
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
 
   static ScriptSourceObject* create(JSContext* cx, ScriptSource* source);
   static ScriptSourceObject* clone(JSContext* cx, HandleScriptSourceObject sso);
 
   // Initialize those properties of this ScriptSourceObject whose values
   // are provided by |options|, re-wrapping as necessary.
   static bool initFromOptions(JSContext* cx, HandleScriptSourceObject source,
                               const JS::ReadOnlyCompileOptions& options);
@@ -2611,36 +2611,36 @@ class JSScript : public js::BaseScript {
 
   js::jit::IonScript* ionScript() const {
     MOZ_ASSERT(hasIonScript());
     return ion;
   }
   js::jit::IonScript* maybeIonScript() const { return ion; }
   js::jit::IonScript* const* addressOfIonScript() const { return &ion; }
   void setIonScript(JSRuntime* rt, js::jit::IonScript* ionScript);
-  void setIonScript(js::FreeOp* fop, js::jit::IonScript* ionScript);
-  inline void clearIonScript(js::FreeOp* fop);
+  void setIonScript(JSFreeOp* fop, js::jit::IonScript* ionScript);
+  inline void clearIonScript(JSFreeOp* fop);
 
   bool hasBaselineScript() const {
     bool res = baseline && baseline != BASELINE_DISABLED_SCRIPT;
     MOZ_ASSERT_IF(!res, !ion || ion == ION_DISABLED_SCRIPT);
     return res;
   }
   bool canBaselineCompile() const {
     return baseline != BASELINE_DISABLED_SCRIPT;
   }
   js::jit::BaselineScript* baselineScript() const {
     MOZ_ASSERT(hasBaselineScript());
     return baseline;
   }
   inline void setBaselineScript(JSRuntime* rt,
                                 js::jit::BaselineScript* baselineScript);
-  inline void setBaselineScript(js::FreeOp* fop,
+  inline void setBaselineScript(JSFreeOp* fop,
                                 js::jit::BaselineScript* baselineScript);
-  inline void clearBaselineScript(js::FreeOp* fop);
+  inline void clearBaselineScript(JSFreeOp* fop);
 
   void updateJitCodeRaw(JSRuntime* rt);
 
   static size_t offsetOfBaselineScript() {
     return offsetof(JSScript, baseline);
   }
   static size_t offsetOfIonScript() { return offsetof(JSScript, ion); }
 
@@ -2746,18 +2746,18 @@ class JSScript : public js::BaseScript {
   bool isTopLevel() { return code() && !functionNonDelazifying(); }
 
   /* Ensure the script has a JitScript. */
   inline bool ensureHasJitScript(JSContext* cx, js::jit::AutoKeepJitScripts&);
 
   bool hasJitScript() const { return jitScript_ != nullptr; }
   js::jit::JitScript* jitScript() { return jitScript_; }
 
-  void maybeReleaseJitScript(js::FreeOp* fop);
-  void releaseJitScript(js::FreeOp* fop);
+  void maybeReleaseJitScript(JSFreeOp* fop);
+  void releaseJitScript(JSFreeOp* fop);
 
   inline js::GlobalObject& global() const;
   inline bool hasGlobal(const js::GlobalObject* global) const;
   js::GlobalObject& uninlinedGlobal() const;
 
   uint32_t bodyScopeIndex() const {
     return immutableScriptData()->bodyScopeIndex;
   }
@@ -3041,17 +3041,17 @@ class JSScript : public js::BaseScript {
   // invariants of debuggee compartments, scripts, and frames.
   inline bool isDebuggee() const;
 
   // Access the flag for whether this script has a DebugScript in its realm's
   // map. This should only be used by the DebugScript class.
   bool hasDebugScript() const { return hasFlag(MutableFlags::HasDebugScript); }
   void setHasDebugScript(bool b) { setFlag(MutableFlags::HasDebugScript, b); }
 
-  void finalize(js::FreeOp* fop);
+  void finalize(JSFreeOp* fop);
 
   static const JS::TraceKind TraceKind = JS::TraceKind::Script;
 
   void traceChildren(JSTracer* trc);
 
   // A helper class to prevent relazification of the given function's script
   // while it's holding on to it.  This class automatically roots the script.
   class AutoDelazify;
@@ -3455,17 +3455,17 @@ class LazyScript : public BaseScript {
   // The enclosing JSScript can be GCed later if the enclosing scope is not
   // FunctionScope or ModuleScope.
   bool enclosingScriptHasEverBeenCompiled() const {
     return hasEnclosingScope();
   }
 
   friend class GCMarker;
   void traceChildren(JSTracer* trc);
-  void finalize(js::FreeOp* fop);
+  void finalize(JSFreeOp* fop);
 
   static const JS::TraceKind TraceKind = JS::TraceKind::LazyScript;
 
   size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
     return mallocSizeOf(lazyData_);
   }
 };
 
--- a/js/src/vm/ObjectGroup.cpp
+++ b/js/src/vm/ObjectGroup.cpp
@@ -42,17 +42,17 @@ ObjectGroup::ObjectGroup(const Class* cl
     : clasp_(clasp), proto_(proto), realm_(realm), flags_(initialFlags) {
   /* Windows may not appear on prototype chains. */
   MOZ_ASSERT_IF(proto.isObject(), !IsWindow(proto.toObject()));
   MOZ_ASSERT(JS::StringIsASCII(clasp->name));
 
   setGeneration(zone()->types.generation);
 }
 
-void ObjectGroup::finalize(FreeOp* fop) {
+void ObjectGroup::finalize(JSFreeOp* fop) {
   if (auto newScript = newScriptDontCheckGeneration()) {
     newScript->clear();
     fop->delete_(this, newScript, newScript->gcMallocBytes(),
                  MemoryUse::ObjectGroupAddendum);
   }
   if (maybePreliminaryObjectsDontCheckGeneration()) {
     maybePreliminaryObjectsDontCheckGeneration()->clear();
   }
--- a/js/src/vm/ObjectGroup.h
+++ b/js/src/vm/ObjectGroup.h
@@ -427,17 +427,17 @@ class ObjectGroup : public gc::TenuredCe
     MOZ_ASSERT(generation <=
                (OBJECT_FLAG_GENERATION_MASK >> OBJECT_FLAG_GENERATION_SHIFT));
     flags_ &= ~OBJECT_FLAG_GENERATION_MASK;
     flags_ |= generation << OBJECT_FLAG_GENERATION_SHIFT;
   }
 
   size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
 
-  void finalize(FreeOp* fop);
+  void finalize(JSFreeOp* fop);
 
   static const JS::TraceKind TraceKind = JS::TraceKind::ObjectGroup;
 
  public:
   const ObjectGroupFlags* addressOfFlags() const { return &flags_; }
 
   inline uint32_t basePropertyCount(const AutoSweepObjectGroup& sweep);
   inline uint32_t basePropertyCountDontCheckGeneration();
--- a/js/src/vm/PIC.cpp
+++ b/js/src/vm/PIC.cpp
@@ -288,30 +288,30 @@ void js::ForOfPIC::Chain::trace(JSTracer
             "ForOfPIC ArrayIterator.prototype.next builtin.");
 
   if (trc->isMarkingTracer()) {
     // Free all the stubs in the chain.
     freeAllStubs(trc->runtime()->defaultFreeOp());
   }
 }
 
-static void ForOfPIC_finalize(FreeOp* fop, JSObject* obj) {
+static void ForOfPIC_finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->maybeOnHelperThread());
   if (ForOfPIC::Chain* chain =
           ForOfPIC::fromJSObject(&obj->as<NativeObject>())) {
     chain->finalize(fop, obj);
   }
 }
 
-void js::ForOfPIC::Chain::finalize(FreeOp* fop, JSObject* obj) {
+void js::ForOfPIC::Chain::finalize(JSFreeOp* fop, JSObject* obj) {
   freeAllStubs(fop);
   fop->delete_(obj, this, MemoryUse::ForOfPIC);
 }
 
-void js::ForOfPIC::Chain::freeAllStubs(FreeOp* fop) {
+void js::ForOfPIC::Chain::freeAllStubs(JSFreeOp* fop) {
   Stub* stub = stubs_;
   while (stub) {
     Stub* next = stub->next();
     fop->delete_(picObject_, stub, MemoryUse::ForOfPICStub);
     stub = next;
   }
   stubs_ = nullptr;
 }
--- a/js/src/vm/PIC.h
+++ b/js/src/vm/PIC.h
@@ -182,17 +182,17 @@ struct ForOfPIC {
     // Try to optimize this chain for an object.
     bool tryOptimizeArray(JSContext* cx, HandleArrayObject array,
                           bool* optimized);
 
     // Check if %ArrayIteratorPrototype% still uses the default "next" method.
     bool tryOptimizeArrayIteratorNext(JSContext* cx, bool* optimized);
 
     void trace(JSTracer* trc);
-    void finalize(FreeOp* fop, JSObject* obj);
+    void finalize(JSFreeOp* fop, JSObject* obj);
 
    private:
     // Check if the global array-related objects have not been messed with
     // in a way that would disable this PIC.
     bool isArrayStateStillSane();
 
     // Check if ArrayIterator.next is still optimizable.
     inline bool isArrayNextStillSane() {
@@ -206,17 +206,17 @@ struct ForOfPIC {
     bool hasMatchingStub(ArrayObject* obj);
 
     // Reset the PIC and all info associated with it.
     void reset(JSContext* cx);
 
     // Erase the stub chain.
     void eraseChain(JSContext* cx);
 
-    void freeAllStubs(FreeOp* fop);
+    void freeAllStubs(JSFreeOp* fop);
   };
 
   // Class for object that holds ForOfPIC chain.
   static const Class class_;
 
   static NativeObject* createForOfPICObject(JSContext* cx,
                                             Handle<GlobalObject*> global);
 
--- a/js/src/vm/Realm.h
+++ b/js/src/vm/Realm.h
@@ -457,17 +457,17 @@ class JS::Realm : public JS::shadow::Rea
   Realm(const Realm&) = delete;
   void operator=(const Realm&) = delete;
 
  public:
   Realm(JS::Compartment* comp, const JS::RealmOptions& options);
   ~Realm();
 
   MOZ_MUST_USE bool init(JSContext* cx, JSPrincipals* principals);
-  void destroy(js::FreeOp* fop);
+  void destroy(JSFreeOp* fop);
   void clearTables();
 
   void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
                               size_t* tiAllocationSiteTables,
                               size_t* tiArrayTypeTables,
                               size_t* tiObjectTypeTables, size_t* realmObject,
                               size_t* realmTables, size_t* innerViews,
                               size_t* lazyArrayBuffers,
--- a/js/src/vm/RegExpObject.cpp
+++ b/js/src/vm/RegExpObject.cpp
@@ -920,17 +920,17 @@ void RegExpShared::discardJitCode() {
   for (auto& comp : compilationArray) {
     comp.jitCode = nullptr;
   }
 
   // We can also purge the tables used by JIT code.
   tables.clearAndFree();
 }
 
-void RegExpShared::finalize(FreeOp* fop) {
+void RegExpShared::finalize(JSFreeOp* fop) {
   for (auto& comp : compilationArray) {
     if (comp.byteCode) {
       size_t length = comp.byteCodeLength();
       fop->free_(this, comp.byteCode, length, MemoryUse::RegExpSharedBytecode);
     }
   }
   tables.~JitCodeTables();
 }
--- a/js/src/vm/RegExpShared.h
+++ b/js/src/vm/RegExpShared.h
@@ -180,17 +180,17 @@ class RegExpShared : public gc::TenuredC
   }
   bool isCompiled() const {
     return isCompiled(Normal, true) || isCompiled(Normal, false) ||
            isCompiled(MatchOnly, true) || isCompiled(MatchOnly, false);
   }
 
   void traceChildren(JSTracer* trc);
   void discardJitCode();
-  void finalize(FreeOp* fop);
+  void finalize(JSFreeOp* fop);
 
   static size_t offsetOfSource() { return offsetof(RegExpShared, source); }
 
   static size_t offsetOfFlags() { return offsetof(RegExpShared, flags); }
 
   static size_t offsetOfParenCount() {
     return offsetof(RegExpShared, parenCount);
   }
--- a/js/src/vm/RegExpStatics.cpp
+++ b/js/src/vm/RegExpStatics.cpp
@@ -15,17 +15,17 @@ using namespace js;
 
 /*
  * RegExpStatics allocates memory -- in order to keep the statics stored
  * per-global and not leak, we create a js::Class to wrap the C++ instance and
  * provide an appropriate finalizer. We lazily create and store an instance of
  * that js::Class in a global reserved slot.
  */
 
-static void resc_finalize(FreeOp* fop, JSObject* obj) {
+static void resc_finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->onMainThread());
   RegExpStatics* res =
       static_cast<RegExpStatics*>(obj->as<RegExpStaticsObject>().getPrivate());
   fop->delete_(obj, res, MemoryUse::RegExpStatics);
 }
 
 static void resc_trace(JSTracer* trc, JSObject* obj) {
   void* pdata = obj->as<RegExpStaticsObject>().getPrivate();
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -685,20 +685,20 @@ struct JSRuntime {
 
   void lockGC() { gc.lockGC(); }
 
   void unlockGC() { gc.unlockGC(); }
 
   js::WriteOnceData<js::PropertyName*> emptyString;
 
  private:
-  js::MainThreadData<js::FreeOp*> defaultFreeOp_;
+  js::MainThreadData<JSFreeOp*> defaultFreeOp_;
 
  public:
-  js::FreeOp* defaultFreeOp() {
+  JSFreeOp* defaultFreeOp() {
     MOZ_ASSERT(defaultFreeOp_);
     return defaultFreeOp_;
   }
 
 #if !EXPOSE_INTL_API
   /* Number localization, used by jsnum.cpp. */
   js::WriteOnceData<const char*> thousandsSeparator;
   js::WriteOnceData<const char*> decimalSeparator;
--- a/js/src/vm/SavedFrame.h
+++ b/js/src/vm/SavedFrame.h
@@ -36,17 +36,17 @@ class SavedFrame : public NativeObject {
   static bool columnProperty(JSContext* cx, unsigned argc, Value* vp);
   static bool functionDisplayNameProperty(JSContext* cx, unsigned argc,
                                           Value* vp);
   static bool asyncCauseProperty(JSContext* cx, unsigned argc, Value* vp);
   static bool asyncParentProperty(JSContext* cx, unsigned argc, Value* vp);
   static bool parentProperty(JSContext* cx, unsigned argc, Value* vp);
   static bool toStringMethod(JSContext* cx, unsigned argc, Value* vp);
 
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
 
   // Convenient getters for SavedFrame's reserved slots for use from C++.
   JSAtom* getSource();
   uint32_t getSourceId();
   uint32_t getLine();
   uint32_t getColumn();
   JSAtom* getFunctionDisplayName();
   JSAtom* getAsyncCause();
--- a/js/src/vm/SavedStacks.cpp
+++ b/js/src/vm/SavedStacks.cpp
@@ -395,17 +395,17 @@ const Class SavedFrame::protoClass_ = {
     JS_PSG("column", SavedFrame::columnProperty, 0),
     JS_PSG("functionDisplayName", SavedFrame::functionDisplayNameProperty, 0),
     JS_PSG("asyncCause", SavedFrame::asyncCauseProperty, 0),
     JS_PSG("asyncParent", SavedFrame::asyncParentProperty, 0),
     JS_PSG("parent", SavedFrame::parentProperty, 0),
     JS_PS_END};
 
 /* static */
-void SavedFrame::finalize(FreeOp* fop, JSObject* obj) {
+void SavedFrame::finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->onMainThread());
   JSPrincipals* p = obj->as<SavedFrame>().getPrincipals();
   if (p) {
     JSRuntime* rt = obj->runtimeFromMainThread();
     JS_DropPrincipals(rt->mainContextFromOwnThread(), p);
   }
 }
 
--- a/js/src/vm/Scope.cpp
+++ b/js/src/vm/Scope.cpp
@@ -458,17 +458,17 @@ Scope* Scope::clone(JSContext* cx, Handl
     case ScopeKind::WasmInstance:
       MOZ_CRASH("NYI");
       break;
   }
 
   return nullptr;
 }
 
-void Scope::finalize(FreeOp* fop) {
+void Scope::finalize(JSFreeOp* fop) {
   MOZ_ASSERT(CurrentThreadIsGCSweeping());
   applyScopeDataTyped([this, fop](auto data) {
     fop->delete_(this, data, SizeOfAllocatedData(data), MemoryUse::ScopeData);
   });
   data_ = nullptr;
 }
 
 size_t Scope::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
--- a/js/src/vm/Scope.h
+++ b/js/src/vm/Scope.h
@@ -377,17 +377,17 @@ class Scope : public js::gc::TenuredCell
       }
     }
     return false;
   }
 
   static Scope* clone(JSContext* cx, HandleScope scope, HandleScope enclosing);
 
   void traceChildren(JSTracer* trc);
-  void finalize(FreeOp* fop);
+  void finalize(JSFreeOp* fop);
 
   size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
 
   void dump();
 };
 
 /** Empty base class for scope Data classes to inherit from. */
 class BaseScopeData {};
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -176,17 +176,17 @@ bool Shape::hashify(JSContext* cx, Shape
   base->maybePurgeCache(cx->defaultFreeOp());
   base->setTable(table.release());
   // TODO: The contents of ShapeTable is not currently tracked, only the object
   // itself.
   AddCellMemory(base, sizeof(ShapeTable), MemoryUse::ShapeCache);
   return true;
 }
 
-void ShapeCachePtr::maybePurgeCache(FreeOp* fop, BaseShape* base) {
+void ShapeCachePtr::maybePurgeCache(JSFreeOp* fop, BaseShape* base) {
   if (isTable()) {
     ShapeTable* table = getTablePointer();
     if (table->freeList() == SHAPE_INVALID_SLOT) {
       fop->delete_(base, getTablePointer(), MemoryUse::ShapeCache);
       p = 0;
     }
   } else if (isIC()) {
     fop->delete_<ShapeIC>(base, getICPointer(), MemoryUse::ShapeCache);
@@ -300,17 +300,17 @@ void ShapeTable::trace(JSTracer* trc) {
       TraceManuallyBarrieredEdge(trc, &shape, "ShapeTable shape");
       if (shape != entry.shape()) {
         entry.setPreservingCollision(shape);
       }
     }
   }
 }
 
-inline void ShapeCachePtr::destroy(FreeOp* fop, BaseShape* base) {
+inline void ShapeCachePtr::destroy(JSFreeOp* fop, BaseShape* base) {
   if (isTable()) {
     fop->delete_(base, getTablePointer(), MemoryUse::ShapeCache);
   } else if (isIC()) {
     fop->delete_(base, getICPointer(), MemoryUse::ShapeCache);
   }
   p = 0;
 }
 
@@ -1678,17 +1678,17 @@ void Zone::checkBaseShapeTableAfterMovin
 
     BaseShapeSet::Ptr ptr = baseShapes().lookup(base);
     MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front());
   }
 }
 
 #endif  // JSGC_HASH_TABLE_CHECKS
 
-void BaseShape::finalize(FreeOp* fop) {
+void BaseShape::finalize(JSFreeOp* fop) {
   if (cache_.isInitialized()) {
     cache_.destroy(fop, this);
   }
 }
 
 inline InitialShapeEntry::InitialShapeEntry() : shape(nullptr), proto() {}
 
 inline InitialShapeEntry::InitialShapeEntry(Shape* shape,
@@ -1789,17 +1789,17 @@ bool PropertyTree::insertChild(JSContext
     ReportOutOfMemory(cx);
     return false;
   }
 
   child->setParent(parent);
   return true;
 }
 
-void Shape::removeChild(FreeOp* fop, Shape* child) {
+void Shape::removeChild(JSFreeOp* fop, Shape* child) {
   MOZ_ASSERT(!child->inDictionary());
   MOZ_ASSERT(child->parent == this);
 
   KidsPointer* kidp = &kids;
 
   if (kidp->isShape()) {
     MOZ_ASSERT(kidp->toShape() == child);
     kidp->setNull();
@@ -1877,17 +1877,17 @@ MOZ_ALWAYS_INLINE Shape* PropertyTree::i
   return shape;
 }
 
 Shape* PropertyTree::getChild(JSContext* cx, Shape* parent,
                               Handle<StackShape> child) {
   return inlinedGetChild(cx, parent, child);
 }
 
-void Shape::sweep(FreeOp* fop) {
+void Shape::sweep(JSFreeOp* fop) {
   /*
    * We detach the child from the parent if the parent is reachable.
    *
    * This test depends on shape arenas not being freed until after we finish
    * incrementally sweeping them. If that were not the case the parent pointer
    * could point to a marked cell that had been deallocated and then
    * reallocated, since allocating a cell in a zone that is being marked will
    * set the mark bit for that cell.
@@ -1898,17 +1898,17 @@ void Shape::sweep(FreeOp* fop) {
         parent->listp = nullptr;
       }
     } else {
       parent->removeChild(fop, this);
     }
   }
 }
 
-void Shape::finalize(FreeOp* fop) {
+void Shape::finalize(JSFreeOp* fop) {
   if (!inDictionary() && kids.isHash()) {
     fop->delete_(this, kids.toHash(), MemoryUse::ShapeKids);
   }
 }
 
 void Shape::fixupDictionaryShapeAfterMovingGC() {
   if (!listp) {
     return;
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -510,19 +510,19 @@ class ShapeCachePtr {
 
     // Double check that pointer is 4 byte aligned.
     MOZ_ASSERT((icptr & CACHETYPE_MASK) == 0);
 
     icptr |= static_cast<uintptr_t>(CacheType::IC);
     p = icptr;
   }
 
-  void destroy(FreeOp* fop, BaseShape* base);
+  void destroy(JSFreeOp* fop, BaseShape* base);
 
-  void maybePurgeCache(FreeOp* fop, BaseShape* base);
+  void maybePurgeCache(JSFreeOp* fop, BaseShape* base);
 
   void trace(JSTracer* trc);
 
   size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
     size_t size = 0;
     if (isIC()) {
       size = getICPointer()->sizeOfIncludingThis(mallocSizeOf);
     } else if (isTable()) {
@@ -664,17 +664,17 @@ class BaseShape : public gc::TenuredCell
 
   /* For owned BaseShapes, the shape's shape table. */
   ShapeCachePtr cache_;
 
   BaseShape(const BaseShape& base) = delete;
   BaseShape& operator=(const BaseShape& other) = delete;
 
  public:
-  void finalize(FreeOp* fop);
+  void finalize(JSFreeOp* fop);
 
   explicit inline BaseShape(const StackBaseShape& base);
 
   /* Not defined: BaseShapes must not be stack allocated. */
   ~BaseShape();
 
   const Class* clasp() const { return clasp_; }
 
@@ -735,17 +735,17 @@ class BaseShape : public gc::TenuredCell
     return (cache_.isIC()) ? cache_.getICPointer() : nullptr;
   }
 
   ShapeIC* maybeIC(const JS::AutoCheckCannotGC&) const {
     MOZ_ASSERT_IF(cache_.isInitialized(), isOwned());
     return (cache_.isIC()) ? cache_.getICPointer() : nullptr;
   }
 
-  void maybePurgeCache(FreeOp* fop) { cache_.maybePurgeCache(fop, this); }
+  void maybePurgeCache(JSFreeOp* fop) { cache_.maybePurgeCache(fop, this); }
 
   uint32_t slotSpan() const {
     MOZ_ASSERT(isOwned());
     return slotSpan_;
   }
   void setSlotSpan(uint32_t slotSpan) {
     MOZ_ASSERT(isOwned());
     slotSpan_ = slotSpan;
@@ -1348,19 +1348,19 @@ class Shape : public gc::TenuredCell {
   }
 
 #ifdef DEBUG
   void dump(js::GenericPrinter& out) const;
   void dump() const;
   void dumpSubtree(int level, js::GenericPrinter& out) const;
 #endif
 
-  void sweep(FreeOp* fop);
-  void finalize(FreeOp* fop);
-  void removeChild(FreeOp* fop, Shape* child);
+  void sweep(JSFreeOp* fop);
+  void finalize(JSFreeOp* fop);
+  void removeChild(JSFreeOp* fop, Shape* child);
 
   static const JS::TraceKind TraceKind = JS::TraceKind::Shape;
 
   void traceChildren(JSTracer* trc);
 
   MOZ_ALWAYS_INLINE Shape* search(JSContext* cx, jsid id);
   MOZ_ALWAYS_INLINE Shape* searchLinear(jsid id);
 
--- a/js/src/vm/SharedArrayObject.cpp
+++ b/js/src/vm/SharedArrayObject.cpp
@@ -269,17 +269,17 @@ void SharedArrayBufferObject::dropRawBuf
 }
 
 SharedArrayRawBuffer* SharedArrayBufferObject::rawBufferObject() const {
   Value v = getReservedSlot(RAWBUF_SLOT);
   MOZ_ASSERT(!v.isUndefined());
   return reinterpret_cast<SharedArrayRawBuffer*>(v.toPrivate());
 }
 
-void SharedArrayBufferObject::Finalize(FreeOp* fop, JSObject* obj) {
+void SharedArrayBufferObject::Finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(fop->maybeOnHelperThread());
 
   SharedArrayBufferObject& buf = obj->as<SharedArrayBufferObject>();
 
   // Detect the case of failure during SharedArrayBufferObject creation,
   // which causes a SharedArrayRawBuffer to never be attached.
   Value v = buf.getReservedSlot(RAWBUF_SLOT);
   if (!v.isUndefined()) {
--- a/js/src/vm/SharedArrayObject.h
+++ b/js/src/vm/SharedArrayObject.h
@@ -182,17 +182,17 @@ class SharedArrayBufferObject : public A
 
   // Create a SharedArrayBufferObject using an existing SharedArrayRawBuffer,
   // recording the given length in the SharedArrayBufferObject.
   static SharedArrayBufferObject* New(JSContext* cx,
                                       SharedArrayRawBuffer* buffer,
                                       uint32_t length,
                                       HandleObject proto = nullptr);
 
-  static void Finalize(FreeOp* fop, JSObject* obj);
+  static void Finalize(JSFreeOp* fop, JSObject* obj);
 
   static void addSizeOfExcludingThis(JSObject* obj,
                                      mozilla::MallocSizeOf mallocSizeOf,
                                      JS::ClassInfo* info);
 
   static void copyData(Handle<SharedArrayBufferObject*> toBuffer,
                        uint32_t toIndex,
                        Handle<SharedArrayBufferObject*> fromBuffer,
--- a/js/src/vm/StringType-inl.h
+++ b/js/src/vm/StringType-inl.h
@@ -406,29 +406,29 @@ inline JSLinearString* js::StaticStrings
     return nullptr;
   }
   if (c < UNIT_STATIC_LIMIT) {
     return getUnit(c);
   }
   return js::NewInlineString<CanGC>(cx, mozilla::Range<const char16_t>(&c, 1));
 }
 
-MOZ_ALWAYS_INLINE void JSString::finalize(js::FreeOp* fop) {
+MOZ_ALWAYS_INLINE void JSString::finalize(JSFreeOp* fop) {
   /* FatInline strings are in a different arena. */
   MOZ_ASSERT(getAllocKind() != js::gc::AllocKind::FAT_INLINE_STRING);
   MOZ_ASSERT(getAllocKind() != js::gc::AllocKind::FAT_INLINE_ATOM);
 
   if (isFlat()) {
     asFlat().finalize(fop);
   } else {
     MOZ_ASSERT(isDependent() || isRope());
   }
 }
 
-inline void JSFlatString::finalize(js::FreeOp* fop) {
+inline void JSFlatString::finalize(JSFreeOp* fop) {
   MOZ_ASSERT(getAllocKind() != js::gc::AllocKind::FAT_INLINE_STRING);
   MOZ_ASSERT(getAllocKind() != js::gc::AllocKind::FAT_INLINE_ATOM);
 
   if (!isInline()) {
     fop->free_(this, nonInlineCharsRaw(), allocSize(),
                js::MemoryUse::StringContents);
   }
 }
@@ -437,31 +437,31 @@ inline size_t JSFlatString::allocSize() 
   MOZ_ASSERT(!isInline());
 
   size_t charSize =
       hasLatin1Chars() ? sizeof(JS::Latin1Char) : sizeof(char16_t);
   size_t count = isExtensible() ? asExtensible().capacity() : length();
   return (count + 1) * charSize;
 }
 
-inline void JSFatInlineString::finalize(js::FreeOp* fop) {
+inline void JSFatInlineString::finalize(JSFreeOp* fop) {
   MOZ_ASSERT(getAllocKind() == js::gc::AllocKind::FAT_INLINE_STRING);
   MOZ_ASSERT(isInline());
 
   // Nothing to do.
 }
 
-inline void js::FatInlineAtom::finalize(js::FreeOp* fop) {
+inline void js::FatInlineAtom::finalize(JSFreeOp* fop) {
   MOZ_ASSERT(JSString::isAtom());
   MOZ_ASSERT(getAllocKind() == js::gc::AllocKind::FAT_INLINE_ATOM);
 
   // Nothing to do.
 }
 
-inline void JSExternalString::finalize(js::FreeOp* fop) {
+inline void JSExternalString::finalize(JSFreeOp* fop) {
   if (!JSString::isExternal()) {
     // This started out as an external string, but was turned into a
     // non-external string by JSExternalString::ensureFlat.
     asFlat().finalize(fop);
     return;
   }
 
   size_t nbytes = (length() + 1) * sizeof(char16_t);
--- a/js/src/vm/StringType.h
+++ b/js/src/vm/StringType.h
@@ -541,17 +541,17 @@ class JSString : public js::gc::CellWith
   inline bool hasBase() const { return flags() & HAS_BASE_BIT; }
 
   inline JSLinearString* base() const;
 
   void traceBase(JSTracer* trc);
 
   /* Only called by the GC for strings with the AllocKind::STRING kind. */
 
-  inline void finalize(js::FreeOp* fop);
+  inline void finalize(JSFreeOp* fop);
 
   /* Gets the number of bytes that the chars take on the heap. */
 
   size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf);
 
   // Make offset accessors public.
   using Base::offsetOfFlags;
   using Base::offsetOfLength;
@@ -977,17 +977,17 @@ class JSFlatString : public JSLinearStri
    * operation changes the string to the JSAtom type, in place.
    */
   MOZ_ALWAYS_INLINE JSAtom* morphAtomizedStringIntoAtom(js::HashNumber hash);
   MOZ_ALWAYS_INLINE JSAtom* morphAtomizedStringIntoPermanentAtom(
       js::HashNumber hash);
 
   inline size_t allocSize() const;
 
-  inline void finalize(js::FreeOp* fop);
+  inline void finalize(JSFreeOp* fop);
 
 #if defined(DEBUG) || defined(JS_JITSPEW)
   void dumpRepresentation(js::GenericPrinter& out, int indent) const;
 #endif
 };
 
 static_assert(sizeof(JSFlatString) == sizeof(JSString),
               "string subclasses must be binary-compatible with JSString");
@@ -1110,17 +1110,17 @@ class JSFatInlineString : public JSInlin
   template <typename CharT>
   inline CharT* init(size_t length);
 
   template <typename CharT>
   static bool lengthFits(size_t length);
 
   // Only called by the GC for strings with the AllocKind::FAT_INLINE_STRING
   // kind.
-  MOZ_ALWAYS_INLINE void finalize(js::FreeOp* fop);
+  MOZ_ALWAYS_INLINE void finalize(JSFreeOp* fop);
 };
 
 static_assert(sizeof(JSFatInlineString) % js::gc::CellAlignBytes == 0,
               "fat inline strings shouldn't waste space up to the next cell "
               "boundary");
 
 class JSExternalString : public JSLinearString {
   void init(const char16_t* chars, size_t length, const JSStringFinalizer* fin);
@@ -1140,17 +1140,17 @@ class JSExternalString : public JSLinear
   }
 
   // External chars are never allocated inline or in the nursery, so we can
   // safely expose this without requiring an AutoCheckCannotGC argument.
   const char16_t* twoByteChars() const { return rawTwoByteChars(); }
 
   // Only called by the GC for strings with the AllocKind::EXTERNAL_STRING
   // kind.
-  inline void finalize(js::FreeOp* fop);
+  inline void finalize(JSFreeOp* fop);
 
   // Free the external chars and allocate a new buffer, converting this to a
   // flat string (which still lives in an AllocKind::EXTERNAL_STRING
   // arena).
   JSFlatString* ensureFlat(JSContext* cx);
 
 #if defined(DEBUG) || defined(JS_JITSPEW)
   void dumpRepresentation(js::GenericPrinter& out, int indent) const;
@@ -1231,17 +1231,17 @@ class FatInlineAtom : public JSAtom {
  protected:  // Silence Clang unused-field warning.
   char inlineStorage_[sizeof(JSFatInlineString) - sizeof(JSString)];
   HashNumber hash_;
 
  public:
   HashNumber hash() const { return hash_; }
   void initHash(HashNumber hash) { hash_ = hash; }
 
-  inline void finalize(js::FreeOp* fop);
+  inline void finalize(JSFreeOp* fop);
 };
 
 static_assert(
     sizeof(FatInlineAtom) == sizeof(JSFatInlineString) + sizeof(uint64_t),
     "FatInlineAtom must have size of a fat inline string + HashNumber, "
     "aligned to gc::CellAlignBytes");
 
 }  // namespace js
--- a/js/src/vm/SymbolType.h
+++ b/js/src/vm/SymbolType.h
@@ -84,17 +84,17 @@ class Symbol : public js::gc::TenuredCel
   }
 
   static const JS::TraceKind TraceKind = JS::TraceKind::Symbol;
   inline void traceChildren(JSTracer* trc) {
     if (description_) {
       js::TraceManuallyBarrieredEdge(trc, &description_, "description");
     }
   }
-  inline void finalize(js::FreeOp*) {}
+  inline void finalize(JSFreeOp*) {}
 
   static MOZ_ALWAYS_INLINE void writeBarrierPre(Symbol* thing) {
     if (thing && !thing->isWellKnownSymbol()) {
       thing->asTenured().writeBarrierPre(thing);
     }
   }
 
   size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
--- a/js/src/vm/TypeInference-inl.h
+++ b/js/src/vm/TypeInference-inl.h
@@ -413,25 +413,25 @@ struct MOZ_RAII AutoEnterAnalysis {
   mozilla::Maybe<AutoClearTypeInferenceStateOnOOM> oom;
 
   // Pending recompilations to perform before execution of JIT code can resume.
   RecompileInfoVector pendingRecompiles;
 
   // Prevent us from calling the objectMetadataCallback.
   js::AutoSuppressAllocationMetadataBuilder suppressMetadata;
 
-  FreeOp* freeOp;
+  JSFreeOp* freeOp;
   Zone* zone;
 
   explicit AutoEnterAnalysis(JSContext* cx)
       : suppressGC(cx), suppressMetadata(cx) {
     init(cx->defaultFreeOp(), cx->zone());
   }
 
-  AutoEnterAnalysis(FreeOp* fop, Zone* zone)
+  AutoEnterAnalysis(JSFreeOp* fop, Zone* zone)
       : suppressGC(TlsContext.get()), suppressMetadata(zone) {
     init(fop, zone);
   }
 
   ~AutoEnterAnalysis() {
     if (this != zone->types.activeAnalysis) {
       return;
     }
@@ -439,17 +439,17 @@ struct MOZ_RAII AutoEnterAnalysis {
     zone->types.activeAnalysis = nullptr;
 
     if (!pendingRecompiles.empty()) {
       zone->types.processPendingRecompiles(freeOp, pendingRecompiles);
     }
   }
 
  private:
-  void init(FreeOp* fop, Zone* zone) {
+  void init(JSFreeOp* fop, Zone* zone) {
 #ifdef JS_CRASH_DIAGNOSTICS
     MOZ_RELEASE_ASSERT(CurrentThreadCanAccessZone(zone));
 #endif
     this->freeOp = fop;
     this->zone = zone;
 
     if (!zone->types.activeAnalysis) {
       oom.emplace(zone);
--- a/js/src/vm/TypeInference.cpp
+++ b/js/src/vm/TypeInference.cpp
@@ -2646,17 +2646,17 @@ bool TemporaryTypeSet::propertyNeedsBarr
   return false;
 }
 
 bool js::ClassCanHaveExtraProperties(const Class* clasp) {
   return clasp->getResolve() || clasp->getOpsLookupProperty() ||
          clasp->getOpsGetProperty() || IsTypedArrayClass(clasp);
 }
 
-void TypeZone::processPendingRecompiles(FreeOp* fop,
+void TypeZone::processPendingRecompiles(JSFreeOp* fop,
                                         RecompileInfoVector& recompiles) {
   MOZ_ASSERT(!recompiles.empty());
 
   // Steal the list of scripts to recompile, to make sure we don't try to
   // recursively recompile them. Note: the move constructor will not reset the
   // length if the Vector is using inline storage, so we also use clear().
   RecompileInfoVector pending(std::move(recompiles));
   recompiles.clear();
@@ -4481,17 +4481,17 @@ AutoClearTypeInferenceStateOnOOM::AutoCl
   MOZ_ASSERT(!TlsContext.get()->inUnsafeCallWithABI);
   zone->types.setSweepingTypes(true);
 }
 
 AutoClearTypeInferenceStateOnOOM::~AutoClearTypeInferenceStateOnOOM() {
   if (zone->types.hadOOMSweepingTypes()) {
     gc::AutoSetThreadIsSweeping threadIsSweeping;
     JSRuntime* rt = zone->runtimeFromMainThread();
-    FreeOp fop(rt);
+    JSFreeOp fop(rt);
     js::CancelOffThreadIonCompile(rt);
     zone->setPreservingCode(false);
     zone->discardJitCode(&fop, Zone::KeepBaselineCode);
     zone->types.clearAllNewScriptsOnOOM();
   }
 
   zone->types.setSweepingTypes(false);
 }
--- a/js/src/vm/TypeInference.h
+++ b/js/src/vm/TypeInference.h
@@ -257,17 +257,17 @@ class TypeZone {
   void beginSweep();
   void endSweep(JSRuntime* rt);
   void clearAllNewScriptsOnOOM();
 
   /* Mark a script as needing recompilation once inference has finished. */
   void addPendingRecompile(JSContext* cx, const RecompileInfo& info);
   void addPendingRecompile(JSContext* cx, JSScript* script);
 
-  void processPendingRecompiles(FreeOp* fop, RecompileInfoVector& recompiles);
+  void processPendingRecompiles(JSFreeOp* fop, RecompileInfoVector& recompiles);
 
   bool isSweepingTypes() const { return sweepingTypes; }
   void setSweepingTypes(bool sweeping) {
     MOZ_RELEASE_ASSERT(sweepingTypes != sweeping);
     MOZ_ASSERT_IF(sweeping, !oomSweepingTypes);
     sweepingTypes = sweeping;
     oomSweepingTypes = false;
   }
--- a/js/src/vm/TypedArrayObject.cpp
+++ b/js/src/vm/TypedArrayObject.cpp
@@ -144,17 +144,17 @@ bool TypedArrayObject::ensureHasBuffer(J
 void TypedArrayObject::assertZeroLengthArrayData() const {
   if (length() == 0 && !hasBuffer()) {
     uint8_t* end = fixedData(TypedArrayObject::FIXED_DATA_START);
     MOZ_ASSERT(end[0] == ZeroLengthArrayData);
   }
 }
 #endif
 
-void TypedArrayObject::finalize(FreeOp* fop, JSObject* obj) {
+void TypedArrayObject::finalize(JSFreeOp* fop, JSObject* obj) {
   MOZ_ASSERT(!IsInsideNursery(obj));
   TypedArrayObject* curObj = &obj->as<TypedArrayObject>();
 
   // Template objects or discarded objects (which didn't have enough room
   // for inner elements) don't have anything to free.
   if (!curObj->elementsRaw()) {
     return;
   }
--- a/js/src/vm/TypedArrayObject.h
+++ b/js/src/vm/TypedArrayObject.h
@@ -153,17 +153,17 @@ class TypedArrayObject : public ArrayBuf
    * when not inlined from Ion.
    */
   static constexpr uint32_t SINGLETON_BYTE_LENGTH = 1024 * 1024 * 10;
 
   static bool isOriginalLengthGetter(Native native);
 
   static bool isOriginalByteOffsetGetter(Native native);
 
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
   static size_t objectMoved(JSObject* obj, JSObject* old);
 
   /* Initialization bits */
 
   template <Value ValueGetter(const TypedArrayObject* tarr)>
   static bool GetterImpl(JSContext* cx, const CallArgs& args) {
     MOZ_ASSERT(is(args.thisv()));
     args.rval().set(
--- a/js/src/wasm/WasmDebug.cpp
+++ b/js/src/wasm/WasmDebug.cpp
@@ -122,17 +122,17 @@ bool DebugState::incrementStepperCount(J
     uint32_t offset = callSite.returnAddressOffset();
     if (codeRange.begin() <= offset && offset <= codeRange.end()) {
       toggleDebugTrap(offset, true);
     }
   }
   return true;
 }
 
-bool DebugState::decrementStepperCount(FreeOp* fop, uint32_t funcIndex) {
+bool DebugState::decrementStepperCount(JSFreeOp* fop, uint32_t funcIndex) {
   const CodeRange& codeRange =
       codeRanges(Tier::Debug)[funcToCodeRangeIndex(funcIndex)];
   MOZ_ASSERT(codeRange.isFunction());
 
   MOZ_ASSERT(!stepperCounters_.empty());
   StepperCounters::Ptr p = stepperCounters_.lookup(funcIndex);
   MOZ_ASSERT(p);
   if (--p->value()) {
@@ -223,26 +223,26 @@ WasmBreakpointSite* DebugState::getOrCre
   }
   return site;
 }
 
 bool DebugState::hasBreakpointSite(uint32_t offset) {
   return breakpointSites_.has(offset);
 }
 
-void DebugState::destroyBreakpointSite(FreeOp* fop, Instance* instance,
+void DebugState::destroyBreakpointSite(JSFreeOp* fop, Instance* instance,
                                        uint32_t offset) {
   WasmBreakpointSiteMap::Ptr p = breakpointSites_.lookup(offset);
   MOZ_ASSERT(p);
   fop->delete_(instance->objectUnbarriered(), p->value(),
                MemoryUse::BreakpointSite);
   breakpointSites_.remove(p);
 }
 
-void DebugState::clearBreakpointsIn(FreeOp* fop, WasmInstanceObject* instance,
+void DebugState::clearBreakpointsIn(JSFreeOp* fop, WasmInstanceObject* instance,
                                     js::Debugger* dbg, JSObject* handler) {
   MOZ_ASSERT(instance);
   if (breakpointSites_.empty()) {
     return;
   }
   for (WasmBreakpointSiteMap::Enum e(breakpointSites_); !e.empty();
        e.popFront()) {
     WasmBreakpointSite* site = e.front().value();
@@ -257,17 +257,17 @@ void DebugState::clearBreakpointsIn(Free
     }
     if (site->isEmpty()) {
       fop->delete_(instance, site, MemoryUse::BreakpointSite);
       e.removeFront();
     }
   }
 }
 
-void DebugState::clearAllBreakpoints(FreeOp* fop,
+void DebugState::clearAllBreakpoints(JSFreeOp* fop,
                                      WasmInstanceObject* instance) {
   clearBreakpointsIn(fop, instance, nullptr, nullptr);
 }
 
 void DebugState::toggleDebugTrap(uint32_t offset, bool enabled) {
   MOZ_ASSERT(offset);
   uint8_t* trap = code_->segment(Tier::Debug).base() + offset;
   const Uint32Vector& farJumpOffsets =
--- a/js/src/wasm/WasmDebug.h
+++ b/js/src/wasm/WasmDebug.h
@@ -89,27 +89,28 @@ class DebugState {
 
   bool hasBreakpointTrapAtOffset(uint32_t offset);
   void toggleBreakpointTrap(JSRuntime* rt, uint32_t offset, bool enabled);
   WasmBreakpointSite* getBreakpointSite(JSContext* cx, uint32_t offset) const;
   WasmBreakpointSite* getOrCreateBreakpointSite(JSContext* cx,
                                                 Instance* instance,
                                                 uint32_t offset);
   bool hasBreakpointSite(uint32_t offset);
-  void destroyBreakpointSite(FreeOp* fop, Instance* instance, uint32_t offset);
-  void clearBreakpointsIn(FreeOp* fp, WasmInstanceObject* instance,
+  void destroyBreakpointSite(JSFreeOp* fop, Instance* instance,
+                             uint32_t offset);
+  void clearBreakpointsIn(JSFreeOp* fp, WasmInstanceObject* instance,
                           js::Debugger* dbg, JSObject* handler);
-  void clearAllBreakpoints(FreeOp* fp, WasmInstanceObject* instance);
+  void clearAllBreakpoints(JSFreeOp* fp, WasmInstanceObject* instance);
 
   // When the Code is debug-enabled, single-stepping mode can be toggled on
   // the granularity of individual functions.
 
   bool stepModeEnabled(uint32_t funcIndex) const;
   bool incrementStepperCount(JSContext* cx, uint32_t funcIndex);
-  bool decrementStepperCount(FreeOp* fop, uint32_t funcIndex);
+  bool decrementStepperCount(JSFreeOp* fop, uint32_t funcIndex);
 
   // Stack inspection helpers.
 
   bool debugGetLocalTypes(uint32_t funcIndex, ValTypeVector* locals,
                           size_t* argsLength);
   ExprType debugGetResultType(uint32_t funcIndex);
   bool getGlobal(Instance& instance, uint32_t globalIndex,
                  MutableHandleValue vp);
--- a/js/src/wasm/WasmInstance.cpp
+++ b/js/src/wasm/WasmInstance.cpp
@@ -1961,17 +1961,17 @@ JSString* Instance::createDisplayURL(JSC
 }
 
 WasmBreakpointSite* Instance::getOrCreateBreakpointSite(JSContext* cx,
                                                         uint32_t offset) {
   MOZ_ASSERT(debugEnabled());
   return debug().getOrCreateBreakpointSite(cx, this, offset);
 }
 
-void Instance::destroyBreakpointSite(FreeOp* fop, uint32_t offset) {
+void Instance::destroyBreakpointSite(JSFreeOp* fop, uint32_t offset) {
   MOZ_ASSERT(debugEnabled());
   return debug().destroyBreakpointSite(fop, this, offset);
 }
 
 void Instance::addSizeOfMisc(MallocSizeOf mallocSizeOf,
                              Metadata::SeenSet* seenMetadata,
                              ShareableBytes::SeenSet* seenBytes,
                              Code::SeenSet* seenCode,
--- a/js/src/wasm/WasmInstance.h
+++ b/js/src/wasm/WasmInstance.h
@@ -52,17 +52,17 @@ class Instance {
   const UniqueTlsData tlsData_;
   GCPtrWasmMemoryObject memory_;
   const SharedTableVector tables_;
   DataSegmentVector passiveDataSegments_;
   ElemSegmentVector passiveElemSegments_;
   const UniqueDebugState maybeDebug_;
   StructTypeDescrVector structTypeDescrs_;
 
-  friend void Zone::sweepBreakpoints(js::FreeOp*);
+  friend void Zone::sweepBreakpoints(JSFreeOp*);
 
   // Internal helpers:
   const void** addressOfFuncTypeId(const FuncTypeIdDesc& funcTypeId) const;
   FuncImportTls& funcImportTls(const FuncImport& fi);
   TableTls& tableTls(const TableDesc& td) const;
 
   // Only WasmInstanceObject can call the private trace function.
   friend class js::WasmInstanceObject;
@@ -164,17 +164,17 @@ class Instance {
 
   void initElems(uint32_t tableIndex, const ElemSegment& seg,
                  uint32_t dstOffset, uint32_t srcOffset, uint32_t len);
 
   // Debugger support:
 
   JSString* createDisplayURL(JSContext* cx);
   WasmBreakpointSite* getOrCreateBreakpointSite(JSContext* cx, uint32_t offset);
-  void destroyBreakpointSite(FreeOp* fop, uint32_t offset);
+  void destroyBreakpointSite(JSFreeOp* fop, uint32_t offset);
 
   // about:memory reporting:
 
   void addSizeOfMisc(MallocSizeOf mallocSizeOf, Metadata::SeenSet* seenMetadata,
                      ShareableBytes::SeenSet* seenBytes,
                      Code::SeenSet* seenCode, Table::SeenSet* seenTables,
                      size_t* code, size_t* data) const;
 
--- a/js/src/wasm/WasmJS.cpp
+++ b/js/src/wasm/WasmJS.cpp
@@ -680,17 +680,17 @@ const JSFunctionSpec WasmModuleObject::m
 const JSFunctionSpec WasmModuleObject::static_methods[] = {
     JS_FN("imports", WasmModuleObject::imports, 1, JSPROP_ENUMERATE),
     JS_FN("exports", WasmModuleObject::exports, 1, JSPROP_ENUMERATE),
     JS_FN("customSections", WasmModuleObject::customSections, 2,
           JSPROP_ENUMERATE),
     JS_FS_END};
 
 /* static */
-void WasmModuleObject::finalize(FreeOp* fop, JSObject* obj) {
+void WasmModuleObject::finalize(JSFreeOp* fop, JSObject* obj) {
   const Module& module = obj->as<WasmModuleObject>().module();
   obj->zone()->decJitMemory(module.codeLength(module.code().stableTier()));
   fop->release(obj, &module, module.gcMallocBytesExcludingCode(),
                MemoryUse::WasmModule);
 }
 
 static bool IsModuleObject(JSObject* obj, const Module** module) {
   WasmModuleObject* mobj = obj->maybeUnwrapIf<WasmModuleObject>();
@@ -1224,17 +1224,17 @@ const JSFunctionSpec WasmInstanceObject:
 const JSFunctionSpec WasmInstanceObject::static_methods[] = {JS_FS_END};
 
 bool WasmInstanceObject::isNewborn() const {
   MOZ_ASSERT(is<WasmInstanceObject>());
   return getReservedSlot(INSTANCE_SLOT).isUndefined();
 }
 
 /* static */
-void WasmInstanceObject::finalize(FreeOp* fop, JSObject* obj) {
+void WasmInstanceObject::finalize(JSFreeOp* fop, JSObject* obj) {
   WasmInstanceObject& instance = obj->as<WasmInstanceObject>();
   fop->delete_(obj, &instance.exports(), MemoryUse::WasmInstanceExports);
   fop->delete_(obj, &instance.scopes(), MemoryUse::WasmInstanceScopes);
   fop->delete_(obj, &instance.indirectGlobals(),
                MemoryUse::WasmInstanceGlobals);
   if (!instance.isNewborn()) {
     fop->delete_(obj, &instance.instance(), MemoryUse::WasmInstanceInstance);
   }
@@ -1614,17 +1614,17 @@ const ClassOps WasmMemoryObject::classOp
 const Class WasmMemoryObject::class_ = {
     "WebAssembly.Memory",
     JSCLASS_DELAY_METADATA_BUILDER |
         JSCLASS_HAS_RESERVED_SLOTS(WasmMemoryObject::RESERVED_SLOTS) |
         JSCLASS_FOREGROUND_FINALIZE,
     &WasmMemoryObject::classOps_};
 
 /* static */
-void WasmMemoryObject::finalize(FreeOp* fop, JSObject* obj) {
+void WasmMemoryObject::finalize(JSFreeOp* fop, JSObject* obj) {
   WasmMemoryObject& memory = obj->as<WasmMemoryObject>();
   if (memory.hasObservers()) {
     fop->delete_(obj, &memory.observers(), MemoryUse::WasmMemoryObservers);
   }
 }
 
 /* static */
 WasmMemoryObject* WasmMemoryObject::create(
@@ -1963,17 +1963,17 @@ const Class WasmTableObject::class_ = {
     &WasmTableObject::classOps_};
 
 bool WasmTableObject::isNewborn() const {
   MOZ_ASSERT(is<WasmTableObject>());
   return getReservedSlot(TABLE_SLOT).isUndefined();
 }
 
 /* static */
-void WasmTableObject::finalize(FreeOp* fop, JSObject* obj) {
+void WasmTableObject::finalize(JSFreeOp* fop, JSObject* obj) {
   WasmTableObject& tableObj = obj->as<WasmTableObject>();
   if (!tableObj.isNewborn()) {
     auto& table = tableObj.table();
     fop->release(obj, &table, table.gcMallocBytes(), MemoryUse::WasmTableTable);
   }
 }
 
 /* static */
@@ -2364,17 +2364,17 @@ void WasmGlobalObject::trace(JSTracer* t
     case ValType::Ref:
       MOZ_CRASH("Ref NYI");
     case ValType::NullRef:
       MOZ_CRASH("NullRef not expressible");
   }
 }
 
 /* static */
-void WasmGlobalObject::finalize(FreeOp* fop, JSObject* obj) {
+void WasmGlobalObject::finalize(JSFreeOp* fop, JSObject* obj) {
   WasmGlobalObject* global = reinterpret_cast<WasmGlobalObject*>(obj);
   if (!global->isNewborn()) {
     fop->delete_(obj, global->cell(), MemoryUse::WasmGlobalCell);
   }
 }
 
 /* static */
 WasmGlobalObject* WasmGlobalObject::create(JSContext* cx, HandleVal hval,
@@ -3419,17 +3419,17 @@ class CompileStreamTask : public Promise
 // the Promise<Response> to resolve to a (hopefully) Promise.
 class ResolveResponseClosure : public NativeObject {
   static const unsigned COMPILE_ARGS_SLOT = 0;
   static const unsigned PROMISE_OBJ_SLOT = 1;
   static const unsigned INSTANTIATE_SLOT = 2;
   static const unsigned IMPORT_OBJ_SLOT = 3;
   static const ClassOps classOps_;
 
-  static void finalize(FreeOp* fop, JSObject* obj) {
+  static void finalize(JSFreeOp* fop, JSObject* obj) {
     auto& closure = obj->as<ResolveResponseClosure>();
     fop->release(obj, &closure.compileArgs(),
                  MemoryUse::WasmResolveResponseClosure);
   }
 
  public:
   static const unsigned RESERVED_SLOTS = 4;
   static const Class class_;
--- a/js/src/wasm/WasmJS.h
+++ b/js/src/wasm/WasmJS.h
@@ -122,17 +122,17 @@ JSObject* InitWebAssemblyClass(JSContext
 
 // The class of WebAssembly.Module. Each WasmModuleObject owns a
 // wasm::Module. These objects are used both as content-facing JS objects and as
 // internal implementation details of asm.js.
 
 class WasmModuleObject : public NativeObject {
   static const unsigned MODULE_SLOT = 0;
   static const ClassOps classOps_;
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
   static bool imports(JSContext* cx, unsigned argc, Value* vp);
   static bool exports(JSContext* cx, unsigned argc, Value* vp);
   static bool customSections(JSContext* cx, unsigned argc, Value* vp);
 
  public:
   static const unsigned RESERVED_SLOTS = 1;
   static const Class class_;
   static const JSPropertySpec properties[];
@@ -157,17 +157,17 @@ class WasmModuleObject : public NativeOb
 STATIC_ASSERT_ANYREF_IS_JSOBJECT;
 
 class WasmGlobalObject : public NativeObject {
   static const unsigned TYPE_SLOT = 0;
   static const unsigned MUTABLE_SLOT = 1;
   static const unsigned CELL_SLOT = 2;
 
   static const ClassOps classOps_;
-  static void finalize(FreeOp*, JSObject* obj);
+  static void finalize(JSFreeOp*, JSObject* obj);
   static void trace(JSTracer* trc, JSObject* obj);
 
   static bool valueGetterImpl(JSContext* cx, const CallArgs& args);
   static bool valueGetter(JSContext* cx, unsigned argc, Value* vp);
   static bool valueSetterImpl(JSContext* cx, const CallArgs& args);
   static bool valueSetter(JSContext* cx, unsigned argc, Value* vp);
 
  public:
@@ -213,17 +213,17 @@ class WasmInstanceObject : public Native
   static const unsigned SCOPES_SLOT = 3;
   static const unsigned INSTANCE_SCOPE_SLOT = 4;
   static const unsigned GLOBALS_SLOT = 5;
 
   static const ClassOps classOps_;
   static bool exportsGetterImpl(JSContext* cx, const CallArgs& args);
   static bool exportsGetter(JSContext* cx, unsigned argc, Value* vp);
   bool isNewborn() const;
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
   static void trace(JSTracer* trc, JSObject* obj);
 
   // ExportMap maps from function index to exported function object.
   // This allows the instance to lazily create exported function
   // objects on demand (instead up-front for all table elements) while
   // correctly preserving observable function object identity.
   using ExportMap = GCHashMap<uint32_t, HeapPtr<JSFunction*>,
                               DefaultHasher<uint32_t>, ZoneAllocPolicy>;
@@ -282,17 +282,17 @@ class WasmInstanceObject : public Native
 
 // The class of WebAssembly.Memory. A WasmMemoryObject references an ArrayBuffer
 // or SharedArrayBuffer object which owns the actual memory.
 
 class WasmMemoryObject : public NativeObject {
   static const unsigned BUFFER_SLOT = 0;
   static const unsigned OBSERVERS_SLOT = 1;
   static const ClassOps classOps_;
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
   static bool bufferGetterImpl(JSContext* cx, const CallArgs& args);
   static bool bufferGetter(JSContext* cx, unsigned argc, Value* vp);
   static bool growImpl(JSContext* cx, const CallArgs& args);
   static bool grow(JSContext* cx, unsigned argc, Value* vp);
   static uint32_t growShared(HandleWasmMemoryObject memory, uint32_t delta);
 
   using InstanceSet =
       JS::WeakCache<GCHashSet<WeakHeapPtrWasmInstanceObject,
@@ -342,17 +342,17 @@ class WasmMemoryObject : public NativeOb
 // The class of WebAssembly.Table. A WasmTableObject holds a refcount on a
 // wasm::Table, allowing a Table to be shared between multiple Instances
 // (eventually between multiple threads).
 
 class WasmTableObject : public NativeObject {
   static const unsigned TABLE_SLOT = 0;
   static const ClassOps classOps_;
   bool isNewborn() const;
-  static void finalize(FreeOp* fop, JSObject* obj);
+  static void finalize(JSFreeOp* fop, JSObject* obj);
   static void trace(JSTracer* trc, JSObject* obj);
   static bool lengthGetterImpl(JSContext* cx, const CallArgs& args);
   static bool lengthGetter(JSContext* cx, unsigned argc, Value* vp);
   static bool getImpl(JSContext* cx, const CallArgs& args);
   static bool get(JSContext* cx, unsigned argc, Value* vp);
   static bool setImpl(JSContext* cx, const CallArgs& args);
   static bool set(JSContext* cx, unsigned argc, Value* vp);
   static bool growImpl(JSContext* cx, const CallArgs& args);
--- a/js/xpconnect/src/xpcprivate.h
+++ b/js/xpconnect/src/xpcprivate.h
@@ -1239,17 +1239,17 @@ class XPCWrappedNativeProto final {
   }
 
   nsIClassInfo* GetClassInfo() const { return mClassInfo; }
 
   XPCNativeSet* GetSet() const { return mSet; }
 
   nsIXPCScriptable* GetScriptable() const { return mScriptable; }
 
-  void JSProtoObjectFinalized(js::FreeOp* fop, JSObject* obj);
+  void JSProtoObjectFinalized(JSFreeOp* fop, JSObject* obj);
   void JSProtoObjectMoved(JSObject* obj, const JSObject* old);
 
   void SystemIsBeingShutDown();
 
   void DebugDump(int16_t depth);
 
   void TraceSelf(JSTracer* trc) {
     if (mJSProtoObject) {