Bug 1075591: Make js::gc::TenuredCell::asTenured return a TenuredCell &, not a TenuredCell *. r=terrence
authorJim Blandy <jimb@mozilla.com>
Thu, 02 Oct 2014 10:19:11 +0100
changeset 208373 a59c2ddcf04cae9dc2e3a7702c8703b7430c3ff3
parent 208372 c70ca7e5474135b799f0bc26f5d8ff9c66faaf1b
child 208374 9f9bbf84c3e8174278336e322643a4fc697e45eb
push id49907
push userjblandy@mozilla.com
push dateThu, 02 Oct 2014 09:19:55 +0000
treeherdermozilla-inbound@a59c2ddcf04c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersterrence
bugs1075591
milestone35.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1075591: Make js::gc::TenuredCell::asTenured return a TenuredCell &, not a TenuredCell *. r=terrence
js/src/gc/Heap.h
js/src/gc/Iteration.cpp
js/src/gc/Marking.cpp
js/src/gc/Tracer.cpp
js/src/jit/CodeGenerator.cpp
js/src/jit/IonBuilder.cpp
js/src/jit/IonMacroAssembler.cpp
js/src/jit/MIR.cpp
js/src/jit/Recover.cpp
js/src/jit/VMFunctions.cpp
js/src/jsfriendapi.cpp
js/src/jsfun.h
js/src/jsgc.cpp
js/src/jsgcinlines.h
js/src/jsobj.cpp
js/src/jsobjinlines.h
js/src/jsscript.cpp
js/src/jsweakmap.cpp
js/src/proxy/Wrapper.cpp
js/src/vm/ObjectImpl.h
js/src/vm/RegExpObject.cpp
js/src/vm/SelfHosting.cpp
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -139,18 +139,18 @@ static const size_t MAX_BACKGROUND_FINAL
 
 class TenuredCell;
 
 // A GC cell is the base class for all GC things.
 struct Cell
 {
   public:
     MOZ_ALWAYS_INLINE bool isTenured() const { return !IsInsideNursery(this); }
-    MOZ_ALWAYS_INLINE const TenuredCell *asTenured() const;
-    MOZ_ALWAYS_INLINE TenuredCell *asTenured();
+    MOZ_ALWAYS_INLINE const TenuredCell &asTenured() const;
+    MOZ_ALWAYS_INLINE TenuredCell &asTenured();
 
     inline JSRuntime *runtimeFromMainThread() const;
     inline JS::shadow::Runtime *shadowRuntimeFromMainThread() const;
 
     // Note: Unrestricted access to the runtime of a GC thing from an arbitrary
     // thread can easily lead to races. Use this method very carefully.
     inline JSRuntime *runtimeFromAnyThread() const;
     inline JS::shadow::Runtime *shadowRuntimeFromAnyThread() const;
@@ -1141,28 +1141,28 @@ static void
 AssertValidColor(const TenuredCell *thing, uint32_t color)
 {
 #ifdef DEBUG
     ArenaHeader *aheader = thing->arenaHeader();
     MOZ_ASSERT(color < aheader->getThingSize() / CellSize);
 #endif
 }
 
-MOZ_ALWAYS_INLINE const TenuredCell *
+MOZ_ALWAYS_INLINE const TenuredCell &
 Cell::asTenured() const
 {
     MOZ_ASSERT(isTenured());
-    return static_cast<const TenuredCell *>(this);
+    return *static_cast<const TenuredCell *>(this);
 }
 
-MOZ_ALWAYS_INLINE TenuredCell *
+MOZ_ALWAYS_INLINE TenuredCell &
 Cell::asTenured()
 {
     MOZ_ASSERT(isTenured());
-    return static_cast<TenuredCell *>(this);
+    return *static_cast<TenuredCell *>(this);
 }
 
 inline JSRuntime *
 Cell::runtimeFromMainThread() const
 {
     JSRuntime *rt = chunk()->info.trailer.runtime;
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
     return rt;
@@ -1378,17 +1378,17 @@ TenuredCell::writeBarrierPostRemove(Tenu
 }
 
 #ifdef DEBUG
 bool
 Cell::isAligned() const
 {
     if (!isTenured())
         return true;
-    return asTenured()->isAligned();
+    return asTenured().isAligned();
 }
 
 bool
 TenuredCell::isAligned() const
 {
     return Arena::isAligned(address(), arenaHeader()->getThingSize());
 }
 #endif
--- a/js/src/gc/Iteration.cpp
+++ b/js/src/gc/Iteration.cpp
@@ -114,17 +114,17 @@ void
 js::IterateGrayObjects(Zone *zone, GCThingCallback cellCallback, void *data)
 {
     zone->runtimeFromMainThread()->gc.evictNursery();
     AutoPrepareForTracing prep(zone->runtimeFromMainThread(), SkipAtoms);
 
     for (size_t finalizeKind = 0; finalizeKind <= FINALIZE_OBJECT_LAST; finalizeKind++) {
         for (ZoneCellIterUnderGC i(zone, AllocKind(finalizeKind)); !i.done(); i.next()) {
             JSObject *obj = i.get<JSObject>();
-            if (obj->asTenured()->isMarked(GRAY))
+            if (obj->asTenured().isMarked(GRAY))
                 cellCallback(data, obj);
         }
     }
 }
 
 JS_PUBLIC_API(void)
 JS_IterateCompartments(JSRuntime *rt, void *data,
                        JSIterateCompartmentCallback compartmentCallback)
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -225,17 +225,17 @@ CheckMarkedThing(JSTracer *trc, T **thin
 
     /*
      * Try to assert that the thing is allocated.  This is complicated by the
      * fact that allocated things may still contain the poison pattern if that
      * part has not been overwritten, and that the free span list head in the
      * ArenaHeader may not be synced with the real one in ArenaLists.
      */
     MOZ_ASSERT_IF(IsThingPoisoned(thing) && rt->isHeapBusy(),
-                  !InFreeList(thing->asTenured()->arenaHeader(), thing));
+                  !InFreeList(thing->asTenured().arenaHeader(), thing));
 #endif
 }
 
 /*
  * We only set the maybeAlive flag for objects and scripts. It's assumed that,
  * if a compartment is alive, then it will have at least some live object or
  * script it in. Even if we get this wrong, the worst that will happen is that
  * scheduledForDestruction will be set on the compartment, which will cause some
@@ -445,24 +445,24 @@ IsMarked(T **thingp)
 #endif
     {
         if (IsInsideNursery(*thingp)) {
             Nursery &nursery = rt->gc.nursery;
             return nursery.getForwardedPointer(thingp);
         }
     }
 #endif  // JSGC_GENERATIONAL
-    Zone *zone = (*thingp)->asTenured()->zone();
+    Zone *zone = (*thingp)->asTenured().zone();
     if (!zone->isCollecting() || zone->isGCFinished())
         return true;
 #ifdef JSGC_COMPACTING
     if (zone->isGCCompacting() && IsForwarded(*thingp))
         *thingp = Forwarded(*thingp);
 #endif
-    return (*thingp)->asTenured()->isMarked();
+    return (*thingp)->asTenured().isMarked();
 }
 
 template <typename T>
 static bool
 IsAboutToBeFinalized(T **thingp)
 {
     MOZ_ASSERT(thingp);
     MOZ_ASSERT(*thingp);
@@ -490,29 +490,29 @@ IsAboutToBeFinalized(T **thingp)
         if (rt->isHeapMinorCollecting()) {
             if (IsInsideNursery(thing))
                 return !nursery.getForwardedPointer(thingp);
             return false;
         }
     }
 #endif  // JSGC_GENERATIONAL
 
-    Zone *zone = thing->asTenured()->zone();
+    Zone *zone = thing->asTenured().zone();
     if (zone->isGCSweeping()) {
         /*
          * We should return false for things that have been allocated during
          * incremental sweeping, but this possibility doesn't occur at the moment
          * because this function is only called at the very start of the sweeping a
          * compartment group and during minor gc. Rather than do the extra check,
          * we just assert that it's not necessary.
          */
         MOZ_ASSERT_IF(!rt->isHeapMinorCollecting(),
-                      !thing->asTenured()->arenaHeader()->allocatedDuringIncremental);
+                      !thing->asTenured().arenaHeader()->allocatedDuringIncremental);
 
-        return !thing->asTenured()->isMarked();
+        return !thing->asTenured().isMarked();
     }
 #ifdef JSGC_COMPACTING
     else if (zone->isGCCompacting() && IsForwarded(thing)) {
         *thingp = Forwarded(thing);
         return false;
     }
 #endif
 
@@ -661,17 +661,17 @@ DeclMarkerImpl(TypeObject, js::types::Ty
 
 void
 gc::MarkKind(JSTracer *trc, void **thingp, JSGCTraceKind kind)
 {
     MOZ_ASSERT(thingp);
     MOZ_ASSERT(*thingp);
     DebugOnly<Cell *> cell = static_cast<Cell *>(*thingp);
     MOZ_ASSERT_IF(cell->isTenured(),
-                  kind == MapAllocToTraceKind(cell->asTenured()->getAllocKind()));
+                  kind == MapAllocToTraceKind(cell->asTenured().getAllocKind()));
     switch (kind) {
       case JSTRACE_OBJECT:
         MarkInternal(trc, reinterpret_cast<JSObject **>(thingp));
         break;
       case JSTRACE_STRING:
         MarkInternal(trc, reinterpret_cast<JSString **>(thingp));
         break;
       case JSTRACE_SYMBOL:
@@ -943,40 +943,40 @@ ShouldMarkCrossCompartment(JSTracer *trc
 
     uint32_t color = AsGCMarker(trc)->getMarkColor();
     MOZ_ASSERT(color == BLACK || color == GRAY);
 
     if (IsInsideNursery(cell)) {
         MOZ_ASSERT(color == BLACK);
         return false;
     }
-    TenuredCell *tenured = cell->asTenured();
+    TenuredCell &tenured = cell->asTenured();
 
-    JS::Zone *zone = tenured->zone();
+    JS::Zone *zone = tenured.zone();
     if (color == BLACK) {
         /*
          * Having black->gray edges violates our promise to the cycle
          * collector. This can happen if we're collecting a compartment and it
          * has an edge to an uncollected compartment: it's possible that the
          * source and destination of the cross-compartment edge should be gray,
          * but the source was marked black by the conservative scanner.
          */
-        if (tenured->isMarked(GRAY)) {
+        if (tenured.isMarked(GRAY)) {
             MOZ_ASSERT(!zone->isCollecting());
             trc->runtime()->gc.setFoundBlackGrayEdges();
         }
         return zone->isGCMarking();
     } else {
         if (zone->isGCMarkingBlack()) {
             /*
              * The destination compartment is being not being marked gray now,
              * but it will be later, so record the cell so it can be marked gray
              * at the appropriate time.
              */
-            if (!tenured->isMarked())
+            if (!tenured.isMarked())
                 DelayCrossCompartmentGrayMarking(src);
             return false;
         }
         return zone->isGCMarkingGray();
     }
 }
 
 void
@@ -1036,17 +1036,17 @@ gc::IsCellAboutToBeFinalized(Cell **thin
     JS_COMPARTMENT_ASSERT_STR(rt, sym)
 
 static void
 PushMarkStack(GCMarker *gcmarker, ObjectImpl *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
     MOZ_ASSERT(!IsInsideNursery(thing));
 
-    if (thing->asTenured()->markIfUnmarked(gcmarker->getMarkColor()))
+    if (thing->asTenured().markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushObject(thing);
 }
 
 /*
  * PushMarkStack for BaseShape unpacks its children directly onto the mark
  * stack. For a pre-barrier between incremental slices, this may result in
  * objects in the nursery getting pushed onto the mark stack. It is safe to
  * ignore these objects because they will be marked by the matching
@@ -1054,27 +1054,27 @@ PushMarkStack(GCMarker *gcmarker, Object
  */
 static void
 MaybePushMarkStackBetweenSlices(GCMarker *gcmarker, JSObject *thing)
 {
     DebugOnly<JSRuntime *> rt = gcmarker->runtime();
     JS_COMPARTMENT_ASSERT(rt, thing);
     MOZ_ASSERT_IF(rt->isHeapBusy(), !IsInsideNursery(thing));
 
-    if (!IsInsideNursery(thing) && thing->asTenured()->markIfUnmarked(gcmarker->getMarkColor()))
+    if (!IsInsideNursery(thing) && thing->asTenured().markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushObject(thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, JSFunction *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
     MOZ_ASSERT(!IsInsideNursery(thing));
 
-    if (thing->asTenured()->markIfUnmarked(gcmarker->getMarkColor()))
+    if (thing->asTenured().markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushObject(thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
     MOZ_ASSERT(!IsInsideNursery(thing));
@@ -1704,17 +1704,17 @@ GCMarker::processMarkStackTop(SliceBudge
                 MOZ_ASSERT(runtime()->isAtomsZone(str->zone()) || str->zone() == obj->zone());
                 if (str->markIfUnmarked())
                     ScanString(this, str);
             }
         } else if (v.isObject()) {
             JSObject *obj2 = &v.toObject();
             JS_COMPARTMENT_ASSERT(runtime(), obj2);
             MOZ_ASSERT(obj->compartment() == obj2->compartment());
-            if (obj2->asTenured()->markIfUnmarked(getMarkColor())) {
+            if (obj2->asTenured().markIfUnmarked(getMarkColor())) {
                 pushValueArray(obj, vp, end);
                 obj = obj2;
                 goto scan_obj;
             }
         } else if (v.isSymbol()) {
             JS::Symbol *sym = v.toSymbol();
             if (!sym->isWellKnownSymbol()) {
                 JS_COMPARTMENT_ASSERT_SYM(runtime(), sym);
--- a/js/src/gc/Tracer.cpp
+++ b/js/src/gc/Tracer.cpp
@@ -582,17 +582,17 @@ GCMarker::markDelayedChildren(SliceBudge
 }
 
 #ifdef DEBUG
 void
 GCMarker::checkZone(void *p)
 {
     MOZ_ASSERT(started);
     DebugOnly<Cell *> cell = static_cast<Cell *>(p);
-    MOZ_ASSERT_IF(cell->isTenured(), cell->asTenured()->zone()->isCollecting());
+    MOZ_ASSERT_IF(cell->isTenured(), cell->asTenured().zone()->isCollecting());
 }
 #endif
 
 bool
 GCMarker::hasBufferedGrayRoots() const
 {
     return grayBufferState == GRAY_BUFFER_OK;
 }
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -4006,17 +4006,17 @@ static const VMFunction NewGCThingParInf
 
 bool
 CodeGenerator::emitAllocateGCThingPar(LInstruction *lir, Register objReg, Register cxReg,
                                       Register tempReg1, Register tempReg2, JSObject *templateObj)
 {
     MOZ_ASSERT(lir->mirRaw());
     MOZ_ASSERT(lir->mirRaw()->isInstruction());
 
-    gc::AllocKind allocKind = templateObj->asTenured()->getAllocKind();
+    gc::AllocKind allocKind = templateObj->asTenured().getAllocKind();
 #ifdef JSGC_FJGENERATIONAL
     OutOfLineCode *ool = oolCallVM(NewGCThingParInfo, lir,
                                    (ArgList(), Imm32(allocKind)), StoreRegisterTo(objReg));
     if (!ool)
         return false;
 #else
     OutOfLineNewGCThingPar *ool = new(alloc()) OutOfLineNewGCThingPar(lir, allocKind, objReg, cxReg);
     if (!ool || !addOutOfLineCode(ool, lir->mirRaw()->toInstruction()))
@@ -4188,17 +4188,17 @@ typedef JSObject *(*NewGCObjectFn)(JSCon
                                    gc::InitialHeap initialHeap);
 static const VMFunction NewGCObjectInfo =
     FunctionInfo<NewGCObjectFn>(js::jit::NewGCObject);
 
 bool
 CodeGenerator::visitCreateThisWithTemplate(LCreateThisWithTemplate *lir)
 {
     JSObject *templateObject = lir->mir()->templateObject();
-    gc::AllocKind allocKind = templateObject->asTenured()->getAllocKind();
+    gc::AllocKind allocKind = templateObject->asTenured().getAllocKind();
     gc::InitialHeap initialHeap = lir->mir()->initialHeap();
     Register objReg = ToRegister(lir->output());
     Register tempReg = ToRegister(lir->temp());
 
     OutOfLineCode *ool = oolCallVM(NewGCObjectInfo, lir,
                                    (ArgList(), Imm32(allocKind), Imm32(initialHeap)),
                                    StoreRegisterTo(objReg));
     if (!ool)
--- a/js/src/jit/IonBuilder.cpp
+++ b/js/src/jit/IonBuilder.cpp
@@ -6772,17 +6772,17 @@ IonBuilder::ensureDefiniteTypeSet(MDefin
 
 static size_t
 NumFixedSlots(JSObject *object)
 {
     // Note: we can't use object->numFixedSlots() here, as this will read the
     // shape and can race with the main thread if we are building off thread.
     // The allocation kind and object class (which goes through the type) can
     // be read freely, however.
-    gc::AllocKind kind = object->asTenured()->getAllocKind();
+    gc::AllocKind kind = object->asTenured().getAllocKind();
     return gc::GetGCKindSlots(kind, object->getClass());
 }
 
 bool
 IonBuilder::getStaticName(JSObject *staticObject, PropertyName *name, bool *psucceeded,
                           MDefinition *lexicalCheck)
 {
     jsid id = NameToId(name);
--- a/js/src/jit/IonMacroAssembler.cpp
+++ b/js/src/jit/IonMacroAssembler.cpp
@@ -590,28 +590,28 @@ void
 MacroAssembler::newGCThing(Register result, Register temp, JSObject *templateObj,
                             gc::InitialHeap initialHeap, Label *fail)
 {
     // This method does not initialize the object: if external slots get
     // allocated into |temp|, there is no easy way for us to ensure the caller
     // frees them. Instead just assert this case does not happen.
     MOZ_ASSERT(!templateObj->numDynamicSlots());
 
-    gc::AllocKind allocKind = templateObj->asTenured()->getAllocKind();
+    gc::AllocKind allocKind = templateObj->asTenured().getAllocKind();
     MOZ_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST);
 
     allocateObject(result, temp, allocKind, templateObj->numDynamicSlots(), initialHeap, fail);
 }
 
 void
 MacroAssembler::createGCObject(Register obj, Register temp, JSObject *templateObj,
                                gc::InitialHeap initialHeap, Label *fail, bool initFixedSlots)
 {
     uint32_t nDynamicSlots = templateObj->numDynamicSlots();
-    gc::AllocKind allocKind = templateObj->asTenured()->getAllocKind();
+    gc::AllocKind allocKind = templateObj->asTenured().getAllocKind();
     MOZ_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST);
 
     // Arrays with copy on write elements do not need fixed space for an
     // elements header. The template object, which owns the original elements,
     // might have another allocation kind.
     if (templateObj->denseElementsAreCopyOnWrite())
         allocKind = gc::FINALIZE_OBJECT0_BACKGROUND;
 
@@ -731,17 +731,17 @@ MacroAssembler::newGCTenuredThingPar(Reg
     // tempReg1->head.first = tempReg2;
     storePtr(tempReg2, Address(tempReg1, gc::FreeList::offsetOfFirst()));
 }
 
 void
 MacroAssembler::newGCThingPar(Register result, Register cx, Register tempReg1, Register tempReg2,
                               JSObject *templateObject, Label *fail)
 {
-    gc::AllocKind allocKind = templateObject->asTenured()->getAllocKind();
+    gc::AllocKind allocKind = templateObject->asTenured().getAllocKind();
     MOZ_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST);
     MOZ_ASSERT(!templateObject->numDynamicSlots());
 
     newGCThingPar(result, cx, tempReg1, tempReg2, allocKind, fail);
 }
 
 void
 MacroAssembler::newGCStringPar(Register result, Register cx, Register tempReg1, Register tempReg2,
--- a/js/src/jit/MIR.cpp
+++ b/js/src/jit/MIR.cpp
@@ -3239,17 +3239,17 @@ MObjectState::Copy(TempAllocator &alloc,
 }
 
 bool
 MNewArray::shouldUseVM() const
 {
     MOZ_ASSERT(count() < JSObject::NELEMENTS_LIMIT);
 
     size_t arraySlots =
-        gc::GetGCKindSlots(templateObject()->asTenured()->getAllocKind()) - ObjectElements::VALUES_PER_HEADER;
+        gc::GetGCKindSlots(templateObject()->asTenured().getAllocKind()) - ObjectElements::VALUES_PER_HEADER;
 
     // Allocate space using the VMCall when mir hints it needs to get allocated
     // immediately, but only when data doesn't fit the available array slots.
     bool allocating = allocatingBehaviour() != NewArray_Unallocating && count() > arraySlots;
 
     return templateObject()->hasSingletonType() || allocating;
 }
 
--- a/js/src/jit/Recover.cpp
+++ b/js/src/jit/Recover.cpp
@@ -1115,17 +1115,17 @@ RCreateThisWithTemplate::recover(JSConte
 {
     RootedObject templateObject(cx, &iter.read().toObject());
 
     // Use AutoEnterAnalysis to avoid invoking the object metadata callback
     // while bailing out, which could try to walk the stack.
     types::AutoEnterAnalysis enter(cx);
 
     // See CodeGenerator::visitCreateThisWithTemplate
-    gc::AllocKind allocKind = templateObject->asTenured()->getAllocKind();
+    gc::AllocKind allocKind = templateObject->asTenured().getAllocKind();
     gc::InitialHeap initialHeap = tenuredHeap_ ? gc::TenuredHeap : gc::DefaultHeap;
     JSObject *resultObject = JSObject::copy(cx, allocKind, initialHeap, templateObject);
     if (!resultObject)
         return false;
 
     RootedValue result(cx);
     result.setObject(*resultObject);
     iter.storeInstructionResult(result);
--- a/js/src/jit/VMFunctions.cpp
+++ b/js/src/jit/VMFunctions.cpp
@@ -1127,19 +1127,19 @@ AssertValidObjectPtr(JSContext *cx, JSOb
     MOZ_ASSERT(obj->compartment() == cx->compartment());
     MOZ_ASSERT(obj->runtimeFromMainThread() == cx->runtime());
 
     MOZ_ASSERT_IF(!obj->hasLazyType(),
                   obj->type()->clasp() == obj->lastProperty()->getObjectClass());
 
     if (obj->isTenured()) {
         MOZ_ASSERT(obj->isAligned());
-        gc::AllocKind kind = obj->asTenured()->getAllocKind();
+        gc::AllocKind kind = obj->asTenured().getAllocKind();
         MOZ_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST);
-        MOZ_ASSERT(obj->asTenured()->zone() == cx->zone());
+        MOZ_ASSERT(obj->asTenured().zone() == cx->zone());
     }
 }
 
 void
 AssertValidStringPtr(JSContext *cx, JSString *str)
 {
     // We can't closely inspect strings from another runtime.
     if (str->runtimeFromAnyThread() != cx->runtime()) {
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -647,17 +647,17 @@ js::GCThingTraceKind(void *thing)
 }
 
 JS_FRIEND_API(void)
 js::VisitGrayWrapperTargets(Zone *zone, GCThingCallback callback, void *closure)
 {
     for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
         for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) {
             gc::Cell *thing = e.front().key().wrapped;
-            if (thing->isTenured() && thing->asTenured()->isMarked(gc::GRAY))
+            if (thing->isTenured() && thing->asTenured().isMarked(gc::GRAY))
                 callback(closure, thing);
         }
     }
 }
 
 JS_FRIEND_API(JSObject *)
 js::GetWeakmapKeyDelegate(JSObject *key)
 {
@@ -1184,17 +1184,17 @@ JS::IncrementalReferenceBarrier(void *pt
     if (kind == JSTRACE_STRING && StringIsPermanentAtom(static_cast<JSString *>(ptr)))
         return;
 
     gc::Cell *cell = static_cast<gc::Cell *>(ptr);
 
 #ifdef DEBUG
     Zone *zone = kind == JSTRACE_OBJECT
                  ? static_cast<JSObject *>(cell)->zone()
-                 : cell->asTenured()->zone();
+                 : cell->asTenured().zone();
     MOZ_ASSERT(!zone->runtimeFromMainThread()->isHeapMajorCollecting());
 #endif
 
     if (kind == JSTRACE_OBJECT)
         JSObject::writeBarrierPre(static_cast<JSObject*>(cell));
     else if (kind == JSTRACE_STRING)
         JSString::writeBarrierPre(static_cast<JSString*>(cell));
     else if (kind == JSTRACE_SYMBOL)
--- a/js/src/jsfun.h
+++ b/js/src/jsfun.h
@@ -456,17 +456,17 @@ class JSFunction : public JSObject
 
   private:
     inline js::FunctionExtended *toExtended();
     inline const js::FunctionExtended *toExtended() const;
 
   public:
     inline bool isExtended() const {
         JS_STATIC_ASSERT(FinalizeKind != ExtendedFinalizeKind);
-        MOZ_ASSERT_IF(isTenured(), !!(flags() & EXTENDED) == (asTenured()->getAllocKind() == ExtendedFinalizeKind));
+        MOZ_ASSERT_IF(isTenured(), !!(flags() & EXTENDED) == (asTenured().getAllocKind() == ExtendedFinalizeKind));
         return !!(flags() & EXTENDED);
     }
 
     /*
      * Accessors for data stored in extended functions. Use setExtendedSlot if
      * the function has already been initialized. Otherwise use
      * initExtendedSlot.
      */
@@ -479,17 +479,17 @@ class JSFunction : public JSObject
     static bool setTypeForScriptedFunction(js::ExclusiveContext *cx, js::HandleFunction fun,
                                            bool singleton = false);
 
     /* GC support. */
     js::gc::AllocKind getAllocKind() const {
         js::gc::AllocKind kind = FinalizeKind;
         if (isExtended())
             kind = ExtendedFinalizeKind;
-        MOZ_ASSERT_IF(isTenured(), kind == asTenured()->getAllocKind());
+        MOZ_ASSERT_IF(isTenured(), kind == asTenured().getAllocKind());
         return kind;
     }
 };
 
 extern JSString *
 fun_toStringHelper(JSContext *cx, js::HandleObject obj, unsigned indent);
 
 inline JSFunction::Flags
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -498,17 +498,17 @@ Arena::finalize(FreeOp *fop, AllocKind t
     uintptr_t lastThing = thingsEnd() - thingSize;
 
     FreeSpan newListHead;
     FreeSpan *newListTail = &newListHead;
     size_t nmarked = 0;
 
     for (ArenaCellIterUnderFinalize i(&aheader); !i.done(); i.next()) {
         T *t = i.get<T>();
-        if (t->asTenured()->isMarked()) {
+        if (t->asTenured().isMarked()) {
             uintptr_t thing = reinterpret_cast<uintptr_t>(t);
             if (thing != firstThingOrSuccessorOfLastMarkedThing) {
                 // We just finished passing over one or more free things,
                 // so record a new FreeSpan.
                 newListTail->initBoundsUnchecked(firstThingOrSuccessorOfLastMarkedThing,
                                                  thing - thingSize);
                 newListTail = newListTail->nextSpanUnchecked();
             }
@@ -4194,39 +4194,39 @@ DropStringWrappers(JSRuntime *rt)
  */
 
 void
 JSCompartment::findOutgoingEdges(ComponentFinder<JS::Zone> &finder)
 {
     for (js::WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
         CrossCompartmentKey::Kind kind = e.front().key().kind;
         MOZ_ASSERT(kind != CrossCompartmentKey::StringWrapper);
-        TenuredCell *other = e.front().key().wrapped->asTenured();
+        TenuredCell &other = e.front().key().wrapped->asTenured();
         if (kind == CrossCompartmentKey::ObjectWrapper) {
             /*
              * Add edge to wrapped object compartment if wrapped object is not
              * marked black to indicate that wrapper compartment not be swept
              * after wrapped compartment.
              */
-            if (!other->isMarked(BLACK) || other->isMarked(GRAY)) {
-                JS::Zone *w = other->zone();
+            if (!other.isMarked(BLACK) || other.isMarked(GRAY)) {
+                JS::Zone *w = other.zone();
                 if (w->isGCMarking())
                     finder.addEdgeTo(w);
             }
         } else {
             MOZ_ASSERT(kind == CrossCompartmentKey::DebuggerScript ||
                        kind == CrossCompartmentKey::DebuggerSource ||
                        kind == CrossCompartmentKey::DebuggerObject ||
                        kind == CrossCompartmentKey::DebuggerEnvironment);
             /*
              * Add edge for debugger object wrappers, to ensure (in conjuction
              * with call to Debugger::findCompartmentEdges below) that debugger
              * and debuggee objects are always swept in the same group.
              */
-            JS::Zone *w = other->zone();
+            JS::Zone *w = other.zone();
             if (w->isGCMarking())
                 finder.addEdgeTo(w);
         }
     }
 
     Debugger::findCompartmentEdges(zone(), finder);
 }
 
@@ -4457,21 +4457,21 @@ MarkIncomingCrossCompartmentPointers(JSR
         for (JSObject *src = c->gcIncomingGrayPointers;
              src;
              src = NextIncomingCrossCompartmentPointer(src, unlinkList))
         {
             JSObject *dst = CrossCompartmentPointerReferent(src);
             MOZ_ASSERT(dst->compartment() == c);
 
             if (color == GRAY) {
-                if (IsObjectMarked(&src) && src->asTenured()->isMarked(GRAY))
+                if (IsObjectMarked(&src) && src->asTenured().isMarked(GRAY))
                     MarkGCThingUnbarriered(&rt->gc.marker, (void**)&dst,
                                            "cross-compartment gray pointer");
             } else {
-                if (IsObjectMarked(&src) && !src->asTenured()->isMarked(GRAY))
+                if (IsObjectMarked(&src) && !src->asTenured().isMarked(GRAY))
                     MarkGCThingUnbarriered(&rt->gc.marker, (void**)&dst,
                                            "cross-compartment black pointer");
             }
         }
 
         if (unlinkList)
             c->gcIncomingGrayPointers = nullptr;
     }
@@ -6336,27 +6336,27 @@ AutoDisableProxyCheck::~AutoDisableProxy
 {
     gc.enableStrictProxyChecking();
 }
 
 JS_FRIEND_API(void)
 JS::AssertGCThingMustBeTenured(JSObject *obj)
 {
     MOZ_ASSERT(obj->isTenured() &&
-               (!IsNurseryAllocable(obj->asTenured()->getAllocKind()) || obj->getClass()->finalize));
+               (!IsNurseryAllocable(obj->asTenured().getAllocKind()) || obj->getClass()->finalize));
 }
 
 JS_FRIEND_API(void)
 js::gc::AssertGCThingHasType(js::gc::Cell *cell, JSGCTraceKind kind)
 {
     MOZ_ASSERT(cell);
     if (IsInsideNursery(cell))
         MOZ_ASSERT(kind == JSTRACE_OBJECT);
     else
-        MOZ_ASSERT(MapAllocToTraceKind(cell->asTenured()->getAllocKind()) == kind);
+        MOZ_ASSERT(MapAllocToTraceKind(cell->asTenured().getAllocKind()) == kind);
 }
 
 JS_FRIEND_API(size_t)
 JS::GetGCNumber()
 {
     JSRuntime *rt = js::TlsPerThreadData.get()->runtimeFromMainThread();
     if (!rt)
         return 0;
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -36,17 +36,17 @@ ThreadSafeContext::isThreadLocal(T thing
     if (cx->nursery().isInsideNewspace(thing))
         return true;
 #endif
 
     // Global invariant
     MOZ_ASSERT(!IsInsideNursery(thing));
 
     // The thing is not in the nursery, but is it in the private tenured area?
-    if (allocator_->arenas.containsArena(runtime_, thing->asTenured()->arenaHeader()))
+    if (allocator_->arenas.containsArena(runtime_, thing->asTenured().arenaHeader()))
     {
         // GC should be suppressed in preparation for mutating thread local
         // objects, as we don't want to trip any barriers.
         MOZ_ASSERT(!thing->zoneFromAnyThread()->needsIncrementalBarrier());
         MOZ_ASSERT(!thing->runtimeFromAnyThread()->needsIncrementalBarrier());
 
         return true;
     }
@@ -87,17 +87,17 @@ inline JSGCTraceKind
 GetGCThingTraceKind(const void *thing)
 {
     MOZ_ASSERT(thing);
     const Cell *cell = static_cast<const Cell *>(thing);
 #ifdef JSGC_GENERATIONAL
     if (IsInsideNursery(cell))
         return JSTRACE_OBJECT;
 #endif
-    return MapAllocToTraceKind(cell->asTenured()->getAllocKind());
+    return MapAllocToTraceKind(cell->asTenured().getAllocKind());
 }
 
 inline void
 GCRuntime::poke()
 {
     poked = true;
 
 #ifdef JS_GC_ZEAL
@@ -541,17 +541,17 @@ static inline void
 CheckIncrementalZoneState(ThreadSafeContext *cx, T *t)
 {
 #ifdef DEBUG
     if (!cx->isJSContext())
         return;
 
     Zone *zone = cx->asJSContext()->zone();
     MOZ_ASSERT_IF(t && zone->wasGCStarted() && (zone->isGCMarking() || zone->isGCSweeping()),
-                  t->asTenured()->arenaHeader()->allocatedDuringIncremental);
+                  t->asTenured().arenaHeader()->allocatedDuringIncremental);
 #endif
 }
 
 /*
  * Allocate a new GC thing. After a successful allocation the caller must
  * fully initialize the thing before calling any function that can potentially
  * trigger GC. This will ensure that GC tracing never sees junk values stored
  * in the partially initialized thing.
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -2050,17 +2050,17 @@ js::DeepCloneObjectLiteral(JSContext *cx
     RootedValue v(cx);
     RootedObject deepObj(cx);
 
     if (obj->is<ArrayObject>()) {
         clone = NewDenseUnallocatedArray(cx, obj->as<ArrayObject>().length(), nullptr, newKind);
     } else {
         // Object literals are tenured by default as holded by the JSScript.
         MOZ_ASSERT(obj->isTenured());
-        AllocKind kind = obj->asTenured()->getAllocKind();
+        AllocKind kind = obj->asTenured().getAllocKind();
         Rooted<TypeObject*> typeObj(cx, obj->getType(cx));
         if (!typeObj)
             return nullptr;
         RootedObject parent(cx, obj->getParent());
         clone = NewObjectWithGivenProto(cx, &JSObject::class_, TaggedProto(typeObj->proto().toObject()),
                                         parent, kind, newKind);
     }
 
@@ -2158,17 +2158,17 @@ js::XDRObjectLiteral(XDRState<mode> *xdr
 
     } else {
         // Code the alloc kind of the object.
         AllocKind kind;
         {
             if (mode == XDR_ENCODE) {
                 MOZ_ASSERT(obj->getClass() == &JSObject::class_);
                 MOZ_ASSERT(obj->isTenured());
-                kind = obj->asTenured()->getAllocKind();
+                kind = obj->asTenured().getAllocKind();
             }
 
             if (!xdr->codeEnum32(&kind))
                 return false;
 
             if (mode == XDR_DECODE)
                 obj.set(NewBuiltinClassInstance(cx, &JSObject::class_, kind, js::MaybeSingletonObject));
         }
@@ -2356,17 +2356,17 @@ js::XDRObjectLiteral(XDRState<XDR_ENCODE
 template bool
 js::XDRObjectLiteral(XDRState<XDR_DECODE> *xdr, MutableHandleObject obj);
 
 JSObject *
 js::CloneObjectLiteral(JSContext *cx, HandleObject parent, HandleObject srcObj)
 {
     if (srcObj->getClass() == &JSObject::class_) {
         AllocKind kind = GetBackgroundAllocKind(GuessObjectGCKind(srcObj->numFixedSlots()));
-        MOZ_ASSERT_IF(srcObj->isTenured(), kind == srcObj->asTenured()->getAllocKind());
+        MOZ_ASSERT_IF(srcObj->isTenured(), kind == srcObj->asTenured().getAllocKind());
 
         JSObject *proto = cx->global()->getOrCreateObjectPrototype(cx);
         if (!proto)
             return nullptr;
         Rooted<TypeObject*> typeObj(cx, cx->getNewType(&JSObject::class_, TaggedProto(proto)));
         if (!typeObj)
             return nullptr;
 
@@ -2386,17 +2386,17 @@ js::CloneObjectLiteral(JSContext *cx, Ha
     RootedId id(cx);
     RootedValue value(cx);
     for (size_t i = 0; i < length; i++) {
         // The only markable values in copy on write arrays are atoms, which
         // can be freely copied between compartments.
         value = srcObj->getDenseElement(i);
         MOZ_ASSERT_IF(value.isMarkable(),
                       value.toGCThing()->isTenured() &&
-                      cx->runtime()->isAtomsZone(value.toGCThing()->asTenured()->zone()));
+                      cx->runtime()->isAtomsZone(value.toGCThing()->asTenured().zone()));
 
         id = INT_TO_JSID(i);
         if (!JSObject::defineGeneric(cx, res, id, value, nullptr, nullptr, JSPROP_ENUMERATE))
             return nullptr;
     }
 
     if (!ObjectElements::MakeElementsCopyOnWrite(cx, res))
         return nullptr;
@@ -2471,26 +2471,26 @@ JSObject::ReserveForTradeGuts(JSContext 
      * inline slots. The fixed slots will be updated in place during TradeGuts.
      * Non-native objects need to be reshaped according to the new count.
      */
     if (a->isNative()) {
         if (!a->generateOwnShape(cx))
             MOZ_CRASH();
     } else {
         reserved.newbshape = EmptyShape::getInitialShape(cx, aClass, aProto, a->getParent(), a->getMetadata(),
-                                                         b->asTenured()->getAllocKind());
+                                                         b->asTenured().getAllocKind());
         if (!reserved.newbshape)
             MOZ_CRASH();
     }
     if (b->isNative()) {
         if (!b->generateOwnShape(cx))
             MOZ_CRASH();
     } else {
         reserved.newashape = EmptyShape::getInitialShape(cx, bClass, bProto, b->getParent(), b->getMetadata(),
-                                                         a->asTenured()->getAllocKind());
+                                                         a->asTenured().getAllocKind());
         if (!reserved.newashape)
             MOZ_CRASH();
     }
 
     /* The avals/bvals vectors hold all original values from the objects. */
 
     if (!reserved.avals.reserve(a->slotSpan()))
         MOZ_CRASH();
@@ -2680,18 +2680,18 @@ JSObject::TradeGuts(JSContext *cx, JSObj
 #endif
 }
 
 /* Use this method with extreme caution. It trades the guts of two objects. */
 bool
 JSObject::swap(JSContext *cx, HandleObject a, HandleObject b)
 {
     // Ensure swap doesn't cause a finalizer to not be run.
-    MOZ_ASSERT(IsBackgroundFinalized(a->asTenured()->getAllocKind()) ==
-               IsBackgroundFinalized(b->asTenured()->getAllocKind()));
+    MOZ_ASSERT(IsBackgroundFinalized(a->asTenured().getAllocKind()) ==
+               IsBackgroundFinalized(b->asTenured().getAllocKind()));
     MOZ_ASSERT(a->compartment() == b->compartment());
 
     unsigned r = NotifyGCPreSwap(a, b);
 
     TradeGutsReserved reserved(cx);
     if (!ReserveForTradeGuts(cx, a, b, reserved)) {
         NotifyGCPostSwap(b, a, r);
         return false;
--- a/js/src/jsobjinlines.h
+++ b/js/src/jsobjinlines.h
@@ -79,17 +79,17 @@ JSObject::unwatch(JSContext *cx, JS::Han
 
 inline void
 JSObject::finalize(js::FreeOp *fop)
 {
     js::probes::FinalizeObject(this);
 
 #ifdef DEBUG
     MOZ_ASSERT(isTenured());
-    if (!IsBackgroundFinalized(asTenured()->getAllocKind())) {
+    if (!IsBackgroundFinalized(asTenured().getAllocKind())) {
         /* Assert we're on the main thread. */
         MOZ_ASSERT(CurrentThreadCanAccessRuntime(fop->runtime()));
     }
 #endif
     const js::Class *clasp = getClass();
     if (clasp->finalize)
         clasp->finalize(fop, this);
 
@@ -1094,17 +1094,17 @@ NewObjectScriptedCall(JSContext *cx, Mut
 static inline JSObject *
 CopyInitializerObject(JSContext *cx, HandleObject baseobj, NewObjectKind newKind = GenericObject)
 {
     MOZ_ASSERT(baseobj->getClass() == &JSObject::class_);
     MOZ_ASSERT(!baseobj->inDictionaryMode());
 
     gc::AllocKind allocKind = gc::GetGCObjectFixedSlotsKind(baseobj->numFixedSlots());
     allocKind = gc::GetBackgroundAllocKind(allocKind);
-    MOZ_ASSERT_IF(baseobj->isTenured(), allocKind == baseobj->asTenured()->getAllocKind());
+    MOZ_ASSERT_IF(baseobj->isTenured(), allocKind == baseobj->asTenured().getAllocKind());
     RootedObject obj(cx);
     obj = NewBuiltinClassInstance(cx, &JSObject::class_, allocKind, newKind);
     if (!obj)
         return nullptr;
 
     RootedObject metadata(cx, obj->getMetadata());
     RootedShape lastProp(cx, baseobj->lastProperty());
     if (!JSObject::setLastProperty(cx, obj, lastProp))
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -2911,17 +2911,17 @@ Rebase(JSScript *dst, JSScript *src, T *
 
 JSScript *
 js::CloneScript(JSContext *cx, HandleObject enclosingScope, HandleFunction fun, HandleScript src,
                 NewObjectKind newKind /* = GenericObject */)
 {
     /* NB: Keep this in sync with XDRScript. */
 
     /* Some embeddings are not careful to use ExposeObjectToActiveJS as needed. */
-    MOZ_ASSERT(!src->sourceObject()->asTenured()->isMarked(gc::GRAY));
+    MOZ_ASSERT(!src->sourceObject()->asTenured().isMarked(gc::GRAY));
 
     uint32_t nconsts   = src->hasConsts()   ? src->consts()->length   : 0;
     uint32_t nobjects  = src->hasObjects()  ? src->objects()->length  : 0;
     uint32_t nregexps  = src->hasRegexps()  ? src->regexps()->length  : 0;
     uint32_t ntrynotes = src->hasTrynotes() ? src->trynotes()->length : 0;
     uint32_t nblockscopes = src->hasBlockScopes() ? src->blockScopes()->length : 0;
 
     /* Script data */
--- a/js/src/jsweakmap.cpp
+++ b/js/src/jsweakmap.cpp
@@ -175,17 +175,17 @@ ObjectValueMap::findZoneEdges()
      * For unmarked weakmap keys with delegates in a different zone, add a zone
      * edge to ensure that the delegate zone does finish marking after the key
      * zone.
      */
     JS::AutoSuppressGCAnalysis nogc;
     Zone *mapZone = compartment->zone();
     for (Range r = all(); !r.empty(); r.popFront()) {
         JSObject *key = r.front().key();
-        if (key->asTenured()->isMarked(BLACK) && !key->asTenured()->isMarked(GRAY))
+        if (key->asTenured().isMarked(BLACK) && !key->asTenured().isMarked(GRAY))
             continue;
         JSWeakmapKeyDelegateOp op = key->getClass()->ext.weakmapKeyDelegateOp;
         if (!op)
             continue;
         JSObject *delegate = op(key);
         if (!delegate)
             continue;
         Zone *delegateZone = delegate->zone();
--- a/js/src/proxy/Wrapper.cpp
+++ b/js/src/proxy/Wrapper.cpp
@@ -160,10 +160,10 @@ bool Wrapper::finalizeInBackground(Value
      * Make the 'background-finalized-ness' of the wrapper the same as the
      * wrapped object, to allow transplanting between them.
      *
      * If the wrapped object is in the nursery then we know it doesn't have a
      * finalizer, and so background finalization is ok.
      */
     if (IsInsideNursery(&priv.toObject()))
         return true;
-    return IsBackgroundFinalized(priv.toObject().asTenured()->getAllocKind());
+    return IsBackgroundFinalized(priv.toObject().asTenured().getAllocKind());
 }
--- a/js/src/vm/ObjectImpl.h
+++ b/js/src/vm/ObjectImpl.h
@@ -819,17 +819,17 @@ class ObjectImpl : public gc::Cell
      * capacity is not stored explicitly, and the allocated size of the slot
      * array is kept in sync with this count.
      */
     static uint32_t dynamicSlotsCount(uint32_t nfixed, uint32_t span, const Class *clasp);
 
     /* Memory usage functions. */
     size_t tenuredSizeOfThis() const {
         MOZ_ASSERT(isTenured());
-        return js::gc::Arena::thingSize(asTenured()->getAllocKind());
+        return js::gc::Arena::thingSize(asTenured().getAllocKind());
     }
 
     /* Elements accessors. */
 
     ObjectElements * getElementsHeader() const {
         return ObjectElements::fromElements(elements);
     }
 
@@ -979,24 +979,24 @@ class ObjectImpl : public gc::Cell
     static size_t getPrivateDataOffset(size_t nfixed) { return getFixedSlotOffset(nfixed); }
     static size_t offsetOfSlots() { return offsetof(ObjectImpl, slots); }
 };
 
 /* static */ MOZ_ALWAYS_INLINE void
 ObjectImpl::readBarrier(ObjectImpl *obj)
 {
     if (!isNullLike(obj) && obj->isTenured())
-        obj->asTenured()->readBarrier(obj->asTenured());
+        obj->asTenured().readBarrier(&obj->asTenured());
 }
 
 /* static */ MOZ_ALWAYS_INLINE void
 ObjectImpl::writeBarrierPre(ObjectImpl *obj)
 {
     if (!isNullLike(obj) && obj->isTenured())
-        obj->asTenured()->writeBarrierPre(obj->asTenured());
+        obj->asTenured().writeBarrierPre(&obj->asTenured());
 }
 
 /* static */ MOZ_ALWAYS_INLINE void
 ObjectImpl::writeBarrierPost(ObjectImpl *obj, void *cellp)
 {
     MOZ_ASSERT(cellp);
 #ifdef JSGC_GENERATIONAL
     if (IsNullTaggedPointer(obj))
--- a/js/src/vm/RegExpObject.cpp
+++ b/js/src/vm/RegExpObject.cpp
@@ -247,17 +247,17 @@ RegExpObject::trace(JSTracer *trc, JSObj
     // be collected. To detect this we need to test all the following
     // conditions, since:
     //   1. During TraceRuntime, isHeapBusy() is true, but the tracer might not
     //      be a marking tracer.
     //   2. When a write barrier executes, IS_GC_MARKING_TRACER is true, but
     //      isHeapBusy() will be false.
     if (trc->runtime()->isHeapBusy() &&
         IS_GC_MARKING_TRACER(trc) &&
-        !obj->asTenured()->zone()->isPreservingCode())
+        !obj->asTenured().zone()->isPreservingCode())
     {
         obj->setPrivate(nullptr);
     } else {
         shared->trace(trc);
     }
 }
 
 const Class RegExpObject::class_ = {
--- a/js/src/vm/SelfHosting.cpp
+++ b/js/src/vm/SelfHosting.cpp
@@ -1284,17 +1284,17 @@ CloneObject(JSContext *cx, HandleObject 
         if (!str)
             return nullptr;
         clone = StringObject::create(cx, str);
     } else if (selfHostedObject->is<ArrayObject>()) {
         clone = NewDenseEmptyArray(cx, nullptr, TenuredObject);
     } else {
         MOZ_ASSERT(selfHostedObject->isNative());
         clone = NewObjectWithGivenProto(cx, selfHostedObject->getClass(), TaggedProto(nullptr), cx->global(),
-                                        selfHostedObject->asTenured()->getAllocKind(),
+                                        selfHostedObject->asTenured().getAllocKind(),
                                         SingletonObject);
     }
     if (!clone)
         return nullptr;
     if (!CloneProperties(cx, selfHostedObject, clone))
         return nullptr;
     return clone;
 }