Backed out changeset 465793bc6fcf (bug 1180017) for suspicion of causing bug 1188799
authorCarsten "Tomcat" Book <cbook@mozilla.com>
Wed, 29 Jul 2015 15:27:12 +0200
changeset 286814 b65c048414f701e3526f13719ca97f15951eeefb
parent 286813 90500c5ebface5c24378fcc9ea1d050dcdbd6f1b
child 286815 bf7d6315fd6b616ebec2e0ae7febeeaf53168b4f
push id5067
push userraliiev@mozilla.com
push dateMon, 21 Sep 2015 14:04:52 +0000
treeherdermozilla-beta@14221ffe5b2f [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs1180017, 1188799
milestone42.0a1
backs out465793bc6fcf71beafea0bbf87f4d7ec23305f63
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out changeset 465793bc6fcf (bug 1180017) for suspicion of causing bug 1188799
dom/base/nsWrapperCache.cpp
dom/xbl/nsXBLDocumentInfo.cpp
js/public/HeapAPI.h
js/public/TraceKind.h
js/public/TracingAPI.h
js/src/gc/Marking.cpp
js/src/gc/RootMarking.cpp
js/src/gc/Tracer.cpp
js/src/jsfriendapi.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsweakmap.h
js/src/vm/UbiNode.cpp
xpcom/base/CycleCollectedJSRuntime.cpp
xpcom/glue/nsCycleCollectionParticipant.cpp
--- a/dom/base/nsWrapperCache.cpp
+++ b/dom/base/nsWrapperCache.cpp
@@ -96,18 +96,18 @@ private:
   JSObject* mWrapper;
 };
 
 static void
 DebugWrapperTraceCallback(JS::GCCellPtr aPtr, const char* aName, void* aClosure)
 {
   DebugWrapperTraversalCallback* callback =
     static_cast<DebugWrapperTraversalCallback*>(aClosure);
-  if (aPtr.is<JSObject>()) {
-    callback->NoteJSObject(&aPtr.as<JSObject>());
+  if (aPtr.isObject()) {
+    callback->NoteJSObject(aPtr.toObject());
   }
 }
 
 void
 nsWrapperCache::CheckCCWrapperTraversal(void* aScriptObjectHolder,
                                         nsScriptObjectTracer* aTracer)
 {
   JSObject* wrapper = GetWrapper();
--- a/dom/xbl/nsXBLDocumentInfo.cpp
+++ b/dom/xbl/nsXBLDocumentInfo.cpp
@@ -72,17 +72,17 @@ NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN(nsX
       iter.UserData()->Trace(aCallbacks, aClosure);
     }
   }
 NS_IMPL_CYCLE_COLLECTION_TRACE_END
 
 static void
 UnmarkXBLJSObject(JS::GCCellPtr aPtr, const char* aName, void* aClosure)
 {
-  JS::ExposeObjectToActiveJS(&aPtr.as<JSObject>());
+  JS::ExposeObjectToActiveJS(aPtr.toObject());
 }
 
 static PLDHashOperator
 UnmarkProtos(const nsACString &aKey, nsXBLPrototypeBinding *aProto, void* aClosure)
 {
   aProto->Trace(TraceCallbackFunc(UnmarkXBLJSObject), nullptr);
   return PL_DHASH_NEXT;
 }
--- a/js/public/HeapAPI.h
+++ b/js/public/HeapAPI.h
@@ -158,52 +158,63 @@ class JS_FRIEND_API(GCCellPtr)
   public:
     // Construction from a void* and trace kind.
     GCCellPtr(void* gcthing, JS::TraceKind traceKind) : ptr(checkedCast(gcthing, traceKind)) {}
 
     // Automatically construct a null GCCellPtr from nullptr.
     MOZ_IMPLICIT GCCellPtr(decltype(nullptr)) : ptr(checkedCast(nullptr, JS::TraceKind::Null)) {}
 
     // Construction from an explicit type.
-    template <typename T>
-    explicit GCCellPtr(T* p) : ptr(checkedCast(p, JS::MapTypeToTraceKind<T>::kind)) { }
-    explicit GCCellPtr(JSFunction* p) : ptr(checkedCast(p, JS::TraceKind::Object)) { }
+    explicit GCCellPtr(JSObject* obj) : ptr(checkedCast(obj, JS::TraceKind::Object)) { }
+    explicit GCCellPtr(JSFunction* fun) : ptr(checkedCast(fun, JS::TraceKind::Object)) { }
+    explicit GCCellPtr(JSString* str) : ptr(checkedCast(str, JS::TraceKind::String)) { }
     explicit GCCellPtr(JSFlatString* str) : ptr(checkedCast(str, JS::TraceKind::String)) { }
+    explicit GCCellPtr(JS::Symbol* sym) : ptr(checkedCast(sym, JS::TraceKind::Symbol)) { }
+    explicit GCCellPtr(JSScript* script) : ptr(checkedCast(script, JS::TraceKind::Script)) { }
     explicit GCCellPtr(const Value& v);
 
     JS::TraceKind kind() const {
         JS::TraceKind traceKind = JS::TraceKind(ptr & OutOfLineTraceKindMask);
         if (uintptr_t(traceKind) != OutOfLineTraceKindMask)
             return traceKind;
         return outOfLineKind();
     }
 
     // Allow GCCellPtr to be used in a boolean context.
     explicit operator bool() const {
         MOZ_ASSERT(bool(asCell()) == (kind() != JS::TraceKind::Null));
         return asCell();
     }
 
     // Simplify checks to the kind.
-    template <typename T>
-    bool is() const { return kind() == JS::MapTypeToTraceKind<T>::kind; }
+    bool isObject() const { return kind() == JS::TraceKind::Object; }
+    bool isScript() const { return kind() == JS::TraceKind::Script; }
+    bool isString() const { return kind() == JS::TraceKind::String; }
+    bool isSymbol() const { return kind() == JS::TraceKind::Symbol; }
+    bool isShape() const { return kind() == JS::TraceKind::Shape; }
+    bool isObjectGroup() const { return kind() == JS::TraceKind::ObjectGroup; }
 
     // Conversions to more specific types must match the kind. Access to
     // further refined types is not allowed directly from a GCCellPtr.
-    template <typename T>
-    T& as() const {
-        MOZ_ASSERT(kind() == JS::MapTypeToTraceKind<T>::kind);
-        // We can't use static_cast here, because the fact that JSObject
-        // inherits from js::gc::Cell is not part of the public API.
-        return *reinterpret_cast<T*>(asCell());
+    JSObject* toObject() const {
+        MOZ_ASSERT(kind() == JS::TraceKind::Object);
+        return reinterpret_cast<JSObject*>(asCell());
+    }
+    JSString* toString() const {
+        MOZ_ASSERT(kind() == JS::TraceKind::String);
+        return reinterpret_cast<JSString*>(asCell());
     }
-
-    // Return a pointer to the cell this |GCCellPtr| refers to, or |nullptr|.
-    // (It would be more symmetrical with |to| for this to return a |Cell&|, but
-    // the result can be |nullptr|, and null references are undefined behavior.)
+    JSScript* toScript() const {
+        MOZ_ASSERT(kind() == JS::TraceKind::Script);
+        return reinterpret_cast<JSScript*>(asCell());
+    }
+    Symbol* toSymbol() const {
+        MOZ_ASSERT(kind() == JS::TraceKind::Symbol);
+        return reinterpret_cast<Symbol*>(asCell());
+    }
     js::gc::Cell* asCell() const {
         return reinterpret_cast<js::gc::Cell*>(ptr & ~OutOfLineTraceKindMask);
     }
 
     // The CC's trace logger needs an identity that is XPIDL serializable.
     uint64_t unsafeAsInteger() const {
         return static_cast<uint64_t>(unsafeAsUIntPtr());
     }
--- a/js/public/TraceKind.h
+++ b/js/public/TraceKind.h
@@ -2,28 +2,16 @@
  * vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef js_TraceKind_h
 #define js_TraceKind_h
 
-#include "js/TypeDecls.h"
-
-// Forward declarations of all the types a TraceKind can denote.
-namespace js {
-class BaseShape;
-class LazyScript;
-class ObjectGroup;
-namespace jit {
-class JitCode;
-} // namespace jit
-} // namespace js
-
 namespace JS {
 
 // When tracing a thing, the GC needs to know about the layout of the object it
 // is looking at. There are a fixed number of different layouts that the GC
 // knows about. The "trace kind" is a static map which tells which layout a GC
 // thing has.
 //
 // Although this map is public, the details are completely hidden. Not all of
@@ -54,87 +42,11 @@ enum class TraceKind
     JitCode = 0x1F,
     LazyScript = 0x2F
 };
 const static uintptr_t OutOfLineTraceKindMask = 0x07;
 static_assert(uintptr_t(JS::TraceKind::BaseShape) & OutOfLineTraceKindMask, "mask bits are set");
 static_assert(uintptr_t(JS::TraceKind::JitCode) & OutOfLineTraceKindMask, "mask bits are set");
 static_assert(uintptr_t(JS::TraceKind::LazyScript) & OutOfLineTraceKindMask, "mask bits are set");
 
-#define JS_FOR_EACH_TRACEKIND(D) \
- /* PrettyName       TypeName           AddToCCKind */ \
-    D(BaseShape,     js::BaseShape,     true) \
-    D(JitCode,       js::jit::JitCode,  true) \
-    D(LazyScript,    js::LazyScript,    true) \
-    D(Object,        JSObject,          true) \
-    D(ObjectGroup,   js::ObjectGroup,   true) \
-    D(Script,        JSScript,          true) \
-    D(Shape,         js::Shape,         true) \
-    D(String,        JSString,          false) \
-    D(Symbol,        JS::Symbol,        false)
-
-// Map from base trace type to the trace kind.
-template <typename T> struct MapTypeToTraceKind {};
-#define JS_EXPAND_DEF(name, type, _) \
-    template <> struct MapTypeToTraceKind<type> { \
-        static const JS::TraceKind kind = JS::TraceKind::name; \
-    };
-JS_FOR_EACH_TRACEKIND(JS_EXPAND_DEF);
-#undef JS_EXPAND_DEF
-
-// Fortunately, few places in the system need to deal with fully abstract
-// cells. In those places that do, we generally want to move to a layout
-// templated function as soon as possible. This template wraps the upcast
-// for that dispatch.
-//
-// Given a call:
-//
-//    DispatchTraceKindTyped(f, thing, traceKind, ... args)
-//
-// Downcast the |void *thing| to the specific type designated by |traceKind|,
-// and pass it to the functor |f| along with |... args|, forwarded. Pass the
-// type designated by |traceKind| as the functor's template argument. The
-// |thing| parameter is optional; without it, we simply pass through |... args|.
-
-// GCC and Clang require an explicit template declaration in front of the
-// specialization of operator() because it is a dependent template. MSVC, on
-// the other hand, gets very confused if we have a |template| token there.
-#ifdef _MSC_VER
-# define JS_DEPENDENT_TEMPLATE_HINT
-#else
-# define JS_DEPENDENT_TEMPLATE_HINT template
-#endif
-template <typename F, typename... Args>
-auto
-DispatchTraceKindTyped(F f, JS::TraceKind traceKind, Args&&... args)
-  -> decltype(f. JS_DEPENDENT_TEMPLATE_HINT operator()<JSObject>(mozilla::Forward<Args>(args)...))
-{
-    switch (traceKind) {
-#define JS_EXPAND_DEF(name, type, _) \
-      case JS::TraceKind::name: \
-        return f. JS_DEPENDENT_TEMPLATE_HINT operator()<type>(mozilla::Forward<Args>(args)...);
-      JS_FOR_EACH_TRACEKIND(JS_EXPAND_DEF);
-#undef JS_EXPAND_DEF
-      default:
-          MOZ_CRASH("Invalid trace kind in DispatchTraceKindTyped.");
-    }
-}
-#undef JS_DEPENDENT_TEMPLATE_HINT
-
-template <typename F, typename... Args>
-auto
-DispatchTraceKindTyped(F f, void* thing, JS::TraceKind traceKind, Args&&... args)
-  -> decltype(f(reinterpret_cast<JSObject*>(0), mozilla::Forward<Args>(args)...))
-{
-    switch (traceKind) {
-#define JS_EXPAND_DEF(name, type, _) \
-      case JS::TraceKind::name: \
-          return f(static_cast<type*>(thing), mozilla::Forward<Args>(args)...);
-      JS_FOR_EACH_TRACEKIND(JS_EXPAND_DEF);
-#undef JS_EXPAND_DEF
-      default:
-          MOZ_CRASH("Invalid trace kind in DispatchTraceKindTyped.");
-    }
-}
-
 } // namespace JS
 
 #endif // js_TraceKind_h
--- a/js/public/TracingAPI.h
+++ b/js/public/TracingAPI.h
@@ -21,16 +21,25 @@ template <typename T> class Heap;
 template <typename T> class TenuredHeap;
 
 // Returns a static string equivalent of |kind|.
 JS_FRIEND_API(const char*)
 GCTraceKindToAscii(JS::TraceKind kind);
 
 } // namespace JS
 
+namespace js {
+class BaseShape;
+class LazyScript;
+class ObjectGroup;
+namespace jit {
+class JitCode;
+} // namespace jit
+} // namespace js
+
 enum WeakMapTraceKind {
     DoNotTraceWeakMaps = 0,
     TraceWeakMapValues = 1,
     TraceWeakMapKeysValues = 2
 };
 
 class JS_PUBLIC_API(JSTracer)
 {
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -30,18 +30,16 @@
 
 #include "gc/Nursery-inl.h"
 #include "vm/String-inl.h"
 #include "vm/UnboxedObject-inl.h"
 
 using namespace js;
 using namespace js::gc;
 
-using JS::MapTypeToTraceKind;
-
 using mozilla::ArrayLength;
 using mozilla::DebugOnly;
 using mozilla::IsBaseOf;
 using mozilla::IsSame;
 using mozilla::MakeRange;
 using mozilla::PodCopy;
 
 // Tracing Overview
@@ -252,17 +250,17 @@ template <>
 void
 CheckTracedThing<jsid>(JSTracer* trc, jsid id)
 {
     DispatchIdTyped(CheckTracedFunctor<jsid>(), id, trc);
 }
 
 #define IMPL_CHECK_TRACED_THING(_, type, __) \
     template void CheckTracedThing<type*>(JSTracer*, type*);
-JS_FOR_EACH_TRACEKIND(IMPL_CHECK_TRACED_THING);
+FOR_EACH_GC_LAYOUT(IMPL_CHECK_TRACED_THING);
 #undef IMPL_CHECK_TRACED_THING
 } // namespace js
 
 static bool
 ShouldMarkCrossCompartment(JSTracer* trc, JSObject* src, Cell* cell)
 {
     if (!trc->isMarkingTracer())
         return true;
@@ -401,17 +399,17 @@ template <typename T,
                         : IsBaseOf<Shape, T>::value        ? JS::TraceKind::Shape
                         : IsBaseOf<BaseShape, T>::value    ? JS::TraceKind::BaseShape
                         : IsBaseOf<jit::JitCode, T>::value ? JS::TraceKind::JitCode
                         : IsBaseOf<LazyScript, T>::value   ? JS::TraceKind::LazyScript
                         :                                    JS::TraceKind::ObjectGroup>
 struct BaseGCType;
 #define IMPL_BASE_GC_TYPE(name, type_, _) \
     template <typename T> struct BaseGCType<T, JS::TraceKind:: name> { typedef type_ type; };
-JS_FOR_EACH_TRACEKIND(IMPL_BASE_GC_TYPE);
+FOR_EACH_GC_LAYOUT(IMPL_BASE_GC_TYPE);
 #undef IMPL_BASE_GC_TYPE
 
 // Our barrier templates are parameterized on the pointer types so that we can
 // share the definitions with Value and jsid. Thus, we need to strip the
 // pointer before sending the type to BaseGCType and re-add it on the other
 // side. As such:
 template <typename T> struct PtrBaseGCType {};
 template <> struct PtrBaseGCType<Value> { typedef Value type; };
@@ -551,17 +549,17 @@ struct TraceRootFunctor {
 
 void
 js::TraceGenericPointerRoot(JSTracer* trc, Cell** thingp, const char* name)
 {
     MOZ_ASSERT(thingp);
     if (!*thingp)
         return;
     TraceRootFunctor f;
-    DispatchTraceKindTyped(f, (*thingp)->getTraceKind(), trc, thingp, name);
+    CallTyped(f, (*thingp)->getTraceKind(), trc, thingp, name);
 }
 
 // A typed functor adaptor for TraceManuallyBarrieredEdge.
 struct TraceManuallyBarrieredEdgeFunctor {
     template <typename T>
     void operator()(JSTracer* trc, Cell** thingp, const char* name) {
         TraceManuallyBarrieredEdge(trc, reinterpret_cast<T**>(thingp), name);
     }
@@ -569,29 +567,29 @@ struct TraceManuallyBarrieredEdgeFunctor
 
 void
 js::TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, Cell** thingp, const char* name)
 {
     MOZ_ASSERT(thingp);
     if (!*thingp)
         return;
     TraceManuallyBarrieredEdgeFunctor f;
-    DispatchTraceKindTyped(f, (*thingp)->getTraceKind(), trc, thingp, name);
+    CallTyped(f, (*thingp)->getTraceKind(), trc, thingp, name);
 }
 
 // This method is responsible for dynamic dispatch to the real tracer
 // implementation. Consider replacing this choke point with virtual dispatch:
 // a sufficiently smart C++ compiler may be able to devirtualize some paths.
 template <typename T>
 void
 DispatchToTracer(JSTracer* trc, T* thingp, const char* name)
 {
 #define IS_SAME_TYPE_OR(name, type, _) mozilla::IsSame<type*, T>::value ||
     static_assert(
-            JS_FOR_EACH_TRACEKIND(IS_SAME_TYPE_OR)
+            FOR_EACH_GC_LAYOUT(IS_SAME_TYPE_OR)
             mozilla::IsSame<T, JS::Value>::value ||
             mozilla::IsSame<T, jsid>::value,
             "Only the base cell layout types are allowed into marking/tracing internals");
 #undef IS_SAME_TYPE_OR
     if (trc->isMarkingTracer())
         return DoMarking(static_cast<GCMarker*>(trc), *thingp);
     if (trc->isTenuringTracer())
         return static_cast<TenuringTracer*>(trc)->traverse(thingp);
@@ -1728,17 +1726,17 @@ struct PushArenaFunctor {
     template <typename T> void operator()(GCMarker* gcmarker, ArenaHeader* aheader) {
         PushArenaTyped<T>(gcmarker, aheader);
     }
 };
 
 void
 gc::PushArena(GCMarker* gcmarker, ArenaHeader* aheader)
 {
-    DispatchTraceKindTyped(PushArenaFunctor(), MapAllocToTraceKind(aheader->getAllocKind()), gcmarker, aheader);
+    CallTyped(PushArenaFunctor(), MapAllocToTraceKind(aheader->getAllocKind()), gcmarker, aheader);
 }
 
 #ifdef DEBUG
 void
 GCMarker::checkZone(void* p)
 {
     MOZ_ASSERT(started);
     DebugOnly<Cell*> cell = static_cast<Cell*>(p);
@@ -2113,17 +2111,17 @@ js::TenuringTracer::moveElementsToTenure
 /*** IsMarked / IsAboutToBeFinalized **************************************************************/
 
 template <typename T>
 static inline void
 CheckIsMarkedThing(T* thingp)
 {
 #define IS_SAME_TYPE_OR(name, type, _) mozilla::IsSame<type*, T>::value ||
     static_assert(
-            JS_FOR_EACH_TRACEKIND(IS_SAME_TYPE_OR)
+            FOR_EACH_GC_LAYOUT(IS_SAME_TYPE_OR)
             false, "Only the base cell layout types are allowed into marking/tracing internals");
 #undef IS_SAME_TYPE_OR
 
 #ifdef DEBUG
     MOZ_ASSERT(thingp);
     MOZ_ASSERT(*thingp);
     JSRuntime* rt = (*thingp)->runtimeFromAnyThread();
     MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(*thingp),
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -521,17 +521,17 @@ BufferGrayRootsTracer::onChild(const JS:
     gc::TenuredCell* tenured = gc::TenuredCell::fromPointer(thing.asCell());
 
     Zone* zone = tenured->zone();
     if (zone->isCollecting()) {
         // See the comment on SetMaybeAliveFlag to see why we only do this for
         // objects and scripts. We rely on gray root buffering for this to work,
         // but we only need to worry about uncollected dead compartments during
         // incremental GCs (when we do gray root buffering).
-        DispatchTraceKindTyped(SetMaybeAliveFunctor(), tenured, thing.kind());
+        CallTyped(SetMaybeAliveFunctor(), tenured, thing.kind());
 
         if (!zone->gcGrayRoots.append(tenured))
             bufferingGrayRootsFailed = true;
     }
 }
 
 void
 GCRuntime::markBufferedGrayRoots(JS::Zone* zone)
--- a/js/src/gc/Tracer.cpp
+++ b/js/src/gc/Tracer.cpp
@@ -48,17 +48,17 @@ DoCallback(JS::CallbackTracer* trc, T* t
 {
     CheckTracedThing(trc, *thingp);
     JS::AutoTracingName ctx(trc, name);
     trc->dispatchToOnEdge(thingp);
     return *thingp;
 }
 #define INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS(name, type, _) \
     template type* DoCallback<type*>(JS::CallbackTracer*, type**, const char*);
-JS_FOR_EACH_TRACEKIND(INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS);
+FOR_EACH_GC_LAYOUT(INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS);
 #undef INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS
 
 template <typename S>
 struct DoCallbackFunctor : public IdentityDefaultAdaptor<S> {
     template <typename T> S operator()(T* t, JS::CallbackTracer* trc, const char* name) {
         return js::gc::RewrapValueOrId<S, T*>::wrap(DoCallback(trc, &t, name));
     }
 };
@@ -188,17 +188,17 @@ struct TraceChildrenFunctor {
     }
 };
 
 void
 js::TraceChildren(JSTracer* trc, void* thing, JS::TraceKind kind)
 {
     MOZ_ASSERT(thing);
     TraceChildrenFunctor f;
-    DispatchTraceKindTyped(f, kind, trc, thing);
+    CallTyped(f, kind, trc, thing);
 }
 
 JS_PUBLIC_API(void)
 JS_TraceRuntime(JSTracer* trc)
 {
     AssertHeapIsIdle(trc->runtime());
     TraceRuntime(trc);
 }
@@ -326,33 +326,33 @@ struct ObjectGroupCycleCollectorTracer :
 
     JS::CallbackTracer* innerTracer;
     Vector<ObjectGroup*, 4, SystemAllocPolicy> seen, worklist;
 };
 
 void
 ObjectGroupCycleCollectorTracer::onChild(const JS::GCCellPtr& thing)
 {
-    if (thing.is<JSObject>() || thing.is<JSScript>()) {
+    if (thing.isObject() || thing.isScript()) {
         // Invoke the inner cycle collector callback on this child. It will not
         // recurse back into TraceChildren.
         innerTracer->onChild(thing);
         return;
     }
 
-    if (thing.is<ObjectGroup>()) {
+    if (thing.isObjectGroup()) {
         // If this group is required to be in an ObjectGroup chain, trace it
         // via the provided worklist rather than continuing to recurse.
-        ObjectGroup& group = thing.as<ObjectGroup>();
-        if (group.maybeUnboxedLayout()) {
+        ObjectGroup* group = static_cast<ObjectGroup*>(thing.asCell());
+        if (group->maybeUnboxedLayout()) {
             for (size_t i = 0; i < seen.length(); i++) {
-                if (seen[i] == &group)
+                if (seen[i] == group)
                     return;
             }
-            if (seen.append(&group) && worklist.append(&group)) {
+            if (seen.append(group) && worklist.append(group)) {
                 return;
             } else {
                 // If append fails, keep tracing normally. The worst that will
                 // happen is we end up overrecursing.
             }
         }
     }
 
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -200,25 +200,25 @@ JS_FRIEND_API(bool)
 JS_WrapPropertyDescriptor(JSContext* cx, JS::MutableHandle<js::PropertyDescriptor> desc)
 {
     return cx->compartment()->wrap(cx, desc);
 }
 
 JS_FRIEND_API(void)
 JS_TraceShapeCycleCollectorChildren(JS::CallbackTracer* trc, JS::GCCellPtr shape)
 {
-    MOZ_ASSERT(shape.is<Shape>());
-    TraceCycleCollectorChildren(trc, &shape.as<Shape>());
+    MOZ_ASSERT(shape.isShape());
+    TraceCycleCollectorChildren(trc, static_cast<Shape*>(shape.asCell()));
 }
 
 JS_FRIEND_API(void)
 JS_TraceObjectGroupCycleCollectorChildren(JS::CallbackTracer* trc, JS::GCCellPtr group)
 {
-    MOZ_ASSERT(group.is<ObjectGroup>());
-    TraceCycleCollectorChildren(trc, &group.as<ObjectGroup>());
+    MOZ_ASSERT(group.isObjectGroup());
+    TraceCycleCollectorChildren(trc, static_cast<ObjectGroup*>(group.asCell()));
 }
 
 static bool
 DefineHelpProperty(JSContext* cx, HandleObject obj, const char* prop, const char* value)
 {
     RootedAtom atom(cx, Atomize(cx, value, strlen(value)));
     if (!atom)
         return false;
@@ -847,18 +847,18 @@ struct DumpHeapTracer : public JS::Callb
     DumpHeapTracer(FILE* fp, JSRuntime* rt)
       : JS::CallbackTracer(rt, DoNotTraceWeakMaps),
         js::WeakMapTracer(rt), prefix(""), output(fp)
     {}
 
   private:
     void trace(JSObject* map, JS::GCCellPtr key, JS::GCCellPtr value) override {
         JSObject* kdelegate = nullptr;
-        if (key.is<JSObject>())
-            kdelegate = js::GetWeakmapKeyDelegate(&key.as<JSObject>());
+        if (key.isObject())
+            kdelegate = js::GetWeakmapKeyDelegate(key.toObject());
 
         fprintf(output, "WeakMapEntry map=%p key=%p keyDelegate=%p value=%p\n",
                 map, key.asCell(), kdelegate, value.asCell());
     }
 
     void onChild(const JS::GCCellPtr& thing) override;
 };
 
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -3716,17 +3716,17 @@ struct MaybeCompartmentFunctor {
     template <typename T> JSCompartment* operator()(T* t) { return t->maybeCompartment(); }
 };
 
 void
 CompartmentCheckTracer::onChild(const JS::GCCellPtr& thing)
 {
     TenuredCell* tenured = TenuredCell::fromPointer(thing.asCell());
 
-    JSCompartment* comp = DispatchTraceKindTyped(MaybeCompartmentFunctor(), tenured, thing.kind());
+    JSCompartment* comp = CallTyped(MaybeCompartmentFunctor(), tenured, thing.kind());
     if (comp && compartment) {
         MOZ_ASSERT(comp == compartment || runtime()->isAtomsCompartment(comp) ||
                    (srcKind == JS::TraceKind::Object &&
                     InCrossCompartmentMap(static_cast<JSObject*>(src), tenured, thing.kind())));
     } else {
         MOZ_ASSERT(tenured->zone() == zone || tenured->zone()->isAtomsZone());
     }
 }
@@ -3739,18 +3739,17 @@ GCRuntime::checkForCompartmentMismatches
 
     CompartmentCheckTracer trc(rt);
     for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
         trc.zone = zone;
         for (auto thingKind : AllAllocKinds()) {
             for (ZoneCellIterUnderGC i(zone, thingKind); !i.done(); i.next()) {
                 trc.src = i.getCell();
                 trc.srcKind = MapAllocToTraceKind(thingKind);
-                trc.compartment = DispatchTraceKindTyped(MaybeCompartmentFunctor(),
-                                                         trc.src, trc.srcKind);
+                trc.compartment = CallTyped(MaybeCompartmentFunctor(), trc.src, trc.srcKind);
                 JS_TraceChildren(&trc, trc.src, trc.srcKind);
             }
         }
     }
 }
 #endif
 
 static void
@@ -6968,17 +6967,17 @@ JS::AutoAssertGCCallback::AutoAssertGCCa
     MOZ_ASSERT(obj->runtimeFromMainThread()->isHeapCollecting());
 }
 
 JS_FRIEND_API(const char*)
 JS::GCTraceKindToAscii(JS::TraceKind kind)
 {
     switch(kind) {
 #define MAP_NAME(name, _0, _1) case JS::TraceKind::name: return #name;
-JS_FOR_EACH_TRACEKIND(MAP_NAME);
+FOR_EACH_GC_LAYOUT(MAP_NAME);
 #undef MAP_NAME
       default: return "Invalid";
     }
 }
 
 JS::GCCellPtr::GCCellPtr(const Value& v)
   : ptr(0)
 {
@@ -6998,18 +6997,18 @@ JS::GCCellPtr::outOfLineKind() const
     MOZ_ASSERT((ptr & OutOfLineTraceKindMask) == OutOfLineTraceKindMask);
     MOZ_ASSERT(asCell()->isTenured());
     return MapAllocToTraceKind(asCell()->asTenured().getAllocKind());
 }
 
 bool
 JS::GCCellPtr::mayBeOwnedByOtherRuntime() const
 {
-    return (is<JSString>() && as<JSString>().isPermanentAtom()) ||
-           (is<Symbol>() && as<Symbol>().isWellKnownSymbol());
+    return (isString() && toString()->isPermanentAtom()) ||
+           (isSymbol() && toSymbol()->isWellKnownSymbol());
 }
 
 #ifdef JSGC_HASH_TABLE_CHECKS
 void
 js::gc::CheckHashTablesAfterMovingGC(JSRuntime* rt)
 {
     /*
      * Check that internal hash tables no longer have any pointers to things
@@ -7192,17 +7191,17 @@ struct IncrementalReferenceBarrierFuncto
 };
 
 JS_PUBLIC_API(void)
 JS::IncrementalReferenceBarrier(GCCellPtr thing)
 {
     if (!thing)
         return;
 
-    DispatchTraceKindTyped(IncrementalReferenceBarrierFunctor(), thing.asCell(), thing.kind());
+    CallTyped(IncrementalReferenceBarrierFunctor(), thing.asCell(), thing.kind());
 }
 
 JS_PUBLIC_API(void)
 JS::IncrementalValueBarrier(const Value& v)
 {
     js::HeapValue::writeBarrierPre(v);
 }
 
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -18,16 +18,28 @@
 #include "jslock.h"
 
 #include "js/GCAPI.h"
 #include "js/SliceBudget.h"
 #include "js/Vector.h"
 
 #include "vm/NativeObject.h"
 
+#define FOR_EACH_GC_LAYOUT(D) \
+ /* PrettyName       TypeName           AddToCCKind */ \
+    D(BaseShape,     js::BaseShape,     true) \
+    D(JitCode,       js::jit::JitCode,  true) \
+    D(LazyScript,    js::LazyScript,    true) \
+    D(Object,        JSObject,          true) \
+    D(ObjectGroup,   js::ObjectGroup,   true) \
+    D(Script,        JSScript,          true) \
+    D(Shape,         js::Shape,         true) \
+    D(String,        JSString,          false) \
+    D(Symbol,        JS::Symbol,        false)
+
 namespace js {
 
 unsigned GetCPUCount();
 
 enum ThreadType
 {
     MainThread,
     BackgroundThread
@@ -46,16 +58,25 @@ struct FinalizePhase;
 enum State {
     NO_INCREMENTAL,
     MARK_ROOTS,
     MARK,
     SWEEP,
     COMPACT
 };
 
+// Map from base trace type to the trace kind.
+template <typename T> struct MapTypeToTraceKind {};
+#define EXPAND_DEF(name, type, _) \
+    template <> struct MapTypeToTraceKind<type> { \
+        static const JS::TraceKind kind = JS::TraceKind::name; \
+    };
+FOR_EACH_GC_LAYOUT(EXPAND_DEF);
+#undef EXPAND_DEF
+
 /* Map from C++ type to alloc kind. JSObject does not have a 1:1 mapping, so must use Arena::thingSize. */
 template <typename T> struct MapTypeToFinalizeKind {};
 template <> struct MapTypeToFinalizeKind<JSScript>          { static const AllocKind kind = AllocKind::SCRIPT; };
 template <> struct MapTypeToFinalizeKind<LazyScript>        { static const AllocKind kind = AllocKind::LAZY_SCRIPT; };
 template <> struct MapTypeToFinalizeKind<Shape>             { static const AllocKind kind = AllocKind::SHAPE; };
 template <> struct MapTypeToFinalizeKind<AccessorShape>     { static const AllocKind kind = AllocKind::ACCESSOR_SHAPE; };
 template <> struct MapTypeToFinalizeKind<BaseShape>         { static const AllocKind kind = AllocKind::BASE_SHAPE; };
 template <> struct MapTypeToFinalizeKind<ObjectGroup>       { static const AllocKind kind = AllocKind::OBJECT_GROUP; };
@@ -63,17 +84,17 @@ template <> struct MapTypeToFinalizeKind
 template <> struct MapTypeToFinalizeKind<JSString>          { static const AllocKind kind = AllocKind::STRING; };
 template <> struct MapTypeToFinalizeKind<JSExternalString>  { static const AllocKind kind = AllocKind::EXTERNAL_STRING; };
 template <> struct MapTypeToFinalizeKind<JS::Symbol>        { static const AllocKind kind = AllocKind::SYMBOL; };
 template <> struct MapTypeToFinalizeKind<jit::JitCode>      { static const AllocKind kind = AllocKind::JITCODE; };
 
 template <typename T> struct ParticipatesInCC {};
 #define EXPAND_PARTICIPATES_IN_CC(_, type, addToCCKind) \
     template <> struct ParticipatesInCC<type> { static const bool value = addToCCKind; };
-JS_FOR_EACH_TRACEKIND(EXPAND_PARTICIPATES_IN_CC)
+FOR_EACH_GC_LAYOUT(EXPAND_PARTICIPATES_IN_CC)
 #undef EXPAND_PARTICIPATES_IN_CC
 
 static inline bool
 IsNurseryAllocable(AllocKind kind)
 {
     MOZ_ASSERT(IsValidAllocKind(kind));
     static const bool map[] = {
         true,      /* AllocKind::FUNCTION */
@@ -151,16 +172,63 @@ CanBeFinalizedInBackground(AllocKind kin
      * AllocKind::OBJECT0_BACKGROUND calls the finalizer on the gcHelperThread.
      * IsBackgroundFinalized is called to prevent recursively incrementing
      * the alloc kind; kind may already be a background finalize kind.
      */
     return (!IsBackgroundFinalized(kind) &&
             (!clasp->finalize || (clasp->flags & JSCLASS_BACKGROUND_FINALIZE)));
 }
 
+// Fortunately, few places in the system need to deal with fully abstract
+// cells. In those places that do, we generally want to move to a layout
+// templated function as soon as possible. This template wraps the upcast
+// for that dispatch.
+//
+// Call the functor |F f| with template parameter of the layout type.
+
+// GCC and Clang require an explicit template declaration in front of the
+// specialization of operator() because it is a dependent template. MSVC, on
+// the other hand, gets very confused if we have a |template| token there.
+#ifdef _MSC_VER
+# define DEPENDENT_TEMPLATE_HINT
+#else
+# define DEPENDENT_TEMPLATE_HINT template
+#endif
+template <typename F, typename... Args>
+auto
+CallTyped(F f, JS::TraceKind traceKind, Args&&... args)
+  -> decltype(f. DEPENDENT_TEMPLATE_HINT operator()<JSObject>(mozilla::Forward<Args>(args)...))
+{
+    switch (traceKind) {
+#define EXPAND_DEF(name, type, _) \
+      case JS::TraceKind::name: \
+        return f. DEPENDENT_TEMPLATE_HINT operator()<type>(mozilla::Forward<Args>(args)...);
+      FOR_EACH_GC_LAYOUT(EXPAND_DEF);
+#undef EXPAND_DEF
+      default:
+          MOZ_CRASH("Invalid trace kind in CallTyped.");
+    }
+}
+#undef DEPENDENT_TEMPLATE_HINT
+
+template <typename F, typename... Args>
+auto
+CallTyped(F f, void* thing, JS::TraceKind traceKind, Args&&... args)
+  -> decltype(f(reinterpret_cast<JSObject*>(0), mozilla::Forward<Args>(args)...))
+{
+    switch (traceKind) {
+#define EXPAND_DEF(name, type, _) \
+      case JS::TraceKind::name: \
+          return f(static_cast<type*>(thing), mozilla::Forward<Args>(args)...);
+      FOR_EACH_GC_LAYOUT(EXPAND_DEF);
+#undef EXPAND_DEF
+      default:
+          MOZ_CRASH("Invalid trace kind in CallTyped.");
+    }
+}
 /* Capacity for slotsToThingKind */
 const size_t SLOTS_TO_THING_KIND_LIMIT = 17;
 
 extern const AllocKind slotsToThingKind[];
 
 /* Get the best kind to use when making an object with the given slot count. */
 static inline AllocKind
 GetGCObjectKind(size_t numSlots)
--- a/js/src/jsweakmap.h
+++ b/js/src/jsweakmap.h
@@ -246,18 +246,18 @@ class WeakMap : public HashMap<Key, Valu
 
     /* memberOf can be nullptr, which means that the map is not part of a JSObject. */
     void traceMappings(WeakMapTracer* tracer) {
         for (Range r = Base::all(); !r.empty(); r.popFront()) {
             gc::Cell* key = gc::ToMarkable(r.front().key());
             gc::Cell* value = gc::ToMarkable(r.front().value());
             if (key && value) {
                 tracer->trace(memberOf,
-                              JS::GCCellPtr(r.front().key().get()),
-                              JS::GCCellPtr(r.front().value().get()));
+                              JS::GCCellPtr(r.front().key()),
+                              JS::GCCellPtr(r.front().value()));
             }
         }
     }
 
     /* Rekey an entry when moved, ensuring we do not trigger barriers. */
     void entryMoved(Enum& e, const Key& k) {
         e.rekeyFront(k);
     }
--- a/js/src/vm/UbiNode.cpp
+++ b/js/src/vm/UbiNode.cpp
@@ -28,17 +28,16 @@
 #include "vm/String.h"
 #include "vm/Symbol.h"
 
 #include "jsobjinlines.h"
 #include "vm/Debugger-inl.h"
 
 using mozilla::Some;
 using mozilla::UniquePtr;
-using JS::DispatchTraceKindTyped;
 using JS::HandleValue;
 using JS::Value;
 using JS::ZoneSet;
 using JS::ubi::Concrete;
 using JS::ubi::Edge;
 using JS::ubi::EdgeRange;
 using JS::ubi::Node;
 using JS::ubi::SimpleEdge;
@@ -63,17 +62,17 @@ Concrete<void>::size(mozilla::MallocSize
 }
 
 struct Node::ConstructFunctor : public js::BoolDefaultAdaptor<Value, false> {
     template <typename T> bool operator()(T* t, Node* node) { node->construct(t); return true; }
 };
 
 Node::Node(const JS::GCCellPtr &thing)
 {
-    DispatchTraceKindTyped(ConstructFunctor(), thing.asCell(), thing.kind(), this);
+    js::gc::CallTyped(ConstructFunctor(), thing.asCell(), thing.kind(), this);
 }
 
 Node::Node(HandleValue value)
 {
     if (!DispatchValueTyped(ConstructFunctor(), value, this))
         construct<void>(nullptr);
 }
 
@@ -113,19 +112,19 @@ class SimpleEdgeVectorTracer : public JS
     bool wantNames;
 
     void onChild(const JS::GCCellPtr& thing) override {
         if (!okay)
             return;
 
         // Don't trace permanent atoms and well-known symbols that are owned by
         // a parent JSRuntime.
-        if (thing.is<JSString>() && thing.as<JSString>().isPermanentAtom())
+        if (thing.isString() && thing.toString()->isPermanentAtom())
             return;
-        if (thing.is<JS::Symbol>() && thing.as<JS::Symbol>().isWellKnownSymbol())
+        if (thing.isSymbol() && thing.toSymbol()->isWellKnownSymbol())
             return;
 
         char16_t* name16 = nullptr;
         if (wantNames) {
             // Ask the tracer to compute an edge name for us.
             char buffer[1024];
             getTracingEdgeName(buffer, sizeof(buffer));
             const char* name = buffer;
@@ -200,17 +199,17 @@ TracerConcrete<Referent>::zone() const
 template<typename Referent>
 UniquePtr<EdgeRange>
 TracerConcrete<Referent>::edges(JSContext* cx, bool wantNames) const {
     UniquePtr<SimpleEdgeRange, JS::DeletePolicy<SimpleEdgeRange>> range(
       cx->new_<SimpleEdgeRange>(cx));
     if (!range)
         return nullptr;
 
-    if (!range->init(cx, ptr, JS::MapTypeToTraceKind<Referent>::kind, wantNames))
+    if (!range->init(cx, ptr, ::js::gc::MapTypeToTraceKind<Referent>::kind, wantNames))
         return nullptr;
 
     return UniquePtr<EdgeRange>(range.release());
 }
 
 template<typename Referent>
 JSCompartment*
 TracerConcreteWithCompartment<Referent>::compartment() const
--- a/xpcom/base/CycleCollectedJSRuntime.cpp
+++ b/xpcom/base/CycleCollectedJSRuntime.cpp
@@ -129,17 +129,17 @@ struct NoteWeakMapChildrenTracer : publi
   JSObject* mMap;
   JS::GCCellPtr mKey;
   JSObject* mKeyDelegate;
 };
 
 void
 NoteWeakMapChildrenTracer::onChild(const JS::GCCellPtr& aThing)
 {
-  if (aThing.is<JSString>()) {
+  if (aThing.isString()) {
     return;
   }
 
   if (!JS::GCThingIsMarkedGray(aThing) && !mCb.WantAllTraces()) {
     return;
   }
 
   if (AddToCCKind(aThing.kind())) {
@@ -163,17 +163,17 @@ struct NoteWeakMapsTracer : public js::W
 
 void
 NoteWeakMapsTracer::trace(JSObject* aMap, JS::GCCellPtr aKey,
                           JS::GCCellPtr aValue)
 {
   // If nothing that could be held alive by this entry is marked gray, return.
   if ((!aKey || !JS::GCThingIsMarkedGray(aKey)) &&
       MOZ_LIKELY(!mCb.WantAllTraces())) {
-    if (!aValue || !JS::GCThingIsMarkedGray(aValue) || aValue.is<JSString>()) {
+    if (!aValue || !JS::GCThingIsMarkedGray(aValue) || aValue.isString()) {
       return;
     }
   }
 
   // The cycle collector can only properly reason about weak maps if it can
   // reason about the liveness of their keys, which in turn requires that
   // the key can be represented in the cycle collector graph.  All existing
   // uses of weak maps use either objects or scripts as keys, which are okay.
@@ -183,29 +183,29 @@ NoteWeakMapsTracer::trace(JSObject* aMap
   // representable in the cycle collector graph, we treat it as marked.  This
   // can cause leaks, but is preferable to ignoring the binding, which could
   // cause the cycle collector to free live objects.
   if (!AddToCCKind(aKey.kind())) {
     aKey = nullptr;
   }
 
   JSObject* kdelegate = nullptr;
-  if (aKey.is<JSObject>()) {
-    kdelegate = js::GetWeakmapKeyDelegate(&aKey.as<JSObject>());
+  if (aKey.isObject()) {
+    kdelegate = js::GetWeakmapKeyDelegate(aKey.toObject());
   }
 
   if (AddToCCKind(aValue.kind())) {
     mCb.NoteWeakMapping(aMap, aKey, kdelegate, aValue);
   } else {
     mChildTracer.mTracedAny = false;
     mChildTracer.mMap = aMap;
     mChildTracer.mKey = aKey;
     mChildTracer.mKeyDelegate = kdelegate;
 
-    if (aValue.is<JSString>()) {
+    if (aValue.isString()) {
       JS_TraceChildren(&mChildTracer, aValue.asCell(), aValue.kind());
     }
 
     // The delegate could hold alive the key, so report something to the CC
     // if we haven't already.
     if (!mChildTracer.mTracedAny &&
         aKey && JS::GCThingIsMarkedGray(aKey) && kdelegate) {
       mCb.NoteWeakMapping(aMap, aKey, kdelegate, nullptr);
@@ -239,18 +239,18 @@ struct FixWeakMappingGrayBitsTracer : pu
     if (!delegateMightNeedMarking && !valueMightNeedMarking) {
       return;
     }
 
     if (!AddToCCKind(aKey.kind())) {
       aKey = nullptr;
     }
 
-    if (delegateMightNeedMarking && aKey.is<JSObject>()) {
-      JSObject* kdelegate = js::GetWeakmapKeyDelegate(&aKey.as<JSObject>());
+    if (delegateMightNeedMarking && aKey.isObject()) {
+      JSObject* kdelegate = js::GetWeakmapKeyDelegate(aKey.toObject());
       if (kdelegate && !JS::ObjectIsMarkedGray(kdelegate)) {
         if (JS::UnmarkGrayGCThingRecursively(aKey)) {
           mAnyMarked = true;
         }
       }
     }
 
     if (aValue && JS::GCThingIsMarkedGray(aValue) &&
@@ -338,31 +338,31 @@ TraversalTracer::onChild(const JS::GCCel
    * use special APIs to handle such chains iteratively.
    */
   if (AddToCCKind(aThing.kind())) {
     if (MOZ_UNLIKELY(mCb.WantDebugInfo())) {
       char buffer[200];
       getTracingEdgeName(buffer, sizeof(buffer));
       mCb.NoteNextEdgeName(buffer);
     }
-    if (aThing.is<JSObject>()) {
-      mCb.NoteJSObject(&aThing.as<JSObject>());
+    if (aThing.isObject()) {
+      mCb.NoteJSObject(aThing.toObject());
     } else {
-      mCb.NoteJSScript(&aThing.as<JSScript>());
+      mCb.NoteJSScript(aThing.toScript());
     }
-  } else if (aThing.is<js::Shape>()) {
+  } else if (aThing.isShape()) {
     // The maximum depth of traversal when tracing a Shape is unbounded, due to
     // the parent pointers on the shape.
     JS_TraceShapeCycleCollectorChildren(this, aThing);
-  } else if (aThing.is<js::ObjectGroup>()) {
+  } else if (aThing.isObjectGroup()) {
     // The maximum depth of traversal when tracing an ObjectGroup is unbounded,
     // due to information attached to the groups which can lead other groups to
     // be traced.
     JS_TraceObjectGroupCycleCollectorChildren(this, aThing);
-  } else if (!aThing.is<JSString>()) {
+  } else if (!aThing.isString()) {
     JS_TraceChildren(this, aThing.asCell(), aThing.kind());
   }
 }
 
 static void
 NoteJSChildGrayWrapperShim(void* aData, JS::GCCellPtr aThing)
 {
   TraversalTracer* trc = static_cast<TraversalTracer*>(aData);
@@ -478,18 +478,18 @@ CycleCollectedJSRuntime::DescribeGCThing
 {
   if (!aCb.WantDebugInfo()) {
     aCb.DescribeGCedNode(aIsMarked, "JS Object");
     return;
   }
 
   char name[72];
   uint64_t compartmentAddress = 0;
-  if (aThing.is<JSObject>()) {
-    JSObject* obj = &aThing.as<JSObject>();
+  if (aThing.isObject()) {
+    JSObject* obj = aThing.toObject();
     compartmentAddress = (uint64_t)js::GetObjectCompartment(obj);
     const js::Class* clasp = js::GetObjectClass(obj);
 
     // Give the subclass a chance to do something
     if (DescribeCustomObjects(obj, clasp, name)) {
       // Nothing else to do!
     } else if (js::IsFunctionObject(obj)) {
       JSFunction* fun = JS_GetObjectFunction(obj);
@@ -577,18 +577,18 @@ CycleCollectedJSRuntime::TraverseGCThing
   if (!isMarkedGray && !aCb.WantAllTraces()) {
     return;
   }
 
   if (aTs == TRAVERSE_FULL) {
     NoteGCThingJSChildren(aThing, aCb);
   }
 
-  if (aThing.is<JSObject>()) {
-    JSObject* obj = &aThing.as<JSObject>();
+  if (aThing.isObject()) {
+    JSObject* obj = aThing.toObject();
     NoteGCThingXPCOMChildren(js::GetObjectClass(obj), obj, aCb);
   }
 }
 
 struct TraverseObjectShimClosure
 {
   nsCycleCollectionTraversalCallback& cb;
   CycleCollectedJSRuntime* self;
@@ -630,17 +630,17 @@ CycleCollectedJSRuntime::TraverseZone(JS
 }
 
 /* static */ void
 CycleCollectedJSRuntime::TraverseObjectShim(void* aData, JS::GCCellPtr aThing)
 {
   TraverseObjectShimClosure* closure =
     static_cast<TraverseObjectShimClosure*>(aData);
 
-  MOZ_ASSERT(aThing.is<JSObject>());
+  MOZ_ASSERT(aThing.isObject());
   closure->self->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_CPP,
                                  aThing, closure->cb);
 }
 
 void
 CycleCollectedJSRuntime::TraverseNativeRoots(nsCycleCollectionNoteRootCallback& aCb)
 {
   // NB: This is here just to preserve the existing XPConnect order. I doubt it
--- a/xpcom/glue/nsCycleCollectionParticipant.cpp
+++ b/xpcom/glue/nsCycleCollectionParticipant.cpp
@@ -18,20 +18,20 @@
 
 void
 nsScriptObjectTracer::NoteJSChild(JS::GCCellPtr aGCThing, const char* aName,
                                   void* aClosure)
 {
   nsCycleCollectionTraversalCallback* cb =
     static_cast<nsCycleCollectionTraversalCallback*>(aClosure);
   NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(*cb, aName);
-  if (aGCThing.is<JSObject>()) {
-    cb->NoteJSObject(&aGCThing.as<JSObject>());
-  } else if (aGCThing.is<JSScript>()) {
-    cb->NoteJSScript(&aGCThing.as<JSScript>());
+  if (aGCThing.isObject()) {
+    cb->NoteJSObject(aGCThing.toObject());
+  } else if (aGCThing.isScript()) {
+    cb->NoteJSScript(aGCThing.toScript());
   } else {
     MOZ_ASSERT(!mozilla::AddToCCKind(aGCThing.kind()));
   }
 }
 
 NS_IMETHODIMP_(void)
 nsXPCOMCycleCollectionParticipant::Root(void* aPtr)
 {
@@ -92,38 +92,38 @@ TraceCallbackFunc::Trace(JS::Heap<jsid>*
     mCallback(JSID_TO_GCTHING(*aPtr), aName, aClosure);
   }
 }
 
 void
 TraceCallbackFunc::Trace(JS::Heap<JSObject*>* aPtr, const char* aName,
                          void* aClosure) const
 {
-  mCallback(JS::GCCellPtr(aPtr->get()), aName, aClosure);
+  mCallback(JS::GCCellPtr(*aPtr), aName, aClosure);
 }
 
 void
 TraceCallbackFunc::Trace(JS::TenuredHeap<JSObject*>* aPtr, const char* aName,
                          void* aClosure) const
 {
-  mCallback(JS::GCCellPtr(aPtr->getPtr()), aName, aClosure);
+  mCallback(JS::GCCellPtr(*aPtr), aName, aClosure);
 }
 
 void
 TraceCallbackFunc::Trace(JS::Heap<JSFunction*>* aPtr, const char* aName,
                          void* aClosure) const
 {
-  mCallback(JS::GCCellPtr(aPtr->get()), aName, aClosure);
+  mCallback(JS::GCCellPtr(*aPtr), aName, aClosure);
 }
 
 void
 TraceCallbackFunc::Trace(JS::Heap<JSString*>* aPtr, const char* aName,
                          void* aClosure) const
 {
-  mCallback(JS::GCCellPtr(aPtr->get()), aName, aClosure);
+  mCallback(JS::GCCellPtr(*aPtr), aName, aClosure);
 }
 
 void
 TraceCallbackFunc::Trace(JS::Heap<JSScript*>* aPtr, const char* aName,
                          void* aClosure) const
 {
-  mCallback(JS::GCCellPtr(aPtr->get()), aName, aClosure);
+  mCallback(JS::GCCellPtr(*aPtr), aName, aClosure);
 }