Backed out 7 changesets (bug 1147588, bug 1147669, bug 1147533, bug 1147665, bug 1135985, bug 1147180, bug 1147670) for frequent 10.10 devtools-2 GC assertion failures
authorPhil Ringnalda <philringnalda@gmail.com>
Sat, 28 Mar 2015 10:41:32 -0700
changeset 236378 99415fbccf8375d8f7b0e4f85a6b040f83adb969
parent 236377 5b892d8ef4538ea84378ebe4a352c49d8b9aa366
child 236379 87e10bac6fd1332ac4a62eae3f9ef673a5dab7f0
push id28497
push userphilringnalda@gmail.com
push dateSun, 29 Mar 2015 03:30:34 +0000
treeherdermozilla-central@02f2f4c75007 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs1147588, 1147669, 1147533, 1147665, 1135985, 1147180, 1147670
milestone39.0a1
backs out0bd8c4ec41fa8c4df77bfafe5508f9924be3f650
35ad82ff9ee770505514230b39864694cd045a75
ffe7f2e2a0f0099870373f80a102c11860d00c4e
37c42cff2c013c8451224b8ea9d6bd8d958d4fc8
be4138f208bad246fac8a198a56c2aec1e27dacc
6bbd529bd995bc89ac9b064bf33b81d3ce6cd342
853e3ad56dadff940baea514c999b9fb055755fb
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out 7 changesets (bug 1147588, bug 1147669, bug 1147533, bug 1147665, bug 1135985, bug 1147180, bug 1147670) for frequent 10.10 devtools-2 GC assertion failures CLOSED TREE Backed out changeset 0bd8c4ec41fa (bug 1147670) Backed out changeset 35ad82ff9ee7 (bug 1147669) Backed out changeset ffe7f2e2a0f0 (bug 1147665) Backed out changeset 37c42cff2c01 (bug 1147588) Backed out changeset be4138f208ba (bug 1147533) Backed out changeset 6bbd529bd995 (bug 1147180) Backed out changeset 853e3ad56dad (bug 1135985)
js/public/TracingAPI.h
js/src/builtin/MapObject.cpp
js/src/builtin/TypedObject.cpp
js/src/gc/Barrier.h
js/src/gc/Marking.cpp
js/src/gc/Marking.h
js/src/gc/RootMarking.cpp
js/src/gc/StoreBuffer.cpp
js/src/gc/StoreBuffer.h
js/src/gc/Tracer.cpp
js/src/jit/BaselineFrame.cpp
js/src/jit/BaselineIC.cpp
js/src/jit/Ion.cpp
js/src/jit/JitFrames.cpp
js/src/jit/JitcodeMap.cpp
js/src/jit/RematerializedFrame.cpp
js/src/jit/VMFunctions.cpp
js/src/jit/shared/Assembler-x86-shared.cpp
js/src/jsatom.cpp
js/src/jsatom.h
js/src/jscntxt.cpp
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsfun.cpp
js/src/jsobj.cpp
js/src/jspropertytree.cpp
js/src/jsscript.cpp
js/src/jswatchpoint.cpp
js/src/jsweakmap.h
js/src/proxy/Proxy.cpp
js/src/vm/ArgumentsObject.cpp
js/src/vm/ArgumentsObject.h
js/src/vm/ArrayBufferObject.cpp
js/src/vm/Debugger.cpp
js/src/vm/Debugger.h
js/src/vm/JSONParser.cpp
js/src/vm/ObjectGroup.cpp
js/src/vm/PIC.cpp
js/src/vm/RegExpObject.cpp
js/src/vm/SavedStacks.cpp
js/src/vm/ScopeObject.cpp
js/src/vm/Shape.cpp
js/src/vm/Shape.h
js/src/vm/Stack.cpp
js/src/vm/TypeInference.cpp
js/src/vm/TypeInference.h
--- a/js/public/TracingAPI.h
+++ b/js/public/TracingAPI.h
@@ -114,27 +114,25 @@ class JS_PUBLIC_API(JSTracer)
         debugPrintIndex_ = index;
     }
 
     void setTracingIndex(const char *name, size_t index) {
         setTracingDetails(nullptr, (void *)name, index);
     }
 
     void setTracingName(const char *name) {
-        setTracingDetails(nullptr, (void *)name, InvalidIndex);
+        setTracingDetails(nullptr, (void *)name, size_t(-1));
     }
 
     // Remove the currently set tracing details.
     void clearTracingDetails() {
         debugPrinter_ = nullptr;
         debugPrintArg_ = nullptr;
     }
 
-    const static size_t InvalidIndex = size_t(-1);
-
     // Return true if tracing details are currently set.
     bool hasTracingDetails() const;
 
     // Get the string set with the most recent call to setTracingName or return
     // fallback if a name printer function has been installed.
     const char *tracingName(const char *fallback) const;
 
     // Build a description of this edge in the heap graph. This call may invoke
--- a/js/src/builtin/MapObject.cpp
+++ b/js/src/builtin/MapObject.cpp
@@ -832,17 +832,17 @@ HashableValue::operator==(const Hashable
     return b;
 }
 
 HashableValue
 HashableValue::mark(JSTracer *trc) const
 {
     HashableValue hv(*this);
     trc->setTracingLocation((void *)this);
-    TraceEdge(trc, &hv.value, "key");
+    gc::MarkValue(trc, &hv.value, "key");
     return hv;
 }
 
 
 /*** MapIterator *********************************************************************************/
 
 namespace {
 
@@ -1101,17 +1101,17 @@ MarkKey(Range &r, const HashableValue &k
 }
 
 void
 MapObject::mark(JSTracer *trc, JSObject *obj)
 {
     if (ValueMap *map = obj->as<MapObject>().getData()) {
         for (ValueMap::Range r = map->all(); !r.empty(); r.popFront()) {
             MarkKey(r, r.front().key, trc);
-            TraceEdge(trc, &r.front().value, "value");
+            gc::MarkValue(trc, &r.front().value, "value");
         }
     }
 }
 
 struct UnbarrieredHashPolicy {
     typedef Value Lookup;
     static HashNumber hash(const Lookup &v) { return v.asRawBits(); }
     static bool match(const Value &k, const Lookup &l) { return k == l; }
@@ -1127,17 +1127,17 @@ class OrderedHashTableRef : public gc::B
 
   public:
     explicit OrderedHashTableRef(TableType *t, const Value &k) : table(t), key(k) {}
 
     void mark(JSTracer *trc) {
         MOZ_ASSERT(UnbarrieredHashPolicy::hash(key) ==
                    HashableValue::Hasher::hash(*reinterpret_cast<HashableValue*>(&key)));
         Value prior = key;
-        TraceManuallyBarrieredEdge(trc, &key, "ordered hash table key");
+        gc::MarkValueUnbarriered(trc, &key, "ordered hash table key");
         table->rekeyOneEntry(prior, key);
     }
 };
 
 inline static void
 WriteBarrierPost(JSRuntime *rt, ValueMap *map, const Value &key)
 {
     typedef OrderedHashMap<Value, Value, UnbarrieredHashPolicy, RuntimeAllocPolicy> UnbarrieredMap;
--- a/js/src/builtin/TypedObject.cpp
+++ b/js/src/builtin/TypedObject.cpp
@@ -2947,32 +2947,34 @@ class MemoryTracingVisitor {
 } // anonymous namespace
 
 void
 MemoryTracingVisitor::visitReference(ReferenceTypeDescr &descr, uint8_t *mem)
 {
     switch (descr.type()) {
       case ReferenceTypeDescr::TYPE_ANY:
       {
-        HeapValue *heapValue = reinterpret_cast<js::HeapValue *>(mem);
-        TraceEdge(trace_, heapValue, "reference-val");
+        js::HeapValue *heapValue = reinterpret_cast<js::HeapValue *>(mem);
+        gc::MarkValue(trace_, heapValue, "reference-val");
         return;
       }
 
       case ReferenceTypeDescr::TYPE_OBJECT:
       {
-        HeapPtrObject *objectPtr = reinterpret_cast<js::HeapPtrObject *>(mem);
+        js::HeapPtrObject *objectPtr =
+            reinterpret_cast<js::HeapPtrObject *>(mem);
         if (*objectPtr)
             gc::MarkObject(trace_, objectPtr, "reference-obj");
         return;
       }
 
       case ReferenceTypeDescr::TYPE_STRING:
       {
-        HeapPtrString *stringPtr = reinterpret_cast<js::HeapPtrString *>(mem);
+        js::HeapPtrString *stringPtr =
+            reinterpret_cast<js::HeapPtrString *>(mem);
         if (*stringPtr)
             gc::MarkString(trace_, stringPtr, "reference-str");
         return;
       }
     }
 
     MOZ_CRASH("Invalid kind");
 }
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -228,17 +228,17 @@ template <> struct MapTypeToTraceKind<Ac
 template <> struct MapTypeToTraceKind<SharedArrayBufferObject>{ static const JSGCTraceKind kind = JSTRACE_OBJECT; };
 template <> struct MapTypeToTraceKind<SharedTypedArrayObject>{ static const JSGCTraceKind kind = JSTRACE_OBJECT; };
 template <> struct MapTypeToTraceKind<UnownedBaseShape> { static const JSGCTraceKind kind = JSTRACE_BASE_SHAPE; };
 template <> struct MapTypeToTraceKind<jit::JitCode>     { static const JSGCTraceKind kind = JSTRACE_JITCODE; };
 template <> struct MapTypeToTraceKind<ObjectGroup>      { static const JSGCTraceKind kind = JSTRACE_OBJECT_GROUP; };
 
 // Direct value access used by the write barriers and the jits.
 void
-MarkValueForBarrier(JSTracer *trc, Value *v, const char *name);
+MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name);
 
 // These three declarations are also present in gc/Marking.h, via the DeclMarker
 // macro.  Not great, but hard to avoid.
 void
 MarkStringUnbarriered(JSTracer *trc, JSString **str, const char *name);
 void
 MarkSymbolUnbarriered(JSTracer *trc, JS::Symbol **sym, const char *name);
 
@@ -340,17 +340,17 @@ struct InternalGCMethods<Value>
     }
 
   private:
     static void preBarrierImpl(Zone *zone, Value v) {
         JS::shadow::Zone *shadowZone = JS::shadow::Zone::asShadowZone(zone);
         if (shadowZone->needsIncrementalBarrier()) {
             MOZ_ASSERT_IF(v.isMarkable(), shadowRuntimeFromMainThread(v)->needsIncrementalBarrier());
             Value tmp(v);
-            js::gc::MarkValueForBarrier(shadowZone->barrierTracer(), &tmp, "write barrier");
+            js::gc::MarkValueUnbarriered(shadowZone->barrierTracer(), &tmp, "write barrier");
             MOZ_ASSERT(tmp == v);
         }
     }
 
   public:
     static void postBarrier(Value *vp) {
         MOZ_ASSERT(!CurrentThreadIsIonCompiling());
         if (vp->isObject()) {
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -2,20 +2,17 @@
  * vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "gc/Marking.h"
 
 #include "mozilla/DebugOnly.h"
-#include "mozilla/IntegerRange.h"
-#include "mozilla/TypeTraits.h"
 
-#include "jsgc.h"
 #include "jsprf.h"
 
 #include "gc/GCInternals.h"
 #include "jit/IonCode.h"
 #include "js/SliceBudget.h"
 #include "vm/ArgumentsObject.h"
 #include "vm/ArrayObject.h"
 #include "vm/ScopeObject.h"
@@ -29,19 +26,16 @@
 
 #include "gc/Nursery-inl.h"
 #include "vm/String-inl.h"
 
 using namespace js;
 using namespace js::gc;
 
 using mozilla::DebugOnly;
-using mozilla::IsBaseOf;
-using mozilla::IsSame;
-using mozilla::MakeRange;
 
 void * const js::NullPtr::constNullValue = nullptr;
 
 JS_PUBLIC_DATA(void * const) JS::NullPtr::constNullValue = nullptr;
 
 /*
  * There are two mostly separate mark paths. The first is a fast path used
  * internally in the GC. The second is a slow path used for root marking and
@@ -148,71 +142,62 @@ IsThingPoisoned(T *thing)
 
 static GCMarker *
 AsGCMarker(JSTracer *trc)
 {
     MOZ_ASSERT(trc->isMarkingTracer());
     return static_cast<GCMarker *>(trc);
 }
 
-template <typename T> bool ThingIsPermanentAtomOrWellKnownSymbol(T *thing) { return false; }
-template <> bool ThingIsPermanentAtomOrWellKnownSymbol<JSString>(JSString *str) {
-    return str->isPermanentAtom();
-}
-template <> bool ThingIsPermanentAtomOrWellKnownSymbol<JSFlatString>(JSFlatString *str) {
-    return str->isPermanentAtom();
-}
-template <> bool ThingIsPermanentAtomOrWellKnownSymbol<JSLinearString>(JSLinearString *str) {
-    return str->isPermanentAtom();
-}
-template <> bool ThingIsPermanentAtomOrWellKnownSymbol<JSAtom>(JSAtom *atom) {
-    return atom->isPermanent();
-}
-template <> bool ThingIsPermanentAtomOrWellKnownSymbol<PropertyName>(PropertyName *name) {
-    return name->isPermanent();
-}
-template <> bool ThingIsPermanentAtomOrWellKnownSymbol<JS::Symbol>(JS::Symbol *sym) {
-    return sym->isWellKnownSymbol();
-}
+template <typename T> bool ThingIsPermanentAtom(T *thing) { return false; }
+template <> bool ThingIsPermanentAtom<JSString>(JSString *str) { return str->isPermanentAtom(); }
+template <> bool ThingIsPermanentAtom<JSFlatString>(JSFlatString *str) { return str->isPermanentAtom(); }
+template <> bool ThingIsPermanentAtom<JSLinearString>(JSLinearString *str) { return str->isPermanentAtom(); }
+template <> bool ThingIsPermanentAtom<JSAtom>(JSAtom *atom) { return atom->isPermanent(); }
+template <> bool ThingIsPermanentAtom<PropertyName>(PropertyName *name) { return name->isPermanent(); }
+template <> bool ThingIsPermanentAtom<JS::Symbol>(JS::Symbol *sym) { return sym->isWellKnownSymbol(); }
 
 template<typename T>
 static inline void
-CheckMarkedThing(JSTracer *trc, T thing)
+CheckMarkedThing(JSTracer *trc, T **thingp)
 {
 #ifdef DEBUG
     MOZ_ASSERT(trc);
-    MOZ_ASSERT(thing);
+    MOZ_ASSERT(thingp);
+
+    T *thing = *thingp;
+    MOZ_ASSERT(*thingp);
 
     thing = MaybeForwarded(thing);
 
     /* This function uses data that's not available in the nursery. */
     if (IsInsideNursery(thing))
         return;
 
     MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc) && !Nursery::IsMinorCollectionTracer(trc),
-                  !IsForwarded(thing));
+                  !IsForwarded(*thingp));
 
     /*
      * Permanent atoms are not associated with this runtime, but will be ignored
      * during marking.
      */
-    if (ThingIsPermanentAtomOrWellKnownSymbol(thing))
+    if (ThingIsPermanentAtom(thing))
         return;
 
     Zone *zone = thing->zoneFromAnyThread();
     JSRuntime *rt = trc->runtime();
 
     MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc), CurrentThreadCanAccessZone(zone));
     MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc), CurrentThreadCanAccessRuntime(rt));
 
     MOZ_ASSERT(zone->runtimeFromAnyThread() == trc->runtime());
+    MOZ_ASSERT(trc->hasTracingDetails());
 
     MOZ_ASSERT(thing->isAligned());
-    MOZ_ASSERT(MapTypeToTraceKind<typename mozilla::RemovePointer<T>::Type>::kind ==
-               GetGCThingTraceKind(thing));
+    MOZ_ASSERT(MapTypeToTraceKind<T>::kind == GetGCThingTraceKind(thing));
 
     /*
      * Do not check IsMarkingTracer directly -- it should only be used in paths
      * where we cannot be the gray buffering tracer.
      */
     bool isGcMarkingTracer = trc->isMarkingTracer();
 
     MOZ_ASSERT_IF(zone->requireGCTracer(), isGcMarkingTracer || IsBufferingGrayRoots(trc));
@@ -236,47 +221,16 @@ CheckMarkedThing(JSTracer *trc, T thing)
      * background sweeping may be running and concurrently modifiying the free
      * list.
      */
     MOZ_ASSERT_IF(IsThingPoisoned(thing) && rt->isHeapBusy() && !rt->gc.isBackgroundSweeping(),
                   !InFreeList(thing->asTenured().arenaHeader(), thing));
 #endif
 }
 
-template<>
-void
-CheckMarkedThing<Value>(JSTracer *trc, Value val)
-{
-#ifdef DEBUG
-    if (val.isString())
-        CheckMarkedThing(trc, val.toString());
-    else if (val.isObject())
-        CheckMarkedThing(trc, &val.toObject());
-    else if (val.isSymbol())
-        CheckMarkedThing(trc, val.toSymbol());
-#endif
-}
-
-template <>
-void
-CheckMarkedThing<jsid>(JSTracer *trc, jsid id)
-{
-#ifdef DEBUG
-    if (JSID_IS_STRING(id))
-        CheckMarkedThing(trc, JSID_TO_STRING(id));
-    else if (JSID_IS_SYMBOL(id))
-        CheckMarkedThing(trc, JSID_TO_SYMBOL(id));
-#endif
-}
-
-#define JS_ROOT_MARKING_ASSERT(trc) \
-    MOZ_ASSERT_IF(trc->isMarkingTracer(), \
-                  trc->runtime()->gc.state() == NO_INCREMENTAL || \
-                  trc->runtime()->gc.state() == MARK_ROOTS);
-
 /*
  * We only set the maybeAlive flag for objects and scripts. It's assumed that,
  * if a compartment is alive, then it will have at least some live object or
  * script it in. Even if we get this wrong, the worst that will happen is that
  * scheduledForDestruction will be set on the compartment, which will cause some
  * extra GC activity to try to free the compartment.
  */
 template<typename T>
@@ -301,380 +255,39 @@ SetMaybeAliveFlag(NativeObject *thing)
 
 template<>
 void
 SetMaybeAliveFlag(JSScript *thing)
 {
     thing->compartment()->maybeAlive = true;
 }
 
-#define FOR_EACH_GC_LAYOUT(D) \
-    D(Object, JSObject) \
-    D(String, JSString) \
-    D(Symbol, JS::Symbol) \
-    D(Script, JSScript) \
-    D(Shape, js::Shape) \
-    D(BaseShape, js::BaseShape) \
-    D(JitCode, js::jit::JitCode) \
-    D(LazyScript, js::LazyScript) \
-    D(ObjectGroup, js::ObjectGroup)
-
-// A C++ version of JSGCTraceKind
-enum class TraceKind {
-#define NAMES(name, _) name,
-FOR_EACH_GC_LAYOUT(NAMES)
-#undef NAMES
-};
-
-#define FOR_EACH_GC_POINTER_TYPE(D) \
-    D(BaseShape *) \
-    D(UnownedBaseShape *) \
-    D(jit::JitCode *) \
-    D(NativeObject *) \
-    D(ArrayObject *) \
-    D(ArgumentsObject *) \
-    D(ArrayBufferObject *) \
-    D(ArrayBufferObjectMaybeShared *) \
-    D(ArrayBufferViewObject *) \
-    D(DebugScopeObject *) \
-    D(GlobalObject *) \
-    D(JSObject *) \
-    D(JSFunction *) \
-    D(NestedScopeObject *) \
-    D(PlainObject *) \
-    D(SavedFrame *) \
-    D(ScopeObject *) \
-    D(SharedArrayBufferObject *) \
-    D(SharedTypedArrayObject *) \
-    D(JSScript *) \
-    D(LazyScript *) \
-    D(Shape *) \
-    D(JSAtom *) \
-    D(JSString *) \
-    D(JSFlatString *) \
-    D(JSLinearString *) \
-    D(PropertyName *) \
-    D(JS::Symbol *) \
-    D(js::ObjectGroup *) \
-    D(Value) \
-    D(jsid)
-
-// The second parameter to BaseGCType is derived automatically based on T. The
-// relation here is that for any T, the TraceKind will automatically,
-// statically select the correct Cell layout for marking. Below, we instantiate
-// each override with a declaration of the most derived layout type.
-//
-// Usage:
-//   BaseGCType<T>::type
-//
-// Examples:
-//   BaseGCType<JSFunction>::type => JSObject
-//   BaseGCType<UnownedBaseShape>::type => BaseShape
-//   etc.
-template <typename T,
-          TraceKind = IsBaseOf<JSObject, T>::value     ? TraceKind::Object
-                    : IsBaseOf<JSString, T>::value     ? TraceKind::String
-                    : IsBaseOf<JS::Symbol, T>::value   ? TraceKind::Symbol
-                    : IsBaseOf<JSScript, T>::value     ? TraceKind::Script
-                    : IsBaseOf<Shape, T>::value        ? TraceKind::Shape
-                    : IsBaseOf<BaseShape, T>::value    ? TraceKind::BaseShape
-                    : IsBaseOf<jit::JitCode, T>::value ? TraceKind::JitCode
-                    : IsBaseOf<LazyScript, T>::value   ? TraceKind::LazyScript
-                    :                                    TraceKind::ObjectGroup>
-struct BaseGCType;
-#define IMPL_BASE_GC_TYPE(name, type_) \
-    template <typename T> struct BaseGCType<T, TraceKind:: name> { typedef type_ type; };
-FOR_EACH_GC_LAYOUT(IMPL_BASE_GC_TYPE);
-#undef IMPL_BASE_GC_TYPE
-
-// Our barrier templates are parameterized on the pointer types so that we can
-// share the definitions with Value and jsid. Thus, we need to strip the
-// pointer before sending the type to BaseGCType and re-add it on the other
-// side. As such:
-template <typename T> struct PtrBaseGCType {};
-template <> struct PtrBaseGCType<Value> { typedef Value type; };
-template <> struct PtrBaseGCType<jsid> { typedef jsid type; };
-template <typename T> struct PtrBaseGCType<T *> { typedef typename BaseGCType<T>::type *type; };
-
-template <typename T> void DispatchToTracer(JSTracer *trc, T *thingp, const char *name, size_t i);
-template <typename T> void DoTracing(JS::CallbackTracer *trc, T *thingp, const char *name, size_t i);
-template <typename T> void DoMarking(GCMarker *gcmarker, T thing);
-static bool ShouldMarkCrossCompartment(JSTracer *trc, JSObject *src, Cell *cell);
-static bool ShouldMarkCrossCompartment(JSTracer *trc, JSObject *src, Value val);
-
-template <typename T>
-void
-js::TraceEdge(JSTracer *trc, BarrieredBase<T> *thingp, const char *name)
-{
-    auto layout = reinterpret_cast<typename PtrBaseGCType<T>::type *>(thingp->unsafeGet());
-    DispatchToTracer(trc, layout, name, JSTracer::InvalidIndex);
-}
-
-template <typename T>
-void
-js::TraceManuallyBarrieredEdge(JSTracer *trc, T *thingp, const char *name)
-{
-    auto layout = reinterpret_cast<typename PtrBaseGCType<T>::type *>(thingp);
-    DispatchToTracer(trc, layout, name, JSTracer::InvalidIndex);
-}
-
-template <typename T>
-void
-js::TraceRoot(JSTracer *trc, T *thingp, const char *name)
-{
-    JS_ROOT_MARKING_ASSERT(trc);
-    auto layout = reinterpret_cast<typename PtrBaseGCType<T>::type *>(thingp);
-    DispatchToTracer(trc, layout, name, JSTracer::InvalidIndex);
-}
-
-template <typename T>
-void
-js::TraceRange(JSTracer *trc, size_t len, BarrieredBase<T> *thingp, const char *name)
-{
-    for (auto i : MakeRange(len)) {
-        auto layout = reinterpret_cast<typename PtrBaseGCType<T>::type *>(&thingp[i]);
-        DispatchToTracer(trc, layout, name, i);
-    }
-}
-
-template <typename T>
-void
-js::TraceRootRange(JSTracer *trc, size_t len, T *thingp, const char *name)
-{
-    JS_ROOT_MARKING_ASSERT(trc);
-    for (auto i : MakeRange(len)) {
-        auto layout = reinterpret_cast<typename PtrBaseGCType<T>::type *>(&thingp[i]);
-        DispatchToTracer(trc, layout, name, i);
-    }
-}
-
-// Instantiate a copy of the Tracing templates for each derived type.
-#define INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS(type) \
-    template void js::TraceEdge<type>(JSTracer *, BarrieredBase<type> *, const char *); \
-    template void js::TraceManuallyBarrieredEdge<type>(JSTracer *, type *, const char *); \
-    template void js::TraceRoot<type>(JSTracer *, type *, const char *); \
-    template void js::TraceRange<type>(JSTracer *, size_t, BarrieredBase<type> *, const char *); \
-    template void js::TraceRootRange<type>(JSTracer *, size_t, type *, const char *);
-FOR_EACH_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS)
-#undef INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS
-
-template <typename T>
-void
-js::TraceManuallyBarrieredCrossCompartmentEdge(JSTracer *trc, JSObject *src, T *dst,
-                                               const char *name)
-{
-    if (ShouldMarkCrossCompartment(trc, src, *dst))
-        DispatchToTracer(trc, dst, name, -1);
-}
-template void js::TraceManuallyBarrieredCrossCompartmentEdge<JSObject*>(JSTracer *, JSObject *,
-                                                                        JSObject **, const char *);
-template void js::TraceManuallyBarrieredCrossCompartmentEdge<JSScript*>(JSTracer *, JSObject *,
-                                                                        JSScript **, const char *);
-
-template <typename T>
-void
-js::TraceCrossCompartmentEdge(JSTracer *trc, JSObject *src, BarrieredBase<T> *dst, const char *name)
-{
-    if (ShouldMarkCrossCompartment(trc, src, dst->get()))
-        DispatchToTracer(trc, dst->unsafeGet(), name, -1);
-}
-template void js::TraceCrossCompartmentEdge<Value>(JSTracer *, JSObject *, BarrieredBase<Value> *,
-                                                   const char *);
-
-// This method is responsible for dynamic dispatch to the real tracer
-// implementation. Consider replacing this choke point with virtual dispatch:
-// a sufficiently smart C++ compiler may be able to devirtualize some paths.
-template <typename T>
-void
-DispatchToTracer(JSTracer *trc, T *thingp, const char *name, size_t i)
-{
-#define IS_SAME_TYPE_OR(name, type) mozilla::IsSame<type *, T>::value ||
-    static_assert(
-            FOR_EACH_GC_LAYOUT(IS_SAME_TYPE_OR)
-            mozilla::IsSame<T, JS::Value>::value ||
-            mozilla::IsSame<T, jsid>::value,
-            "Only the base cell layout types are allowed into marking/tracing internals");
-#undef IS_SAME_TYPE_OR
-    CheckMarkedThing(trc, *thingp);
-
-    if (trc->isMarkingTracer())
-        return DoMarking(static_cast<GCMarker*>(trc), *thingp);
-    return DoTracing(static_cast<JS::CallbackTracer*>(trc), thingp, name, i);
-}
-
-template <typename T>
-static inline bool
-MustSkipMarking(T thing)
-{
-    // Don't mark things outside a zone if we are in a per-zone GC.
-    return !thing->zone()->isGCMarking();
-}
-
-template <>
-bool
-MustSkipMarking<JSObject*>(JSObject *obj)
-{
-    // We may mark a Nursery thing outside the context of the
-    // MinorCollectionTracer because of a pre-barrier. The pre-barrier is not
-    // needed in this case because we perform a minor collection before each
-    // incremental slice.
-    if (IsInsideNursery(obj))
-        return true;
-
-    // Don't mark things outside a zone if we are in a per-zone GC. It is
-    // faster to check our own arena header, which we can do since we know that
-    // the object is tenured.
-    return !TenuredCell::fromPointer(obj)->zone()->isGCMarking();
-}
-
-template <>
-bool
-MustSkipMarking<JSString*>(JSString *str)
-{
-    // Don't mark permanent atoms, as they may be associated with another
-    // runtime. Note that PushMarkStack() also checks this, but we need to not
-    // run the isGCMarking test from off-main-thread, so have to check it here
-    // too.
-    return str->isPermanentAtom() ||
-           !str->zone()->isGCMarking();
-}
-
-template <>
-bool
-MustSkipMarking<JS::Symbol*>(JS::Symbol *sym)
-{
-    // As for JSString, don't touch a globally owned well-known symbol from
-    // off-main-thread.
-    return sym->isWellKnownSymbol() ||
-           !sym->zone()->isGCMarking();
-}
-
-template <typename T>
-void
-DoMarking(GCMarker *gcmarker, T thing)
-{
-    // Do per-type marking precondition checks.
-    if (MustSkipMarking(thing))
-        return;
-
-    PushMarkStack(gcmarker, thing);
-
-    // Mark the compartment as live.
-    SetMaybeAliveFlag(thing);
-}
-
-template <>
-void
-DoMarking<Value>(GCMarker *gcmarker, Value val)
-{
-    if (val.isString())
-        DoMarking(gcmarker, val.toString());
-    else if (val.isObject())
-        DoMarking(gcmarker, &val.toObject());
-    else if (val.isSymbol())
-        DoMarking(gcmarker, val.toSymbol());
-    else
-        gcmarker->clearTracingDetails();
-}
-
-template <>
-void
-DoMarking<jsid>(GCMarker *gcmarker, jsid id)
-{
-    if (JSID_IS_STRING(id))
-        DoMarking(gcmarker, JSID_TO_STRING(id));
-    else if (JSID_IS_SYMBOL(id))
-        DoMarking(gcmarker, JSID_TO_SYMBOL(id));
-    else
-        gcmarker->clearTracingDetails();
-}
-
-template <typename T>
-void
-DoTracing(JS::CallbackTracer *trc, T *thingp, const char *name, size_t i)
-{
-    JSGCTraceKind kind = MapTypeToTraceKind<typename mozilla::RemovePointer<T>::Type>::kind;
-    trc->setTracingIndex(name, i);
-    trc->invoke((void **)thingp, kind);
-    trc->unsetTracingLocation();
-}
-
-template <>
-void
-DoTracing<Value>(JS::CallbackTracer *trc, Value *vp, const char *name, size_t i)
-{
-    if (vp->isObject()) {
-        JSObject *prior = &vp->toObject();
-        JSObject *obj = prior;
-        DoTracing(trc, &obj, name, i);
-        if (obj != prior)
-            vp->setObjectOrNull(obj);
-    } else if (vp->isString()) {
-        JSString *prior = vp->toString();
-        JSString *str = prior;
-        DoTracing(trc, &str, name, i);
-        if (str != prior)
-            vp->setString(str);
-    } else if (vp->isSymbol()) {
-        JS::Symbol *prior = vp->toSymbol();
-        JS::Symbol *sym = prior;
-        DoTracing(trc, &sym, name, i);
-        if (sym != prior)
-            vp->setSymbol(sym);
-    } else {
-        /* Unset realLocation manually if we do not call MarkInternal. */
-        trc->unsetTracingLocation();
-    }
-}
-
-template <>
-void
-DoTracing<jsid>(JS::CallbackTracer *trc, jsid *idp, const char *name, size_t i)
-{
-    if (JSID_IS_STRING(*idp)) {
-        JSString *prior = JSID_TO_STRING(*idp);
-        JSString *str = prior;
-        DoTracing(trc, &str, name, i);
-        if (str != prior)
-            *idp = NON_INTEGER_ATOM_TO_JSID(reinterpret_cast<JSAtom *>(str));
-    } else if (JSID_IS_SYMBOL(*idp)) {
-        JS::Symbol *prior = JSID_TO_SYMBOL(*idp);
-        JS::Symbol *sym = prior;
-        DoTracing(trc, &sym, name, i);
-        if (sym != prior)
-            *idp = SYMBOL_TO_JSID(sym);
-    } else {
-        /* Unset realLocation manually if we do not call MarkInternal. */
-        trc->unsetTracingLocation();
-    }
-}
-
 template<typename T>
 static void
 MarkInternal(JSTracer *trc, T **thingp)
 {
+    CheckMarkedThing(trc, thingp);
     T *thing = *thingp;
-    CheckMarkedThing(trc, thing);
 
     if (trc->isMarkingTracer()) {
         /*
          * We may mark a Nursery thing outside the context of the
          * MinorCollectionTracer because of a pre-barrier. The pre-barrier is
          * not needed in this case because we perform a minor collection before
          * each incremental slice.
          */
         if (IsInsideNursery(thing))
             return;
 
         /*
          * Don't mark permanent atoms, as they may be associated with another
          * runtime. Note that PushMarkStack() also checks this, but the tests
          * and maybeAlive write below should only be done on the main thread.
          */
-        if (ThingIsPermanentAtomOrWellKnownSymbol(thing))
+        if (ThingIsPermanentAtom(thing))
             return;
 
         /*
          * Don't mark things outside a compartment if we are in a
          * per-compartment GC.
          */
         if (!thing->zone()->isGCMarking())
             return;
@@ -684,16 +297,21 @@ MarkInternal(JSTracer *trc, T **thingp)
     } else {
         trc->asCallbackTracer()->invoke((void **)thingp, MapTypeToTraceKind<T>::kind);
         trc->unsetTracingLocation();
     }
 
     trc->clearTracingDetails();
 }
 
+#define JS_ROOT_MARKING_ASSERT(trc) \
+    MOZ_ASSERT_IF(trc->isMarkingTracer(), \
+                  trc->runtime()->gc.state() == NO_INCREMENTAL || \
+                  trc->runtime()->gc.state() == MARK_ROOTS);
+
 namespace js {
 namespace gc {
 
 template <typename T>
 void
 MarkUnbarriered(JSTracer *trc, T **thingp, const char *name)
 {
     trc->setTracingName(name);
@@ -710,17 +328,17 @@ Mark(JSTracer *trc, BarrieredBase<T*> *t
 
 void
 MarkPermanentAtom(JSTracer *trc, JSAtom *atom, const char *name)
 {
     trc->setTracingName(name);
 
     MOZ_ASSERT(atom->isPermanent());
 
-    CheckMarkedThing(trc, atom);
+    CheckMarkedThing(trc, &atom);
 
     if (trc->isMarkingTracer()) {
         // Atoms do not refer to other GC things so don't need to go on the mark stack.
         // Additionally, PushMarkStack will ignore permanent atoms.
         atom->markIfUnmarked();
     } else {
         void *thing = atom;
         trc->asCallbackTracer()->invoke(&thing, JSTRACE_STRING);
@@ -735,17 +353,17 @@ void
 MarkWellKnownSymbol(JSTracer *trc, JS::Symbol *sym)
 {
     if (!sym)
         return;
 
     trc->setTracingName("wellKnownSymbols");
 
     MOZ_ASSERT(sym->isWellKnownSymbol());
-    CheckMarkedThing(trc, sym);
+    CheckMarkedThing(trc, &sym);
     if (trc->isMarkingTracer()) {
         // Permanent atoms are marked before well-known symbols.
         MOZ_ASSERT(sym->description()->isMarked());
         sym->markIfUnmarked();
     } else {
         void *thing = sym;
         trc->asCallbackTracer()->invoke(&thing, JSTRACE_SYMBOL);
         MOZ_ASSERT(thing == sym);
@@ -791,59 +409,66 @@ MarkRootRange(JSTracer *trc, size_t len,
         }
     }
 }
 
 namespace js {
 namespace gc {
 
 template <typename T>
-static inline void
-CheckIsMarkedThing(T **thingp)
+static bool
+IsMarked(T **thingp)
 {
-#ifdef DEBUG
-    MOZ_ASSERT(thingp);
-    MOZ_ASSERT(*thingp);
-    JSRuntime *rt = (*thingp)->runtimeFromAnyThread();
-    MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(*thingp),
-                  CurrentThreadCanAccessRuntime(rt) ||
-                  (rt->isHeapCollecting() && rt->gc.state() == SWEEP));
-#endif
+    MOZ_ASSERT_IF(!ThingIsPermanentAtom(*thingp),
+                  CurrentThreadCanAccessRuntime((*thingp)->runtimeFromMainThread()));
+    return IsMarkedFromAnyThread(thingp);
 }
 
 template <typename T>
 static bool
-IsMarked(T **thingp)
+IsMarkedFromAnyThread(T **thingp)
 {
-    CheckIsMarkedThing(thingp);
+    MOZ_ASSERT(thingp);
+    MOZ_ASSERT(*thingp);
     JSRuntime* rt = (*thingp)->runtimeFromAnyThread();
 
     if (IsInsideNursery(*thingp)) {
-        MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
-        return rt->gc.nursery.getForwardedPointer(thingp);
+        Nursery &nursery = rt->gc.nursery;
+        return nursery.getForwardedPointer(thingp);
     }
 
     Zone *zone = (*thingp)->asTenured().zoneFromAnyThread();
     if (!zone->isCollectingFromAnyThread() || zone->isGCFinished())
         return true;
     if (zone->isGCCompacting() && IsForwarded(*thingp))
         *thingp = Forwarded(*thingp);
     return (*thingp)->asTenured().isMarked();
 }
 
 template <typename T>
 static bool
 IsAboutToBeFinalized(T **thingp)
 {
-    CheckIsMarkedThing(thingp);
+    MOZ_ASSERT_IF(!ThingIsPermanentAtom(*thingp),
+                  CurrentThreadCanAccessRuntime((*thingp)->runtimeFromMainThread()));
+    return IsAboutToBeFinalizedFromAnyThread(thingp);
+}
+
+template <typename T>
+static bool
+IsAboutToBeFinalizedFromAnyThread(T **thingp)
+{
+    MOZ_ASSERT(thingp);
+    MOZ_ASSERT(*thingp);
+
     T *thing = *thingp;
     JSRuntime *rt = thing->runtimeFromAnyThread();
 
     /* Permanent atoms are never finalized by non-owning runtimes. */
-    if (ThingIsPermanentAtomOrWellKnownSymbol(thing) && !TlsPerThreadData.get()->associatedWith(rt))
+    if (ThingIsPermanentAtom(thing) && !TlsPerThreadData.get()->associatedWith(rt))
         return false;
 
     Nursery &nursery = rt->gc.nursery;
     MOZ_ASSERT_IF(!rt->isHeapMinorCollecting(), !IsInsideNursery(thing));
     if (rt->isHeapMinorCollecting()) {
         if (IsInsideNursery(thing))
             return !nursery.getForwardedPointer(thingp);
         return false;
@@ -915,34 +540,52 @@ Mark##base##Range(JSTracer *trc, size_t 
                                                                                                   \
 void                                                                                              \
 Mark##base##RootRange(JSTracer *trc, size_t len, type **vec, const char *name)                    \
 {                                                                                                 \
     MarkRootRange<type>(trc, len, vec, name);                                                     \
 }                                                                                                 \
                                                                                                   \
 bool                                                                                              \
+Is##base##MarkedFromAnyThread(type **thingp)                                                      \
+{                                                                                                 \
+    return IsMarkedFromAnyThread<type>(thingp);                                                   \
+}                                                                                                 \
+                                                                                                  \
+bool                                                                                              \
 Is##base##Marked(type **thingp)                                                                   \
 {                                                                                                 \
     return IsMarked<type>(thingp);                                                                \
 }                                                                                                 \
                                                                                                   \
 bool                                                                                              \
+Is##base##MarkedFromAnyThread(BarrieredBase<type*> *thingp)                                       \
+{                                                                                                 \
+    return IsMarkedFromAnyThread<type>(thingp->unsafeGet());                                      \
+}                                                                                                 \
+                                                                                                  \
+bool                                                                                              \
 Is##base##Marked(BarrieredBase<type*> *thingp)                                                    \
 {                                                                                                 \
     return IsMarked<type>(thingp->unsafeGet());                                                   \
 }                                                                                                 \
                                                                                                   \
 bool                                                                                              \
 Is##base##AboutToBeFinalized(type **thingp)                                                       \
 {                                                                                                 \
     return IsAboutToBeFinalized<type>(thingp);                                                    \
 }                                                                                                 \
                                                                                                   \
 bool                                                                                              \
+Is##base##AboutToBeFinalizedFromAnyThread(type **thingp)                                          \
+{                                                                                                 \
+    return IsAboutToBeFinalizedFromAnyThread<type>(thingp);                                       \
+}                                                                                                 \
+                                                                                                  \
+bool                                                                                              \
 Is##base##AboutToBeFinalized(BarrieredBase<type*> *thingp)                                        \
 {                                                                                                 \
     return IsAboutToBeFinalized<type>(thingp->unsafeGet());                                       \
 }                                                                                                 \
                                                                                                   \
 type *                                                                                            \
 Update##base##IfRelocated(JSRuntime *rt, BarrieredBase<type*> *thingp)                            \
 {                                                                                                 \
@@ -1050,16 +693,82 @@ gc::MarkGCThingRoot(JSTracer *trc, void 
 }
 
 void
 gc::MarkGCThingUnbarriered(JSTracer *trc, void **thingp, const char *name)
 {
     MarkGCThingInternal(trc, thingp, name);
 }
 
+/*** ID Marking ***/
+
+static inline void
+MarkIdInternal(JSTracer *trc, jsid *id)
+{
+    if (JSID_IS_STRING(*id)) {
+        JSString *str = JSID_TO_STRING(*id);
+        JSString *prior = str;
+        trc->setTracingLocation((void *)id);
+        MarkInternal(trc, &str);
+        if (str != prior)
+            *id = NON_INTEGER_ATOM_TO_JSID(reinterpret_cast<JSAtom *>(str));
+    } else if (JSID_IS_SYMBOL(*id)) {
+        JS::Symbol *sym = JSID_TO_SYMBOL(*id);
+        JS::Symbol *prior = sym;
+        trc->setTracingLocation((void *)id);
+        MarkInternal(trc, &sym);
+        if (sym != prior)
+            *id = SYMBOL_TO_JSID(sym);
+    } else {
+        /* Unset realLocation manually if we do not call MarkInternal. */
+        trc->unsetTracingLocation();
+    }
+}
+
+void
+gc::MarkId(JSTracer *trc, BarrieredBase<jsid> *id, const char *name)
+{
+    trc->setTracingName(name);
+    MarkIdInternal(trc, id->unsafeGet());
+}
+
+void
+gc::MarkIdRoot(JSTracer *trc, jsid *id, const char *name)
+{
+    JS_ROOT_MARKING_ASSERT(trc);
+    trc->setTracingName(name);
+    MarkIdInternal(trc, id);
+}
+
+void
+gc::MarkIdUnbarriered(JSTracer *trc, jsid *id, const char *name)
+{
+    trc->setTracingName(name);
+    MarkIdInternal(trc, id);
+}
+
+void
+gc::MarkIdRange(JSTracer *trc, size_t len, HeapId *vec, const char *name)
+{
+    for (size_t i = 0; i < len; ++i) {
+        trc->setTracingIndex(name, i);
+        MarkIdInternal(trc, vec[i].unsafeGet());
+    }
+}
+
+void
+gc::MarkIdRootRange(JSTracer *trc, size_t len, jsid *vec, const char *name)
+{
+    JS_ROOT_MARKING_ASSERT(trc);
+    for (size_t i = 0; i < len; ++i) {
+        trc->setTracingIndex(name, i);
+        MarkIdInternal(trc, &vec[i]);
+    }
+}
+
 /*** Value Marking ***/
 
 static inline void
 MarkValueInternal(JSTracer *trc, Value *v)
 {
     if (v->isMarkable()) {
         MOZ_ASSERT(v->toGCThing());
         void *thing = v->toGCThing();
@@ -1082,16 +791,50 @@ MarkValueInternal(JSTracer *trc, Value *
                 v->setSymbol(sym);
         }
     } else {
         /* Unset realLocation manually if we do not call MarkInternal. */
         trc->unsetTracingLocation();
     }
 }
 
+void
+gc::MarkValue(JSTracer *trc, BarrieredBase<Value> *v, const char *name)
+{
+    trc->setTracingName(name);
+    MarkValueInternal(trc, v->unsafeGet());
+}
+
+void
+gc::MarkValueRoot(JSTracer *trc, Value *v, const char *name)
+{
+    JS_ROOT_MARKING_ASSERT(trc);
+    trc->setTracingName(name);
+    MarkValueInternal(trc, v);
+}
+
+void
+gc::MarkValueRange(JSTracer *trc, size_t len, BarrieredBase<Value> *vec, const char *name)
+{
+    for (size_t i = 0; i < len; ++i) {
+        trc->setTracingIndex(name, i);
+        MarkValueInternal(trc, vec[i].unsafeGet());
+    }
+}
+
+void
+gc::MarkValueRootRange(JSTracer *trc, size_t len, Value *vec, const char *name)
+{
+    JS_ROOT_MARKING_ASSERT(trc);
+    for (size_t i = 0; i < len; ++i) {
+        trc->setTracingIndex(name, i);
+        MarkValueInternal(trc, &vec[i]);
+    }
+}
+
 bool
 gc::IsValueMarked(Value *v)
 {
     MOZ_ASSERT(v->isMarkable());
     bool rv;
     if (v->isString()) {
         JSString *str = (JSString *)v->toGCThing();
         rv = IsMarked<JSString>(&str);
@@ -1126,16 +869,38 @@ gc::IsValueAboutToBeFinalized(Value *v)
         MOZ_ASSERT(v->isSymbol());
         JS::Symbol *sym = v->toSymbol();
         rv = IsAboutToBeFinalized<JS::Symbol>(&sym);
         v->setSymbol(sym);
     }
     return rv;
 }
 
+bool
+gc::IsValueAboutToBeFinalizedFromAnyThread(Value *v)
+{
+    MOZ_ASSERT(v->isMarkable());
+    bool rv;
+    if (v->isString()) {
+        JSString *str = (JSString *)v->toGCThing();
+        rv = IsAboutToBeFinalizedFromAnyThread<JSString>(&str);
+        v->setString(str);
+    } else if (v->isObject()) {
+        JSObject *obj = (JSObject *)v->toGCThing();
+        rv = IsAboutToBeFinalizedFromAnyThread<JSObject>(&obj);
+        v->setObject(*obj);
+    } else {
+        MOZ_ASSERT(v->isSymbol());
+        JS::Symbol *sym = v->toSymbol();
+        rv = IsAboutToBeFinalizedFromAnyThread<JS::Symbol>(&sym);
+        v->setSymbol(sym);
+    }
+    return rv;
+}
+
 /*** Type Marking ***/
 
 void
 TypeSet::MarkTypeRoot(JSTracer *trc, TypeSet::Type *v, const char *name)
 {
     JS_ROOT_MARKING_ASSERT(trc);
     MarkTypeUnbarriered(trc, v, name);
 }
@@ -1159,16 +924,32 @@ TypeSet::MarkTypeUnbarriered(JSTracer *t
 
 bool
 gc::IsSlotMarked(HeapSlot *s)
 {
     return IsMarked(s);
 }
 
 void
+gc::MarkSlot(JSTracer *trc, HeapSlot *s, const char *name)
+{
+    trc->setTracingName(name);
+    MarkValueInternal(trc, s->unsafeGet());
+}
+
+void
+gc::MarkArraySlots(JSTracer *trc, size_t len, HeapSlot *vec, const char *name)
+{
+    for (size_t i = 0; i < len; ++i) {
+        trc->setTracingIndex(name, i);
+        MarkValueInternal(trc, vec[i].unsafeGet());
+    }
+}
+
+void
 gc::MarkObjectSlots(JSTracer *trc, NativeObject *obj, uint32_t start, uint32_t nslots)
 {
     MOZ_ASSERT(obj->isNative());
     for (uint32_t i = start; i < (start + nslots); ++i) {
         trc->setTracingDetails(GetObjectSlotName, obj, i);
         MarkValueInternal(trc, obj->getSlotRef(i).unsafeGet());
     }
 }
@@ -1212,29 +993,45 @@ ShouldMarkCrossCompartment(JSTracer *trc
             if (!tenured.isMarked())
                 DelayCrossCompartmentGrayMarking(src);
             return false;
         }
         return zone->isGCMarkingGray();
     }
 }
 
-static bool
-ShouldMarkCrossCompartment(JSTracer *trc, JSObject *src, Value val)
+void
+gc::MarkCrossCompartmentObjectUnbarriered(JSTracer *trc, JSObject *src, JSObject **dst, const char *name)
 {
-    return val.isMarkable() && ShouldMarkCrossCompartment(trc, src, (Cell *)val.toGCThing());
+    if (ShouldMarkCrossCompartment(trc, src, *dst))
+        MarkObjectUnbarriered(trc, dst, name);
+}
+
+void
+gc::MarkCrossCompartmentScriptUnbarriered(JSTracer *trc, JSObject *src, JSScript **dst,
+                                          const char *name)
+{
+    if (ShouldMarkCrossCompartment(trc, src, *dst))
+        MarkScriptUnbarriered(trc, dst, name);
+}
+
+void
+gc::MarkCrossCompartmentSlot(JSTracer *trc, JSObject *src, HeapValue *dst, const char *name)
+{
+    if (dst->isMarkable() && ShouldMarkCrossCompartment(trc, src, (Cell *)dst->toGCThing()))
+        MarkValue(trc, dst, name);
 }
 
 /*** Special Marking ***/
 
 void
-gc::MarkValueForBarrier(JSTracer *trc, Value *v, const char *name)
+gc::MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name)
 {
-    MOZ_ASSERT(!trc->runtime()->isHeapBusy());
-    TraceManuallyBarrieredEdge(trc, v, name);
+    trc->setTracingName(name);
+    MarkValueInternal(trc, v);
 }
 
 /*** Push Mark Stack ***/
 
 /*
  * PushMarkStack for BaseShape unpacks its children directly onto the mark
  * stack. For a pre-barrier between incremental slices, this may result in
  * objects in the nursery getting pushed onto the mark stack. It is safe to
@@ -1479,17 +1276,17 @@ gc::MarkCycleCollectorChildren(JSTracer 
     MarkObjectUnbarriered(trc, &global, "global");
 
     do {
         MOZ_ASSERT(global == shape->compartment()->unsafeUnbarrieredMaybeGlobal());
 
         MOZ_ASSERT(shape->base());
         shape->base()->assertConsistency();
 
-        TraceEdge(trc, &shape->propidRef(), "propid");
+        MarkId(trc, &shape->propidRef(), "propid");
 
         if (shape->hasGetterObject()) {
             JSObject *tmp = shape->getterObject();
             MarkObjectUnbarriered(trc, &tmp, "getter");
             MOZ_ASSERT(tmp == shape->getterObject());
         }
 
         if (shape->hasSetterObject()) {
@@ -1503,17 +1300,17 @@ gc::MarkCycleCollectorChildren(JSTracer 
 }
 
 static void
 ScanObjectGroup(GCMarker *gcmarker, ObjectGroup *group)
 {
     unsigned count = group->getPropertyCount();
     for (unsigned i = 0; i < count; i++) {
         if (ObjectGroup::Property *prop = group->getProperty(i))
-            DoMarking(gcmarker, prop->id.get());
+            MarkId(gcmarker, &prop->id, "ObjectGroup property id");
     }
 
     if (group->proto().isObject())
         gcmarker->traverse(group->proto().toObject());
 
     group->compartment()->mark();
 
     if (GlobalObject *global = group->compartment()->unsafeUnbarrieredMaybeGlobal())
@@ -1539,17 +1336,17 @@ ScanObjectGroup(GCMarker *gcmarker, Obje
 }
 
 static void
 gc::MarkChildren(JSTracer *trc, ObjectGroup *group)
 {
     unsigned count = group->getPropertyCount();
     for (unsigned i = 0; i < count; i++) {
         if (ObjectGroup::Property *prop = group->getProperty(i))
-            TraceEdge(trc, &prop->id, "group_property");
+            MarkId(trc, &prop->id, "group_property");
     }
 
     if (group->proto().isObject())
         MarkObject(trc, &group->protoRaw(), "group_proto");
 
     if (group->newScript())
         group->newScript()->trace(trc);
 
--- a/js/src/gc/Marking.h
+++ b/js/src/gc/Marking.h
@@ -32,61 +32,16 @@ class UnownedBaseShape;
 template<class> class HeapPtr;
 
 namespace jit {
 class JitCode;
 struct IonScript;
 struct VMFunction;
 }
 
-/*** Tracing ***/
-
-// Trace through an edge in the live object graph on behalf of tracing. The
-// effect of tracing the edge depends on the JSTracer being used.
-template <typename T>
-void
-TraceEdge(JSTracer *trc, BarrieredBase<T> *thingp, const char *name);
-
-// Trace through a "root" edge. These edges are the initial edges in the object
-// graph traversal. Root edges are asserted to only be traversed in the initial
-// phase of a GC.
-template <typename T>
-void
-TraceRoot(JSTracer *trc, T *thingp, const char *name);
-
-// Like TraceEdge, but for edges that do not use one of the automatic barrier
-// classes and, thus, must be treated specially for moving GC. This method is
-// separate from TraceEdge to make accidental use of such edges more obvious.
-template <typename T>
-void
-TraceManuallyBarrieredEdge(JSTracer *trc, T *thingp, const char *name);
-
-// Trace all edges contained in the given array.
-template <typename T>
-void
-TraceRange(JSTracer *trc, size_t len, BarrieredBase<T> *thingp, const char *name);
-
-// Trace all root edges in the given array.
-template <typename T>
-void
-TraceRootRange(JSTracer *trc, size_t len, T *thingp, const char *name);
-
-// Trace an edge that crosses compartment boundaries. If the compartment of the
-// destination thing is not being GC'd, then the edge will not be traced.
-template <typename T>
-void
-TraceCrossCompartmentEdge(JSTracer *trc, JSObject *src, BarrieredBase<T> *dst,
-                          const char *name);
-
-// As above but with manual barriers.
-template <typename T>
-void
-TraceManuallyBarrieredCrossCompartmentEdge(JSTracer *trc, JSObject *src, T *dst,
-                                           const char *name);
-
 namespace gc {
 
 /*** Object Marking ***/
 
 /*
  * These functions expose marking functionality for all of the different GC
  * thing kinds. For each GC thing, there are several variants. As an example,
  * these are the variants generated for JSObject. They are listed from most to
@@ -133,17 +88,20 @@ namespace gc {
 #define DeclMarker(base, type)                                                                    \
 void Mark##base(JSTracer *trc, BarrieredBase<type*> *thing, const char *name);                    \
 void Mark##base##Root(JSTracer *trc, type **thingp, const char *name);                            \
 void Mark##base##Unbarriered(JSTracer *trc, type **thingp, const char *name);                     \
 void Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type*> *thing, const char *name);       \
 void Mark##base##RootRange(JSTracer *trc, size_t len, type **thing, const char *name);            \
 bool Is##base##Marked(type **thingp);                                                             \
 bool Is##base##Marked(BarrieredBase<type*> *thingp);                                              \
+bool Is##base##MarkedFromAnyThread(type **thingp);                                                \
+bool Is##base##MarkedFromAnyThread(BarrieredBase<type*> *thingp);                                 \
 bool Is##base##AboutToBeFinalized(type **thingp);                                                 \
+bool Is##base##AboutToBeFinalizedFromAnyThread(type **thingp);                                    \
 bool Is##base##AboutToBeFinalized(BarrieredBase<type*> *thingp);                                  \
 type *Update##base##IfRelocated(JSRuntime *rt, BarrieredBase<type*> *thingp);                     \
 type *Update##base##IfRelocated(JSRuntime *rt, type **thingp);
 
 DeclMarker(BaseShape, BaseShape)
 DeclMarker(BaseShape, UnownedBaseShape)
 DeclMarker(JitCode, jit::JitCode)
 DeclMarker(Object, NativeObject)
@@ -201,50 +159,165 @@ void
 MarkKind(JSTracer *trc, void **thingp, JSGCTraceKind kind);
 
 void
 MarkGCThingRoot(JSTracer *trc, void **thingp, const char *name);
 
 void
 MarkGCThingUnbarriered(JSTracer *trc, void **thingp, const char *name);
 
+/*** ID Marking ***/
+
+void
+MarkId(JSTracer *trc, BarrieredBase<jsid> *id, const char *name);
+
+void
+MarkIdRoot(JSTracer *trc, jsid *id, const char *name);
+
+void
+MarkIdUnbarriered(JSTracer *trc, jsid *id, const char *name);
+
+void
+MarkIdRange(JSTracer *trc, size_t len, HeapId *vec, const char *name);
+
+void
+MarkIdRootRange(JSTracer *trc, size_t len, jsid *vec, const char *name);
+
 /*** Value Marking ***/
 
+void
+MarkValue(JSTracer *trc, BarrieredBase<Value> *v, const char *name);
+
+void
+MarkValueRange(JSTracer *trc, size_t len, BarrieredBase<Value> *vec, const char *name);
+
+inline void
+MarkValueRange(JSTracer *trc, HeapValue *begin, HeapValue *end, const char *name)
+{
+    return MarkValueRange(trc, end - begin, begin, name);
+}
+
+void
+MarkValueRoot(JSTracer *trc, Value *v, const char *name);
+
+void
+MarkThingOrValueUnbarriered(JSTracer *trc, uintptr_t *word, const char *name);
+
+void
+MarkValueRootRange(JSTracer *trc, size_t len, Value *vec, const char *name);
+
+inline void
+MarkValueRootRange(JSTracer *trc, Value *begin, Value *end, const char *name)
+{
+    MarkValueRootRange(trc, end - begin, begin, name);
+}
+
 bool
 IsValueMarked(Value *v);
 
 bool
 IsValueAboutToBeFinalized(Value *v);
 
+bool
+IsValueAboutToBeFinalizedFromAnyThread(Value *v);
+
 /*** Slot Marking ***/
 
 bool
 IsSlotMarked(HeapSlot *s);
 
 void
+MarkSlot(JSTracer *trc, HeapSlot *s, const char *name);
+
+void
+MarkArraySlots(JSTracer *trc, size_t len, HeapSlot *vec, const char *name);
+
+void
 MarkObjectSlots(JSTracer *trc, NativeObject *obj, uint32_t start, uint32_t nslots);
 
+void
+MarkCrossCompartmentObjectUnbarriered(JSTracer *trc, JSObject *src, JSObject **dst_obj,
+                                      const char *name);
+
+void
+MarkCrossCompartmentScriptUnbarriered(JSTracer *trc, JSObject *src, JSScript **dst_script,
+                                      const char *name);
+
+/*
+ * Mark a value that may be in a different compartment from the compartment
+ * being GC'd. (Although it won't be marked if it's in the wrong compartment.)
+ */
+void
+MarkCrossCompartmentSlot(JSTracer *trc, JSObject *src, HeapValue *dst_slot, const char *name);
+
+
 /*** Special Cases ***/
 
 /*
  * Trace through the shape and any shapes it contains to mark
  * non-shape children. This is exposed to the JS API as
  * JS_TraceShapeCycleCollectorChildren.
  */
 void
 MarkCycleCollectorChildren(JSTracer *trc, Shape *shape);
 
 void
 PushArena(GCMarker *gcmarker, ArenaHeader *aheader);
 
 /*** Generic ***/
 
-template <typename T>
-static bool
-IsMarked(T **thingp);
+/*
+ * The Mark() functions interface should only be used by code that must be
+ * templated.  Other uses should use the more specific, type-named functions.
+ */
+
+inline void
+Mark(JSTracer *trc, BarrieredBase<Value> *v, const char *name)
+{
+    MarkValue(trc, v, name);
+}
+
+inline void
+Mark(JSTracer *trc, BarrieredBase<JSObject*> *o, const char *name)
+{
+    MarkObject(trc, o, name);
+}
+
+inline void
+Mark(JSTracer *trc, BarrieredBase<JSScript*> *o, const char *name)
+{
+    MarkScript(trc, o, name);
+}
+
+inline void
+Mark(JSTracer *trc, HeapPtrJitCode *code, const char *name)
+{
+    MarkJitCode(trc, code, name);
+}
+
+/* For use by WeakMap's HashKeyRef instantiation. */
+inline void
+Mark(JSTracer *trc, JSObject **objp, const char *name)
+{
+    MarkObjectUnbarriered(trc, objp, name);
+}
+
+/* For use by Debugger::WeakMap's missingScopes HashKeyRef instantiation. */
+inline void
+Mark(JSTracer *trc, NativeObject **obj, const char *name)
+{
+    MarkObjectUnbarriered(trc, obj, name);
+}
+
+/* For use by Debugger::WeakMap's liveScopes HashKeyRef instantiation. */
+inline void
+Mark(JSTracer *trc, ScopeObject **obj, const char *name)
+{
+    MarkObjectUnbarriered(trc, obj, name);
+}
 
 inline bool
 IsMarked(BarrieredBase<Value> *v)
 {
     if (!v->isMarkable())
         return true;
     return IsValueMarked(v->unsafeGet());
 }
@@ -306,40 +379,16 @@ ToMarkable(const Value &v)
 }
 
 inline Cell *
 ToMarkable(Cell *cell)
 {
     return cell;
 }
 
-/*
- * HashKeyRef represents a reference to a HashMap key. This should normally
- * be used through the HashTableWriteBarrierPost function.
- */
-template <typename Map, typename Key>
-class HashKeyRef : public BufferableRef
-{
-    Map *map;
-    Key key;
-
-  public:
-    HashKeyRef(Map *m, const Key &k) : map(m), key(k) {}
-
-    void mark(JSTracer *trc) {
-        Key prior = key;
-        typename Map::Ptr p = map->lookup(key);
-        if (!p)
-            return;
-        trc->setTracingLocation(&*p);
-        TraceManuallyBarrieredEdge(trc, &key, "HashKeyRef");
-        map->rekeyIfMoved(prior, key);
-    }
-};
-
 } /* namespace gc */
 
 void
 TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind);
 
 bool
 UnmarkGrayShapeRecursively(Shape *shape);
 
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -99,18 +99,18 @@ MarkExactStackRootsAcrossTypes(T context
     MarkExactStackRootList<BaseShape *, MarkBaseShapeRoot>(trc, context, "exact-baseshape");
     MarkExactStackRootList<ObjectGroup *, MarkObjectGroupRoot>(
         trc, context, "exact-objectgroup");
     MarkExactStackRootList<JSString *, MarkStringRoot>(trc, context, "exact-string");
     MarkExactStackRootList<JS::Symbol *, MarkSymbolRoot>(trc, context, "exact-symbol");
     MarkExactStackRootList<jit::JitCode *, MarkJitCodeRoot>(trc, context, "exact-jitcode");
     MarkExactStackRootList<JSScript *, MarkScriptRoot>(trc, context, "exact-script");
     MarkExactStackRootList<LazyScript *, MarkLazyScriptRoot>(trc, context, "exact-lazy-script");
-    MarkExactStackRootList<jsid, TraceRoot>(trc, context, "exact-id");
-    MarkExactStackRootList<Value, TraceRoot>(trc, context, "exact-value");
+    MarkExactStackRootList<jsid, MarkIdRoot>(trc, context, "exact-id");
+    MarkExactStackRootList<Value, MarkValueRoot>(trc, context, "exact-value");
     MarkExactStackRootList<TypeSet::Type, TypeSet::MarkTypeRoot>(trc, context, "TypeSet::Type");
     MarkExactStackRootList<Bindings, MarkBindingsRoot>(trc, context, "Bindings");
     MarkExactStackRootList<JSPropertyDescriptor, MarkPropertyDescriptorRoot>(
         trc, context, "JSPropertyDescriptor");
 }
 
 static void
 MarkExactStackRoots(JSRuntime* rt, JSTracer *trc)
@@ -119,58 +119,58 @@ MarkExactStackRoots(JSRuntime* rt, JSTra
         MarkExactStackRootsAcrossTypes<JSContext*>(cx.get(), trc);
     MarkExactStackRootsAcrossTypes<PerThreadData*>(&rt->mainThread, trc);
 }
 
 void
 JS::AutoIdArray::trace(JSTracer *trc)
 {
     MOZ_ASSERT(tag_ == IDARRAY);
-    TraceRange(trc, idArray->length, idArray->begin(), "JSAutoIdArray.idArray");
+    gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray");
 }
 
 inline void
 AutoGCRooter::trace(JSTracer *trc)
 {
     switch (tag_) {
       case PARSER:
         frontend::MarkParser(trc, this);
         return;
 
       case IDARRAY: {
         JSIdArray *ida = static_cast<AutoIdArray *>(this)->idArray;
-        TraceRange(trc, ida->length, ida->begin(), "JS::AutoIdArray.idArray");
+        MarkIdRange(trc, ida->length, ida->vector, "JS::AutoIdArray.idArray");
         return;
       }
 
       case DESCVECTOR: {
         AutoPropertyDescriptorVector::VectorImpl &descriptors =
             static_cast<AutoPropertyDescriptorVector *>(this)->vector;
         for (size_t i = 0, len = descriptors.length(); i < len; i++)
             descriptors[i].trace(trc);
         return;
       }
 
       case VALVECTOR: {
         AutoValueVector::VectorImpl &vector = static_cast<AutoValueVector *>(this)->vector;
-        TraceRootRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
+        MarkValueRootRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
         return;
       }
 
       case IDVECTOR: {
         AutoIdVector::VectorImpl &vector = static_cast<AutoIdVector *>(this)->vector;
-        TraceRootRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
+        MarkIdRootRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
         return;
       }
 
       case IDVALVECTOR: {
         AutoIdValueVector::VectorImpl &vector = static_cast<AutoIdValueVector *>(this)->vector;
         for (size_t i = 0; i < vector.length(); i++) {
-            TraceRoot(trc, &vector[i].id, "js::AutoIdValueVector id");
-            TraceRoot(trc, &vector[i].value, "js::AutoIdValueVector value");
+            MarkIdRoot(trc, &vector[i].id, "js::AutoIdValueVector id");
+            MarkValueRoot(trc, &vector[i].value, "js::AutoIdValueVector value");
         }
         return;
       }
 
       case SHAPEVECTOR: {
         AutoShapeVector::VectorImpl &vector = static_cast<js::AutoShapeVector *>(this)->vector;
         MarkShapeRootRange(trc, vector.length(), const_cast<Shape **>(vector.begin()),
                            "js::AutoShapeVector.vector");
@@ -202,17 +202,17 @@ AutoGCRooter::trace(JSTracer *trc)
       }
 
       case VALARRAY: {
         /*
          * We don't know the template size parameter, but we can safely treat it
          * as an AutoValueArray<1> because the length is stored separately.
          */
         AutoValueArray<1> *array = static_cast<AutoValueArray<1> *>(this);
-        TraceRootRange(trc, array->length(), array->begin(), "js::AutoValueArray");
+        MarkValueRootRange(trc, array->length(), array->begin(), "js::AutoValueArray");
         return;
       }
 
       case SCRIPTVECTOR: {
         AutoScriptVector::VectorImpl &vector = static_cast<AutoScriptVector *>(this)->vector;
         MarkScriptRootRange(trc, vector.length(), vector.begin(), "js::AutoScriptVector.vector");
         return;
       }
@@ -262,49 +262,49 @@ AutoGCRooter::trace(JSTracer *trc)
 
       case IONMASM: {
         static_cast<js::jit::MacroAssembler::AutoRooter *>(this)->masm()->trace(trc);
         return;
       }
 
       case WRAPPER: {
         /*
-         * We need to use TraceManuallyBarrieredEdge here because we mark
-         * wrapper roots in every slice. This is because of some rule-breaking
-         * in RemapAllWrappersForObject; see comment there.
+         * We need to use MarkValueUnbarriered here because we mark wrapper
+         * roots in every slice. This is because of some rule-breaking in
+         * RemapAllWrappersForObject; see comment there.
          */
-        TraceManuallyBarrieredEdge(trc, &static_cast<AutoWrapperRooter *>(this)->value.get(),
-                                   "JS::AutoWrapperRooter.value");
+          MarkValueUnbarriered(trc, &static_cast<AutoWrapperRooter *>(this)->value.get(),
+                               "JS::AutoWrapperRooter.value");
         return;
       }
 
       case WRAPVECTOR: {
         AutoWrapperVector::VectorImpl &vector = static_cast<AutoWrapperVector *>(this)->vector;
         /*
-         * We need to use TraceManuallyBarrieredEdge here because we mark
-         * wrapper roots in every slice. This is because of some rule-breaking
-         * in RemapAllWrappersForObject; see comment there.
+         * We need to use MarkValueUnbarriered here because we mark wrapper
+         * roots in every slice. This is because of some rule-breaking in
+         * RemapAllWrappersForObject; see comment there.
          */
         for (WrapperValue *p = vector.begin(); p < vector.end(); p++)
-            TraceManuallyBarrieredEdge(trc, &p->get(), "js::AutoWrapperVector.vector");
+            MarkValueUnbarriered(trc, &p->get(), "js::AutoWrapperVector.vector");
         return;
       }
 
       case JSONPARSER:
         static_cast<js::JSONParserBase *>(this)->trace(trc);
         return;
 
       case CUSTOM:
         static_cast<JS::CustomAutoRooter *>(this)->trace(trc);
         return;
     }
 
     MOZ_ASSERT(tag_ >= 0);
     if (Value *vp = static_cast<AutoArrayRooter *>(this)->array)
-        TraceRootRange(trc, tag_, vp, "JS::AutoArrayRooter.array");
+        MarkValueRootRange(trc, tag_, vp, "JS::AutoArrayRooter.array");
 }
 
 /* static */ void
 AutoGCRooter::traceAll(JSTracer *trc)
 {
     for (ContextIter cx(trc->runtime()); !cx.done(); cx.next())
         traceAllInContext(&*cx, trc);
 }
@@ -318,40 +318,40 @@ AutoGCRooter::traceAllWrappers(JSTracer 
                 gcr->trace(trc);
         }
     }
 }
 
 void
 AutoHashableValueRooter::trace(JSTracer *trc)
 {
-    TraceRoot(trc, reinterpret_cast<Value*>(&value), "AutoHashableValueRooter");
+    MarkValueRoot(trc, reinterpret_cast<Value*>(&value), "AutoHashableValueRooter");
 }
 
 void
 StackShape::trace(JSTracer *trc)
 {
     if (base)
         MarkBaseShapeRoot(trc, (BaseShape**) &base, "StackShape base");
 
-    TraceRoot(trc, (jsid*) &propid, "StackShape id");
+    MarkIdRoot(trc, (jsid*) &propid, "StackShape id");
 
     if ((attrs & JSPROP_GETTER) && rawGetter)
         MarkObjectRoot(trc, (JSObject**)&rawGetter, "StackShape getter");
 
     if ((attrs & JSPROP_SETTER) && rawSetter)
         MarkObjectRoot(trc, (JSObject**)&rawSetter, "StackShape setter");
 }
 
 void
 JSPropertyDescriptor::trace(JSTracer *trc)
 {
     if (obj)
         MarkObjectRoot(trc, &obj, "Descriptor::obj");
-    TraceRoot(trc, &value, "Descriptor::value");
+    MarkValueRoot(trc, &value, "Descriptor::value");
     if ((attrs & JSPROP_GETTER) && getter) {
         JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, getter);
         MarkObjectRoot(trc, &tmp, "Descriptor::get");
         getter = JS_DATA_TO_FUNC_PTR(JSGetterOp, tmp);
     }
     if ((attrs & JSPROP_SETTER) && setter) {
         JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, setter);
         MarkObjectRoot(trc, &tmp, "Descriptor::set");
@@ -401,20 +401,20 @@ js::gc::MarkPersistentRootedChains(JSTra
     PersistentRootedMarker<JSObject*>::markChainIfNotNull<MarkObjectRoot>(
         trc, rt->objectPersistentRooteds, "PersistentRooted<JSObject *>");
     PersistentRootedMarker<JSScript*>::markChainIfNotNull<MarkScriptRoot>(
         trc, rt->scriptPersistentRooteds, "PersistentRooted<JSScript *>");
     PersistentRootedMarker<JSString*>::markChainIfNotNull<MarkStringRoot>(
         trc, rt->stringPersistentRooteds, "PersistentRooted<JSString *>");
 
     // Mark the PersistentRooted chains of types that are never null.
-    PersistentRootedMarker<jsid>::markChain<TraceRoot>(trc, rt->idPersistentRooteds,
-                                                       "PersistentRooted<jsid>");
-    PersistentRootedMarker<Value>::markChain<TraceRoot>(trc, rt->valuePersistentRooteds,
-                                                        "PersistentRooted<Value>");
+    PersistentRootedMarker<jsid>::markChain<MarkIdRoot>(trc, rt->idPersistentRooteds,
+                                                        "PersistentRooted<jsid>");
+    PersistentRootedMarker<Value>::markChain<MarkValueRoot>(trc, rt->valuePersistentRooteds,
+                                                            "PersistentRooted<Value>");
 }
 
 void
 js::gc::GCRuntime::markRuntime(JSTracer *trc,
                                TraceOrMarkRuntime traceOrMark,
                                TraceRootsOrUsedSaved rootsSource)
 {
     gcstats::AutoPhase ap(stats, gcstats::PHASE_MARK_ROOTS);
@@ -441,17 +441,17 @@ js::gc::GCRuntime::markRuntime(JSTracer 
 
         if (!rt->isBeingDestroyed()) {
             MarkExactStackRoots(rt, trc);
             rt->markSelfHostingGlobal(trc);
         }
 
         for (RootRange r = rootsHash.all(); !r.empty(); r.popFront()) {
             const RootEntry &entry = r.front();
-            TraceRoot(trc, entry.key(), entry.value());
+            MarkValueRoot(trc, entry.key(), entry.value());
         }
 
         MarkPersistentRootedChains(trc);
     }
 
     if (rt->asyncStackForNewActivations)
         MarkObjectRoot(trc, &rt->asyncStackForNewActivations,
                        "asyncStackForNewActivations");
--- a/js/src/gc/StoreBuffer.cpp
+++ b/js/src/gc/StoreBuffer.cpp
@@ -31,18 +31,18 @@ StoreBuffer::SlotsEdge::mark(JSTracer *t
 
     if (IsInsideNursery(obj))
         return;
 
     if (kind() == ElementKind) {
         int32_t initLen = obj->getDenseInitializedLength();
         int32_t clampedStart = Min(start_, initLen);
         int32_t clampedEnd = Min(start_ + count_, initLen);
-        TraceRange(trc, clampedEnd - clampedStart,
-                   static_cast<HeapSlot*>(obj->getDenseElements() + clampedStart), "element");
+        gc::MarkArraySlots(trc, clampedEnd - clampedStart,
+                           obj->getDenseElements() + clampedStart, "element");
     } else {
         int32_t start = Min(uint32_t(start_), obj->slotSpan());
         int32_t end = Min(uint32_t(start_) + count_, obj->slotSpan());
         MOZ_ASSERT(end >= start);
         MarkObjectSlots(trc, obj, start, end - start);
     }
 }
 
@@ -73,17 +73,17 @@ StoreBuffer::CellPtrEdge::mark(JSTracer 
 }
 
 void
 StoreBuffer::ValueEdge::mark(JSTracer *trc) const
 {
     if (!deref())
         return;
 
-    TraceRoot(trc, edge, "store buffer edge");
+    MarkValueRoot(trc, edge, "store buffer edge");
 }
 
 /*** MonoTypeBuffer ***/
 
 template <typename T>
 void
 StoreBuffer::MonoTypeBuffer<T>::mark(StoreBuffer *owner, JSTracer *trc)
 {
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -29,16 +29,40 @@ namespace gc {
  */
 class BufferableRef
 {
   public:
     virtual void mark(JSTracer *trc) = 0;
     bool maybeInRememberedSet(const Nursery &) const { return true; }
 };
 
+/*
+ * HashKeyRef represents a reference to a HashMap key. This should normally
+ * be used through the HashTableWriteBarrierPost function.
+ */
+template <typename Map, typename Key>
+class HashKeyRef : public BufferableRef
+{
+    Map *map;
+    Key key;
+
+  public:
+    HashKeyRef(Map *m, const Key &k) : map(m), key(k) {}
+
+    void mark(JSTracer *trc) {
+        Key prior = key;
+        typename Map::Ptr p = map->lookup(key);
+        if (!p)
+            return;
+        trc->setTracingLocation(&*p);
+        Mark(trc, &key, "HashKeyRef");
+        map->rekeyIfMoved(prior, key);
+    }
+};
+
 typedef HashSet<void *, PointerHasher<void *, 3>, SystemAllocPolicy> EdgeSet;
 
 /* The size of a single block of store buffer storage space. */
 static const size_t LifoAllocBlockSize = 1 << 16; /* 64KiB */
 
 /*
  * The StoreBuffer observes all writes that occur in the system and performs
  * efficient filtering of them to derive a remembered set for nursery GC.
--- a/js/src/gc/Tracer.cpp
+++ b/js/src/gc/Tracer.cpp
@@ -27,23 +27,23 @@
 
 using namespace js;
 using namespace js::gc;
 using mozilla::DebugOnly;
 
 JS_PUBLIC_API(void)
 JS_CallUnbarrieredValueTracer(JSTracer *trc, Value *valuep, const char *name)
 {
-    TraceManuallyBarrieredEdge(trc, valuep, name);
+    MarkValueUnbarriered(trc, valuep, name);
 }
 
 JS_PUBLIC_API(void)
 JS_CallUnbarrieredIdTracer(JSTracer *trc, jsid *idp, const char *name)
 {
-    TraceManuallyBarrieredEdge(trc, idp, name);
+    MarkIdUnbarriered(trc, idp, name);
 }
 
 JS_PUBLIC_API(void)
 JS_CallUnbarrieredObjectTracer(JSTracer *trc, JSObject **objp, const char *name)
 {
     MarkObjectUnbarriered(trc, objp, name);
 }
 
@@ -57,23 +57,23 @@ JS_PUBLIC_API(void)
 JS_CallUnbarrieredScriptTracer(JSTracer *trc, JSScript **scriptp, const char *name)
 {
     MarkScriptUnbarriered(trc, scriptp, name);
 }
 
 JS_PUBLIC_API(void)
 JS_CallValueTracer(JSTracer *trc, JS::Heap<JS::Value> *valuep, const char *name)
 {
-    TraceManuallyBarrieredEdge(trc, valuep->unsafeGet(), name);
+    MarkValueUnbarriered(trc, valuep->unsafeGet(), name);
 }
 
 JS_PUBLIC_API(void)
 JS_CallIdTracer(JSTracer *trc, JS::Heap<jsid> *idp, const char *name)
 {
-    TraceManuallyBarrieredEdge(trc, idp->unsafeGet(), name);
+    MarkIdUnbarriered(trc, idp->unsafeGet(), name);
 }
 
 JS_PUBLIC_API(void)
 JS_CallObjectTracer(JSTracer *trc, JS::Heap<JSObject *> *objp, const char *name)
 {
     MarkObjectUnbarriered(trc, objp->unsafeGet(), name);
 }
 
--- a/js/src/jit/BaselineFrame.cpp
+++ b/js/src/jit/BaselineFrame.cpp
@@ -18,40 +18,40 @@ using namespace js;
 using namespace js::jit;
 
 static void
 MarkLocals(BaselineFrame *frame, JSTracer *trc, unsigned start, unsigned end)
 {
     if (start < end) {
         // Stack grows down.
         Value *last = frame->valueSlot(end - 1);
-        TraceRootRange(trc, end - start, last, "baseline-stack");
+        gc::MarkValueRootRange(trc, end - start, last, "baseline-stack");
     }
 }
 
 void
 BaselineFrame::trace(JSTracer *trc, JitFrameIterator &frameIterator)
 {
     replaceCalleeToken(MarkCalleeToken(trc, calleeToken()));
 
-    TraceRoot(trc, &thisValue(), "baseline-this");
+    gc::MarkValueRoot(trc, &thisValue(), "baseline-this");
 
     // Mark actual and formal args.
     if (isNonEvalFunctionFrame()) {
         unsigned numArgs = js::Max(numActualArgs(), numFormalArgs());
-        TraceRootRange(trc, numArgs, argv(), "baseline-args");
+        gc::MarkValueRootRange(trc, numArgs, argv(), "baseline-args");
     }
 
     // Mark scope chain, if it exists.
     if (scopeChain_)
         gc::MarkObjectRoot(trc, &scopeChain_, "baseline-scopechain");
 
     // Mark return value.
     if (hasReturnValue())
-        TraceRoot(trc, returnValue().address(), "baseline-rval");
+        gc::MarkValueRoot(trc, returnValue().address(), "baseline-rval");
 
     if (isEvalFrame())
         gc::MarkScriptRoot(trc, &evalScript_, "baseline-evalscript");
 
     if (hasArgsObj())
         gc::MarkObjectRoot(trc, &argsObj_, "baseline-args-obj");
 
     // Mark locals and stack values.
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -331,17 +331,17 @@ ICStub::trace(JSTracer *trc)
       case ICStub::GetName_Scope5:
         static_cast<ICGetName_Scope<5>*>(this)->traceScopes(trc);
         break;
       case ICStub::GetName_Scope6:
         static_cast<ICGetName_Scope<6>*>(this)->traceScopes(trc);
         break;
       case ICStub::GetIntrinsic_Constant: {
         ICGetIntrinsic_Constant *constantStub = toGetIntrinsic_Constant();
-        TraceEdge(trc, &constantStub->value(), "baseline-getintrinsic-constant-value");
+        gc::MarkValue(trc, &constantStub->value(), "baseline-getintrinsic-constant-value");
         break;
       }
       case ICStub::GetProp_Primitive: {
         ICGetProp_Primitive *propStub = toGetProp_Primitive();
         MarkShape(trc, &propStub->protoShape(), "baseline-getprop-primitive-stub-shape");
         break;
       }
       case ICStub::GetProp_Native: {
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -861,17 +861,17 @@ IonScript::trace(JSTracer *trc)
 {
     if (method_)
         MarkJitCode(trc, &method_, "method");
 
     if (deoptTable_)
         MarkJitCode(trc, &deoptTable_, "deoptimizationTable");
 
     for (size_t i = 0; i < numConstants(); i++)
-        TraceEdge(trc, &getConstant(i), "constant");
+        gc::MarkValue(trc, &getConstant(i), "constant");
 }
 
 /* static */ void
 IonScript::writeBarrierPre(Zone *zone, IonScript *ionScript)
 {
     if (zone->needsIncrementalBarrier())
         ionScript->trace(zone->barrierTracer());
 }
--- a/js/src/jit/JitFrames.cpp
+++ b/js/src/jit/JitFrames.cpp
@@ -1002,21 +1002,21 @@ MarkThisAndArguments(JSTracer *trc, JitF
     if (CalleeTokenIsFunction(layout->calleeToken())) {
         JSFunction *fun = CalleeTokenToFunction(layout->calleeToken());
         nformals = fun->nonLazyScript()->argumentsHasVarBinding() ? 0 : fun->nargs();
     }
 
     Value *argv = layout->argv();
 
     // Trace |this|.
-    TraceRoot(trc, argv, "ion-thisv");
+    gc::MarkValueRoot(trc, argv, "ion-thisv");
 
     // Trace actual arguments beyond the formals. Note + 1 for thisv.
     for (size_t i = nformals + 1; i < nargs + 1; i++)
-        TraceRoot(trc, &argv[i], "ion-argv");
+        gc::MarkValueRoot(trc, &argv[i], "ion-argv");
 }
 
 static void
 MarkThisAndArguments(JSTracer *trc, const JitFrameIterator &frame)
 {
     JitFrameLayout *layout = frame.jsFrame();
     MarkThisAndArguments(trc, layout);
 }
@@ -1063,39 +1063,39 @@ MarkIonJSFrame(JSTracer *trc, const JitF
 
     while (safepoint.getGcSlot(&entry)) {
         uintptr_t *ref = layout->slotRef(entry);
         gc::MarkGCThingRoot(trc, reinterpret_cast<void **>(ref), "ion-gc-slot");
     }
 
     while (safepoint.getValueSlot(&entry)) {
         Value *v = (Value *)layout->slotRef(entry);
-        TraceRoot(trc, v, "ion-gc-slot");
+        gc::MarkValueRoot(trc, v, "ion-gc-slot");
     }
 
     uintptr_t *spill = frame.spillBase();
     LiveGeneralRegisterSet gcRegs = safepoint.gcSpills();
     LiveGeneralRegisterSet valueRegs = safepoint.valueSpills();
     for (GeneralRegisterBackwardIterator iter(safepoint.allGprSpills()); iter.more(); iter++) {
         --spill;
         if (gcRegs.has(*iter))
             gc::MarkGCThingRoot(trc, reinterpret_cast<void **>(spill), "ion-gc-spill");
         else if (valueRegs.has(*iter))
-            TraceRoot(trc, reinterpret_cast<Value *>(spill), "ion-value-spill");
+            gc::MarkValueRoot(trc, reinterpret_cast<Value *>(spill), "ion-value-spill");
     }
 
 #ifdef JS_NUNBOX32
     LAllocation type, payload;
     while (safepoint.getNunboxSlot(&type, &payload)) {
         jsval_layout layout;
         layout.s.tag = (JSValueTag)ReadAllocation(frame, &type);
         layout.s.payload.uintptr = ReadAllocation(frame, &payload);
 
         Value v = IMPL_TO_JSVAL(layout);
-        TraceRoot(trc, &v, "ion-torn-value");
+        gc::MarkValueRoot(trc, &v, "ion-torn-value");
 
         if (v != IMPL_TO_JSVAL(layout)) {
             // GC moved the value, replace the stored payload.
             layout = JSVAL_TO_IMPL(v);
             WriteAllocation(frame, &payload, layout.s.payload.uintptr);
         }
     }
 #endif
@@ -1260,17 +1260,17 @@ MarkJitExitFrameCopiedArguments(JSTracer
     if (f->outParam == Type_Handle)
         doubleArgs -= sizeof(Value);
     doubleArgs -= f->doubleByRefArgs() * sizeof(double);
 
     for (uint32_t explicitArg = 0; explicitArg < f->explicitArgs; explicitArg++) {
         if (f->argProperties(explicitArg) == VMFunction::DoubleByRef) {
             // Arguments with double size can only have RootValue type.
             if (f->argRootType(explicitArg) == VMFunction::RootValue)
-                TraceRoot(trc, reinterpret_cast<Value*>(doubleArgs), "ion-vm-args");
+                gc::MarkValueRoot(trc, reinterpret_cast<Value*>(doubleArgs), "ion-vm-args");
             else
                 MOZ_ASSERT(f->argRootType(explicitArg) == VMFunction::RootNone);
             doubleArgs += sizeof(double);
         }
     }
 }
 #else
 static void
@@ -1298,68 +1298,68 @@ MarkJitExitFrame(JSTracer *trc, const Ji
 
     // This correspond to the case where we have build a fake exit frame in
     // CodeGenerator.cpp which handle the case of a native function call. We
     // need to mark the argument vector of the function call.
     if (frame.isExitFrameLayout<NativeExitFrameLayout>()) {
         NativeExitFrameLayout *native = frame.exitFrame()->as<NativeExitFrameLayout>();
         size_t len = native->argc() + 2;
         Value *vp = native->vp();
-        TraceRootRange(trc, len, vp, "ion-native-args");
+        gc::MarkValueRootRange(trc, len, vp, "ion-native-args");
         return;
     }
 
     if (frame.isExitFrameLayout<IonOOLNativeExitFrameLayout>()) {
         IonOOLNativeExitFrameLayout *oolnative =
             frame.exitFrame()->as<IonOOLNativeExitFrameLayout>();
         gc::MarkJitCodeRoot(trc, oolnative->stubCode(), "ion-ool-native-code");
-        TraceRoot(trc, oolnative->vp(), "iol-ool-native-vp");
+        gc::MarkValueRoot(trc, oolnative->vp(), "iol-ool-native-vp");
         size_t len = oolnative->argc() + 1;
-        TraceRootRange(trc, len, oolnative->thisp(), "ion-ool-native-thisargs");
+        gc::MarkValueRootRange(trc, len, oolnative->thisp(), "ion-ool-native-thisargs");
         return;
     }
 
     if (frame.isExitFrameLayout<IonOOLPropertyOpExitFrameLayout>() ||
         frame.isExitFrameLayout<IonOOLSetterOpExitFrameLayout>())
     {
         // A SetterOp frame is a different size, but that's the only relevant
         // difference between the two. The fields that need marking are all in
         // the common base class.
         IonOOLPropertyOpExitFrameLayout *oolgetter =
             frame.isExitFrameLayout<IonOOLPropertyOpExitFrameLayout>()
             ? frame.exitFrame()->as<IonOOLPropertyOpExitFrameLayout>()
             : frame.exitFrame()->as<IonOOLSetterOpExitFrameLayout>();
         gc::MarkJitCodeRoot(trc, oolgetter->stubCode(), "ion-ool-property-op-code");
-        TraceRoot(trc, oolgetter->vp(), "ion-ool-property-op-vp");
-        TraceRoot(trc, oolgetter->id(), "ion-ool-property-op-id");
+        gc::MarkValueRoot(trc, oolgetter->vp(), "ion-ool-property-op-vp");
+        gc::MarkIdRoot(trc, oolgetter->id(), "ion-ool-property-op-id");
         gc::MarkObjectRoot(trc, oolgetter->obj(), "ion-ool-property-op-obj");
         return;
     }
 
     if (frame.isExitFrameLayout<IonOOLProxyExitFrameLayout>()) {
         IonOOLProxyExitFrameLayout *oolproxy = frame.exitFrame()->as<IonOOLProxyExitFrameLayout>();
         gc::MarkJitCodeRoot(trc, oolproxy->stubCode(), "ion-ool-proxy-code");
-        TraceRoot(trc, oolproxy->vp(), "ion-ool-proxy-vp");
-        TraceRoot(trc, oolproxy->id(), "ion-ool-proxy-id");
+        gc::MarkValueRoot(trc, oolproxy->vp(), "ion-ool-proxy-vp");
+        gc::MarkIdRoot(trc, oolproxy->id(), "ion-ool-proxy-id");
         gc::MarkObjectRoot(trc, oolproxy->proxy(), "ion-ool-proxy-proxy");
         gc::MarkObjectRoot(trc, oolproxy->receiver(), "ion-ool-proxy-receiver");
         return;
     }
 
     if (frame.isExitFrameLayout<IonDOMExitFrameLayout>()) {
         IonDOMExitFrameLayout *dom = frame.exitFrame()->as<IonDOMExitFrameLayout>();
         gc::MarkObjectRoot(trc, dom->thisObjAddress(), "ion-dom-args");
         if (dom->isMethodFrame()) {
             IonDOMMethodExitFrameLayout *method =
                 reinterpret_cast<IonDOMMethodExitFrameLayout *>(dom);
             size_t len = method->argc() + 2;
             Value *vp = method->vp();
-            TraceRootRange(trc, len, vp, "ion-dom-args");
+            gc::MarkValueRootRange(trc, len, vp, "ion-dom-args");
         } else {
-            TraceRoot(trc, dom->vp(), "ion-dom-args");
+            gc::MarkValueRoot(trc, dom->vp(), "ion-dom-args");
         }
         return;
     }
 
     if (frame.isExitFrameLayout<LazyLinkExitFrameLayout>()) {
         LazyLinkExitFrameLayout *ll = frame.exitFrame()->as<LazyLinkExitFrameLayout>();
         JitFrameLayout *layout = ll->jsFrame();
 
@@ -1397,17 +1397,17 @@ MarkJitExitFrame(JSTracer *trc, const Ji
           case VMFunction::RootString:
           case VMFunction::RootPropertyName:
             gc::MarkStringRoot(trc, reinterpret_cast<JSString**>(argBase), "ion-vm-args");
             break;
           case VMFunction::RootFunction:
             gc::MarkObjectRoot(trc, reinterpret_cast<JSFunction**>(argBase), "ion-vm-args");
             break;
           case VMFunction::RootValue:
-            TraceRoot(trc, reinterpret_cast<Value*>(argBase), "ion-vm-args");
+            gc::MarkValueRoot(trc, reinterpret_cast<Value*>(argBase), "ion-vm-args");
             break;
           case VMFunction::RootCell:
             gc::MarkGCThingRoot(trc, reinterpret_cast<void **>(argBase), "ion-vm-args");
             break;
         }
 
         switch (f->argProperties(explicitArg)) {
           case VMFunction::WordByValue:
@@ -1431,17 +1431,17 @@ MarkJitExitFrame(JSTracer *trc, const Ji
           case VMFunction::RootString:
           case VMFunction::RootPropertyName:
             gc::MarkStringRoot(trc, footer->outParam<JSString *>(), "ion-vm-out");
             break;
           case VMFunction::RootFunction:
             gc::MarkObjectRoot(trc, footer->outParam<JSFunction *>(), "ion-vm-out");
             break;
           case VMFunction::RootValue:
-            TraceRoot(trc, footer->outParam<Value>(), "ion-vm-outvp");
+            gc::MarkValueRoot(trc, footer->outParam<Value>(), "ion-vm-outvp");
             break;
           case VMFunction::RootCell:
             gc::MarkGCThingRoot(trc, footer->outParam<void *>(), "ion-vm-out");
             break;
         }
     }
 
     MarkJitExitFrameCopiedArguments(trc, f, footer);
@@ -1450,17 +1450,17 @@ MarkJitExitFrame(JSTracer *trc, const Ji
 static void
 MarkRectifierFrame(JSTracer *trc, const JitFrameIterator &frame)
 {
     // Mark thisv.
     //
     // Baseline JIT code generated as part of the ICCall_Fallback stub may use
     // it if we're calling a constructor that returns a primitive value.
     RectifierFrameLayout *layout = (RectifierFrameLayout *)frame.fp();
-    TraceRoot(trc, &layout->argv()[0], "ion-thisv");
+    gc::MarkValueRoot(trc, &layout->argv()[0], "ion-thisv");
 }
 
 static void
 MarkJitActivation(JSTracer *trc, const JitActivationIterator &activations)
 {
     JitActivation *activation = activations->asJit();
 
 #ifdef CHECK_OSIPOINT_REGISTERS
@@ -1700,17 +1700,17 @@ RInstructionResults::operator [](size_t 
     return (*results_)[index];
 }
 
 void
 RInstructionResults::trace(JSTracer *trc)
 {
     // Note: The vector necessary exists, otherwise this object would not have
     // been stored on the activation from where the trace function is called.
-    TraceRange(trc, results_->length(), results_->begin(), "ion-recover-results");
+    gc::MarkValueRange(trc, results_->length(), results_->begin(), "ion-recover-results");
 }
 
 
 SnapshotIterator::SnapshotIterator(IonScript *ionScript, SnapshotOffset snapshotOffset,
                                    JitFrameLayout *fp, const MachineState &machine)
   : snapshot_(ionScript->snapshots(),
               snapshotOffset,
               ionScript->snapshotsRVATableSize(),
@@ -2078,17 +2078,17 @@ SnapshotIterator::traceAllocation(JSTrac
     if (!allocationReadable(alloc, RM_AlwaysDefault))
         return;
 
     Value v = allocationValue(alloc, RM_AlwaysDefault);
     if (!v.isMarkable())
         return;
 
     Value copy = v;
-    TraceRoot(trc, &v, "ion-typed-reg");
+    gc::MarkValueRoot(trc, &v, "ion-typed-reg");
     if (v != copy) {
         MOZ_ASSERT(SameType(v, copy));
         writeAllocationValuePayload(alloc, v);
     }
 }
 
 const RResumePoint *
 SnapshotIterator::resumePoint() const
--- a/js/src/jit/JitcodeMap.cpp
+++ b/js/src/jit/JitcodeMap.cpp
@@ -801,87 +801,87 @@ JitcodeGlobalTable::sweep(JSRuntime *rt)
         else
             entry->sweep();
     }
 }
 
 bool
 JitcodeGlobalEntry::BaseEntry::markJitcodeIfUnmarked(JSTracer *trc)
 {
-    if (!IsJitCodeMarked(&jitcode_)) {
+    if (!IsJitCodeMarkedFromAnyThread(&jitcode_)) {
         MarkJitCodeUnbarriered(trc, &jitcode_, "jitcodglobaltable-baseentry-jitcode");
         return true;
     }
     return false;
 }
 
 bool
 JitcodeGlobalEntry::BaseEntry::isJitcodeMarkedFromAnyThread()
 {
-    return IsJitCodeMarked(&jitcode_) ||
+    return IsJitCodeMarkedFromAnyThread(&jitcode_) ||
            jitcode_->arenaHeader()->allocatedDuringIncremental;
 }
 
 bool
 JitcodeGlobalEntry::BaseEntry::isJitcodeAboutToBeFinalized()
 {
     return IsJitCodeAboutToBeFinalized(&jitcode_);
 }
 
 bool
 JitcodeGlobalEntry::BaselineEntry::markIfUnmarked(JSTracer *trc)
 {
-    if (!IsScriptMarked(&script_)) {
+    if (!IsScriptMarkedFromAnyThread(&script_)) {
         MarkScriptUnbarriered(trc, &script_, "jitcodeglobaltable-baselineentry-script");
         return true;
     }
     return false;
 }
 
 void
 JitcodeGlobalEntry::BaselineEntry::sweep()
 {
     MOZ_ALWAYS_FALSE(IsScriptAboutToBeFinalized(&script_));
 }
 
 bool
 JitcodeGlobalEntry::BaselineEntry::isMarkedFromAnyThread()
 {
-    return IsScriptMarked(&script_) ||
+    return IsScriptMarkedFromAnyThread(&script_) ||
            script_->arenaHeader()->allocatedDuringIncremental;
 }
 
 bool
 JitcodeGlobalEntry::IonEntry::markIfUnmarked(JSTracer *trc)
 {
     bool markedAny = false;
 
     for (unsigned i = 0; i < numScripts(); i++) {
-        if (!IsScriptMarked(&sizedScriptList()->pairs[i].script)) {
+        if (!IsScriptMarkedFromAnyThread(&sizedScriptList()->pairs[i].script)) {
             MarkScriptUnbarriered(trc, &sizedScriptList()->pairs[i].script,
                                   "jitcodeglobaltable-ionentry-script");
             markedAny = true;
         }
     }
 
     if (!optsAllTypes_)
         return markedAny;
 
     for (IonTrackedTypeWithAddendum *iter = optsAllTypes_->begin();
          iter != optsAllTypes_->end(); iter++)
     {
-        if (!TypeSet::IsTypeMarked(&iter->type)) {
+        if (!TypeSet::IsTypeMarkedFromAnyThread(&iter->type)) {
             TypeSet::MarkTypeUnbarriered(trc, &iter->type, "jitcodeglobaltable-ionentry-type");
             markedAny = true;
         }
-        if (iter->hasAllocationSite() && !IsScriptMarked(&iter->script)) {
+        if (iter->hasAllocationSite() && !IsScriptMarkedFromAnyThread(&iter->script)) {
             MarkScriptUnbarriered(trc, &iter->script,
                                   "jitcodeglobaltable-ionentry-type-addendum-script");
             markedAny = true;
-        } else if (iter->hasConstructor() && !IsObjectMarked(&iter->constructor)) {
+        } else if (iter->hasConstructor() && !IsObjectMarkedFromAnyThread(&iter->constructor)) {
             MarkObjectUnbarriered(trc, &iter->constructor,
                                   "jitcodeglobaltable-ionentry-type-addendum-constructor");
             markedAny = true;
         }
     }
 
     return markedAny;
 }
@@ -907,30 +907,30 @@ JitcodeGlobalEntry::IonEntry::sweep()
             MOZ_ALWAYS_FALSE(IsObjectAboutToBeFinalized(&iter->constructor));
     }
 }
 
 bool
 JitcodeGlobalEntry::IonEntry::isMarkedFromAnyThread()
 {
     for (unsigned i = 0; i < numScripts(); i++) {
-        if (!IsScriptMarked(&sizedScriptList()->pairs[i].script) &&
+        if (!IsScriptMarkedFromAnyThread(&sizedScriptList()->pairs[i].script) &&
             !sizedScriptList()->pairs[i].script->arenaHeader()->allocatedDuringIncremental)
         {
             return false;
         }
     }
 
     if (!optsAllTypes_)
         return true;
 
     for (IonTrackedTypeWithAddendum *iter = optsAllTypes_->begin();
          iter != optsAllTypes_->end(); iter++)
     {
-        if (!TypeSet::IsTypeMarked(&iter->type) &&
+        if (!TypeSet::IsTypeMarkedFromAnyThread(&iter->type) &&
             !TypeSet::IsTypeAllocatedDuringIncremental(iter->type))
         {
             return false;
         }
     }
 
     return true;
 }
--- a/js/src/jit/RematerializedFrame.cpp
+++ b/js/src/jit/RematerializedFrame.cpp
@@ -144,19 +144,20 @@ RematerializedFrame::initFunctionScopeOb
     return true;
 }
 
 void
 RematerializedFrame::mark(JSTracer *trc)
 {
     gc::MarkScriptRoot(trc, &script_, "remat ion frame script");
     gc::MarkObjectRoot(trc, &scopeChain_, "remat ion frame scope chain");
-    TraceRoot(trc, &returnValue_, "remat ion frame return value");
-    TraceRoot(trc, &thisValue_, "remat ion frame this");
-    TraceRootRange(trc, numActualArgs_ + script_->nfixed(), slots_, "remat ion frame stack");
+    gc::MarkValueRoot(trc, &returnValue_, "remat ion frame return value");
+    gc::MarkValueRoot(trc, &thisValue_, "remat ion frame this");
+    gc::MarkValueRootRange(trc, slots_, slots_ + numActualArgs_ + script_->nfixed(),
+                           "remat ion frame stack");
 }
 
 void
 RematerializedFrame::dump()
 {
     fprintf(stderr, " Rematerialized Ion Frame%s\n", inlined() ? " (inlined)" : "");
     if (isFunctionFrame()) {
         fprintf(stderr, "  callee fun: ");
--- a/js/src/jit/VMFunctions.cpp
+++ b/js/src/jit/VMFunctions.cpp
@@ -1238,17 +1238,17 @@ bool
 ObjectIsCallable(JSObject *obj)
 {
     return obj->isCallable();
 }
 
 void
 MarkValueFromIon(JSRuntime *rt, Value *vp)
 {
-    TraceManuallyBarrieredEdge(&rt->gc.marker, vp, "write barrier");
+    gc::MarkValueUnbarriered(&rt->gc.marker, vp, "write barrier");
 }
 
 void
 MarkStringFromIon(JSRuntime *rt, JSString **stringp)
 {
     if (*stringp)
         gc::MarkStringUnbarriered(&rt->gc.marker, stringp, "write barrier");
 }
--- a/js/src/jit/shared/Assembler-x86-shared.cpp
+++ b/js/src/jit/shared/Assembler-x86-shared.cpp
@@ -56,17 +56,17 @@ TraceDataRelocations(JSTracer *trc, uint
 #ifdef JS_PUNBOX64
         // All pointers on x64 will have the top bits cleared. If those bits
         // are not cleared, this must be a Value.
         uintptr_t *word = reinterpret_cast<uintptr_t *>(ptr);
         if (*word >> JSVAL_TAG_SHIFT) {
             jsval_layout layout;
             layout.asBits = *word;
             Value v = IMPL_TO_JSVAL(layout);
-            TraceManuallyBarrieredEdge(trc, &v, "ion-masm-value");
+            gc::MarkValueUnbarriered(trc, &v, "ion-masm-value");
             *word = JSVAL_TO_IMPL(v).asBits;
             continue;
         }
 #endif
 
         // The low bit shouldn't be set. If it is, we probably got a dummy
         // pointer inserted by CodeGenerator::visitNurseryObject, but we
         // shouldn't be able to trigger GC before those are patched to their
--- a/js/src/jsatom.cpp
+++ b/js/src/jsatom.cpp
@@ -250,17 +250,17 @@ void
 JSRuntime::sweepAtoms()
 {
     if (!atoms_)
         return;
 
     for (AtomSet::Enum e(*atoms_); !e.empty(); e.popFront()) {
         AtomStateEntry entry = e.front();
         JSAtom *atom = entry.asPtr();
-        bool isDying = IsStringAboutToBeFinalized(&atom);
+        bool isDying = IsStringAboutToBeFinalizedFromAnyThread(&atom);
 
         /* Pinned or interned key cannot be finalized. */
         MOZ_ASSERT_IF(hasContexts() && entry.isTagged(), !isDying);
 
         if (isDying)
             e.removeFront();
     }
 }
--- a/js/src/jsatom.h
+++ b/js/src/jsatom.h
@@ -17,21 +17,16 @@
 #include "vm/CommonPropertyNames.h"
 
 class JSAtom;
 class JSAutoByteString;
 
 struct JSIdArray {
     int length;
     js::HeapId vector[1];    /* actually, length jsid words */
-
-    js::HeapId *begin() { return vector; }
-    const js::HeapId *begin() const { return vector; }
-    js::HeapId *end() { return vector + length; }
-    const js::HeapId *end() const { return vector + length; }
 };
 
 namespace js {
 
 JS_STATIC_ASSERT(sizeof(HashNumber) == 4);
 
 static MOZ_ALWAYS_INLINE js::HashNumber
 HashId(jsid id)
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -1108,17 +1108,17 @@ JSContext::sizeOfIncludingThis(mozilla::
 
 void
 JSContext::mark(JSTracer *trc)
 {
     /* Stack frames and slots are traced by StackSpace::mark. */
 
     /* Mark other roots-by-definition in the JSContext. */
     if (isExceptionPending())
-        TraceRoot(trc, &unwrappedException_, "unwrapped exception");
+        MarkValueRoot(trc, &unwrappedException_, "unwrapped exception");
 
     TraceCycleDetectionSet(trc, cycleDetectorSet);
 
     if (compartment_)
         compartment_->mark();
 }
 
 void *
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -202,26 +202,25 @@ class WrapperMapRef : public BufferableR
 
   public:
     WrapperMapRef(WrapperMap *map, const CrossCompartmentKey &key)
       : map(map), key(key) {}
 
     void mark(JSTracer *trc) {
         CrossCompartmentKey prior = key;
         if (key.debugger)
-            TraceManuallyBarrieredEdge(trc, &key.debugger, "CCW debugger");
+            Mark(trc, &key.debugger, "CCW debugger");
         if (key.kind == CrossCompartmentKey::ObjectWrapper ||
             key.kind == CrossCompartmentKey::DebuggerObject ||
             key.kind == CrossCompartmentKey::DebuggerEnvironment ||
             key.kind == CrossCompartmentKey::DebuggerSource)
         {
             MOZ_ASSERT(IsInsideNursery(key.wrapped) ||
                        key.wrapped->asTenured().getTraceKind() == JSTRACE_OBJECT);
-            TraceManuallyBarrieredEdge(trc, reinterpret_cast<JSObject**>(&key.wrapped),
-                                       "CCW wrapped object");
+            Mark(trc, reinterpret_cast<JSObject**>(&key.wrapped), "CCW wrapped object");
         }
         if (key.debugger == prior.debugger && key.wrapped == prior.wrapped)
             return;
 
         /* Look for the original entry, which might have been removed. */
         WrapperMap::Ptr p = map->lookup(prior);
         if (!p)
             return;
@@ -487,17 +486,17 @@ JSCompartment::markCrossCompartmentWrapp
         Value v = e.front().value();
         if (e.front().key().kind == CrossCompartmentKey::ObjectWrapper) {
             ProxyObject *wrapper = &v.toObject().as<ProxyObject>();
 
             /*
              * We have a cross-compartment wrapper. Its private pointer may
              * point into the compartment being collected, so we should mark it.
              */
-            TraceEdge(trc, wrapper->slotOfPrivate(), "cross-compartment wrapper");
+            MarkValue(trc, wrapper->slotOfPrivate(), "cross-compartment wrapper");
         }
     }
 }
 
 void
 JSCompartment::trace(JSTracer *trc)
 {
     savedStacks_.trace(trc);
@@ -529,28 +528,28 @@ void
 JSCompartment::sweepSavedStacks()
 {
     savedStacks_.sweep(runtimeFromAnyThread());
 }
 
 void
 JSCompartment::sweepGlobalObject(FreeOp *fop)
 {
-    if (global_.unbarrieredGet() && IsObjectAboutToBeFinalized(global_.unsafeGet())) {
+    if (global_.unbarrieredGet() && IsObjectAboutToBeFinalizedFromAnyThread(global_.unsafeGet())) {
         if (isDebuggee())
             Debugger::detachAllDebuggersFromGlobal(fop, global_);
         global_.set(nullptr);
     }
 }
 
 void
 JSCompartment::sweepSelfHostingScriptSource()
 {
     if (selfHostingScriptSource.unbarrieredGet() &&
-        IsObjectAboutToBeFinalized((JSObject **) selfHostingScriptSource.unsafeGet()))
+        IsObjectAboutToBeFinalizedFromAnyThread((JSObject **) selfHostingScriptSource.unsafeGet()))
     {
         selfHostingScriptSource.set(nullptr);
     }
 }
 
 void
 JSCompartment::sweepJitCompartment(FreeOp *fop)
 {
@@ -587,17 +586,17 @@ JSCompartment::sweepWeakMaps()
 void
 JSCompartment::sweepNativeIterators()
 {
     /* Sweep list of native iterators. */
     NativeIterator *ni = enumerators->next();
     while (ni != enumerators) {
         JSObject *iterObj = ni->iterObj();
         NativeIterator *next = ni->next();
-        if (gc::IsObjectAboutToBeFinalized(&iterObj))
+        if (gc::IsObjectAboutToBeFinalizedFromAnyThread(&iterObj))
             ni->unlink();
         ni = next;
     }
 }
 
 /*
  * Remove dead wrappers from the table. We must sweep all compartments, since
  * string entries in the crossCompartmentWrappers table are not marked during
@@ -612,34 +611,34 @@ JSCompartment::sweepCrossCompartmentWrap
         bool keyDying;
         switch (key.kind) {
           case CrossCompartmentKey::ObjectWrapper:
           case CrossCompartmentKey::DebuggerObject:
           case CrossCompartmentKey::DebuggerEnvironment:
           case CrossCompartmentKey::DebuggerSource:
               MOZ_ASSERT(IsInsideNursery(key.wrapped) ||
                          key.wrapped->asTenured().getTraceKind() == JSTRACE_OBJECT);
-              keyDying = IsObjectAboutToBeFinalized(
+              keyDying = IsObjectAboutToBeFinalizedFromAnyThread(
                   reinterpret_cast<JSObject**>(&key.wrapped));
               break;
           case CrossCompartmentKey::StringWrapper:
               MOZ_ASSERT(key.wrapped->asTenured().getTraceKind() == JSTRACE_STRING);
-              keyDying = IsStringAboutToBeFinalized(
+              keyDying = IsStringAboutToBeFinalizedFromAnyThread(
                   reinterpret_cast<JSString**>(&key.wrapped));
               break;
           case CrossCompartmentKey::DebuggerScript:
               MOZ_ASSERT(key.wrapped->asTenured().getTraceKind() == JSTRACE_SCRIPT);
-              keyDying = IsScriptAboutToBeFinalized(
+              keyDying = IsScriptAboutToBeFinalizedFromAnyThread(
                   reinterpret_cast<JSScript**>(&key.wrapped));
               break;
           default:
               MOZ_CRASH("Unknown key kind");
         }
-        bool valDying = IsValueAboutToBeFinalized(e.front().value().unsafeGet());
-        bool dbgDying = key.debugger && IsObjectAboutToBeFinalized(&key.debugger);
+        bool valDying = IsValueAboutToBeFinalizedFromAnyThread(e.front().value().unsafeGet());
+        bool dbgDying = key.debugger && IsObjectAboutToBeFinalizedFromAnyThread(&key.debugger);
         if (keyDying || valDying || dbgDying) {
             MOZ_ASSERT(key.kind != CrossCompartmentKey::StringWrapper);
             e.removeFront();
         } else if (key.wrapped != e.front().key().wrapped ||
                    key.debugger != e.front().key().debugger)
         {
             e.rekeyFront(key);
         }
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -361,17 +361,17 @@ struct JSCompartment
             if (!wrap(cx, vec[i]))
                 return false;
         }
         return true;
     };
 
     bool putWrapper(JSContext *cx, const js::CrossCompartmentKey& wrapped, const js::Value& wrapper);
 
-    js::WrapperMap::Ptr lookupWrapper(const js::Value& wrapped) const {
+    js::WrapperMap::Ptr lookupWrapper(const js::Value& wrapped) {
         return crossCompartmentWrappers.lookup(js::CrossCompartmentKey(wrapped));
     }
 
     void removeWrapper(js::WrapperMap::Ptr p) {
         crossCompartmentWrappers.remove(p);
     }
 
     struct WrapperEnum : public js::WrapperMap::Enum {
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -716,18 +716,18 @@ fun_hasInstance(JSContext *cx, HandleObj
     *bp = isDelegate;
     return true;
 }
 
 inline void
 JSFunction::trace(JSTracer *trc)
 {
     if (isExtended()) {
-        TraceRange(trc, ArrayLength(toExtended()->extendedSlots),
-                   (HeapValue *)toExtended()->extendedSlots, "nativeReserved");
+        MarkValueRange(trc, ArrayLength(toExtended()->extendedSlots),
+                       toExtended()->extendedSlots, "nativeReserved");
     }
 
     if (atom_)
         MarkString(trc, &atom_, "atom");
 
     if (isInterpreted()) {
         // Functions can be be marked as interpreted despite having no script
         // yet at some points when parsing, and can be lazy with no lazy script
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -4085,15 +4085,15 @@ JSObject::markChildren(JSTracer *trc)
             if (nobj->denseElementsAreCopyOnWrite()) {
                 HeapPtrNativeObject &owner = nobj->getElementsHeader()->ownerObject();
                 if (owner != nobj) {
                     MarkObject(trc, &owner, "objectElementsOwner");
                     break;
                 }
             }
 
-            TraceRange(trc,
-                       nobj->getDenseInitializedLength(),
-                       static_cast<HeapSlot*>(nobj->getDenseElementsAllowCopyOnWrite()),
-                       "objectElements");
+            gc::MarkArraySlots(trc,
+                               nobj->getDenseInitializedLength(),
+                               nobj->getDenseElementsAllowCopyOnWrite(),
+                               "objectElements");
         } while (false);
     }
 }
--- a/js/src/jspropertytree.cpp
+++ b/js/src/jspropertytree.cpp
@@ -317,17 +317,17 @@ ShapeGetterSetterRef::mark(JSTracer *trc
     // pointers.
 
     JSObject *obj = *objp;
     JSObject *prior = obj;
     if (!prior)
         return;
 
     trc->setTracingLocation(&*prior);
-    TraceManuallyBarrieredEdge(trc, &obj, "AccessorShape getter or setter");
+    gc::Mark(trc, &obj, "AccessorShape getter or setter");
     if (obj == *objp)
         return;
 
     Shape *parent = shape->parent;
     if (shape->inDictionary() || !parent->kids.isHash()) {
         *objp = obj;
         return;
     }
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -3429,17 +3429,17 @@ JSScript::markChildren(JSTracer *trc)
 
     if (hasRegexps()) {
         ObjectArray *objarray = regexps();
         MarkObjectRange(trc, objarray->length, objarray->vector, "objects");
     }
 
     if (hasConsts()) {
         ConstArray *constarray = consts();
-        TraceRange(trc, constarray->length, constarray->vector, "consts");
+        MarkValueRange(trc, constarray->length, constarray->vector, "consts");
     }
 
     if (sourceObject()) {
         MOZ_ASSERT(MaybeForwarded(sourceObject())->compartment() == compartment());
         MarkObject(trc, &sourceObject_, "sourceObject");
     }
 
     if (functionNonDelazifying())
--- a/js/src/jswatchpoint.cpp
+++ b/js/src/jswatchpoint.cpp
@@ -167,17 +167,17 @@ WatchpointMap::markIteratively(JSTracer 
                 MarkObject(trc, const_cast<PreBarrieredObject *>(&entry.key().object),
                            "held Watchpoint object");
                 marked = true;
             }
 
             MOZ_ASSERT(JSID_IS_STRING(priorKeyId) ||
                        JSID_IS_INT(priorKeyId) ||
                        JSID_IS_SYMBOL(priorKeyId));
-            TraceEdge(trc, const_cast<PreBarrieredId *>(&entry.key().id), "WatchKey::id");
+            MarkId(trc, const_cast<PreBarrieredId *>(&entry.key().id), "WatchKey::id");
 
             if (entry.value().closure && !IsObjectMarked(&entry.value().closure)) {
                 MarkObject(trc, &entry.value().closure, "Watchpoint::closure");
                 marked = true;
             }
 
             /* We will sweep this entry in sweepAll if !objectIsLive. */
             if (priorKeyObj != entry.key().object || priorKeyId != entry.key().id)
@@ -193,17 +193,17 @@ WatchpointMap::markAll(JSTracer *trc)
     for (Map::Enum e(map); !e.empty(); e.popFront()) {
         Map::Entry &entry = e.front();
         WatchKey key = entry.key();
         WatchKey prior = key;
         MOZ_ASSERT(JSID_IS_STRING(prior.id) || JSID_IS_INT(prior.id) || JSID_IS_SYMBOL(prior.id));
 
         MarkObject(trc, const_cast<PreBarrieredObject *>(&key.object),
                    "held Watchpoint object");
-        TraceEdge(trc, const_cast<PreBarrieredId *>(&key.id), "WatchKey::id");
+        MarkId(trc, const_cast<PreBarrieredId *>(&key.id), "WatchKey::id");
         MarkObject(trc, &entry.value().closure, "Watchpoint::closure");
 
         if (prior.object != key.object || prior.id != key.id)
             e.rekeyFront(key);
     }
 }
 
 void
--- a/js/src/jsweakmap.h
+++ b/js/src/jsweakmap.h
@@ -156,33 +156,33 @@ class WeakMap : public HashMap<Key, Valu
 
   private:
     void exposeGCThingToActiveJS(const JS::Value &v) const { JS::ExposeValueToActiveJS(v); }
     void exposeGCThingToActiveJS(JSObject *obj) const { JS::ExposeObjectToActiveJS(obj); }
 
     bool markValue(JSTracer *trc, Value *x) {
         if (gc::IsMarked(x))
             return false;
-        TraceEdge(trc, x, "WeakMap entry value");
+        gc::Mark(trc, x, "WeakMap entry value");
         MOZ_ASSERT(gc::IsMarked(x));
         return true;
     }
 
     void nonMarkingTraceKeys(JSTracer *trc) {
         for (Enum e(*this); !e.empty(); e.popFront()) {
             Key key(e.front().key());
-            TraceEdge(trc, &key, "WeakMap entry key");
+            gc::Mark(trc, &key, "WeakMap entry key");
             if (key != e.front().key())
                 entryMoved(e, key);
         }
     }
 
     void nonMarkingTraceValues(JSTracer *trc) {
         for (Range r = Base::all(); !r.empty(); r.popFront())
-            TraceEdge(trc, &r.front().value(), "WeakMap entry value");
+            gc::Mark(trc, &r.front().value(), "WeakMap entry value");
     }
 
     bool keyNeedsMark(JSObject *key) {
         if (JSWeakmapKeyDelegateOp op = key->getClass()->ext.weakmapKeyDelegateOp) {
             JSObject *delegate = op(key);
             /*
              * Check if the delegate is marked with any color to properly handle
              * gray marking when the key's delegate is black and the map is
@@ -203,18 +203,18 @@ class WeakMap : public HashMap<Key, Valu
             /* If the entry is live, ensure its key and value are marked. */
             Key key(e.front().key());
             if (gc::IsMarked(const_cast<Key *>(&key))) {
                 if (markValue(trc, &e.front().value()))
                     markedAny = true;
                 if (e.front().key() != key)
                     entryMoved(e, key);
             } else if (keyNeedsMark(key)) {
-                TraceEdge(trc, &e.front().value(), "WeakMap entry value");
-                TraceEdge(trc, &key, "proxy-preserved WeakMap entry key");
+                gc::Mark(trc, &e.front().value(), "WeakMap entry value");
+                gc::Mark(trc, &key, "proxy-preserved WeakMap entry key");
                 if (e.front().key() != key)
                     entryMoved(e, key);
                 markedAny = true;
             }
             key.unsafeSet(nullptr);
         }
         return markedAny;
     }
--- a/js/src/proxy/Proxy.cpp
+++ b/js/src/proxy/Proxy.cpp
@@ -628,25 +628,25 @@ ProxyObject::trace(JSTracer *trc, JSObje
             MOZ_ASSERT(p);
             MOZ_ASSERT(*p->value().unsafeGet() == ObjectValue(*proxy));
         }
     }
 #endif
 
     // Note: If you add new slots here, make sure to change
     // nuke() to cope.
-    TraceCrossCompartmentEdge(trc, obj, proxy->slotOfPrivate(), "private");
-    TraceEdge(trc, proxy->slotOfExtra(0), "extra0");
+    MarkCrossCompartmentSlot(trc, obj, proxy->slotOfPrivate(), "private");
+    MarkValue(trc, proxy->slotOfExtra(0), "extra0");
 
     /*
      * The GC can use the second reserved slot to link the cross compartment
      * wrappers into a linked list, in which case we don't want to trace it.
      */
     if (!proxy->is<CrossCompartmentWrapperObject>())
-        TraceEdge(trc, proxy->slotOfExtra(1), "extra1");
+        MarkValue(trc, proxy->slotOfExtra(1), "extra1");
 
     Proxy::trace(trc, obj);
 }
 
 JSObject *
 js::proxy_WeakmapKeyDelegate(JSObject *obj)
 {
     MOZ_ASSERT(obj->is<ProxyObject>());
--- a/js/src/vm/ArgumentsObject.cpp
+++ b/js/src/vm/ArgumentsObject.cpp
@@ -538,18 +538,18 @@ ArgumentsObject::finalize(FreeOp *fop, J
     fop->free_(reinterpret_cast<void *>(obj->as<ArgumentsObject>().data()));
 }
 
 void
 ArgumentsObject::trace(JSTracer *trc, JSObject *obj)
 {
     ArgumentsObject &argsobj = obj->as<ArgumentsObject>();
     ArgumentsData *data = argsobj.data();
-    TraceEdge(trc, &data->callee, js_callee_str);
-    TraceRange(trc, data->numArgs, data->begin(), js_arguments_str);
+    MarkValue(trc, &data->callee, js_callee_str);
+    MarkValueRange(trc, data->numArgs, data->args, js_arguments_str);
     MarkScriptUnbarriered(trc, &data->script, "script");
 }
 
 /*
  * The classes below collaborate to lazily reflect and synchronize actual
  * argument values, argument count, and callee function object stored in a
  * stack frame with their corresponding property values in the frame's
  * arguments object.
--- a/js/src/vm/ArgumentsObject.h
+++ b/js/src/vm/ArgumentsObject.h
@@ -57,22 +57,16 @@ struct ArgumentsData
      * aliased by the CallObject. In such cases, the CallObject holds the
      * canonical value so any element access to the arguments object should load
      * the value out of the CallObject (which is pointed to by MAYBE_CALL_SLOT).
      */
     HeapValue   args[1];
 
     /* For jit use: */
     static ptrdiff_t offsetOfArgs() { return offsetof(ArgumentsData, args); }
-
-    /* Iterate args. */
-    HeapValue *begin() { return args; }
-    const HeapValue *begin() const { return args; }
-    HeapValue *end() { return args + numArgs; }
-    const HeapValue *end() const { return args + numArgs; }
 };
 
 // Maximum supported value of arguments.length. This bounds the maximum
 // number of arguments that can be supplied to Function.prototype.apply.
 // This value also bounds the number of elements parsed in an array
 // initialiser.
 static const unsigned ARGS_LENGTH_MAX = 500 * 1000;
 
--- a/js/src/vm/ArrayBufferObject.cpp
+++ b/js/src/vm/ArrayBufferObject.cpp
@@ -1085,22 +1085,22 @@ InnerViewTable::removeViews(ArrayBufferO
     MOZ_ASSERT(p);
 
     map.remove(p);
 }
 
 bool
 InnerViewTable::sweepEntry(JSObject **pkey, ViewVector &views)
 {
-    if (IsObjectAboutToBeFinalized(pkey))
+    if (IsObjectAboutToBeFinalizedFromAnyThread(pkey))
         return true;
 
     MOZ_ASSERT(!views.empty());
     for (size_t i = 0; i < views.length(); i++) {
-        if (IsObjectAboutToBeFinalized(&views[i])) {
+        if (IsObjectAboutToBeFinalizedFromAnyThread(&views[i])) {
             views[i--] = views.back();
             views.popBack();
         }
     }
 
     return views.empty();
 }
 
@@ -1172,17 +1172,17 @@ InnerViewTable::sizeOfExcludingThis(mozi
  * a custom tracer to move the object's data pointer if its owner was moved and
  * stores its data inline.
  */
 /* static */ void
 ArrayBufferViewObject::trace(JSTracer *trc, JSObject *objArg)
 {
     NativeObject *obj = &objArg->as<NativeObject>();
     HeapSlot &bufSlot = obj->getReservedSlotRef(TypedArrayLayout::BUFFER_SLOT);
-    TraceEdge(trc, &bufSlot, "typedarray.buffer");
+    MarkSlot(trc, &bufSlot, "typedarray.buffer");
 
     // Update obj's data pointer if it moved.
     if (bufSlot.isObject()) {
         ArrayBufferObject &buf = AsArrayBuffer(MaybeForwarded(&bufSlot.toObject()));
         int32_t offset = obj->getReservedSlot(TypedArrayLayout::BYTEOFFSET_SLOT).toInt32();
         MOZ_ASSERT(buf.dataPointer() != nullptr);
 
         if (buf.forInlineTypedObject()) {
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -4245,17 +4245,17 @@ GetScriptReferent(JSObject *obj)
     return static_cast<JSScript *>(obj->as<NativeObject>().getPrivate());
 }
 
 void
 DebuggerScript_trace(JSTracer *trc, JSObject *obj)
 {
     /* This comes from a private pointer, so no barrier needed. */
     if (JSScript *script = GetScriptReferent(obj)) {
-        TraceManuallyBarrieredCrossCompartmentEdge(trc, obj, &script, "Debugger.Script referent");
+        MarkCrossCompartmentScriptUnbarriered(trc, obj, &script, "Debugger.Script referent");
         obj->as<NativeObject>().setPrivateUnbarriered(script);
     }
 }
 
 const Class DebuggerScript_class = {
     "Script",
     JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGSCRIPT_COUNT),
@@ -5267,18 +5267,17 @@ GetSourceReferent(JSObject *obj)
 void
 DebuggerSource_trace(JSTracer *trc, JSObject *obj)
 {
     /*
      * There is a barrier on private pointers, so the Unbarriered marking
      * is okay.
      */
     if (JSObject *referent = GetSourceReferent(obj)) {
-        TraceManuallyBarrieredCrossCompartmentEdge(trc, obj, &referent,
-                                                   "Debugger.Source referent");
+        MarkCrossCompartmentObjectUnbarriered(trc, obj, &referent, "Debugger.Source referent");
         obj->as<NativeObject>().setPrivateUnbarriered(referent);
     }
 }
 
 const Class DebuggerSource_class = {
     "Source",
     JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGSOURCE_COUNT),
@@ -6384,18 +6383,17 @@ static const JSFunctionSpec DebuggerFram
 void
 DebuggerObject_trace(JSTracer *trc, JSObject *obj)
 {
     /*
      * There is a barrier on private pointers, so the Unbarriered marking
      * is okay.
      */
     if (JSObject *referent = (JSObject *) obj->as<NativeObject>().getPrivate()) {
-        TraceManuallyBarrieredCrossCompartmentEdge(trc, obj, &referent,
-                                                   "Debugger.Object referent");
+        MarkCrossCompartmentObjectUnbarriered(trc, obj, &referent, "Debugger.Object referent");
         obj->as<NativeObject>().setPrivateUnbarriered(referent);
     }
 }
 
 const Class DebuggerObject_class = {
     "Object",
     JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGOBJECT_COUNT),
@@ -7294,18 +7292,17 @@ static const JSFunctionSpec DebuggerObje
 void
 DebuggerEnv_trace(JSTracer *trc, JSObject *obj)
 {
     /*
      * There is a barrier on private pointers, so the Unbarriered marking
      * is okay.
      */
     if (Env *referent = (JSObject *) obj->as<NativeObject>().getPrivate()) {
-        TraceManuallyBarrieredCrossCompartmentEdge(trc, obj, &referent,
-                                                   "Debugger.Environment referent");
+        MarkCrossCompartmentObjectUnbarriered(trc, obj, &referent, "Debugger.Environment referent");
         obj->as<NativeObject>().setPrivateUnbarriered(referent);
     }
 }
 
 const Class DebuggerEnv_class = {
     "Environment",
     JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGENV_COUNT),
--- a/js/src/vm/Debugger.h
+++ b/js/src/vm/Debugger.h
@@ -119,17 +119,17 @@ class DebuggerWeakMap : private WeakMap<
     }
 
   public:
     template <void (traceValueEdges)(JSTracer *, JSObject *)>
     void markCrossCompartmentEdges(JSTracer *tracer) {
         for (Enum e(*static_cast<Base *>(this)); !e.empty(); e.popFront()) {
             traceValueEdges(tracer, e.front().value());
             Key key = e.front().key();
-            TraceEdge(tracer, &key, "Debugger WeakMap key");
+            gc::Mark(tracer, &key, "Debugger WeakMap key");
             if (key != e.front().key())
                 e.rekeyFront(key);
             key.unsafeSet(nullptr);
         }
     }
 
     bool hasKeyInZone(JS::Zone *zone) {
         CountMap::Ptr p = zoneCounts.lookup(zone);
--- a/js/src/vm/JSONParser.cpp
+++ b/js/src/vm/JSONParser.cpp
@@ -42,22 +42,22 @@ JSONParserBase::~JSONParserBase()
 
 void
 JSONParserBase::trace(JSTracer *trc)
 {
     for (size_t i = 0; i < stack.length(); i++) {
         if (stack[i].state == FinishArrayElement) {
             ElementVector &elements = stack[i].elements();
             for (size_t j = 0; j < elements.length(); j++)
-                TraceRoot(trc, &elements[j], "JSONParser element");
+                gc::MarkValueRoot(trc, &elements[j], "JSONParser element");
         } else {
             PropertyVector &properties = stack[i].properties();
             for (size_t j = 0; j < properties.length(); j++) {
-                TraceRoot(trc, &properties[j].value, "JSONParser property value");
-                TraceRoot(trc, &properties[j].id, "JSONParser property id");
+                gc::MarkValueRoot(trc, &properties[j].value, "JSONParser property value");
+                gc::MarkIdRoot(trc, &properties[j].id, "JSONParser property id");
             }
         }
     }
 }
 
 template <typename CharT>
 void
 JSONParser<CharT>::getTextPosition(uint32_t *column, uint32_t *line)
--- a/js/src/vm/ObjectGroup.cpp
+++ b/js/src/vm/ObjectGroup.cpp
@@ -412,17 +412,17 @@ class ObjectGroupCompartment::NewTableRe
   public:
     NewTableRef(NewTable *table, const Class *clasp, JSObject *proto, JSObject *associated)
         : table(table), clasp(clasp), proto(proto), associated(associated)
     {}
 
     void mark(JSTracer *trc) {
         JSObject *prior = proto;
         trc->setTracingLocation(&*prior);
-        TraceManuallyBarrieredEdge(trc, &proto, "newObjectGroups set prototype");
+        Mark(trc, &proto, "newObjectGroups set prototype");
         if (prior == proto)
             return;
 
         NewTable::Ptr p = table->lookup(NewTable::Lookup(clasp, TaggedProto(prior),
                                                          TaggedProto(proto),
                                                          associated));
         if (!p)
             return;
@@ -1400,63 +1400,63 @@ ObjectGroupCompartment::sweep(FreeOp *fo
     if (arrayObjectTable) {
         for (ArrayObjectTable::Enum e(*arrayObjectTable); !e.empty(); e.popFront()) {
             ArrayObjectKey key = e.front().key();
             MOZ_ASSERT(key.type.isUnknown() || !key.type.isSingleton());
 
             bool remove = false;
             if (!key.type.isUnknown() && key.type.isGroup()) {
                 ObjectGroup *group = key.type.groupNoBarrier();
-                if (IsObjectGroupAboutToBeFinalized(&group))
+                if (IsObjectGroupAboutToBeFinalizedFromAnyThread(&group))
                     remove = true;
                 else
                     key.type = TypeSet::ObjectType(group);
             }
             if (key.proto && key.proto != TaggedProto::LazyProto &&
-                IsObjectAboutToBeFinalized(&key.proto))
+                IsObjectAboutToBeFinalizedFromAnyThread(&key.proto))
             {
                 remove = true;
             }
-            if (IsObjectGroupAboutToBeFinalized(e.front().value().unsafeGet()))
+            if (IsObjectGroupAboutToBeFinalizedFromAnyThread(e.front().value().unsafeGet()))
                 remove = true;
 
             if (remove)
                 e.removeFront();
             else if (key != e.front().key())
                 e.rekeyFront(key);
         }
     }
 
     if (plainObjectTable) {
         for (PlainObjectTable::Enum e(*plainObjectTable); !e.empty(); e.popFront()) {
             const PlainObjectKey &key = e.front().key();
             PlainObjectEntry &entry = e.front().value();
 
             bool remove = false;
-            if (IsObjectGroupAboutToBeFinalized(entry.group.unsafeGet()))
+            if (IsObjectGroupAboutToBeFinalizedFromAnyThread(entry.group.unsafeGet()))
                 remove = true;
-            if (IsShapeAboutToBeFinalized(entry.shape.unsafeGet()))
+            if (IsShapeAboutToBeFinalizedFromAnyThread(entry.shape.unsafeGet()))
                 remove = true;
             for (unsigned i = 0; !remove && i < key.nproperties; i++) {
                 if (JSID_IS_STRING(key.properties[i])) {
                     JSString *str = JSID_TO_STRING(key.properties[i]);
-                    if (IsStringAboutToBeFinalized(&str))
+                    if (IsStringAboutToBeFinalizedFromAnyThread(&str))
                         remove = true;
                     MOZ_ASSERT(AtomToId((JSAtom *)str) == key.properties[i]);
                 } else if (JSID_IS_SYMBOL(key.properties[i])) {
                     JS::Symbol *sym = JSID_TO_SYMBOL(key.properties[i]);
-                    if (IsSymbolAboutToBeFinalized(&sym))
+                    if (IsSymbolAboutToBeFinalizedFromAnyThread(&sym))
                         remove = true;
                 }
 
                 MOZ_ASSERT(!entry.types[i].isSingleton());
                 ObjectGroup *group = nullptr;
                 if (entry.types[i].isGroup()) {
                     group = entry.types[i].groupNoBarrier();
-                    if (IsObjectGroupAboutToBeFinalized(&group))
+                    if (IsObjectGroupAboutToBeFinalizedFromAnyThread(&group))
                         remove = true;
                     else if (group != entry.types[i].groupNoBarrier())
                         entry.types[i] = TypeSet::ObjectType(group);
                 }
             }
 
             if (remove) {
                 js_free(key.properties);
@@ -1464,18 +1464,18 @@ ObjectGroupCompartment::sweep(FreeOp *fo
                 e.removeFront();
             }
         }
     }
 
     if (allocationSiteTable) {
         for (AllocationSiteTable::Enum e(*allocationSiteTable); !e.empty(); e.popFront()) {
             AllocationSiteKey key = e.front().key();
-            bool keyDying = IsScriptAboutToBeFinalized(&key.script);
-            bool valDying = IsObjectGroupAboutToBeFinalized(e.front().value().unsafeGet());
+            bool keyDying = IsScriptAboutToBeFinalizedFromAnyThread(&key.script);
+            bool valDying = IsObjectGroupAboutToBeFinalizedFromAnyThread(e.front().value().unsafeGet());
             if (keyDying || valDying)
                 e.removeFront();
             else if (key.script != e.front().key().script)
                 e.rekeyFront(key);
         }
     }
 
     sweepNewTable(defaultNewTable);
@@ -1483,18 +1483,18 @@ ObjectGroupCompartment::sweep(FreeOp *fo
 }
 
 void
 ObjectGroupCompartment::sweepNewTable(NewTable *table)
 {
     if (table && table->initialized()) {
         for (NewTable::Enum e(*table); !e.empty(); e.popFront()) {
             NewEntry entry = e.front();
-            if (IsObjectGroupAboutToBeFinalized(entry.group.unsafeGet()) ||
-                (entry.associated && IsObjectAboutToBeFinalized(&entry.associated)))
+            if (IsObjectGroupAboutToBeFinalizedFromAnyThread(entry.group.unsafeGet()) ||
+                (entry.associated && IsObjectAboutToBeFinalizedFromAnyThread(&entry.associated)))
             {
                 e.removeFront();
             } else {
                 /* Any rekeying necessary is handled by fixupNewObjectGroupTable() below. */
                 MOZ_ASSERT(entry.group.unbarrieredGet() == e.front().group.unbarrieredGet());
                 MOZ_ASSERT(entry.associated == e.front().associated);
             }
         }
--- a/js/src/vm/PIC.cpp
+++ b/js/src/vm/PIC.cpp
@@ -255,18 +255,18 @@ js::ForOfPIC::Chain::mark(JSTracer *trc)
         return;
 
     gc::MarkObject(trc, &arrayProto_, "ForOfPIC Array.prototype.");
     gc::MarkObject(trc, &arrayIteratorProto_, "ForOfPIC ArrayIterator.prototype.");
 
     gc::MarkShape(trc, &arrayProtoShape_, "ForOfPIC Array.prototype shape.");
     gc::MarkShape(trc, &arrayIteratorProtoShape_, "ForOfPIC ArrayIterator.prototype shape.");
 
-    TraceEdge(trc, &canonicalIteratorFunc_, "ForOfPIC ArrayValues builtin.");
-    TraceEdge(trc, &canonicalNextFunc_, "ForOfPIC ArrayIterator.prototype.next builtin.");
+    gc::MarkValue(trc, &canonicalIteratorFunc_, "ForOfPIC ArrayValues builtin.");
+    gc::MarkValue(trc, &canonicalNextFunc_, "ForOfPIC ArrayIterator.prototype.next builtin.");
 
     // Free all the stubs in the chain.
     while (stubs_)
         removeStub(stubs_, nullptr);
 }
 
 void
 js::ForOfPIC::Chain::sweep(FreeOp *fop)
--- a/js/src/vm/RegExpObject.cpp
+++ b/js/src/vm/RegExpObject.cpp
@@ -907,35 +907,35 @@ RegExpCompartment::sweep(JSRuntime *rt)
         // restarted while in progress (i.e. performing a full GC in the
         // middle of an incremental GC) or if a RegExpShared referenced via the
         // stack is traced but is not in a zone being collected.
         //
         // Because of this we only treat the marked_ bit as a hint, and destroy
         // the RegExpShared if it was accidentally marked earlier but wasn't
         // marked by the current trace.
         bool keep = shared->marked() &&
-                    IsStringMarked(&shared->source);
+                    IsStringMarkedFromAnyThread(&shared->source);
         for (size_t i = 0; i < ArrayLength(shared->compilationArray); i++) {
             RegExpShared::RegExpCompilation &compilation = shared->compilationArray[i];
             if (compilation.jitCode &&
-                IsJitCodeAboutToBeFinalized(compilation.jitCode.unsafeGet()))
+                IsJitCodeAboutToBeFinalizedFromAnyThread(compilation.jitCode.unsafeGet()))
             {
                 keep = false;
             }
         }
         if (keep || rt->isHeapCompacting()) {
             shared->clearMarked();
         } else {
             js_delete(shared);
             e.removeFront();
         }
     }
 
     if (matchResultTemplateObject_ &&
-        IsObjectAboutToBeFinalized(matchResultTemplateObject_.unsafeGet()))
+        IsObjectAboutToBeFinalizedFromAnyThread(matchResultTemplateObject_.unsafeGet()))
     {
         matchResultTemplateObject_.set(nullptr);
     }
 }
 
 bool
 RegExpCompartment::get(JSContext *cx, JSAtom *source, RegExpFlag flags, RegExpGuard *g)
 {
--- a/js/src/vm/SavedStacks.cpp
+++ b/js/src/vm/SavedStacks.cpp
@@ -812,17 +812,17 @@ SavedStacks::saveCurrentStack(JSContext 
 void
 SavedStacks::sweep(JSRuntime *rt)
 {
     if (frames.initialized()) {
         for (SavedFrame::Set::Enum e(frames); !e.empty(); e.popFront()) {
             JSObject *obj = e.front().unbarrieredGet();
             JSObject *temp = obj;
 
-            if (IsObjectAboutToBeFinalized(&obj)) {
+            if (IsObjectAboutToBeFinalizedFromAnyThread(&obj)) {
                 e.removeFront();
             } else {
                 SavedFrame *frame = &obj->as<SavedFrame>();
                 bool parentMoved = frame->parentMoved();
 
                 if (parentMoved) {
                     frame->updatePrivateParent();
                 }
@@ -1080,17 +1080,17 @@ SavedStacks::createFrameFromLookup(JSCon
  * Remove entries from the table whose JSScript is being collected.
  */
 void
 SavedStacks::sweepPCLocationMap()
 {
     for (PCLocationMap::Enum e(pcLocationMap); !e.empty(); e.popFront()) {
         PCKey key = e.front().key();
         JSScript *script = key.script.get();
-        if (IsScriptAboutToBeFinalized(&script)) {
+        if (IsScriptAboutToBeFinalizedFromAnyThread(&script)) {
             e.removeFront();
         } else if (script != key.script.get()) {
             key.script = script;
             e.rekeyFront(key);
         }
     }
 }
 
--- a/js/src/vm/ScopeObject.cpp
+++ b/js/src/vm/ScopeObject.cpp
@@ -1180,17 +1180,17 @@ MissingScopeKey::match(MissingScopeKey s
 {
     return sk1.frame_ == sk2.frame_ && sk1.staticScope_ == sk2.staticScope_;
 }
 
 void
 LiveScopeVal::sweep()
 {
     if (staticScope_)
-        MOZ_ALWAYS_FALSE(IsObjectAboutToBeFinalized(staticScope_.unsafeGet()));
+        MOZ_ALWAYS_FALSE(IsObjectAboutToBeFinalizedFromAnyThread(staticScope_.unsafeGet()));
 }
 
 // Live ScopeIter values may be added to DebugScopes::liveScopes, as
 // LiveScopeVal instances.  They need to have write barriers when they are added
 // to the hash table, but no barriers when rehashing inside GC.  It's a nasty
 // hack, but the important thing is that LiveScopeVal and MissingScopeKey need to
 // alias each other.
 void
@@ -1878,17 +1878,17 @@ void
 DebugScopes::sweep(JSRuntime *rt)
 {
     /*
      * missingScopes points to debug scopes weakly so that debug scopes can be
      * released more eagerly.
      */
     for (MissingScopeMap::Enum e(missingScopes); !e.empty(); e.popFront()) {
         DebugScopeObject **debugScope = e.front().value().unsafeGet();
-        if (IsObjectAboutToBeFinalized(debugScope)) {
+        if (IsObjectAboutToBeFinalizedFromAnyThread(debugScope)) {
             /*
              * Note that onPopCall and onPopBlock rely on missingScopes to find
              * scope objects that we synthesized for the debugger's sake, and
              * clean up the synthetic scope objects' entries in liveScopes. So
              * if we remove an entry frcom missingScopes here, we must also
              * remove the corresponding liveScopes entry.
              *
              * Since the DebugScopeObject is the only thing using its scope
@@ -1916,17 +1916,17 @@ DebugScopes::sweep(JSRuntime *rt)
         ScopeObject *scope = e.front().key();
 
         e.front().value().sweep();
 
         /*
          * Scopes can be finalized when a debugger-synthesized ScopeObject is
          * no longer reachable via its DebugScopeObject.
          */
-        if (IsObjectAboutToBeFinalized(&scope))
+        if (IsObjectAboutToBeFinalizedFromAnyThread(&scope))
             e.removeFront();
         else if (scope != e.front().key())
             e.rekeyFront(scope);
     }
 }
 
 #ifdef JSGC_HASH_TABLE_CHECKS
 void
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -1246,17 +1246,17 @@ BaseShape::assertConsistency()
 void
 JSCompartment::sweepBaseShapeTable()
 {
     if (!baseShapes.initialized())
         return;
 
     for (BaseShapeSet::Enum e(baseShapes); !e.empty(); e.popFront()) {
         UnownedBaseShape *base = e.front().unbarrieredGet();
-        if (IsBaseShapeAboutToBeFinalized(&base)) {
+        if (IsBaseShapeAboutToBeFinalizedFromAnyThread(&base)) {
             e.removeFront();
         } else if (base != e.front().unbarrieredGet()) {
             ReadBarriered<UnownedBaseShape *> b(base);
             e.rekeyFront(base, b);
         }
     }
 }
 
@@ -1346,20 +1346,18 @@ class InitialShapeSetRef : public Buffer
           clasp(clasp),
           proto(proto),
           nfixed(nfixed),
           objectFlags(objectFlags)
     {}
 
     void mark(JSTracer *trc) {
         TaggedProto priorProto = proto;
-        if (proto.isObject()) {
-            TraceManuallyBarrieredEdge(trc, reinterpret_cast<JSObject**>(&proto),
-                                       "initialShapes set proto");
-        }
+        if (proto.isObject())
+            Mark(trc, reinterpret_cast<JSObject**>(&proto), "initialShapes set proto");
         if (proto == priorProto)
             return;
 
         /* Find the original entry, which must still be present. */
         InitialShapeEntry::Lookup lookup(clasp, priorProto, nfixed, objectFlags);
         InitialShapeSet::Ptr p = set->lookup(lookup);
         MOZ_ASSERT(p);
 
@@ -1531,18 +1529,18 @@ EmptyShape::insertInitialShape(Exclusive
 void
 JSCompartment::sweepInitialShapeTable()
 {
     if (initialShapes.initialized()) {
         for (InitialShapeSet::Enum e(initialShapes); !e.empty(); e.popFront()) {
             const InitialShapeEntry &entry = e.front();
             Shape *shape = entry.shape.unbarrieredGet();
             JSObject *proto = entry.proto.raw();
-            if (IsShapeAboutToBeFinalized(&shape) ||
-                (entry.proto.isObject() && IsObjectAboutToBeFinalized(&proto)))
+            if (IsShapeAboutToBeFinalizedFromAnyThread(&shape) ||
+                (entry.proto.isObject() && IsObjectAboutToBeFinalizedFromAnyThread(&proto)))
             {
                 e.removeFront();
             } else {
                 if (shape != entry.shape.unbarrieredGet() || proto != entry.proto.raw()) {
                     ReadBarrieredShape readBarrieredShape(shape);
                     InitialShapeEntry newKey(readBarrieredShape, TaggedProto(proto));
                     e.rekeyFront(newKey.getLookup(), newKey);
                 }
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -1356,17 +1356,17 @@ Shape::searchLinear(jsid id)
 
     return nullptr;
 }
 
 inline void
 Shape::markChildren(JSTracer *trc)
 {
     MarkBaseShape(trc, &base_, "base");
-    TraceEdge(trc, &propidRef(), "propid");
+    gc::MarkId(trc, &propidRef(), "propid");
     if (parent)
         MarkShape(trc, &parent, "parent");
 
     if (hasGetterObject())
         gc::MarkObjectUnbarriered(trc, &asAccessorShape().getterObj, "getter");
 
     if (hasSetterObject())
         gc::MarkObjectUnbarriered(trc, &asAccessorShape().setterObj, "setter");
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -340,24 +340,24 @@ InterpreterFrame::mark(JSTracer *trc)
         if (isEvalFrame())
             gc::MarkScriptUnbarriered(trc, &u.evalScript, "eval script");
     } else {
         gc::MarkScriptUnbarriered(trc, &exec.script, "script");
     }
     if (trc->isMarkingTracer())
         script()->compartment()->zone()->active = true;
     if (hasReturnValue())
-        TraceManuallyBarrieredEdge(trc, &rval_, "rval");
+        gc::MarkValueUnbarriered(trc, &rval_, "rval");
 }
 
 void
 InterpreterFrame::markValues(JSTracer *trc, unsigned start, unsigned end)
 {
     if (start < end)
-        TraceRootRange(trc, end - start, slots() + start, "vm_stack");
+        gc::MarkValueRootRange(trc, end - start, slots() + start, "vm_stack");
 }
 
 void
 InterpreterFrame::markValues(JSTracer *trc, Value *sp, jsbytecode *pc)
 {
     MOZ_ASSERT(sp >= slots());
 
     JSScript *script = this->script();
@@ -391,20 +391,20 @@ InterpreterFrame::markValues(JSTracer *t
 
         // Mark live locals.
         markValues(trc, 0, nlivefixed);
     }
 
     if (hasArgs()) {
         // Mark callee, |this| and arguments.
         unsigned argc = Max(numActualArgs(), numFormalArgs());
-        TraceRootRange(trc, argc + 2, argv_ - 2, "fp argv");
+        gc::MarkValueRootRange(trc, argc + 2, argv_ - 2, "fp argv");
     } else {
         // Mark callee and |this|
-        TraceRootRange(trc, 2, ((Value *)this) - 2, "stack callee and this");
+        gc::MarkValueRootRange(trc, 2, ((Value *)this) - 2, "stack callee and this");
     }
 }
 
 static void
 MarkInterpreterActivation(JSTracer *trc, InterpreterActivation *act)
 {
     for (InterpreterFrameIterator frames(act); !frames.done(); ++frames) {
         InterpreterFrame *fp = frames.frame();
--- a/js/src/vm/TypeInference.cpp
+++ b/js/src/vm/TypeInference.cpp
@@ -704,26 +704,26 @@ TypeSet::readBarrier(const TypeSet *type
                 (void) key->singleton();
             else
                 (void) key->group();
         }
     }
 }
 
 /* static */ bool
-TypeSet::IsTypeMarked(TypeSet::Type *v)
+TypeSet::IsTypeMarkedFromAnyThread(TypeSet::Type *v)
 {
     bool rv;
     if (v->isSingletonUnchecked()) {
         JSObject *obj = v->singletonNoBarrier();
-        rv = IsObjectMarked(&obj);
+        rv = IsObjectMarkedFromAnyThread(&obj);
         *v = TypeSet::ObjectType(obj);
     } else if (v->isGroupUnchecked()) {
         ObjectGroup *group = v->groupNoBarrier();
-        rv = IsObjectGroupMarked(&group);
+        rv = IsObjectGroupMarkedFromAnyThread(&group);
         *v = TypeSet::ObjectType(group);
     } else {
         rv = true;
     }
     return rv;
 }
 
 /* static */ bool
--- a/js/src/vm/TypeInference.h
+++ b/js/src/vm/TypeInference.h
@@ -525,17 +525,17 @@ class TypeSet
 
     // Get the type of a possibly optimized out or uninitialized let value.
     // This generally only happens on unconditional type monitors on bailing
     // out of Ion, such as for argument and local types.
     static inline Type GetMaybeUntrackedValueType(const Value &val);
 
     static void MarkTypeRoot(JSTracer *trc, Type *v, const char *name);
     static void MarkTypeUnbarriered(JSTracer *trc, Type *v, const char *name);
-    static bool IsTypeMarked(Type *v);
+    static bool IsTypeMarkedFromAnyThread(Type *v);
     static bool IsTypeAllocatedDuringIncremental(Type v);
     static bool IsTypeAboutToBeFinalized(Type *v);
 };
 
 /*
  * A constraint which listens to additions to a type set and propagates those
  * changes to other type sets.
  */