Bug 1010655 - Always use the faster version of IsInsideNursery when possible; r=jonco
authorTerrence Cole <terrence@mozilla.com>
Wed, 14 May 2014 19:48:09 -0700
changeset 196484 a6cf64544f9b9c82e3bde274db80bd2a2fa3be4e
parent 196483 24ddf040a705131f673b7b6ce7defb811c9b8973
child 196485 a7a36e1dca08277aba3620c788bdb10edb0b7d5f
push idunknown
push userunknown
push dateunknown
reviewersjonco
bugs1010655
milestone32.0a1
Bug 1010655 - Always use the faster version of IsInsideNursery when possible; r=jonco
js/public/GCAPI.h
js/public/HeapAPI.h
js/public/Id.h
js/src/gc/Heap.h
js/src/gc/Marking.cpp
js/src/gc/Nursery-inl.h
js/src/gc/Nursery.cpp
js/src/gc/Nursery.h
js/src/gc/StoreBuffer.cpp
js/src/gc/StoreBuffer.h
js/src/gc/Verifier.cpp
js/src/jit/BaselineIC.cpp
js/src/jit/CodeGenerator.cpp
js/src/jit/CompileWrappers.cpp
js/src/jit/IonBuilder.cpp
js/src/jit/IonMacroAssembler.cpp
js/src/jit/MCallOptimize.cpp
js/src/jit/VMFunctions.cpp
js/src/jit/arm/MacroAssembler-arm.cpp
js/src/jit/shared/Assembler-shared.h
js/src/jit/x64/Assembler-x64.h
js/src/jit/x86/Assembler-x86.h
js/src/jsapi-tests/testGCStoreBufferRemoval.cpp
js/src/jsapi-tests/testIsInsideNursery.cpp
js/src/jscompartment.cpp
js/src/jsfriendapi.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsgcinlines.h
js/src/jsinfer.cpp
js/src/jsobj.h
js/src/jsobjinlines.h
js/src/jspubtd.h
js/src/jsweakmap.cpp
js/src/jswrapper.cpp
js/src/vm/ArrayBufferObject.cpp
js/src/vm/ArrayBufferObject.h
js/src/vm/Runtime.cpp
js/src/vm/ScopeObject.cpp
js/src/vm/Shape.cpp
xpcom/glue/tests/gtest/TestGCPostBarriers.cpp
--- a/js/public/GCAPI.h
+++ b/js/public/GCAPI.h
@@ -460,17 +460,17 @@ ExposeGCThingToActiveJS(void *thing, JSG
 
     shadow::Runtime *rt = js::gc::GetGCThingRuntime(thing);
 #ifdef JSGC_GENERATIONAL
     /*
      * GC things residing in the nursery cannot be gray: they have no mark bits.
      * All live objects in the nursery are moved to tenured at the beginning of
      * each GC slice, so the gray marker never sees nursery things.
      */
-    if (js::gc::IsInsideNursery(rt, thing))
+    if (js::gc::IsInsideNursery((js::gc::Cell *)thing))
         return;
 #endif
     if (IsIncrementalBarrierNeededOnGCThing(rt, thing, kind))
         IncrementalReferenceBarrier(thing, kind);
     else if (GCThingIsMarkedGray(thing))
         UnmarkGrayGCThingRecursively(thing, kind);
 }
 
@@ -493,17 +493,17 @@ ExposeObjectToActiveJS(JSObject *obj)
 static MOZ_ALWAYS_INLINE void
 MarkGCThingAsLive(JSRuntime *rt_, void *thing, JSGCTraceKind kind)
 {
     shadow::Runtime *rt = shadow::Runtime::asShadowRuntime(rt_);
 #ifdef JSGC_GENERATIONAL
     /*
      * Any object in the nursery will not be freed during any GC running at that time.
      */
-    if (js::gc::IsInsideNursery(rt, thing))
+    if (js::gc::IsInsideNursery((js::gc::Cell *)thing))
         return;
 #endif
     if (IsIncrementalBarrierNeededOnGCThing(rt, thing, kind))
         IncrementalReferenceBarrier(thing, kind);
 }
 
 static MOZ_ALWAYS_INLINE void
 MarkStringAsLive(Zone *zone, JSString *string)
--- a/js/public/HeapAPI.h
+++ b/js/public/HeapAPI.h
@@ -56,24 +56,75 @@ static const uint32_t GRAY = 1;
 
 /*
  * Constants used to indicate whether a chunk is part of the tenured heap or the
  * nusery.
  */
 const uintptr_t ChunkLocationNursery = 0;
 const uintptr_t ChunkLocationTenuredHeap = 1;
 
+#ifdef JS_DEBUG
+/* When downcasting, ensure we are actually the right type. */
+extern JS_FRIEND_API(void)
+AssertGCThingHasType(js::gc::Cell *cell, JSGCTraceKind kind);
+#else
+inline void
+AssertGCThingHasType(js::gc::Cell *cell, JSGCTraceKind kind) {}
+#endif
+
 } /* namespace gc */
 } /* namespace js */
 
 namespace JS {
 struct Zone;
-} /* namespace JS */
+
+/*
+ * We cannot expose the class hierarchy: the implementation is hidden. Instead
+ * we provide cast functions with strong debug-mode assertions.
+ */
+static MOZ_ALWAYS_INLINE js::gc::Cell *
+AsCell(JSObject *obj)
+{
+    js::gc::Cell *cell = reinterpret_cast<js::gc::Cell *>(obj);
+    AssertGCThingHasType(cell, JSTRACE_OBJECT);
+    return cell;
+}
+
+static MOZ_ALWAYS_INLINE js::gc::Cell *
+AsCell(JSFunction *fun)
+{
+    js::gc::Cell *cell = reinterpret_cast<js::gc::Cell *>(fun);
+    AssertGCThingHasType(cell, JSTRACE_OBJECT);
+    return cell;
+}
 
-namespace JS {
+static MOZ_ALWAYS_INLINE js::gc::Cell *
+AsCell(JSString *str)
+{
+    js::gc::Cell *cell = reinterpret_cast<js::gc::Cell *>(str);
+    AssertGCThingHasType(cell, JSTRACE_STRING);
+    return cell;
+}
+
+static MOZ_ALWAYS_INLINE js::gc::Cell *
+AsCell(JSFlatString *flat)
+{
+    js::gc::Cell *cell = reinterpret_cast<js::gc::Cell *>(flat);
+    AssertGCThingHasType(cell, JSTRACE_STRING);
+    return cell;
+}
+
+static MOZ_ALWAYS_INLINE js::gc::Cell *
+AsCell(JSScript *script)
+{
+    js::gc::Cell *cell = reinterpret_cast<js::gc::Cell *>(script);
+    AssertGCThingHasType(cell, JSTRACE_SCRIPT);
+    return cell;
+}
+
 namespace shadow {
 
 struct ArenaHeader
 {
     JS::Zone *zone;
 };
 
 struct Zone
@@ -160,26 +211,16 @@ static MOZ_ALWAYS_INLINE JS::shadow::Are
 GetGCThingArena(void *thing)
 {
     uintptr_t addr = uintptr_t(thing);
     addr &= ~js::gc::ArenaMask;
     return reinterpret_cast<JS::shadow::ArenaHeader *>(addr);
 }
 
 MOZ_ALWAYS_INLINE bool
-IsInsideNursery(const JS::shadow::Runtime *runtime, const void *p)
-{
-#ifdef JSGC_GENERATIONAL
-    return uintptr_t(p) >= runtime->gcNurseryStart_ && uintptr_t(p) < runtime->gcNurseryEnd_;
-#else
-    return false;
-#endif
-}
-
-MOZ_ALWAYS_INLINE bool
 IsInsideNursery(const js::gc::Cell *cell)
 {
 #ifdef JSGC_GENERATIONAL
     if (!cell)
         return false;
     uintptr_t addr = uintptr_t(cell);
     addr &= ~js::gc::ChunkMask;
     addr |= js::gc::ChunkLocationOffset;
@@ -215,18 +256,17 @@ static MOZ_ALWAYS_INLINE bool
 GCThingIsMarkedGray(void *thing)
 {
 #ifdef JSGC_GENERATIONAL
     /*
      * GC things residing in the nursery cannot be gray: they have no mark bits.
      * All live objects in the nursery are moved to tenured at the beginning of
      * each GC slice, so the gray marker never sees nursery things.
      */
-    JS::shadow::Runtime *rt = js::gc::GetGCThingRuntime(thing);
-    if (js::gc::IsInsideNursery(rt, thing))
+    if (js::gc::IsInsideNursery((js::gc::Cell *)thing))
         return false;
 #endif
     uintptr_t *word, mask;
     js::gc::GetGCThingMarkWordAndMask(thing, js::gc::GRAY, &word, &mask);
     return *word & mask;
 }
 
 static MOZ_ALWAYS_INLINE bool
--- a/js/public/Id.h
+++ b/js/public/Id.h
@@ -112,17 +112,17 @@ JSID_TO_OBJECT(jsid id)
 }
 
 static MOZ_ALWAYS_INLINE jsid
 OBJECT_TO_JSID(JSObject *obj)
 {
     jsid id;
     MOZ_ASSERT(obj != nullptr);
     MOZ_ASSERT(((size_t)obj & JSID_TYPE_MASK) == 0);
-    JS_ASSERT(!js::gc::IsInsideNursery(js::gc::GetGCThingRuntime(obj), obj));
+    JS_ASSERT(!js::gc::IsInsideNursery(JS::AsCell(obj)));
     JSID_BITS(id) = ((size_t)obj | JSID_TYPE_OBJECT);
     return id;
 }
 
 static MOZ_ALWAYS_INLINE bool
 JSID_IS_GCTHING(jsid id)
 {
     return JSID_IS_STRING(id) || JSID_IS_OBJECT(id);
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -1084,18 +1084,17 @@ Cell::isAligned() const
 {
     return Arena::isAligned(address(), arenaHeader()->getThingSize());
 }
 
 bool
 Cell::isTenured() const
 {
 #ifdef JSGC_GENERATIONAL
-    JS::shadow::Runtime *rt = js::gc::GetGCThingRuntime(this);
-    return !IsInsideNursery(rt, this);
+    return !IsInsideNursery(this);
 #endif
     return true;
 }
 #endif
 
 inline uintptr_t
 Cell::address() const
 {
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -167,17 +167,17 @@ CheckMarkedThing(JSTracer *trc, T **thin
         MOZ_ReportAssertionFailure(msgbuf, __FILE__, __LINE__);
         MOZ_CRASH();
     }
 #endif
     JS_ASSERT(*thingp);
 
 #ifdef DEBUG
     /* This function uses data that's not available in the nursery. */
-    if (IsInsideNursery(trc->runtime(), thing))
+    if (IsInsideNursery(thing))
         return;
 
     /*
      * Permanent atoms are not associated with this runtime, but will be ignored
      * during marking.
      */
     if (ThingIsPermanentAtom(thing))
         return;
@@ -230,17 +230,17 @@ MarkInternal(JSTracer *trc, T **thingp)
 
     if (!trc->callback) {
         /*
          * We may mark a Nursery thing outside the context of the
          * MinorCollectionTracer because of a pre-barrier. The pre-barrier is
          * not needed in this case because we perform a minor collection before
          * each incremental slice.
          */
-        if (IsInsideNursery(trc->runtime(), thing))
+        if (IsInsideNursery(thing))
             return;
 
         /*
          * Don't mark permanent atoms, as they may be associated with another
          * runtime. Note that PushMarkStack() also checks this, but the tests
          * and maybeAlive write below should only be done on the main thread.
          */
         if (ThingIsPermanentAtom(thing))
@@ -352,19 +352,20 @@ namespace gc {
 
 template <typename T>
 static bool
 IsMarked(T **thingp)
 {
     JS_ASSERT(thingp);
     JS_ASSERT(*thingp);
 #ifdef JSGC_GENERATIONAL
-    Nursery &nursery = (*thingp)->runtimeFromMainThread()->gc.nursery;
-    if (nursery.isInside(*thingp))
+    if (IsInsideNursery(*thingp)) {
+        Nursery &nursery = (*thingp)->runtimeFromMainThread()->gc.nursery;
         return nursery.getForwardedPointer(thingp);
+    }
 #endif
     Zone *zone = (*thingp)->tenuredZone();
     if (!zone->isCollecting() || zone->isGCFinished())
         return true;
     return (*thingp)->isMarked();
 }
 
 template <typename T>
@@ -378,19 +379,19 @@ IsAboutToBeFinalized(T **thingp)
     JSRuntime *rt = thing->runtimeFromAnyThread();
 
     /* Permanent atoms are never finalized by non-owning runtimes. */
     if (ThingIsPermanentAtom(thing) && !TlsPerThreadData.get()->associatedWith(rt))
         return false;
 
 #ifdef JSGC_GENERATIONAL
     Nursery &nursery = rt->gc.nursery;
-    JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !nursery.isInside(thing));
+    JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !IsInsideNursery(thing));
     if (rt->isHeapMinorCollecting()) {
-        if (nursery.isInside(thing))
+        if (IsInsideNursery(thing))
             return !nursery.getForwardedPointer(thingp);
         return false;
     }
 #endif
 
     if (!thing->tenuredZone()->isGCSweeping())
         return false;
 
@@ -407,17 +408,17 @@ IsAboutToBeFinalized(T **thingp)
 }
 
 template <typename T>
 T *
 UpdateIfRelocated(JSRuntime *rt, T **thingp)
 {
     JS_ASSERT(thingp);
 #ifdef JSGC_GENERATIONAL
-    if (*thingp && rt->isHeapMinorCollecting() && rt->gc.nursery.isInside(*thingp))
+    if (*thingp && rt->isHeapMinorCollecting() && IsInsideNursery(*thingp))
         rt->gc.nursery.getForwardedPointer(thingp);
 #endif
     return *thingp;
 }
 
 #define DeclMarkerImpl(base, type)                                                                \
 void                                                                                              \
 Mark##base(JSTracer *trc, BarrieredBase<type*> *thing, const char *name)                          \
@@ -781,17 +782,17 @@ static bool
 ShouldMarkCrossCompartment(JSTracer *trc, JSObject *src, Cell *cell)
 {
     if (!IS_GC_MARKING_TRACER(trc))
         return true;
 
     uint32_t color = AsGCMarker(trc)->getMarkColor();
     JS_ASSERT(color == BLACK || color == GRAY);
 
-    if (IsInsideNursery(trc->runtime(), cell)) {
+    if (IsInsideNursery(cell)) {
         JS_ASSERT(color == BLACK);
         return false;
     }
 
     JS::Zone *zone = cell->tenuredZone();
     if (color == BLACK) {
         /*
          * Having black->gray edges violates our promise to the cycle
@@ -871,121 +872,121 @@ gc::IsCellAboutToBeFinalized(Cell **thin
 #define JS_COMPARTMENT_ASSERT_STR(rt, thing)                            \
     JS_ASSERT((thing)->zone()->isGCMarking() ||                         \
               (rt)->isAtomsZone((thing)->zone()));
 
 static void
 PushMarkStack(GCMarker *gcmarker, ObjectImpl *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
-    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
+    JS_ASSERT(!IsInsideNursery(thing));
 
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushObject(thing);
 }
 
 /*
  * PushMarkStack for BaseShape unpacks its children directly onto the mark
  * stack. For a pre-barrier between incremental slices, this may result in
  * objects in the nursery getting pushed onto the mark stack. It is safe to
  * ignore these objects because they will be marked by the matching
  * post-barrier during the minor GC at the start of each incremental slice.
  */
 static void
 MaybePushMarkStackBetweenSlices(GCMarker *gcmarker, JSObject *thing)
 {
-    JSRuntime *rt = gcmarker->runtime();
+    DebugOnly<JSRuntime *> rt = gcmarker->runtime();
     JS_COMPARTMENT_ASSERT(rt, thing);
-    JS_ASSERT_IF(rt->isHeapBusy(), !IsInsideNursery(rt, thing));
+    JS_ASSERT_IF(rt->isHeapBusy(), !IsInsideNursery(thing));
 
-    if (!IsInsideNursery(rt, thing) && thing->markIfUnmarked(gcmarker->getMarkColor()))
+    if (!IsInsideNursery(thing) && thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushObject(thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, JSFunction *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
-    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
+    JS_ASSERT(!IsInsideNursery(thing));
 
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushObject(thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
-    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
+    JS_ASSERT(!IsInsideNursery(thing));
 
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushType(thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, JSScript *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
-    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
+    JS_ASSERT(!IsInsideNursery(thing));
 
     /*
      * We mark scripts directly rather than pushing on the stack as they can
      * refer to other scripts only indirectly (like via nested functions) and
      * we cannot get to deep recursion.
      */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         MarkChildren(gcmarker, thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, LazyScript *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
-    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
+    JS_ASSERT(!IsInsideNursery(thing));
 
     /*
      * We mark lazy scripts directly rather than pushing on the stack as they
      * only refer to normal scripts and to strings, and cannot recurse.
      */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         MarkChildren(gcmarker, thing);
 }
 
 static void
 ScanShape(GCMarker *gcmarker, Shape *shape);
 
 static void
 PushMarkStack(GCMarker *gcmarker, Shape *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
-    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
+    JS_ASSERT(!IsInsideNursery(thing));
 
     /* We mark shapes directly rather than pushing on the stack. */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         ScanShape(gcmarker, thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, jit::JitCode *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
-    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
+    JS_ASSERT(!IsInsideNursery(thing));
 
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushJitCode(thing);
 }
 
 static inline void
 ScanBaseShape(GCMarker *gcmarker, BaseShape *base);
 
 static void
 PushMarkStack(GCMarker *gcmarker, BaseShape *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
-    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
+    JS_ASSERT(!IsInsideNursery(thing));
 
     /* We mark base shapes directly rather than pushing on the stack. */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         ScanBaseShape(gcmarker, thing);
 }
 
 static void
 ScanShape(GCMarker *gcmarker, Shape *shape)
@@ -1757,17 +1758,17 @@ UnmarkGrayChildren(JSTracer *trc, void *
          * If we run out of stack, we take a more drastic measure: require that
          * we GC again before the next CC.
          */
         trc->runtime()->gc.grayBitsValid = false;
         return;
     }
 
     UnmarkGrayTracer *tracer = static_cast<UnmarkGrayTracer *>(trc);
-    if (!IsInsideNursery(trc->runtime(), thing)) {
+    if (!IsInsideNursery(static_cast<Cell *>(thing))) {
         if (!JS::GCThingIsMarkedGray(thing))
             return;
 
         UnmarkGrayGCThing(thing);
         tracer->unmarkedAny = true;
     }
 
     /*
@@ -1804,17 +1805,17 @@ UnmarkGrayChildren(JSTracer *trc, void *
 JS_FRIEND_API(bool)
 JS::UnmarkGrayGCThingRecursively(void *thing, JSGCTraceKind kind)
 {
     JS_ASSERT(kind != JSTRACE_SHAPE);
 
     JSRuntime *rt = static_cast<Cell *>(thing)->runtimeFromMainThread();
 
     bool unmarkedArg = false;
-    if (!IsInsideNursery(rt, thing)) {
+    if (!IsInsideNursery(static_cast<Cell *>(thing))) {
         if (!JS::GCThingIsMarkedGray(thing))
             return false;
 
         UnmarkGrayGCThing(thing);
         unmarkedArg = true;
     }
 
     UnmarkGrayTracer trc(rt);
--- a/js/src/gc/Nursery-inl.h
+++ b/js/src/gc/Nursery-inl.h
@@ -67,17 +67,17 @@ class RelocationOverlay
 } /* namespace gc */
 } /* namespace js */
 
 template <typename T>
 MOZ_ALWAYS_INLINE bool
 js::Nursery::getForwardedPointer(T **ref)
 {
     JS_ASSERT(ref);
-    JS_ASSERT(isInside(*ref));
+    JS_ASSERT(isInside((void *)*ref));
     const gc::RelocationOverlay *overlay = reinterpret_cast<const gc::RelocationOverlay *>(*ref);
     if (!overlay->isForwarded())
         return false;
     /* This static cast from Cell* restricts T to valid (GC thing) types. */
     *ref = static_cast<T *>(overlay->forwardingAddress());
     return true;
 }
 
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -43,29 +43,26 @@ using namespace mozilla;
  * Print timing information for minor GCs that take longer than this time in microseconds.
  */
 static int64_t GCReportThreshold = INT64_MAX;
 #endif
 
 bool
 js::Nursery::init()
 {
-    JS_ASSERT(start() == 0);
-
     if (!hugeSlots.init())
         return false;
 
     void *heap = runtime()->gc.pageAllocator.mapAlignedPages(NurserySize, Alignment);
     if (!heap)
         return false;
 
-    JSRuntime *rt = runtime();
-    rt->gcNurseryStart_ = uintptr_t(heap);
+    heapStart_ = uintptr_t(heap);
     currentStart_ = start();
-    rt->gcNurseryEnd_ = chunk(LastNurseryChunk).end();
+    heapEnd_ = chunk(LastNurseryChunk).end();
     numActiveChunks_ = 1;
     JS_POISON(heap, JS_FRESH_NURSERY_PATTERN, NurserySize);
     setCurrentChunk(0);
     updateDecommittedRegion();
 
 #ifdef PROFILE_NURSERY
     char *env = getenv("JS_MINORGC_TIME");
     if (env)
@@ -190,17 +187,17 @@ js::Nursery::allocate(size_t size)
 
 /* Internally, this function is used to allocate elements as well as slots. */
 HeapSlot *
 js::Nursery::allocateSlots(JSContext *cx, JSObject *obj, uint32_t nslots)
 {
     JS_ASSERT(obj);
     JS_ASSERT(nslots > 0);
 
-    if (!isInside(obj))
+    if (!IsInsideNursery(obj))
         return cx->pod_malloc<HeapSlot>(nslots);
 
     if (nslots > MaxNurserySlots)
         return allocateHugeSlots(cx, nslots);
 
     size_t size = sizeof(HeapSlot) * nslots;
     HeapSlot *slots = static_cast<HeapSlot *>(allocate(size));
     if (slots)
@@ -218,17 +215,17 @@ js::Nursery::allocateElements(JSContext 
 
 HeapSlot *
 js::Nursery::reallocateSlots(JSContext *cx, JSObject *obj, HeapSlot *oldSlots,
                              uint32_t oldCount, uint32_t newCount)
 {
     size_t oldSize = oldCount * sizeof(HeapSlot);
     size_t newSize = newCount * sizeof(HeapSlot);
 
-    if (!isInside(obj))
+    if (!IsInsideNursery(obj))
         return static_cast<HeapSlot *>(cx->realloc_(oldSlots, oldSize, newSize));
 
     if (!isInside(oldSlots)) {
         HeapSlot *newSlots = static_cast<HeapSlot *>(cx->realloc_(oldSlots, oldSize, newSize));
         if (oldSlots != newSlots) {
             hugeSlots.remove(oldSlots);
             /* If this put fails, we will only leak the slots. */
             (void)hugeSlots.put(newSlots);
@@ -270,17 +267,17 @@ js::Nursery::allocateHugeSlots(JSContext
     /* If this put fails, we will only leak the slots. */
     (void)hugeSlots.put(slots);
     return slots;
 }
 
 void
 js::Nursery::notifyInitialSlots(Cell *cell, HeapSlot *slots)
 {
-    if (isInside(cell) && !isInside(slots)) {
+    if (IsInsideNursery(cell) && !isInside(slots)) {
         /* If this put fails, we will only leak the slots. */
         (void)hugeSlots.put(slots);
     }
 }
 
 namespace js {
 namespace gc {
 
@@ -362,17 +359,17 @@ class MinorCollectionTracer : public JST
 
 static AllocKind
 GetObjectAllocKindForCopy(JSRuntime *rt, JSObject *obj)
 {
     if (obj->is<ArrayObject>()) {
         JS_ASSERT(obj->numFixedSlots() == 0);
 
         /* Use minimal size object if we are just going to copy the pointer. */
-        if (!IsInsideNursery(rt, (void *)obj->getElementsHeader()))
+        if (!rt->gc.nursery.isInside(obj->getElementsHeader()))
             return FINALIZE_OBJECT0_BACKGROUND;
 
         size_t nelements = obj->getDenseCapacity();
         return GetBackgroundAllocKind(GetGCArrayKind(nelements));
     }
 
     if (obj->is<JSFunction>())
         return obj->as<JSFunction>().getAllocKind();
@@ -529,17 +526,17 @@ js::Nursery::markSlots(MinorCollectionTr
 
 MOZ_ALWAYS_INLINE void
 js::Nursery::markSlot(MinorCollectionTracer *trc, HeapSlot *slotp)
 {
     if (!slotp->isObject())
         return;
 
     JSObject *obj = &slotp->toObject();
-    if (!isInside(obj))
+    if (!IsInsideNursery(obj))
         return;
 
     if (getForwardedPointer(&obj)) {
         slotp->unsafeGet()->setObject(*obj);
         return;
     }
 
     JSObject *tenured = static_cast<JSObject*>(moveToTenured(trc, obj));
@@ -682,17 +679,17 @@ js::Nursery::moveElementsToTenured(JSObj
     return nslots * sizeof(HeapSlot);
 }
 
 static bool
 ShouldMoveToTenured(MinorCollectionTracer *trc, void **thingp)
 {
     Cell *cell = static_cast<Cell *>(*thingp);
     Nursery &nursery = *trc->nursery;
-    return !nursery.isInside(thingp) && nursery.isInside(cell) &&
+    return !nursery.isInside(thingp) && IsInsideNursery(cell) &&
            !nursery.getForwardedPointer(thingp);
 }
 
 /* static */ void
 js::Nursery::MinorGCCallback(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
 {
     MinorCollectionTracer *trc = static_cast<MinorCollectionTracer *>(jstrc);
     if (ShouldMoveToTenured(trc, thingp))
--- a/js/src/gc/Nursery.h
+++ b/js/src/gc/Nursery.h
@@ -56,35 +56,41 @@ class Nursery
     static const int LastNurseryChunk = NumNurseryChunks - 1;
     static const size_t Alignment = gc::ChunkSize;
     static const size_t ChunkShift = gc::ChunkShift;
     static const size_t NurserySize = gc::ChunkSize * NumNurseryChunks;
 
     explicit Nursery(JSRuntime *rt)
       : runtime_(rt),
         position_(0),
+        heapStart_(0),
+        heapEnd_(0),
         currentStart_(0),
         currentEnd_(0),
         currentChunk_(0),
         numActiveChunks_(0)
     {}
     ~Nursery();
 
     bool init();
 
     void enable();
     void disable();
     bool isEnabled() const { return numActiveChunks_ != 0; }
 
     /* Return true if no allocations have been made since the last collection. */
     bool isEmpty() const;
 
-    template <typename T>
-    MOZ_ALWAYS_INLINE bool isInside(const T *p) const {
-        return gc::IsInsideNursery((JS::shadow::Runtime *)runtime_, p);
+    /*
+     * Check whether an arbitrary pointer is within the nursery. This is
+     * slower than IsInsideNursery(Cell*), but works on all types of pointers.
+     */
+    MOZ_ALWAYS_INLINE bool isInside(gc::Cell *cellp) const MOZ_DELETE;
+    MOZ_ALWAYS_INLINE bool isInside(const void *p) const {
+        return uintptr_t(p) >= heapStart_ && uintptr_t(p) < heapEnd_;
     }
 
     /*
      * Allocate and return a pointer to a new GC object with its |slots|
      * pointer pre-filled. Returns nullptr if the Nursery is full.
      */
     JSObject *allocateObject(JSContext *cx, size_t size, size_t numDynamic);
 
@@ -137,23 +143,21 @@ class Nursery
         size_t total = 0;
         for (HugeSlotsSet::Range r = hugeSlots.all(); !r.empty(); r.popFront())
             total += mallocSizeOf(r.front());
         total += hugeSlots.sizeOfExcludingThis(mallocSizeOf);
         return total;
     }
 
     MOZ_ALWAYS_INLINE uintptr_t start() const {
-        JS_ASSERT(runtime_);
-        return ((JS::shadow::Runtime *)runtime_)->gcNurseryStart_;
+        return heapStart_;
     }
 
     MOZ_ALWAYS_INLINE uintptr_t heapEnd() const {
-        JS_ASSERT(runtime_);
-        return ((JS::shadow::Runtime *)runtime_)->gcNurseryEnd_;
+        return heapEnd_;
     }
 
 #ifdef JS_GC_ZEAL
     /*
      * In debug and zeal builds, these bytes indicate the state of an unused
      * segment of nursery-allocated memory.
      */
     void enterZealMode() {
@@ -178,16 +182,20 @@ class Nursery
      * inline the isInsideNursery check into embedder code. Use the start()
      * and heapEnd() functions to access these values.
      */
     JSRuntime *runtime_;
 
     /* Pointer to the first unallocated byte in the nursery. */
     uintptr_t position_;
 
+    /* Pointer to first and last address of the total nursery allocation. */
+    uintptr_t heapStart_;
+    uintptr_t heapEnd_;
+
     /* Pointer to the logical start of the Nursery. */
     uintptr_t currentStart_;
 
     /* Pointer to the last byte of space in the current chunk. */
     uintptr_t currentEnd_;
 
     /* The index of the chunk that is currently being allocated from. */
     int currentChunk_;
--- a/js/src/gc/StoreBuffer.cpp
+++ b/js/src/gc/StoreBuffer.cpp
@@ -21,17 +21,17 @@ using mozilla::ReentrancyGuard;
 
 /*** Edges ***/
 
 void
 StoreBuffer::SlotsEdge::mark(JSTracer *trc)
 {
     JSObject *obj = object();
 
-    if (trc->runtime()->gc.nursery.isInside(obj))
+    if (IsInsideNursery(obj))
         return;
 
     if (!obj->isNative()) {
         const Class *clasp = obj->getClass();
         if (clasp)
             clasp->trace(trc, obj);
         return;
     }
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -239,17 +239,17 @@ class StoreBuffer
     {
         Cell **edge;
 
         explicit CellPtrEdge(Cell **v) : edge(v) {}
         bool operator==(const CellPtrEdge &other) const { return edge == other.edge; }
         bool operator!=(const CellPtrEdge &other) const { return edge != other.edge; }
 
         bool maybeInRememberedSet(const Nursery &nursery) const {
-            JS_ASSERT(nursery.isInside(*edge));
+            JS_ASSERT(IsInsideNursery(*edge));
             return !nursery.isInside(edge);
         }
 
         void mark(JSTracer *trc);
 
         CellPtrEdge tagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) | 1)); }
         CellPtrEdge untagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) & ~1)); }
         bool isTagged() const { return bool(uintptr_t(edge) & 1); }
@@ -260,20 +260,20 @@ class StoreBuffer
     struct ValueEdge
     {
         JS::Value *edge;
 
         explicit ValueEdge(JS::Value *v) : edge(v) {}
         bool operator==(const ValueEdge &other) const { return edge == other.edge; }
         bool operator!=(const ValueEdge &other) const { return edge != other.edge; }
 
-        void *deref() const { return edge->isGCThing() ? edge->toGCThing() : nullptr; }
+        Cell *deref() const { return edge->isGCThing() ? static_cast<Cell *>(edge->toGCThing()) : nullptr; }
 
         bool maybeInRememberedSet(const Nursery &nursery) const {
-            JS_ASSERT(nursery.isInside(deref()));
+            JS_ASSERT(IsInsideNursery(deref()));
             return !nursery.isInside(edge);
         }
 
         void mark(JSTracer *trc);
 
         ValueEdge tagged() const { return ValueEdge((JS::Value *)(uintptr_t(edge) | 1)); }
         ValueEdge untagged() const { return ValueEdge((JS::Value *)(uintptr_t(edge) & ~1)); }
         bool isTagged() const { return bool(uintptr_t(edge) & 1); }
@@ -308,18 +308,18 @@ class StoreBuffer
                    start_ == other.start_ &&
                    count_ == other.count_;
         }
 
         bool operator!=(const SlotsEdge &other) const {
             return !(*this == other);
         }
 
-        bool maybeInRememberedSet(const Nursery &nursery) const {
-            return !nursery.isInside(object());
+        bool maybeInRememberedSet(const Nursery &) const {
+            return !IsInsideNursery(JS::AsCell(object()));
         }
 
         void mark(JSTracer *trc);
 
         typedef struct {
             typedef SlotsEdge Lookup;
             static HashNumber hash(const Lookup &l) { return l.objectAndKind_ ^ l.start_ ^ l.count_; }
             static bool match(const SlotsEdge &k, const Lookup &l) { return k == l; }
@@ -332,17 +332,17 @@ class StoreBuffer
 
         explicit WholeCellEdges(Cell *cell) : edge(cell) {
             JS_ASSERT(edge->isTenured());
         }
 
         bool operator==(const WholeCellEdges &other) const { return edge == other.edge; }
         bool operator!=(const WholeCellEdges &other) const { return edge != other.edge; }
 
-        bool maybeInRememberedSet(const Nursery &nursery) const { return true; }
+        bool maybeInRememberedSet(const Nursery &) const { return true; }
 
         static bool supportsDeduplication() { return true; }
         void *deduplicationKey() const { return (void *)edge; }
 
         void mark(JSTracer *trc);
 
         typedef PointerEdgeHasher<WholeCellEdges> Hasher;
     };
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -115,17 +115,17 @@ struct VerifyPreTracer : JSTracer
  * This function builds up the heap snapshot by adding edges to the current
  * node.
  */
 static void
 AccumulateEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
 {
     VerifyPreTracer *trc = (VerifyPreTracer *)jstrc;
 
-    JS_ASSERT(!IsInsideNursery(trc->runtime(), *(uintptr_t **)thingp));
+    JS_ASSERT(!IsInsideNursery(*reinterpret_cast<Cell **>(thingp)));
 
     trc->edgeptr += sizeof(EdgeValue);
     if (trc->edgeptr >= trc->term) {
         trc->edgeptr = trc->term;
         return;
     }
 
     VerifyNode *node = trc->curnode;
@@ -428,17 +428,17 @@ PostVerifierCollectStoreBufferEdges(JSTr
     VerifyPostTracer *trc = (VerifyPostTracer *)jstrc;
 
     /* The nursery only stores objects. */
     if (kind != JSTRACE_OBJECT)
         return;
 
     /* The store buffer may store extra, non-cross-generational edges. */
     JSObject *dst = *reinterpret_cast<JSObject **>(thingp);
-    if (trc->runtime()->gc.nursery.isInside(thingp) || !trc->runtime()->gc.nursery.isInside(dst))
+    if (trc->runtime()->gc.nursery.isInside(thingp) || !IsInsideNursery(dst))
         return;
 
     /*
      * Values will be unpacked to the stack before getting here. However, the
      * only things that enter this callback are marked by the store buffer. The
      * store buffer ensures that the real tracing location is set correctly.
      */
     void **loc = trc->tracingLocation(thingp);
@@ -466,17 +466,17 @@ PostVerifierVisitEdge(JSTracer *jstrc, v
 
     /* The nursery only stores objects. */
     if (kind != JSTRACE_OBJECT)
         return;
 
     /* Filter out non cross-generational edges. */
     JS_ASSERT(!trc->runtime()->gc.nursery.isInside(thingp));
     JSObject *dst = *reinterpret_cast<JSObject **>(thingp);
-    if (!trc->runtime()->gc.nursery.isInside(dst))
+    if (!IsInsideNursery(dst))
         return;
 
     /*
      * Values will be unpacked to the stack before getting here. However, the
      * only things that enter this callback are marked by the JS_TraceChildren
      * below. Since ObjectImpl::markChildren handles this, the real trace
      * location will be set correctly in these cases.
      */
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -3397,17 +3397,17 @@ IsCacheableGetPropCall(JSContext *cx, JS
     if (!shape->getterValue().isObject() || !shape->getterObject()->is<JSFunction>())
         return false;
 
     JSFunction *func = &shape->getterObject()->as<JSFunction>();
 
 #ifdef JSGC_GENERATIONAL
     // Information from get prop call ICs may be used directly from Ion code,
     // and should not be nursery allocated.
-    if (cx->runtime()->gc.nursery.isInside(holder) || cx->runtime()->gc.nursery.isInside(func))
+    if (IsInsideNursery(holder) || IsInsideNursery(func))
         return false;
 #endif
 
     if (func->isNative()) {
         *isScripted = false;
         return true;
     }
 
@@ -3516,17 +3516,17 @@ IsCacheableSetPropCall(JSContext *cx, JS
     if (!shape->setterValue().isObject() || !shape->setterObject()->is<JSFunction>())
         return false;
 
     JSFunction *func = &shape->setterObject()->as<JSFunction>();
 
 #ifdef JSGC_GENERATIONAL
     // Information from set prop call ICs may be used directly from Ion code,
     // and should not be nursery allocated.
-    if (cx->runtime()->gc.nursery.isInside(holder) || cx->runtime()->gc.nursery.isInside(func))
+    if (IsInsideNursery(holder) || IsInsideNursery(func))
         return false;
 #endif
 
     if (func->isNative()) {
         *isScripted = false;
         return true;
     }
 
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -1941,18 +1941,17 @@ CodeGenerator::visitPostWriteBarrierO(LP
     OutOfLineCallPostWriteBarrier *ool = new(alloc()) OutOfLineCallPostWriteBarrier(lir, lir->object());
     if (!addOutOfLineCode(ool))
         return false;
 
     Register temp = ToTempRegisterOrInvalid(lir->temp());
 
     if (lir->object()->isConstant()) {
 #ifdef DEBUG
-        const Nursery &nursery = GetIonContext()->runtime->gcNursery();
-        JS_ASSERT(!nursery.isInside(&lir->object()->toConstant()->toObject()));
+        JS_ASSERT(!IsInsideNursery(&lir->object()->toConstant()->toObject()));
 #endif
     } else {
         masm.branchPtrInNurseryRange(Assembler::Equal, ToRegister(lir->object()), temp,
                                      ool->rejoin());
     }
 
     masm.branchPtrInNurseryRange(Assembler::Equal, ToRegister(lir->value()), temp, ool->entry());
 
@@ -1968,18 +1967,17 @@ CodeGenerator::visitPostWriteBarrierV(LP
     OutOfLineCallPostWriteBarrier *ool = new(alloc()) OutOfLineCallPostWriteBarrier(lir, lir->object());
     if (!addOutOfLineCode(ool))
         return false;
 
     Register temp = ToTempRegisterOrInvalid(lir->temp());
 
     if (lir->object()->isConstant()) {
 #ifdef DEBUG
-        const Nursery &nursery = GetIonContext()->runtime->gcNursery();
-        JS_ASSERT(!nursery.isInside(&lir->object()->toConstant()->toObject()));
+        JS_ASSERT(!IsInsideNursery(&lir->object()->toConstant()->toObject()));
 #endif
     } else {
         masm.branchPtrInNurseryRange(Assembler::Equal, ToRegister(lir->object()), temp,
                                      ool->rejoin());
     }
 
     ValueOperand value = ToValue(lir, LPostWriteBarrierV::Input);
     masm.branchValueIsNurseryObject(Assembler::Equal, value, temp, ool->entry());
--- a/js/src/jit/CompileWrappers.cpp
+++ b/js/src/jit/CompileWrappers.cpp
@@ -145,17 +145,17 @@ CompileRuntime::positiveInfinityValue()
 {
     return runtime()->positiveInfinityValue;
 }
 
 #ifdef DEBUG
 bool
 CompileRuntime::isInsideNursery(gc::Cell *cell)
 {
-    return UninlinedIsInsideNursery(runtime(), cell);
+    return UninlinedIsInsideNursery(cell);
 }
 #endif
 
 const DOMCallbacks *
 CompileRuntime::DOMcallbacks()
 {
     return GetDOMCallbacks(runtime());
 }
--- a/js/src/jit/IonBuilder.cpp
+++ b/js/src/jit/IonBuilder.cpp
@@ -7435,17 +7435,22 @@ IonBuilder::getTypedArrayLength(MDefinit
 MInstruction *
 IonBuilder::getTypedArrayElements(MDefinition *obj)
 {
     if (obj->isConstant() && obj->toConstant()->value().isObject()) {
         TypedArrayObject *tarr = &obj->toConstant()->value().toObject().as<TypedArrayObject>();
         void *data = tarr->viewData();
         // Bug 979449 - Optimistically embed the elements and use TI to
         //              invalidate if we move them.
-        if (!gc::IsInsideNursery(tarr->runtimeFromMainThread(), data)) {
+#ifdef JSGC_GENERATIONAL
+        bool isTenured = !tarr->runtimeFromMainThread()->gc.nursery.isInside(data);
+#else
+        bool isTenured = true;
+#endif
+        if (isTenured) {
             // The 'data' pointer can change in rare circumstances
             // (ArrayBufferObject::changeContents).
             types::TypeObjectKey *tarrType = types::TypeObjectKey::get(tarr);
             if (!tarrType->unknownProperties()) {
                 tarrType->watchStateChangeForTypedArrayBuffer(constraints());
 
                 obj->setImplicitlyUsedUnchecked();
                 return MConstantElements::New(alloc(), data);
@@ -7742,18 +7747,20 @@ IonBuilder::setElemTryTypedStatic(bool *
     if (!object->resultTypeSet())
         return true;
     JSObject *tarrObj = object->resultTypeSet()->getSingleton();
     if (!tarrObj)
         return true;
 
     TypedArrayObject *tarr = &tarrObj->as<TypedArrayObject>();
 
-    if (gc::IsInsideNursery(tarr->runtimeFromMainThread(), tarr->viewData()))
-        return true;
+#ifdef JSGC_GENERATIONAL
+    if (tarr->runtimeFromMainThread()->gc.nursery.isInside(tarr->viewData()))
+        return true;
+#endif
 
     ArrayBufferView::ViewType viewType = (ArrayBufferView::ViewType) tarr->type();
 
     MDefinition *ptr = convertShiftToMaskForStaticTypedArray(index, viewType);
     if (!ptr)
         return true;
 
     // Emit StoreTypedArrayElementStatic.
--- a/js/src/jit/IonMacroAssembler.cpp
+++ b/js/src/jit/IonMacroAssembler.cpp
@@ -243,27 +243,27 @@ template void MacroAssembler::guardType(
 template void MacroAssembler::guardType(const ValueOperand &value, types::Type type,
                                         Register scratch, Label *miss);
 
 void
 MacroAssembler::branchNurseryPtr(Condition cond, const Address &ptr1, ImmMaybeNurseryPtr ptr2,
                                  Label *label)
 {
 #ifdef JSGC_GENERATIONAL
-    if (ptr2.value && gc::IsInsideNursery(GetIonContext()->cx->runtime(), (void *)ptr2.value))
+    if (ptr2.value && gc::IsInsideNursery(ptr2.value))
         embedsNurseryPointers_ = true;
 #endif
     branchPtr(cond, ptr1, ptr2, label);
 }
 
 void
 MacroAssembler::moveNurseryPtr(ImmMaybeNurseryPtr ptr, Register reg)
 {
 #ifdef JSGC_GENERATIONAL
-    if (ptr.value && gc::IsInsideNursery(GetIonContext()->cx->runtime(), (void *)ptr.value))
+    if (ptr.value && gc::IsInsideNursery(ptr.value))
         embedsNurseryPointers_ = true;
 #endif
     movePtr(ptr, reg);
 }
 
 template<typename S, typename T>
 static void
 StoreToTypedFloatArray(MacroAssembler &masm, int arrayType, const S &value, const T &dest)
--- a/js/src/jit/MCallOptimize.cpp
+++ b/js/src/jit/MCallOptimize.cpp
@@ -1914,38 +1914,37 @@ IonBuilder::inlineAssertFloat32(CallInfo
 
 IonBuilder::InliningStatus
 IonBuilder::inlineBoundFunction(CallInfo &nativeCallInfo, JSFunction *target)
 {
      if (!target->getBoundFunctionTarget()->is<JSFunction>())
          return InliningStatus_NotInlined;
 
     JSFunction *scriptedTarget = &(target->getBoundFunctionTarget()->as<JSFunction>());
-    JSRuntime *runtime = scriptedTarget->runtimeFromMainThread();
 
     // Don't optimize if we're constructing and the callee is not a
     // constructor, so that CallKnown does not have to handle this case
     // (it should always throw).
     if (nativeCallInfo.constructing() && !scriptedTarget->isInterpretedConstructor() &&
         !scriptedTarget->isNativeConstructor())
     {
         return InliningStatus_NotInlined;
     }
 
-    if (gc::IsInsideNursery(runtime, scriptedTarget))
+    if (gc::IsInsideNursery(scriptedTarget))
         return InliningStatus_NotInlined;
 
     for (size_t i = 0; i < target->getBoundFunctionArgumentCount(); i++) {
         const Value val = target->getBoundFunctionArgument(i);
-        if (val.isObject() && gc::IsInsideNursery(runtime, &val.toObject()))
+        if (val.isObject() && gc::IsInsideNursery(&val.toObject()))
             return InliningStatus_NotInlined;
     }
 
     const Value thisVal = target->getBoundFunctionThis();
-    if (thisVal.isObject() && gc::IsInsideNursery(runtime, &thisVal.toObject()))
+    if (thisVal.isObject() && gc::IsInsideNursery(&thisVal.toObject()))
         return InliningStatus_NotInlined;
 
     size_t argc = target->getBoundFunctionArgumentCount() + nativeCallInfo.argc();
     if (argc > ARGS_LENGTH_MAX)
         return InliningStatus_NotInlined;
 
     nativeCallInfo.thisArg()->setImplicitlyUsedUnchecked();
 
--- a/js/src/jit/VMFunctions.cpp
+++ b/js/src/jit/VMFunctions.cpp
@@ -698,17 +698,17 @@ FilterArgumentsOrEval(JSContext *cx, JSS
     return !StringHasPattern(chars, str->length(), arguments, mozilla::ArrayLength(arguments)) &&
         !StringHasPattern(chars, str->length(), eval, mozilla::ArrayLength(eval));
 }
 
 #ifdef JSGC_GENERATIONAL
 void
 PostWriteBarrier(JSRuntime *rt, JSObject *obj)
 {
-    JS_ASSERT(!IsInsideNursery(rt, obj));
+    JS_ASSERT(!IsInsideNursery(obj));
     rt->gc.storeBuffer.putWholeCellFromMainThread(obj);
 }
 
 void
 PostGlobalWriteBarrier(JSRuntime *rt, JSObject *obj)
 {
     JS_ASSERT(obj->is<GlobalObject>());
     if (!obj->compartment()->globalWriteBarriered) {
--- a/js/src/jit/arm/MacroAssembler-arm.cpp
+++ b/js/src/jit/arm/MacroAssembler-arm.cpp
@@ -485,17 +485,17 @@ MacroAssemblerARM::ma_mov(ImmGCPtr ptr, 
     // before to recover the pointer, and not after.
     writeDataRelocation(ptr);
     RelocStyle rs;
     if (hasMOVWT())
         rs = L_MOVWT;
     else
         rs = L_LDR;
 
-    ma_movPatchable(Imm32(ptr.value), dest, Always, rs);
+    ma_movPatchable(Imm32(uintptr_t(ptr.value)), dest, Always, rs);
 }
 
     // Shifts (just a move with a shifting op2)
 void
 MacroAssemblerARM::ma_lsl(Imm32 shift, Register src, Register dst)
 {
     as_mov(dst, lsl(src, shift.value));
 }
--- a/js/src/jit/shared/Assembler-shared.h
+++ b/js/src/jit/shared/Assembler-shared.h
@@ -176,19 +176,19 @@ struct PatchedImmPtr {
     explicit PatchedImmPtr(const void *value)
       : value(const_cast<void*>(value))
     { }
 };
 
 // Used for immediates which require relocation.
 struct ImmGCPtr
 {
-    uintptr_t value;
+    const gc::Cell *value;
 
-    explicit ImmGCPtr(const gc::Cell *ptr) : value(reinterpret_cast<uintptr_t>(ptr))
+    explicit ImmGCPtr(const gc::Cell *ptr) : value(ptr)
     {
         JS_ASSERT(!IsPoisonedPtr(ptr));
         JS_ASSERT_IF(ptr, ptr->isTenured());
 
         // asm.js shouldn't be creating GC things
         JS_ASSERT(!IsCompilingAsmJS());
     }
 
@@ -196,17 +196,17 @@ struct ImmGCPtr
     ImmGCPtr() : value(0) {}
 };
 
 // Used for immediates which require relocation and may be traced during minor GC.
 struct ImmMaybeNurseryPtr : public ImmGCPtr
 {
     explicit ImmMaybeNurseryPtr(gc::Cell *ptr)
     {
-        this->value = reinterpret_cast<uintptr_t>(ptr);
+        this->value = ptr;
         JS_ASSERT(!IsPoisonedPtr(ptr));
 
         // asm.js shouldn't be creating GC things
         JS_ASSERT(!IsCompilingAsmJS());
     }
 };
 
 // Pointer to be embedded as an immediate that is loaded/stored from by an
--- a/js/src/jit/x64/Assembler-x64.h
+++ b/js/src/jit/x64/Assembler-x64.h
@@ -322,17 +322,17 @@ class Assembler : public AssemblerX86Sha
             // Otherwise use movabs.
             masm.movq_i64r(word.value, dest.code());
         }
     }
     void movq(ImmPtr imm, Register dest) {
         movq(ImmWord(uintptr_t(imm.value)), dest);
     }
     void movq(ImmGCPtr ptr, Register dest) {
-        masm.movq_i64r(ptr.value, dest.code());
+        masm.movq_i64r(uintptr_t(ptr.value), dest.code());
         writeDataRelocation(ptr);
     }
     void movq(const Operand &src, Register dest) {
         switch (src.kind()) {
           case Operand::REG:
             masm.movq_rr(src.reg(), dest.code());
             break;
           case Operand::MEM_REG_DISP:
--- a/js/src/jit/x86/Assembler-x86.h
+++ b/js/src/jit/x86/Assembler-x86.h
@@ -181,17 +181,17 @@ class Assembler : public AssemblerX86Sha
 
     // Copy the assembly code to the given buffer, and perform any pending
     // relocations relying on the target address.
     void executableCopy(uint8_t *buffer);
 
     // Actual assembly emitting functions.
 
     void push(ImmGCPtr ptr) {
-        push(Imm32(ptr.value));
+        push(Imm32(uintptr_t(ptr.value)));
         writeDataRelocation(ptr);
     }
     void push(const ImmWord imm) {
         push(Imm32(imm.value));
     }
     void push(const ImmPtr imm) {
         push(ImmWord(uintptr_t(imm.value)));
     }
@@ -214,31 +214,31 @@ class Assembler : public AssemblerX86Sha
         movl(Imm32(word.value), dest);
         return masm.currentOffset();
     }
     CodeOffsetLabel movWithPatch(ImmPtr imm, Register dest) {
         return movWithPatch(ImmWord(uintptr_t(imm.value)), dest);
     }
 
     void movl(ImmGCPtr ptr, Register dest) {
-        masm.movl_i32r(ptr.value, dest.code());
+        masm.movl_i32r(uintptr_t(ptr.value), dest.code());
         writeDataRelocation(ptr);
     }
     void movl(ImmGCPtr ptr, const Operand &dest) {
         switch (dest.kind()) {
           case Operand::REG:
-            masm.movl_i32r(ptr.value, dest.reg());
+            masm.movl_i32r(uintptr_t(ptr.value), dest.reg());
             writeDataRelocation(ptr);
             break;
           case Operand::MEM_REG_DISP:
-            masm.movl_i32m(ptr.value, dest.disp(), dest.base());
+            masm.movl_i32m(uintptr_t(ptr.value), dest.disp(), dest.base());
             writeDataRelocation(ptr);
             break;
           case Operand::MEM_SCALE:
-            masm.movl_i32m(ptr.value, dest.disp(), dest.base(), dest.index(), dest.scale());
+            masm.movl_i32m(uintptr_t(ptr.value), dest.disp(), dest.base(), dest.index(), dest.scale());
             writeDataRelocation(ptr);
             break;
           default:
             MOZ_ASSUME_UNREACHABLE("unexpected operand kind");
         }
     }
     void movl(ImmWord imm, Register dest) {
         masm.movl_i32r(imm.value, dest.code());
@@ -310,34 +310,34 @@ class Assembler : public AssemblerX86Sha
 
     void cmpl(const Register src, ImmWord ptr) {
         masm.cmpl_ir(ptr.value, src.code());
     }
     void cmpl(const Register src, ImmPtr imm) {
         cmpl(src, ImmWord(uintptr_t(imm.value)));
     }
     void cmpl(const Register src, ImmGCPtr ptr) {
-        masm.cmpl_ir(ptr.value, src.code());
+        masm.cmpl_ir(uintptr_t(ptr.value), src.code());
         writeDataRelocation(ptr);
     }
     void cmpl(Register lhs, Register rhs) {
         masm.cmpl_rr(rhs.code(), lhs.code());
     }
     void cmpl(const Operand &op, ImmGCPtr imm) {
         switch (op.kind()) {
           case Operand::REG:
-            masm.cmpl_ir_force32(imm.value, op.reg());
+            masm.cmpl_ir_force32(uintptr_t(imm.value), op.reg());
             writeDataRelocation(imm);
             break;
           case Operand::MEM_REG_DISP:
-            masm.cmpl_im_force32(imm.value, op.disp(), op.base());
+            masm.cmpl_im_force32(uintptr_t(imm.value), op.disp(), op.base());
             writeDataRelocation(imm);
             break;
           case Operand::MEM_ADDRESS32:
-            masm.cmpl_im(imm.value, op.address());
+            masm.cmpl_im(uintptr_t(imm.value), op.address());
             writeDataRelocation(imm);
             break;
           default:
             MOZ_ASSUME_UNREACHABLE("unexpected operand kind");
         }
     }
     void cmpl(AsmJSAbsoluteAddress lhs, Register rhs) {
         masm.cmpl_rm_force32(rhs.code(), (void*)-1);
--- a/js/src/jsapi-tests/testGCStoreBufferRemoval.cpp
+++ b/js/src/jsapi-tests/testGCStoreBufferRemoval.cpp
@@ -21,19 +21,19 @@ struct AutoIgnoreRootingHazards {
 };
 volatile int AutoIgnoreRootingHazards::depth = 0;
 
 BEGIN_TEST(testGCStoreBufferRemoval)
 {
     // Sanity check - objects start in the nursery and then become tenured.
     JS_GC(cx->runtime());
     JS::RootedObject obj(cx, NurseryObject());
-    CHECK(js::gc::IsInsideNursery(rt, obj.get()));
+    CHECK(js::gc::IsInsideNursery(obj.get()));
     JS_GC(cx->runtime());
-    CHECK(!js::gc::IsInsideNursery(rt, obj.get()));
+    CHECK(!js::gc::IsInsideNursery(obj.get()));
     JS::RootedObject tenuredObject(cx, obj);
 
     // Hide the horrors herein from the static rooting analysis.
     AutoIgnoreRootingHazards ignore;
 
     // Test removal of store buffer entries added by RelocatablePtr<T>.
     {
         JSObject *badObject = reinterpret_cast<JSObject*>(1);
--- a/js/src/jsapi-tests/testIsInsideNursery.cpp
+++ b/js/src/jsapi-tests/testIsInsideNursery.cpp
@@ -2,36 +2,33 @@
 * vim: set ts=8 sts=4 et sw=4 tw=99:
 */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "jsapi-tests/tests.h"
 
+#ifdef JSGC_GENERATIONAL
+
 BEGIN_TEST(testIsInsideNursery)
 {
     /* Non-GC things are never inside the nursery. */
-    CHECK(!js::gc::IsInsideNursery(rt, rt));
-    CHECK(!js::gc::IsInsideNursery(rt, nullptr));
-    CHECK(!js::gc::IsInsideNursery(nullptr));
+    CHECK(!rt->gc.nursery.isInside(rt));
+    CHECK(!rt->gc.nursery.isInside((void *)nullptr));
 
     JS_GC(rt);
 
     JS::RootedObject object(cx, JS_NewObject(cx, nullptr, JS::NullPtr(), JS::NullPtr()));
 
-#ifdef JSGC_GENERATIONAL
     /* Objects are initially allocated in the nursery. */
-    CHECK(js::gc::IsInsideNursery(rt, object));
     CHECK(js::gc::IsInsideNursery(object));
-#else
-    CHECK(!js::gc::IsInsideNursery(rt, object));
-    CHECK(!js::gc::IsInsideNursery(object));
-#endif
 
     JS_GC(rt);
 
-    CHECK(!js::gc::IsInsideNursery(rt, object));
+    /* And are tenured if still live after a GC. */
     CHECK(!js::gc::IsInsideNursery(object));
 
     return true;
 }
 END_TEST(testIsInsideNursery)
+
+#endif
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -220,22 +220,21 @@ class WrapperMapRef : public BufferableR
 void
 JSCompartment::checkWrapperMapAfterMovingGC()
 {
     /*
      * Assert that the postbarriers have worked and that nothing is left in
      * wrapperMap that points into the nursery, and that the hash table entries
      * are discoverable.
      */
-    JS::shadow::Runtime *rt = JS::shadow::Runtime::asShadowRuntime(runtimeFromMainThread());
     for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
         CrossCompartmentKey key = e.front().key();
-        JS_ASSERT(!IsInsideNursery(rt, key.debugger));
-        JS_ASSERT(!IsInsideNursery(rt, key.wrapped));
-        JS_ASSERT(!IsInsideNursery(rt, e.front().value().get().toGCThing()));
+        JS_ASSERT(!IsInsideNursery(key.debugger));
+        JS_ASSERT(!IsInsideNursery(key.wrapped));
+        JS_ASSERT(!IsInsideNursery(static_cast<Cell *>(e.front().value().get().toGCThing())));
 
         WrapperMap::Ptr ptr = crossCompartmentWrappers.lookup(key);
         JS_ASSERT(ptr.found() && &*ptr == &e.front());
     }
 }
 #endif
 
 #endif
@@ -248,20 +247,19 @@ JSCompartment::putWrapper(JSContext *cx,
     JS_ASSERT(!IsPoisonedPtr(wrapped.debugger));
     JS_ASSERT(!IsPoisonedPtr(wrapper.toGCThing()));
     JS_ASSERT_IF(wrapped.kind == CrossCompartmentKey::StringWrapper, wrapper.isString());
     JS_ASSERT_IF(wrapped.kind != CrossCompartmentKey::StringWrapper, wrapper.isObject());
     bool success = crossCompartmentWrappers.put(wrapped, wrapper);
 
 #ifdef JSGC_GENERATIONAL
     /* There's no point allocating wrappers in the nursery since we will tenure them anyway. */
-    Nursery &nursery = cx->nursery();
-    JS_ASSERT(!nursery.isInside(wrapper.toGCThing()));
+    JS_ASSERT(!IsInsideNursery(static_cast<gc::Cell *>(wrapper.toGCThing())));
 
-    if (success && (nursery.isInside(wrapped.wrapped) || nursery.isInside(wrapped.debugger))) {
+    if (success && (IsInsideNursery(wrapped.wrapped) || IsInsideNursery(wrapped.debugger))) {
         WrapperMapRef ref(&crossCompartmentWrappers, wrapped);
         cx->runtime()->gc.storeBuffer.putGeneric(ref);
     }
 #endif
 
     return success;
 }
 
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -653,21 +653,20 @@ js::GCThingTraceKind(void *thing)
 {
     JS_ASSERT(thing);
     return gc::GetGCThingTraceKind(thing);
 }
 
 JS_FRIEND_API(void)
 js::VisitGrayWrapperTargets(Zone *zone, GCThingCallback callback, void *closure)
 {
-    JSRuntime *rt = zone->runtimeFromMainThread();
     for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
         for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) {
             gc::Cell *thing = e.front().key().wrapped;
-            if (!IsInsideNursery(rt, thing) && thing->isMarked(gc::GRAY))
+            if (!IsInsideNursery(thing) && thing->isMarked(gc::GRAY))
                 callback(closure, thing);
         }
     }
 }
 
 JS_FRIEND_API(JSObject *)
 js::GetWeakmapKeyDelegate(JSObject *key)
 {
@@ -780,29 +779,29 @@ DumpHeapVisitCell(JSRuntime *rt, void *d
     JS_GetTraceThingInfo(cellDesc, sizeof(cellDesc), dtrc, thing, traceKind, true);
     fprintf(dtrc->output, "%p %c %s\n", thing, MarkDescriptor(thing), cellDesc);
     JS_TraceChildren(dtrc, thing, traceKind);
 }
 
 static void
 DumpHeapVisitChild(JSTracer *trc, void **thingp, JSGCTraceKind kind)
 {
-    if (gc::IsInsideNursery(trc->runtime(), *thingp))
+    if (gc::IsInsideNursery((js::gc::Cell *)*thingp))
         return;
 
     DumpHeapTracer *dtrc = static_cast<DumpHeapTracer *>(trc);
     char buffer[1024];
     fprintf(dtrc->output, "> %p %c %s\n", *thingp, MarkDescriptor(*thingp),
             dtrc->getTracingEdgeName(buffer, sizeof(buffer)));
 }
 
 static void
 DumpHeapVisitRoot(JSTracer *trc, void **thingp, JSGCTraceKind kind)
 {
-    if (gc::IsInsideNursery(trc->runtime(), *thingp))
+    if (gc::IsInsideNursery((js::gc::Cell *)*thingp))
         return;
 
     DumpHeapTracer *dtrc = static_cast<DumpHeapTracer *>(trc);
     char buffer[1024];
     fprintf(dtrc->output, "%p %c %s\n", *thingp, MarkDescriptor(*thingp),
             dtrc->getTracingEdgeName(buffer, sizeof(buffer)));
 }
 
@@ -1223,22 +1222,22 @@ js::IsInRequest(JSContext *cx)
 
 #ifdef JSGC_GENERATIONAL
 JS_FRIEND_API(void)
 JS_StoreObjectPostBarrierCallback(JSContext* cx,
                                   void (*callback)(JSTracer *trc, JSObject *key, void *data),
                                   JSObject *key, void *data)
 {
     JSRuntime *rt = cx->runtime();
-    if (IsInsideNursery(rt, key))
+    if (IsInsideNursery(key))
         rt->gc.storeBuffer.putCallback(callback, key, data);
 }
 
 extern JS_FRIEND_API(void)
 JS_StoreStringPostBarrierCallback(JSContext* cx,
                                   void (*callback)(JSTracer *trc, JSString *key, void *data),
                                   JSString *key, void *data)
 {
     JSRuntime *rt = cx->runtime();
-    if (IsInsideNursery(rt, key))
+    if (IsInsideNursery(key))
         rt->gc.storeBuffer.putCallback(callback, key, data);
 }
 #endif /* JSGC_GENERATIONAL */
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -5544,19 +5544,19 @@ AutoSuppressGC::AutoSuppressGC(JSCompart
 
 AutoSuppressGC::AutoSuppressGC(JSRuntime *rt)
   : suppressGC_(rt->mainThread.suppressGC)
 {
     suppressGC_++;
 }
 
 bool
-js::UninlinedIsInsideNursery(JSRuntime *rt, const void *thing)
-{
-    return IsInsideNursery(rt, thing);
+js::UninlinedIsInsideNursery(const gc::Cell *cell)
+{
+    return IsInsideNursery(cell);
 }
 
 #ifdef DEBUG
 AutoDisableProxyCheck::AutoDisableProxyCheck(JSRuntime *rt
                                              MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
   : count(rt->gc.disableStrictProxyCheckingCount)
 {
     MOZ_GUARD_OBJECT_NOTIFIER_INIT;
@@ -5565,16 +5565,28 @@ AutoDisableProxyCheck::AutoDisableProxyC
 
 JS_FRIEND_API(void)
 JS::AssertGCThingMustBeTenured(JSObject *obj)
 {
     JS_ASSERT((!IsNurseryAllocable(obj->tenuredGetAllocKind()) || obj->getClass()->finalize) &&
               obj->isTenured());
 }
 
+JS_FRIEND_API(void)
+js::gc::AssertGCThingHasType(js::gc::Cell *cell, JSGCTraceKind kind)
+{
+#ifdef DEBUG
+    JS_ASSERT(cell);
+    if (IsInsideNursery(cell))
+        JS_ASSERT(kind == JSTRACE_OBJECT);
+    else
+        JS_ASSERT(MapAllocToTraceKind(cell->tenuredGetAllocKind()) == kind);
+#endif
+}
+
 JS_FRIEND_API(size_t)
 JS::GetGCNumber()
 {
     JSRuntime *rt = js::TlsPerThreadData.get()->runtimeFromMainThread();
     if (!rt)
         return 0;
     return rt->gc.number;
 }
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -1275,13 +1275,13 @@ struct AutoDisableProxyCheck
 };
 #endif
 
 void
 PurgeJITCaches(JS::Zone *zone);
 
 // This is the same as IsInsideNursery, but not inlined.
 bool
-UninlinedIsInsideNursery(JSRuntime *rt, const void *thing);
+UninlinedIsInsideNursery(const gc::Cell *cell);
 
 } /* namespace js */
 
 #endif /* jsgc_h */
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -51,17 +51,17 @@ ThreadSafeContext::allocator()
 
 template <typename T>
 inline bool
 ThreadSafeContext::isThreadLocal(T thing) const
 {
     if (!isForkJoinContext())
         return true;
 
-    if (!IsInsideNursery(runtime_, thing) &&
+    if (!IsInsideNursery(thing) &&
         allocator_->arenas.containsArena(runtime_, thing->arenaHeader()))
     {
         // GC should be suppressed in preparation for mutating thread local
         // objects, as we don't want to trip any barriers.
         JS_ASSERT(!thing->zoneFromAnyThread()->needsBarrier());
         JS_ASSERT(!thing->runtimeFromAnyThread()->needsBarrier());
 
         return true;
@@ -92,17 +92,17 @@ ShouldNurseryAllocate(const Nursery &nur
 #endif
 
 inline JSGCTraceKind
 GetGCThingTraceKind(const void *thing)
 {
     JS_ASSERT(thing);
     const Cell *cell = static_cast<const Cell *>(thing);
 #ifdef JSGC_GENERATIONAL
-    if (IsInsideNursery(cell->runtimeFromAnyThread(), cell))
+    if (IsInsideNursery(cell))
         return JSTRACE_OBJECT;
 #endif
     return MapAllocToTraceKind(cell->tenuredGetAllocKind());
 }
 
 static inline void
 GCPoke(JSRuntime *rt)
 {
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -2025,17 +2025,17 @@ TypeCompartment::TypeCompartment()
 
 TypeObject *
 TypeCompartment::newTypeObject(ExclusiveContext *cx, const Class *clasp, Handle<TaggedProto> proto,
                                TypeObjectFlags initialFlags)
 {
     JS_ASSERT_IF(proto.isObject(), cx->isInsideCurrentCompartment(proto.toObject()));
 
     if (cx->isJSContext()) {
-        if (proto.isObject() && IsInsideNursery(cx->asJSContext()->runtime(), proto.toObject()))
+        if (proto.isObject() && IsInsideNursery(proto.toObject()))
             initialFlags |= OBJECT_FLAG_NURSERY_PROTO;
     }
 
     TypeObject *object = js::NewTypeObject(cx);
     if (!object)
         return nullptr;
     new(object) TypeObject(clasp, proto, initialFlags);
 
@@ -2749,17 +2749,17 @@ TypeCompartment::newTypedObject(JSContex
 // TypeObject
 /////////////////////////////////////////////////////////////////////
 
 void
 TypeObject::setProto(JSContext *cx, TaggedProto proto)
 {
     JS_ASSERT(singleton());
 
-    if (proto.isObject() && IsInsideNursery(cx->runtime(), proto.toObject()))
+    if (proto.isObject() && IsInsideNursery(proto.toObject()))
         addFlags(OBJECT_FLAG_NURSERY_PROTO);
 
     setProtoUnchecked(proto);
 }
 
 static inline void
 UpdatePropertyType(ExclusiveContext *cx, HeapTypeSet *types, JSObject *obj, Shape *shape,
                    bool indexed)
@@ -3911,17 +3911,17 @@ ExclusiveContext::getNewType(const Class
     TypeObject *type = compartment()->types.newTypeObject(this, clasp, protoRoot, initialFlags);
     if (!type)
         return nullptr;
 
     if (!newTypeObjects.add(p, TypeObjectWithNewScriptEntry(type, fun)))
         return nullptr;
 
 #ifdef JSGC_GENERATIONAL
-    if (proto.isObject() && hasNursery() && nursery().isInside(proto.toObject())) {
+    if (proto.isObject() && hasNursery() && IsInsideNursery(proto.toObject())) {
         asJSContext()->runtime()->gc.storeBuffer.putGeneric(
             NewTypeObjectsSetRef(&newTypeObjects, clasp, proto.toObject(), fun));
     }
 #endif
 
     if (proto.isObject()) {
         RootedObject obj(this, proto.toObject());
 
@@ -3962,22 +3962,21 @@ ExclusiveContext::getNewType(const Class
 void
 JSCompartment::checkNewTypeObjectTableAfterMovingGC()
 {
     /*
      * Assert that the postbarriers have worked and that nothing is left in
      * newTypeObjects that points into the nursery, and that the hash table
      * entries are discoverable.
      */
-    JS::shadow::Runtime *rt = JS::shadow::Runtime::asShadowRuntime(runtimeFromMainThread());
     for (TypeObjectWithNewScriptSet::Enum e(newTypeObjects); !e.empty(); e.popFront()) {
         TypeObjectWithNewScriptEntry entry = e.front();
-        JS_ASSERT(!IsInsideNursery(rt, entry.newFunction));
+        JS_ASSERT(!IsInsideNursery(entry.newFunction));
         TaggedProto proto = entry.object->proto();
-        JS_ASSERT_IF(proto.isObject(), !IsInsideNursery(rt, proto.toObject()));
+        JS_ASSERT_IF(proto.isObject(), !IsInsideNursery(proto.toObject()));
         TypeObjectWithNewScriptEntry::Lookup
             lookup(entry.object->clasp(), proto, entry.newFunction);
         TypeObjectWithNewScriptSet::Ptr ptr = newTypeObjects.lookup(lookup);
         JS_ASSERT(ptr.found() && &*ptr == &e.front());
     }
 }
 #endif
 
@@ -4568,17 +4567,17 @@ TypeObject::setAddendum(TypeObjectAddend
 }
 
 bool
 TypeObject::addTypedObjectAddendum(JSContext *cx, Handle<TypeDescr*> descr)
 {
     // Type descriptors are always pre-tenured. This is both because
     // we expect them to live a long time and so that they can be
     // safely accessed during ion compilation.
-    JS_ASSERT(!IsInsideNursery(cx->runtime(), descr));
+    JS_ASSERT(!IsInsideNursery(descr));
     JS_ASSERT(descr);
 
     if (flags() & OBJECT_FLAG_ADDENDUM_CLEARED)
         return true;
 
     JS_ASSERT(!unknownProperties());
 
     if (addendum) {
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -16,16 +16,17 @@
  * is reference counted and the slot vector is malloc'ed.
  */
 
 #include "mozilla/MemoryReporting.h"
 
 #include "gc/Barrier.h"
 #include "gc/Marking.h"
 #include "js/GCAPI.h"
+#include "js/HeapAPI.h"
 #include "vm/ObjectImpl.h"
 #include "vm/Shape.h"
 #include "vm/Xdr.h"
 
 namespace JS {
 struct ObjectsExtraSizes;
 }
 
@@ -676,22 +677,21 @@ class JSObject : public js::ObjectImpl
 
     void initDenseElementsUnbarriered(uint32_t dstStart, const js::Value *src, uint32_t count) {
         /*
          * For use by parallel threads, which since they cannot see nursery
          * things do not require a barrier.
          */
         JS_ASSERT(dstStart + count <= getDenseCapacity());
 #if defined(DEBUG) && defined(JSGC_GENERATIONAL)
-        JS::shadow::Runtime *rt = JS::shadow::Runtime::asShadowRuntime(runtimeFromAnyThread());
-        JS_ASSERT(!js::gc::IsInsideNursery(rt, this));
+        JS_ASSERT(!js::gc::IsInsideNursery(this));
         for (uint32_t index = 0; index < count; ++index) {
             const JS::Value& value = src[index];
             if (value.isMarkable())
-                JS_ASSERT(!js::gc::IsInsideNursery(rt, value.toGCThing()));
+                JS_ASSERT(!js::gc::IsInsideNursery(static_cast<js::gc::Cell *>(value.toGCThing())));
         }
 #endif
         memcpy(&elements[dstStart], src, count * sizeof(js::HeapSlot));
     }
 
     void moveDenseElements(uint32_t dstStart, uint32_t srcStart, uint32_t count) {
         JS_ASSERT(dstStart + count <= getDenseCapacity());
         JS_ASSERT(srcStart + count <= getDenseInitializedLength());
--- a/js/src/jsobjinlines.h
+++ b/js/src/jsobjinlines.h
@@ -350,18 +350,17 @@ JSObject::getDenseOrTypedArrayElement(ui
     if (is<js::TypedArrayObject>())
         return as<js::TypedArrayObject>().getElement(idx);
     return getDenseElement(idx);
 }
 
 /* static */ inline bool
 JSObject::setSingletonType(js::ExclusiveContext *cx, js::HandleObject obj)
 {
-    JS_ASSERT_IF(cx->isJSContext(),
-                 !IsInsideNursery(cx->asJSContext()->runtime(), obj.get()));
+    JS_ASSERT_IF(cx->isJSContext(), !IsInsideNursery(obj));
 
     js::types::TypeObject *type = cx->getSingletonType(obj->getClass(), obj->getTaggedProto());
     if (!type)
         return false;
 
     obj->type_ = type;
     return true;
 }
--- a/js/src/jspubtd.h
+++ b/js/src/jspubtd.h
@@ -170,34 +170,28 @@ typedef void (*OffThreadCompileCallback)
 namespace shadow {
 
 struct Runtime
 {
     /* Restrict zone access during Minor GC. */
     bool needsBarrier_;
 
 #ifdef JSGC_GENERATIONAL
-    /* Allow inlining of Nursery::isInside. */
-    uintptr_t gcNurseryStart_;
-    uintptr_t gcNurseryEnd_;
-
   private:
     js::gc::StoreBuffer *gcStoreBufferPtr_;
 #endif
 
   public:
     Runtime(
 #ifdef JSGC_GENERATIONAL
         js::gc::StoreBuffer *storeBuffer
 #endif
     )
       : needsBarrier_(false)
 #ifdef JSGC_GENERATIONAL
-      , gcNurseryStart_(0)
-      , gcNurseryEnd_(0)
       , gcStoreBufferPtr_(storeBuffer)
 #endif
     {}
 
     bool needsBarrier() const {
         return needsBarrier_;
     }
 
--- a/js/src/jsweakmap.cpp
+++ b/js/src/jsweakmap.cpp
@@ -355,17 +355,17 @@ WeakMapPostWriteBarrier(JSRuntime *rt, O
      * the HashMap base class and then to the unbarriered version.
      */
     ObjectValueMap::Base *baseHashMap = static_cast<ObjectValueMap::Base *>(weakMap);
 
     typedef HashMap<JSObject *, Value> UnbarrieredMap;
     UnbarrieredMap *unbarrieredMap = reinterpret_cast<UnbarrieredMap *>(baseHashMap);
 
     typedef HashKeyRef<UnbarrieredMap, JSObject *> Ref;
-    if (key && IsInsideNursery(rt, key))
+    if (key && IsInsideNursery(key))
         rt->gc.storeBuffer.putGeneric(Ref((unbarrieredMap), key));
 #endif
 }
 
 MOZ_ALWAYS_INLINE bool
 SetWeakMapEntryInternal(JSContext *cx, Handle<WeakMapObject*> mapObj,
                         HandleObject key, HandleValue value)
 {
--- a/js/src/jswrapper.cpp
+++ b/js/src/jswrapper.cpp
@@ -188,17 +188,17 @@ bool Wrapper::finalizeInBackground(Value
 
     /*
      * Make the 'background-finalized-ness' of the wrapper the same as the
      * wrapped object, to allow transplanting between them.
      *
      * If the wrapped object is in the nursery then we know it doesn't have a
      * finalizer, and so background finalization is ok.
      */
-    if (IsInsideNursery(priv.toObject().runtimeFromMainThread(), &priv.toObject()))
+    if (IsInsideNursery(&priv.toObject()))
         return true;
     return IsBackgroundFinalized(priv.toObject().tenuredGetAllocKind());
 }
 
 #define PIERCE(cx, wrapper, pre, op, post)                      \
     JS_BEGIN_MACRO                                              \
         bool ok;                                                \
         {                                                       \
--- a/js/src/vm/ArrayBufferObject.cpp
+++ b/js/src/vm/ArrayBufferObject.cpp
@@ -651,17 +651,17 @@ ArrayBufferObject::create(JSContext *cx,
     gc::AllocKind allocKind = GetGCObjectKind(nslots);
 
     Rooted<ArrayBufferObject*> obj(cx, NewBuiltinClassInstance<ArrayBufferObject>(cx, allocKind, newKind));
     if (!obj)
         return nullptr;
 
     JS_ASSERT(obj->getClass() == &class_);
 
-    JS_ASSERT(!gc::IsInsideNursery(cx->runtime(), obj));
+    JS_ASSERT(!gc::IsInsideNursery(obj));
 
     if (data) {
         obj->initialize(nbytes, data, OwnsData);
         if (mapped)
             obj->setIsMappedArrayBuffer();
     } else {
         void *data = obj->fixedData(reservedSlots);
         memset(data, 0, nbytes);
--- a/js/src/vm/ArrayBufferObject.h
+++ b/js/src/vm/ArrayBufferObject.h
@@ -270,17 +270,17 @@ bool
 ToClampedIndex(JSContext *cx, HandleValue v, uint32_t length, uint32_t *out);
 
 inline void
 PostBarrierTypedArrayObject(JSObject *obj)
 {
 #ifdef JSGC_GENERATIONAL
     JS_ASSERT(obj);
     JSRuntime *rt = obj->runtimeFromMainThread();
-    if (!rt->isHeapBusy() && !IsInsideNursery(rt, obj))
+    if (!rt->isHeapBusy() && !IsInsideNursery(JS::AsCell(obj)))
         rt->gc.storeBuffer.putWholeCellFromMainThread(obj);
 #endif
 }
 
 inline void
 InitArrayBufferViewDataPointer(ArrayBufferViewObject *obj, ArrayBufferObject *buffer, size_t byteOffset)
 {
     /*
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -463,26 +463,28 @@ JSRuntime::~JSRuntime()
 #ifdef JS_THREADSAFE
     js::TlsPerThreadData.set(nullptr);
 #endif
 }
 
 void
 NewObjectCache::clearNurseryObjects(JSRuntime *rt)
 {
+#ifdef JSGC_GENERATIONAL
     for (unsigned i = 0; i < mozilla::ArrayLength(entries); ++i) {
         Entry &e = entries[i];
         JSObject *obj = reinterpret_cast<JSObject *>(&e.templateObject);
-        if (IsInsideNursery(rt, e.key) ||
-            IsInsideNursery(rt, obj->slots) ||
-            IsInsideNursery(rt, obj->elements))
+        if (IsInsideNursery(e.key) ||
+            rt->gc.nursery.isInside(obj->slots) ||
+            rt->gc.nursery.isInside(obj->elements))
         {
             PodZero(&e);
         }
     }
+#endif
 }
 
 void
 JSRuntime::resetJitStackLimit()
 {
     AutoLockForInterrupt lock(this);
     mainThread.setJitStackLimit(mainThread.nativeStackLimit[js::StackForUntrustedScript]);
 
--- a/js/src/vm/ScopeObject.cpp
+++ b/js/src/vm/ScopeObject.cpp
@@ -1678,17 +1678,17 @@ DebugScopes::proxiedScopesPostWriteBarri
      * the HashMap base class and then to the unbarriered version.
      */
     ObjectWeakMap::Base *baseHashMap = static_cast<ObjectWeakMap::Base *>(map);
 
     typedef HashMap<JSObject *, JSObject *> UnbarrieredMap;
     UnbarrieredMap *unbarrieredMap = reinterpret_cast<UnbarrieredMap *>(baseHashMap);
 
     typedef gc::HashKeyRef<UnbarrieredMap, JSObject *> Ref;
-    if (key && IsInsideNursery(rt, key))
+    if (key && IsInsideNursery(key))
         rt->gc.storeBuffer.putGeneric(Ref(unbarrieredMap, key.get()));
 #endif
 }
 
 #ifdef JSGC_GENERATIONAL
 class DebugScopes::MissingScopesRef : public gc::BufferableRef
 {
     MissingScopeMap *map;
@@ -1709,33 +1709,33 @@ class DebugScopes::MissingScopesRef : pu
 };
 #endif
 
 /* static */ MOZ_ALWAYS_INLINE void
 DebugScopes::missingScopesPostWriteBarrier(JSRuntime *rt, MissingScopeMap *map,
                                            const ScopeIterKey &key)
 {
 #ifdef JSGC_GENERATIONAL
-    if (key.enclosingScope() && IsInsideNursery(rt, key.enclosingScope()))
+    if (key.enclosingScope() && IsInsideNursery(key.enclosingScope()))
         rt->gc.storeBuffer.putGeneric(MissingScopesRef(map, key));
 #endif
 }
 
 /* static */ MOZ_ALWAYS_INLINE void
 DebugScopes::liveScopesPostWriteBarrier(JSRuntime *rt, LiveScopeMap *map, ScopeObject *key)
 {
 #ifdef JSGC_GENERATIONAL
     // As above.  Otherwise, barriers could fire during GC when moving the
     // value.
     typedef HashMap<ScopeObject *,
                     ScopeIterKey,
                     DefaultHasher<ScopeObject *>,
                     RuntimeAllocPolicy> UnbarrieredLiveScopeMap;
     typedef gc::HashKeyRef<UnbarrieredLiveScopeMap, ScopeObject *> Ref;
-    if (key && IsInsideNursery(rt, key))
+    if (key && IsInsideNursery(key))
         rt->gc.storeBuffer.putGeneric(Ref(reinterpret_cast<UnbarrieredLiveScopeMap *>(map), key));
 #endif
 }
 
 DebugScopes::DebugScopes(JSContext *cx)
  : proxiedScopes(cx),
    missingScopes(cx->runtime()),
    liveScopes(cx->runtime())
@@ -1815,30 +1815,29 @@ DebugScopes::sweep(JSRuntime *rt)
 void
 DebugScopes::checkHashTablesAfterMovingGC(JSRuntime *runtime)
 {
     /*
      * This is called at the end of StoreBuffer::mark() to check that our
      * postbarriers have worked and that no hashtable keys (or values) are left
      * pointing into the nursery.
      */
-    JS::shadow::Runtime *rt = JS::shadow::Runtime::asShadowRuntime(runtime);
     for (ObjectWeakMap::Range r = proxiedScopes.all(); !r.empty(); r.popFront()) {
-        JS_ASSERT(!IsInsideNursery(rt, r.front().key().get()));
-        JS_ASSERT(!IsInsideNursery(rt, r.front().value().get()));
+        JS_ASSERT(!IsInsideNursery(r.front().key().get()));
+        JS_ASSERT(!IsInsideNursery(r.front().value().get()));
     }
     for (MissingScopeMap::Range r = missingScopes.all(); !r.empty(); r.popFront()) {
-        JS_ASSERT(!IsInsideNursery(rt, r.front().key().cur()));
-        JS_ASSERT(!IsInsideNursery(rt, r.front().key().staticScope()));
-        JS_ASSERT(!IsInsideNursery(rt, r.front().value().get()));
+        JS_ASSERT(!IsInsideNursery(r.front().key().cur()));
+        JS_ASSERT(!IsInsideNursery(r.front().key().staticScope()));
+        JS_ASSERT(!IsInsideNursery(r.front().value().get()));
     }
     for (LiveScopeMap::Range r = liveScopes.all(); !r.empty(); r.popFront()) {
-        JS_ASSERT(!IsInsideNursery(rt, r.front().key()));
-        JS_ASSERT(!IsInsideNursery(rt, r.front().value().cur_.get()));
-        JS_ASSERT(!IsInsideNursery(rt, r.front().value().staticScope_.get()));
+        JS_ASSERT(!IsInsideNursery(r.front().key()));
+        JS_ASSERT(!IsInsideNursery(r.front().value().cur_.get()));
+        JS_ASSERT(!IsInsideNursery(r.front().value().staticScope_.get()));
     }
 }
 #endif
 
 /*
  * Unfortunately, GetDebugScopeForFrame needs to work even outside debug mode
  * (in particular, JS_GetFrameScopeChain does not require debug mode). Since
  * DebugScopes::onPop* are only called in debug mode, this means we cannot
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -1662,25 +1662,24 @@ JSCompartment::checkInitialShapesTableAf
     if (!initialShapes.initialized())
         return;
 
     /*
      * Assert that the postbarriers have worked and that nothing is left in
      * initialShapes that points into the nursery, and that the hash table
      * entries are discoverable.
      */
-    JS::shadow::Runtime *rt = JS::shadow::Runtime::asShadowRuntime(runtimeFromMainThread());
     for (InitialShapeSet::Enum e(initialShapes); !e.empty(); e.popFront()) {
         InitialShapeEntry entry = e.front();
         TaggedProto proto = entry.proto;
         Shape *shape = entry.shape.get();
 
-        JS_ASSERT_IF(proto.isObject(), !IsInsideNursery(rt, proto.toObject()));
-        JS_ASSERT(!IsInsideNursery(rt, shape->getObjectParent()));
-        JS_ASSERT(!IsInsideNursery(rt, shape->getObjectMetadata()));
+        JS_ASSERT_IF(proto.isObject(), !IsInsideNursery(proto.toObject()));
+        JS_ASSERT_IF(shape->getObjectParent(), !IsInsideNursery(shape->getObjectParent()));
+        JS_ASSERT_IF(shape->getObjectMetadata(), !IsInsideNursery(shape->getObjectMetadata()));
 
         InitialShapeEntry::Lookup lookup(shape->getObjectClass(),
                                          proto,
                                          shape->getObjectParent(),
                                          shape->getObjectMetadata(),
                                          shape->numFixedSlots(),
                                          shape->getObjectFlags());
         InitialShapeSet::Ptr ptr = initialShapes.lookup(lookup);
@@ -1725,19 +1724,19 @@ EmptyShape::getInitialShape(ExclusiveCon
     new (shape) EmptyShape(nbase, nfixed);
 
     Lookup lookup(clasp, protoRoot, parentRoot, metadataRoot, nfixed, objectFlags);
     if (!p.add(cx, table, lookup, InitialShapeEntry(shape, protoRoot)))
         return nullptr;
 
 #ifdef JSGC_GENERATIONAL
     if (cx->hasNursery()) {
-        if ((protoRoot.isObject() && cx->nursery().isInside(protoRoot.toObject())) ||
-            cx->nursery().isInside(parentRoot.get()) ||
-            cx->nursery().isInside(metadataRoot.get()))
+        if ((protoRoot.isObject() && IsInsideNursery(protoRoot.toObject())) ||
+            IsInsideNursery(parentRoot.get()) ||
+            IsInsideNursery(metadataRoot.get()))
         {
             InitialShapeSetRef ref(
                 &table, clasp, protoRoot, parentRoot, metadataRoot, nfixed, objectFlags);
             cx->asJSContext()->runtime()->gc.storeBuffer.putGeneric(ref);
         }
     }
 #endif
 
--- a/xpcom/glue/tests/gtest/TestGCPostBarriers.cpp
+++ b/xpcom/glue/tests/gtest/TestGCPostBarriers.cpp
@@ -49,23 +49,22 @@ RunTest(JSRuntime* rt, JSContext* cx, Ar
   JS_AddExtraGCRootsTracer(rt, TraceArray<ArrayT>, array);
 
   /*
    * Create the array and fill it with new JS objects. With GGC these will be
    * allocated in the nursery.
    */
   RootedValue value(cx);
   const char* property = "foo";
-  JS::shadow::Runtime* srt = reinterpret_cast<JS::shadow::Runtime*>(rt);
   for (size_t i = 0; i < ElementCount; ++i) {
     RootedObject obj(cx, JS_NewObject(cx, nullptr, JS::NullPtr(), JS::NullPtr()));
 #ifdef JSGC_GENERATIONAL
-    ASSERT_TRUE(js::gc::IsInsideNursery(srt, obj));
+    ASSERT_TRUE(js::gc::IsInsideNursery(AsCell(obj)));
 #else
-    ASSERT_FALSE(js::gc::IsInsideNursery(srt, obj));
+    ASSERT_FALSE(js::gc::IsInsideNursery(AsCell(obj)));
 #endif
     value = Int32Value(i);
     ASSERT_TRUE(JS_SetProperty(cx, obj, property, value));
     array->AppendElement(obj);
   }
 
   /*
    * If postbarriers are not working, we will crash here when we try to mark
@@ -73,17 +72,17 @@ RunTest(JSRuntime* rt, JSContext* cx, Ar
    */
   JS_GC(rt);
 
   /*
    * Sanity check that our array contains what we expect.
    */
   for (size_t i = 0; i < ElementCount; ++i) {
     RootedObject obj(cx, array->ElementAt(i));
-    ASSERT_FALSE(js::gc::IsInsideNursery(srt, obj));
+    ASSERT_FALSE(js::gc::IsInsideNursery(AsCell(obj)));
     ASSERT_TRUE(JS_GetProperty(cx, obj, property, &value));
     ASSERT_TRUE(value.isInt32());
     ASSERT_EQ(static_cast<int32_t>(i), value.toInt32());
   }
 
   JS_RemoveExtraGCRootsTracer(rt, TraceArray<ArrayT>, array);
 }