Bug 898886 - Improve threadsafe assertions when accessing runtimes and zones, r=billm.
authorBrian Hackett <bhackett1024@gmail.com>
Mon, 05 Aug 2013 08:48:34 -0600
changeset 141424 f836042326f9df3abfbee44169fc878ef87c950d
parent 141423 c2dc96c6efa5d2af7b9ffc5d74f9b5f4a9e4ba0b
child 141425 f24d9d59a689bb6be0f3dfe3e38a26e2bad3c09a
push id25059
push userryanvm@gmail.com
push dateTue, 06 Aug 2013 03:20:31 +0000
treeherderautoland@a0dd80f800e2 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbillm
bugs898886
milestone25.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 898886 - Improve threadsafe assertions when accessing runtimes and zones, r=billm.
js/src/gc/Barrier-inl.h
js/src/gc/Barrier.h
js/src/gc/Heap.h
js/src/gc/Iteration.cpp
js/src/gc/Marking.cpp
js/src/gc/StoreBuffer.cpp
js/src/gc/Zone.cpp
js/src/gc/Zone.h
js/src/ion/AsmJS.cpp
js/src/ion/BaselineCompiler.cpp
js/src/ion/BaselineIC.cpp
js/src/ion/BaselineJIT.cpp
js/src/ion/CodeGenerator.cpp
js/src/ion/CompilerRoot.h
js/src/ion/Ion.cpp
js/src/ion/Ion.h
js/src/ion/IonMacroAssembler.cpp
js/src/ion/Lowering.cpp
js/src/ion/MIRGenerator.h
js/src/ion/arm/Bailouts-arm.cpp
js/src/ion/arm/CodeGenerator-arm.cpp
js/src/ion/arm/MacroAssembler-arm.cpp
js/src/ion/shared/CodeGenerator-shared.cpp
js/src/ion/x64/CodeGenerator-x64.cpp
js/src/ion/x86/Bailouts-x86.cpp
js/src/ion/x86/CodeGenerator-x86.cpp
js/src/ion/x86/MacroAssembler-x86.h
js/src/jsapi.cpp
js/src/jscntxt.cpp
js/src/jscntxtinlines.h
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsfriendapi.cpp
js/src/jsgc.cpp
js/src/jsgcinlines.h
js/src/jsinfer.cpp
js/src/jsinferinlines.h
js/src/jsiter.cpp
js/src/jsmemorymetrics.cpp
js/src/jsobjinlines.h
js/src/jsscriptinlines.h
js/src/jsworkers.cpp
js/src/jswrapper.cpp
js/src/vm/Debugger.cpp
js/src/vm/ForkJoin.cpp
js/src/vm/ForkJoin.h
js/src/vm/ObjectImpl-inl.h
js/src/vm/ObjectImpl.cpp
js/src/vm/Runtime.cpp
js/src/vm/Runtime.h
js/src/vm/ScopeObject.cpp
js/src/vm/Shape-inl.h
js/src/vm/Shape.cpp
js/src/vm/String-inl.h
js/src/vm/ThreadPool.cpp
js/src/vm/TypedArrayObject.cpp
--- a/js/src/gc/Barrier-inl.h
+++ b/js/src/gc/Barrier-inl.h
@@ -74,27 +74,27 @@ EncapsulatedValue::operator=(const Encap
     value = v.get();
     return *this;
 }
 
 inline void
 EncapsulatedValue::writeBarrierPre(const Value &value)
 {
 #ifdef JSGC_INCREMENTAL
-    if (value.isMarkable() && runtime(value)->needsBarrier())
+    if (value.isMarkable() && runtimeFromAnyThread(value)->needsBarrier())
         writeBarrierPre(ZoneOfValue(value), value);
 #endif
 }
 
 inline void
 EncapsulatedValue::writeBarrierPre(Zone *zone, const Value &value)
 {
 #ifdef JSGC_INCREMENTAL
     if (zone->needsBarrier()) {
-        JS_ASSERT_IF(value.isMarkable(), runtime(value)->needsBarrier());
+        JS_ASSERT_IF(value.isMarkable(), runtimeFromMainThread(value)->needsBarrier());
         Value tmp(value);
         js::gc::MarkValueUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
         JS_ASSERT(tmp == value);
     }
 #endif
 }
 
 inline void
@@ -175,32 +175,32 @@ HeapValue::operator=(const HeapValue &v)
 }
 
 inline void
 HeapValue::set(Zone *zone, const Value &v)
 {
 #ifdef DEBUG
     if (value.isMarkable()) {
         JS_ASSERT(ZoneOfValue(value) == zone ||
-                  ZoneOfValue(value) == zone->rt->atomsCompartment->zone());
+                  ZoneOfValue(value) == zone->runtimeFromMainThread()->atomsCompartment->zone());
     }
 #endif
 
     pre(zone);
     JS_ASSERT(!IsPoisonedValue(v));
     value = v;
-    post(zone->rt);
+    post(zone->runtimeFromAnyThread());
 }
 
 inline void
 HeapValue::writeBarrierPost(const Value &value, Value *addr)
 {
 #ifdef JSGC_GENERATIONAL
     if (value.isMarkable())
-        runtime(value)->gcStoreBuffer.putValue(addr);
+        runtimeFromMainThread(value)->gcStoreBuffer.putValue(addr);
 #endif
 }
 
 inline void
 HeapValue::writeBarrierPost(JSRuntime *rt, const Value &value, Value *addr)
 {
 #ifdef JSGC_GENERATIONAL
     if (value.isMarkable())
@@ -243,29 +243,29 @@ RelocatableValue::RelocatableValue(const
     if (v.value.isMarkable())
         post();
 }
 
 inline
 RelocatableValue::~RelocatableValue()
 {
     if (value.isMarkable())
-        relocate(runtime(value));
+        relocate(runtimeFromMainThread(value));
 }
 
 inline RelocatableValue &
 RelocatableValue::operator=(const Value &v)
 {
     pre();
     JS_ASSERT(!IsPoisonedValue(v));
     if (v.isMarkable()) {
         value = v;
         post();
     } else if (value.isMarkable()) {
-        JSRuntime *rt = runtime(value);
+        JSRuntime *rt = runtimeFromMainThread(value);
         value = v;
         relocate(rt);
     } else {
         value = v;
     }
     return *this;
 }
 
@@ -273,31 +273,31 @@ inline RelocatableValue &
 RelocatableValue::operator=(const RelocatableValue &v)
 {
     pre();
     JS_ASSERT(!IsPoisonedValue(v.value));
     if (v.value.isMarkable()) {
         value = v.value;
         post();
     } else if (value.isMarkable()) {
-        JSRuntime *rt = runtime(value);
+        JSRuntime *rt = runtimeFromMainThread(value);
         value = v.value;
         relocate(rt);
     } else {
         value = v.value;
     }
     return *this;
 }
 
 inline void
 RelocatableValue::post()
 {
 #ifdef JSGC_GENERATIONAL
     JS_ASSERT(value.isMarkable());
-    runtime(value)->gcStoreBuffer.putRelocatableValue(&value);
+    runtimeFromMainThread(value)->gcStoreBuffer.putRelocatableValue(&value);
 #endif
 }
 
 inline void
 RelocatableValue::relocate(JSRuntime *rt)
 {
 #ifdef JSGC_GENERATIONAL
     rt->gcStoreBuffer.removeRelocatableValue(&value);
@@ -357,24 +357,24 @@ HeapSlot::set(Zone *zone, JSObject *obj,
 {
     JS_ASSERT_IF(kind == Slot, &obj->getSlotRef(slot) == this);
     JS_ASSERT_IF(kind == Element, &obj->getDenseElement(slot) == (const Value *)this);
     JS_ASSERT(obj->zone() == zone);
 
     pre(zone);
     JS_ASSERT(!IsPoisonedValue(v));
     value = v;
-    post(zone->rt, obj, kind, slot, v);
+    post(zone->runtimeFromAnyThread(), obj, kind, slot, v);
 }
 
 inline void
 HeapSlot::writeBarrierPost(JSObject *obj, Kind kind, uint32_t slot, Value target)
 {
 #ifdef JSGC_GENERATIONAL
-    writeBarrierPost(obj->runtime(), obj, kind, slot, target);
+    writeBarrierPost(obj->runtimeFromAnyThread(), obj, kind, slot, target);
 #endif
 }
 
 inline void
 HeapSlot::writeBarrierPost(JSRuntime *rt, JSObject *obj, Kind kind, uint32_t slot, Value target)
 {
 #ifdef JSGC_GENERATIONAL
     JS_ASSERT_IF(kind == Slot, obj->getSlotAddressUnchecked(slot)->get() == target);
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -261,41 +261,41 @@ class RelocatablePtr : public Encapsulat
     }
     RelocatablePtr(const RelocatablePtr<T> &v) : EncapsulatedPtr<T>(v) {
         if (this->value)
             post();
     }
 
     ~RelocatablePtr() {
         if (this->value)
-            relocate(this->value->runtime());
+            relocate(this->value->runtimeFromMainThread());
     }
 
     RelocatablePtr<T> &operator=(T *v) {
         this->pre();
         JS_ASSERT(!IsPoisonedPtr<T>(v));
         if (v) {
             this->value = v;
             post();
         } else if (this->value) {
-            JSRuntime *rt = this->value->runtime();
+            JSRuntime *rt = this->value->runtimeFromMainThread();
             this->value = v;
             relocate(rt);
         }
         return *this;
     }
 
     RelocatablePtr<T> &operator=(const RelocatablePtr<T> &v) {
         this->pre();
         JS_ASSERT(!IsPoisonedPtr<T>(v.value));
         if (v.value) {
             this->value = v.value;
             post();
         } else if (this->value) {
-            JSRuntime *rt = this->value->runtime();
+            JSRuntime *rt = this->value->runtimeFromMainThread();
             this->value = v;
             relocate(rt);
         }
         return *this;
     }
 
   protected:
     inline void post();
@@ -414,19 +414,23 @@ class EncapsulatedValue : public ValueOp
 
     static inline void writeBarrierPre(const Value &v);
     static inline void writeBarrierPre(Zone *zone, const Value &v);
 
   protected:
     inline void pre();
     inline void pre(Zone *zone);
 
-    static inline JSRuntime *runtime(const Value &v) {
+    static inline JSRuntime *runtimeFromMainThread(const Value &v) {
         JS_ASSERT(v.isMarkable());
-        return static_cast<js::gc::Cell *>(v.toGCThing())->runtime();
+        return static_cast<js::gc::Cell *>(v.toGCThing())->runtimeFromMainThread();
+    }
+    static inline JSRuntime *runtimeFromAnyThread(const Value &v) {
+        JS_ASSERT(v.isMarkable());
+        return static_cast<js::gc::Cell *>(v.toGCThing())->runtimeFromAnyThread();
     }
 
   private:
     friend class ValueOperations<EncapsulatedValue>;
     const Value * extract() const { return &value; }
 };
 
 class HeapValue : public EncapsulatedValue
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -23,18 +23,20 @@
 struct JSCompartment;
 
 extern "C" {
 struct JSRuntime;
 }
 
 namespace js {
 
-// Defined in vm/ForkJoin.cpp
-extern bool InSequentialOrExclusiveParallelSection();
+// Whether the current thread is permitted access to any part of the specified
+// runtime or zone.
+extern bool CurrentThreadCanAccessRuntime(JSRuntime *rt);
+extern bool CurrentThreadCanAccessZone(JS::Zone *zone);
 
 class FreeOp;
 
 namespace gc {
 
 struct Arena;
 struct ArenaHeader;
 struct Chunk;
@@ -92,20 +94,24 @@ struct Cell
 {
   public:
     inline ArenaHeader *arenaHeader() const;
     inline AllocKind tenuredGetAllocKind() const;
     MOZ_ALWAYS_INLINE bool isMarked(uint32_t color = BLACK) const;
     MOZ_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const;
     MOZ_ALWAYS_INLINE void unmark(uint32_t color) const;
 
-    inline JSRuntime *runtime() const;
+    inline JSRuntime *runtimeFromMainThread() const;
     inline JS::Zone *tenuredZone() const;
     inline bool tenuredIsInsideZone(JS::Zone *zone) const;
 
+    // Note: Unrestricted access to the runtime of a GC thing from an arbitrary
+    // thread can easily lead to races. Use this method very carefully.
+    inline JSRuntime *runtimeFromAnyThread() const;
+
 #ifdef DEBUG
     inline bool isAligned() const;
     inline bool isTenured() const;
 #endif
 
   protected:
     inline uintptr_t address() const;
     inline Chunk *chunk() const;
@@ -945,19 +951,26 @@ Cell::arenaHeader() const
 {
     JS_ASSERT(isTenured());
     uintptr_t addr = address();
     addr &= ~ArenaMask;
     return reinterpret_cast<ArenaHeader *>(addr);
 }
 
 inline JSRuntime *
-Cell::runtime() const
+Cell::runtimeFromMainThread() const
 {
-    JS_ASSERT(InSequentialOrExclusiveParallelSection());
+    JSRuntime *rt = chunk()->info.runtime;
+    JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
+    return rt;
+}
+
+inline JSRuntime *
+Cell::runtimeFromAnyThread() const
+{
     return chunk()->info.runtime;
 }
 
 AllocKind
 Cell::tenuredGetAllocKind() const
 {
     return arenaHeader()->getAllocKind();
 }
@@ -985,19 +998,20 @@ Cell::unmark(uint32_t color) const
     JS_ASSERT(color != BLACK);
     AssertValidColor(this, color);
     chunk()->bitmap.unmark(this, color);
 }
 
 JS::Zone *
 Cell::tenuredZone() const
 {
-    JS_ASSERT(InSequentialOrExclusiveParallelSection());
+    JS::Zone *zone = arenaHeader()->zone;
+    JS_ASSERT(CurrentThreadCanAccessZone(zone));
     JS_ASSERT(isTenured());
-    return arenaHeader()->zone;
+    return zone;
 }
 
 bool
 Cell::tenuredIsInsideZone(JS::Zone *zone) const
 {
     JS_ASSERT(isTenured());
     return zone == arenaHeader()->zone;
 }
--- a/js/src/gc/Iteration.cpp
+++ b/js/src/gc/Iteration.cpp
@@ -81,17 +81,17 @@ js::IterateScripts(JSRuntime *rt, JSComp
                 scriptCallback(rt, data, i.get<JSScript>());
         }
     }
 }
 
 void
 js::IterateGrayObjects(Zone *zone, GCThingCallback cellCallback, void *data)
 {
-    AutoPrepareForTracing prep(zone->rt);
+    AutoPrepareForTracing prep(zone->runtimeFromMainThread());
 
     for (size_t finalizeKind = 0; finalizeKind <= FINALIZE_OBJECT_LAST; finalizeKind++) {
         for (CellIterUnderGC i(zone, AllocKind(finalizeKind)); !i.done(); i.next()) {
             JSObject *obj = i.get<JSObject>();
             if (obj->isMarked(GRAY))
                 cellCallback(data, obj);
         }
     }
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -121,25 +121,25 @@ CheckMarkedThing(JSTracer *trc, T *thing
     JS_ASSERT(trc);
     JS_ASSERT(thing);
 
     /* This function uses data that's not available in the nursery. */
     if (IsInsideNursery(trc->runtime, thing))
         return;
 
     JS_ASSERT(thing->zone());
-    JS_ASSERT(thing->zone()->rt == trc->runtime);
+    JS_ASSERT(thing->zone()->runtimeFromMainThread() == trc->runtime);
     JS_ASSERT(trc->debugPrinter || trc->debugPrintArg);
 
     DebugOnly<JSRuntime *> rt = trc->runtime;
 
     JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gcManipulatingDeadZones,
                  !thing->zone()->scheduledForDestruction);
 
-    rt->assertValidThread();
+    JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
 
     JS_ASSERT_IF(thing->zone()->requireGCTracer(),
                  IS_GC_MARKING_TRACER(trc));
 
     JS_ASSERT(thing->isAligned());
 
     JS_ASSERT(MapTypeToTraceKind<T>::kind == GetGCThingTraceKind(thing));
 
@@ -265,17 +265,17 @@ namespace gc {
 
 template <typename T>
 static bool
 IsMarked(T **thingp)
 {
     JS_ASSERT(thingp);
     JS_ASSERT(*thingp);
 #ifdef JSGC_GENERATIONAL
-    Nursery &nursery = (*thingp)->runtime()->gcNursery;
+    Nursery &nursery = (*thingp)->runtimeFromMainThread()->gcNursery;
     if (nursery.isInside(*thingp))
         return nursery.getForwardedPointer(thingp);
 #endif
     Zone *zone = (*thingp)->tenuredZone();
     if (!zone->isCollecting() || zone->isGCFinished())
         return true;
     return (*thingp)->isMarked();
 }
@@ -283,17 +283,17 @@ IsMarked(T **thingp)
 template <typename T>
 static bool
 IsAboutToBeFinalized(T **thingp)
 {
     JS_ASSERT(thingp);
     JS_ASSERT(*thingp);
 
 #ifdef JSGC_GENERATIONAL
-    Nursery &nursery = (*thingp)->runtime()->gcNursery;
+    Nursery &nursery = (*thingp)->runtimeFromMainThread()->gcNursery;
     if (nursery.isInside(*thingp))
         return !nursery.getForwardedPointer(thingp);
 #endif
     if (!(*thingp)->tenuredZone()->isGCSweeping())
         return false;
 
     /*
      * We should return false for things that have been allocated during
@@ -770,93 +770,93 @@ MaybePushMarkStackBetweenSlices(GCMarker
     if (!IsInsideNursery(rt, thing) && thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushObject(thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, JSFunction *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
-    JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
+    JS_ASSERT(!IsInsideNursery(gcmarker->runtime, thing));
 
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushObject(thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
-    JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
+    JS_ASSERT(!IsInsideNursery(gcmarker->runtime, thing));
 
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushType(thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, JSScript *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
-    JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
+    JS_ASSERT(!IsInsideNursery(gcmarker->runtime, thing));
 
     /*
      * We mark scripts directly rather than pushing on the stack as they can
      * refer to other scripts only indirectly (like via nested functions) and
      * we cannot get to deep recursion.
      */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         MarkChildren(gcmarker, thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, LazyScript *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
-    JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
+    JS_ASSERT(!IsInsideNursery(gcmarker->runtime, thing));
 
     /*
      * We mark lazy scripts directly rather than pushing on the stack as they
      * only refer to normal scripts and to strings, and cannot recurse.
      */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         MarkChildren(gcmarker, thing);
 }
 
 static void
 ScanShape(GCMarker *gcmarker, Shape *shape);
 
 static void
 PushMarkStack(GCMarker *gcmarker, Shape *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
-    JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
+    JS_ASSERT(!IsInsideNursery(gcmarker->runtime, thing));
 
     /* We mark shapes directly rather than pushing on the stack. */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         ScanShape(gcmarker, thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, ion::IonCode *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
-    JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
+    JS_ASSERT(!IsInsideNursery(gcmarker->runtime, thing));
 
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushIonCode(thing);
 }
 
 static inline void
 ScanBaseShape(GCMarker *gcmarker, BaseShape *base);
 
 static void
 PushMarkStack(GCMarker *gcmarker, BaseShape *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
-    JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
+    JS_ASSERT(!IsInsideNursery(gcmarker->runtime, thing));
 
     /* We mark base shapes directly rather than pushing on the stack. */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         ScanBaseShape(gcmarker, thing);
 }
 
 static void
 ScanShape(GCMarker *gcmarker, Shape *shape)
@@ -1684,12 +1684,12 @@ JS::UnmarkGrayGCThingRecursively(void *t
 {
     JS_ASSERT(kind != JSTRACE_SHAPE);
 
     if (!JS::GCThingIsMarkedGray(thing))
         return;
 
     UnmarkGrayGCThing(thing);
 
-    JSRuntime *rt = static_cast<Cell *>(thing)->runtime();
+    JSRuntime *rt = static_cast<Cell *>(thing)->runtimeFromMainThread();
     UnmarkGrayTracer trc(rt);
     JS_TraceChildren(&trc, thing, kind);
 }
--- a/js/src/gc/StoreBuffer.cpp
+++ b/js/src/gc/StoreBuffer.cpp
@@ -312,43 +312,43 @@ StoreBuffer::inParallelSection() const
 {
     return InParallelSection();
 }
 
 JS_PUBLIC_API(void)
 JS::HeapCellPostBarrier(js::gc::Cell **cellp)
 {
     JS_ASSERT(*cellp);
-    JSRuntime *runtime = (*cellp)->runtime();
+    JSRuntime *runtime = (*cellp)->runtimeFromMainThread();
     runtime->gcStoreBuffer.putRelocatableCell(cellp);
 }
 
 JS_PUBLIC_API(void)
 JS::HeapCellRelocate(js::gc::Cell **cellp)
 {
     /* Called with old contents of *pp before overwriting. */
     JS_ASSERT(*cellp);
-    JSRuntime *runtime = (*cellp)->runtime();
+    JSRuntime *runtime = (*cellp)->runtimeFromMainThread();
     runtime->gcStoreBuffer.removeRelocatableCell(cellp);
 }
 
 JS_PUBLIC_API(void)
 JS::HeapValuePostBarrier(JS::Value *valuep)
 {
     JS_ASSERT(JSVAL_IS_TRACEABLE(*valuep));
-    JSRuntime *runtime = static_cast<js::gc::Cell *>(valuep->toGCThing())->runtime();
+    JSRuntime *runtime = static_cast<js::gc::Cell *>(valuep->toGCThing())->runtimeFromMainThread();
     runtime->gcStoreBuffer.putRelocatableValue(valuep);
 }
 
 JS_PUBLIC_API(void)
 JS::HeapValueRelocate(JS::Value *valuep)
 {
     /* Called with old contents of *valuep before overwriting. */
     JS_ASSERT(JSVAL_IS_TRACEABLE(*valuep));
-    JSRuntime *runtime = static_cast<js::gc::Cell *>(valuep->toGCThing())->runtime();
+    JSRuntime *runtime = static_cast<js::gc::Cell *>(valuep->toGCThing())->runtimeFromMainThread();
     runtime->gcStoreBuffer.removeRelocatableValue(valuep);
 }
 
 template class StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>;
 template class StoreBuffer::MonoTypeBuffer<StoreBuffer::CellPtrEdge>;
 template class StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotEdge>;
 template class StoreBuffer::MonoTypeBuffer<StoreBuffer::WholeCellEdges>;
 template class StoreBuffer::RelocatableMonoTypeBuffer<StoreBuffer::ValueEdge>;
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -17,17 +17,17 @@
 #include "vm/Runtime.h"
 
 #include "jsgcinlines.h"
 
 using namespace js;
 using namespace js::gc;
 
 JS::Zone::Zone(JSRuntime *rt)
-  : rt(rt),
+  : runtime_(rt),
     allocator(this),
     hold(false),
     ionUsingBarriers_(false),
     active(false),
     gcScheduled(false),
     gcState(NoGC),
     gcPreserveCode(false),
     gcBytes(0),
@@ -45,18 +45,18 @@ JS::Zone::Zone(JSRuntime *rt)
     JS_ASSERT(reinterpret_cast<JS::shadow::Zone *>(this) ==
               static_cast<JS::shadow::Zone *>(this));
 
     setGCMaxMallocBytes(rt->gcMaxMallocBytes * 0.9);
 }
 
 Zone::~Zone()
 {
-    if (this == rt->systemZone)
-        rt->systemZone = NULL;
+    if (this == runtimeFromMainThread()->systemZone)
+        runtimeFromMainThread()->systemZone = NULL;
 }
 
 bool
 Zone::init(JSContext *cx)
 {
     types.init(cx);
     return true;
 }
@@ -88,17 +88,17 @@ Zone::markTypes(JSTracer *trc)
         JSScript *script = i.get<JSScript>();
         MarkScriptRoot(trc, &script, "mark_types_script");
         JS_ASSERT(script == i.get<JSScript>());
     }
 
     for (size_t thingKind = FINALIZE_OBJECT0; thingKind < FINALIZE_OBJECT_LIMIT; thingKind++) {
         ArenaHeader *aheader = allocator.arenas.getFirstArena(static_cast<AllocKind>(thingKind));
         if (aheader)
-            rt->gcMarker.pushArenaList(aheader);
+            trc->runtime->gcMarker.pushArenaList(aheader);
     }
 
     for (CellIterUnderGC i(this, FINALIZE_TYPE_OBJECT); !i.done(); i.next()) {
         types::TypeObject *type = i.get<types::TypeObject>();
         MarkTypeObjectRoot(trc, &type, "mark_types_scan");
         JS_ASSERT(type == i.get<types::TypeObject>());
     }
 }
@@ -132,36 +132,36 @@ Zone::sweep(FreeOp *fop, bool releaseTyp
     /*
      * Periodically release observed types for all scripts. This is safe to
      * do when there are no frames for the zone on the stack.
      */
     if (active)
         releaseTypes = false;
 
     if (!isPreservingCode()) {
-        gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DISCARD_ANALYSIS);
+        gcstats::AutoPhase ap(fop->runtime()->gcStats, gcstats::PHASE_DISCARD_ANALYSIS);
         types.sweep(fop, releaseTypes);
     }
 
-    if (!rt->debuggerList.isEmpty())
+    if (!fop->runtime()->debuggerList.isEmpty())
         sweepBreakpoints(fop);
 
     active = false;
 }
 
 void
 Zone::sweepBreakpoints(FreeOp *fop)
 {
     /*
      * Sweep all compartments in a zone at the same time, since there is no way
      * to iterate over the scripts belonging to a single compartment in a zone.
      */
 
-    gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_SWEEP_TABLES);
-    gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_BREAKPOINT);
+    gcstats::AutoPhase ap1(fop->runtime()->gcStats, gcstats::PHASE_SWEEP_TABLES);
+    gcstats::AutoPhase ap2(fop->runtime()->gcStats, gcstats::PHASE_SWEEP_TABLES_BREAKPOINT);
 
     for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
         JSScript *script = i.get<JSScript>();
         if (!script->hasAnyBreakpointsOrStepMode())
             continue;
         bool scriptGone = IsScriptAboutToBeFinalized(&script);
         JS_ASSERT(script == i.get<JSScript>());
         for (unsigned i = 0; i < script->length; i++) {
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -99,35 +99,51 @@ namespace JS {
  * example, if the conservative scanner marks a string in an otherwise dead
  * zone.)
  */
 
 struct Zone : private JS::shadow::Zone,
               public js::gc::GraphNodeBase<JS::Zone>,
               public js::MallocProvider<JS::Zone>
 {
-    JSRuntime                    *rt;
+  private:
+    JSRuntime                    *runtime_;
+
+    friend bool js::CurrentThreadCanAccessZone(Zone *zone);
+
+  public:
     js::Allocator                allocator;
 
     js::CompartmentVector        compartments;
 
     bool                         hold;
 
   private:
     bool                         ionUsingBarriers_;
 
   public:
     bool                         active;  // GC flag, whether there are active frames
 
+    JSRuntime *runtimeFromMainThread() const {
+        JS_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
+        return runtime_;
+    }
+
+    // Note: Unrestricted access to the zone's runtime from an arbitrary
+    // thread can easily lead to races. Use this method very carefully.
+    JSRuntime *runtimeFromAnyThread() const {
+        return runtime_;
+    }
+
     bool needsBarrier() const {
         return needsBarrier_;
     }
 
     bool compileBarriers(bool needsBarrier) const {
-        return needsBarrier || rt->gcZeal() == js::gc::ZealVerifierPreValue;
+        return needsBarrier || runtimeFromMainThread()->gcZeal() == js::gc::ZealVerifierPreValue;
     }
 
     bool compileBarriers() const {
         return compileBarriers(needsBarrier());
     }
 
     enum ShouldUpdateIon {
         DontUpdateIon,
@@ -137,17 +153,17 @@ struct Zone : private JS::shadow::Zone,
     void setNeedsBarrier(bool needs, ShouldUpdateIon updateIon);
 
     static size_t OffsetOfNeedsBarrier() {
         return offsetof(Zone, needsBarrier_);
     }
 
     js::GCMarker *barrierTracer() {
         JS_ASSERT(needsBarrier_);
-        return &rt->gcMarker;
+        return &runtimeFromMainThread()->gcMarker;
     }
 
   public:
     enum CompartmentGCState {
         NoGC,
         Mark,
         MarkGray,
         Sweep,
@@ -156,41 +172,41 @@ struct Zone : private JS::shadow::Zone,
 
   private:
     bool                         gcScheduled;
     CompartmentGCState           gcState;
     bool                         gcPreserveCode;
 
   public:
     bool isCollecting() const {
-        if (rt->isHeapCollecting())
+        if (runtimeFromMainThread()->isHeapCollecting())
             return gcState != NoGC;
         else
             return needsBarrier();
     }
 
     bool isPreservingCode() const {
         return gcPreserveCode;
     }
 
     /*
      * If this returns true, all object tracing must be done with a GC marking
      * tracer.
      */
     bool requireGCTracer() const {
-        return rt->isHeapMajorCollecting() && gcState != NoGC;
+        return runtimeFromMainThread()->isHeapMajorCollecting() && gcState != NoGC;
     }
 
     void setGCState(CompartmentGCState state) {
-        JS_ASSERT(rt->isHeapBusy());
+        JS_ASSERT(runtimeFromMainThread()->isHeapBusy());
         gcState = state;
     }
 
     void scheduleGC() {
-        JS_ASSERT(!rt->isHeapBusy());
+        JS_ASSERT(!runtimeFromMainThread()->isHeapBusy());
 
         /* Note: zones cannot be collected while in use by other threads. */
         if (!usedByExclusiveThread)
             gcScheduled = true;
     }
 
     void unscheduleGC() {
         gcScheduled = false;
@@ -204,17 +220,17 @@ struct Zone : private JS::shadow::Zone,
         gcPreserveCode = preserving;
     }
 
     bool wasGCStarted() const {
         return gcState != NoGC;
     }
 
     bool isGCMarking() {
-        if (rt->isHeapCollecting())
+        if (runtimeFromMainThread()->isHeapCollecting())
             return gcState == Mark || gcState == MarkGray;
         else
             return needsBarrier();
     }
 
     bool isGCMarkingBlack() {
         return gcState == Mark;
     }
@@ -287,17 +303,17 @@ struct Zone : private JS::shadow::Zone,
 
     bool isTooMuchMalloc() const {
         return gcMallocBytes <= 0;
      }
 
     void onTooMuchMalloc();
 
     void *onOutOfMemory(void *p, size_t nbytes) {
-        return rt->onOutOfMemory(p, nbytes);
+        return runtimeFromMainThread()->onOutOfMemory(p, nbytes);
     }
     void reportAllocationOverflow() {
         js_ReportAllocationOverflow(NULL);
     }
 
     void markTypes(JSTracer *trc);
 
     js::types::TypeZone types;
--- a/js/src/ion/AsmJS.cpp
+++ b/js/src/ion/AsmJS.cpp
@@ -1758,17 +1758,17 @@ class FunctionCompiler
         return varInitializers_.append(init);
     }
 
     bool prepareToEmitMIR(const VarTypeVector &argTypes)
     {
         JS_ASSERT(locals_.count() == argTypes.length() + varInitializers_.length());
 
         alloc_  = lifo_.new_<TempAllocator>(&lifo_);
-        ionContext_.construct(m_.cx()->compartment(), alloc_);
+        ionContext_.construct(m_.cx()->runtime(), m_.cx()->compartment(), alloc_);
 
         graph_  = lifo_.new_<MIRGraph>(alloc_);
         info_   = lifo_.new_<CompileInfo>(locals_.count(), SequentialExecution);
         mirGen_ = lifo_.new_<MIRGenerator>(cx()->compartment(), alloc_, graph_, info_);
 
         if (!newBlock(/* pred = */ NULL, &curBlock_, fn_))
             return false;
 
@@ -4838,17 +4838,17 @@ GenerateCodeForFinishedJob(ModuleCompile
     if (!task)
         return false;
 
     ModuleCompiler::Func &func = *reinterpret_cast<ModuleCompiler::Func *>(task->func);
     func.accumulateCompileTime(task->compileTime);
 
     {
         // Perform code generation on the main thread.
-        IonContext ionContext(m.cx()->compartment(), &task->mir->temp());
+        IonContext ionContext(m.cx()->runtime(), m.cx()->compartment(), &task->mir->temp());
         if (!GenerateCode(m, func, *task->mir, *task->lir))
             return false;
     }
 
     group.compiledJobs++;
 
     // Clear the LifoAlloc for use by another worker.
     TempAllocator &tempAlloc = task->mir->temp();
@@ -5147,17 +5147,17 @@ static const RegisterSet AllRegsExceptSP
                 FloatRegisterSet(FloatRegisters::AllMask));
 static const RegisterSet NonVolatileRegs =
     RegisterSet(GeneralRegisterSet(Registers::NonVolatileMask),
                 FloatRegisterSet(FloatRegisters::NonVolatileMask));
 
 static void
 LoadAsmJSActivationIntoRegister(MacroAssembler &masm, Register reg)
 {
-    masm.movePtr(ImmWord(GetIonContext()->compartment->rt), reg);
+    masm.movePtr(ImmWord(GetIonContext()->runtime), reg);
     size_t offset = offsetof(JSRuntime, mainThread) +
                     PerThreadData::offsetOfAsmJSActivationStackReadOnly();
     masm.loadPtr(Address(reg, offset), reg);
 }
 
 static void
 LoadJSContextFromActivation(MacroAssembler &masm, Register activation, Register dest)
 {
@@ -6199,17 +6199,17 @@ GenerateStubs(ModuleCompiler &m)
 }
 
 static bool
 FinishModule(ModuleCompiler &m,
              ScopedJSDeletePtr<AsmJSModule> *module,
              ScopedJSFreePtr<char> *compilationTimeReport)
 {
     TempAllocator alloc(&m.cx()->tempLifoAlloc());
-    IonContext ionContext(m.cx()->compartment(), &alloc);
+    IonContext ionContext(m.cx()->runtime(), m.cx()->compartment(), &alloc);
 
     if (!GenerateStubs(m))
         return false;
 
     return m.staticallyLink(module, compilationTimeReport);
 }
 
 static bool
--- a/js/src/ion/BaselineCompiler.cpp
+++ b/js/src/ion/BaselineCompiler.cpp
@@ -431,17 +431,17 @@ typedef bool (*InterruptCheckFn)(JSConte
 static const VMFunction InterruptCheckInfo = FunctionInfo<InterruptCheckFn>(InterruptCheck);
 
 bool
 BaselineCompiler::emitInterruptCheck()
 {
     frame.syncStack(0);
 
     Label done;
-    void *interrupt = (void *)&cx->compartment()->rt->interrupt;
+    void *interrupt = (void *)&cx->runtime()->interrupt;
     masm.branch32(Assembler::Equal, AbsoluteAddress(interrupt), Imm32(0), &done);
 
     prepareVMCall();
     if (!callVM(InterruptCheckInfo))
         return false;
 
     masm.bind(&done);
     return true;
--- a/js/src/ion/BaselineIC.cpp
+++ b/js/src/ion/BaselineIC.cpp
@@ -3773,17 +3773,17 @@ ICGetElem_String::Compiler::generateStub
     masm.loadPtr(charsAddr, scratchReg);
     masm.load16ZeroExtend(BaseIndex(scratchReg, key, TimesTwo, 0), scratchReg);
 
     // Check if char code >= UNIT_STATIC_LIMIT.
     masm.branch32(Assembler::AboveOrEqual, scratchReg, Imm32(StaticStrings::UNIT_STATIC_LIMIT),
                   &failure);
 
     // Load static string.
-    masm.movePtr(ImmWord(&cx->compartment()->rt->staticStrings.unitStaticTable), str);
+    masm.movePtr(ImmWord(&cx->runtime()->staticStrings.unitStaticTable), str);
     masm.loadPtr(BaseIndex(str, scratchReg, ScalePointer), str);
 
     // Return.
     masm.tagValue(JSVAL_TYPE_STRING, str, R0);
     EmitReturnFromIC(masm);
 
     // Failure case - jump to next stub
     masm.bind(&failure);
--- a/js/src/ion/BaselineJIT.cpp
+++ b/js/src/ion/BaselineJIT.cpp
@@ -704,17 +704,17 @@ BaselineScript::pcForReturnAddress(JSScr
 
 void
 BaselineScript::toggleDebugTraps(JSScript *script, jsbytecode *pc)
 {
     JS_ASSERT(script->baselineScript() == this);
 
     SrcNoteLineScanner scanner(script->notes(), script->lineno);
 
-    IonContext ictx(script->compartment(), NULL);
+    IonContext ictx(script->runtimeFromMainThread(), script->compartment(), NULL);
     AutoFlushCache afc("DebugTraps");
 
     for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++) {
         PCMappingIndexEntry &entry = pcMappingIndexEntry(i);
 
         CompactBufferReader reader(pcMappingReader(i));
         jsbytecode *curPC = script->code + entry.pcOffset;
         uint32_t nativeOffset = entry.nativeOffset;
@@ -897,17 +897,17 @@ MarkActiveBaselineScripts(JSContext *cx,
     }
 }
 
 void
 ion::MarkActiveBaselineScripts(Zone *zone)
 {
     // First check if there is a JitActivation on the stack, so that there
     // must be a valid IonContext.
-    JitActivationIterator iter(zone->rt);
+    JitActivationIterator iter(zone->runtimeFromMainThread());
     if (iter.done())
         return;
 
     // If baseline is disabled, there are no baseline scripts on the stack.
     JSContext *cx = GetIonContext()->cx;
     if (!ion::IsBaselineEnabled(cx))
         return;
 
--- a/js/src/ion/CodeGenerator.cpp
+++ b/js/src/ion/CodeGenerator.cpp
@@ -619,17 +619,17 @@ CodeGenerator::visitIntToString(LIntToSt
     OutOfLineCode *ool = oolCallVM(IntToStringInfo, lir, (ArgList(), input),
                                    StoreRegisterTo(output));
     if (!ool)
         return false;
 
     masm.branch32(Assembler::AboveOrEqual, input, Imm32(StaticStrings::INT_STATIC_LIMIT),
                   ool->entry());
 
-    masm.movePtr(ImmWord(&gen->compartment->rt->staticStrings.intStaticTable), output);
+    masm.movePtr(ImmWord(&GetIonContext()->runtime->staticStrings.intStaticTable), output);
     masm.loadPtr(BaseIndex(output, input, ScalePointer), output);
 
     masm.bind(ool->rejoin());
     return true;
 }
 
 typedef JSString *(*DoubleToStringFn)(ThreadSafeContext *, double);
 typedef ParallelResult (*DoubleToStringParFn)(ForkJoinSlice *, double, MutableHandleString);
@@ -648,17 +648,17 @@ CodeGenerator::visitDoubleToString(LDoub
                                    StoreRegisterTo(output));
     if (!ool)
         return false;
 
     masm.convertDoubleToInt32(input, temp, ool->entry(), true);
     masm.branch32(Assembler::AboveOrEqual, temp, Imm32(StaticStrings::INT_STATIC_LIMIT),
                   ool->entry());
 
-    masm.movePtr(ImmWord(&gen->compartment->rt->staticStrings.intStaticTable), output);
+    masm.movePtr(ImmWord(&GetIonContext()->runtime->staticStrings.intStaticTable), output);
     masm.loadPtr(BaseIndex(output, temp, ScalePointer), output);
 
     masm.bind(ool->rejoin());
     return true;
 }
 
 typedef JSObject *(*CloneRegExpObjectFn)(JSContext *, JSObject *, JSObject *);
 static const VMFunction CloneRegExpObjectInfo =
@@ -1336,17 +1336,17 @@ CodeGenerator::visitOutOfLineCallPostWri
         objreg = regs.takeAny();
         masm.movePtr(ImmGCPtr(&obj->toConstant()->toObject()), objreg);
     } else {
         objreg = ToRegister(obj);
         regs.takeUnchecked(objreg);
     }
 
     Register runtimereg = regs.takeAny();
-    masm.mov(ImmWord(gen->compartment->rt), runtimereg);
+    masm.mov(ImmWord(GetIonContext()->runtime), runtimereg);
 
     masm.setupUnalignedABICall(2, regs.takeAny());
     masm.passABIArg(runtimereg);
     masm.passABIArg(objreg);
     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, PostWriteBarrier));
 
     restoreLive(ool->lir());
 
@@ -1358,17 +1358,17 @@ CodeGenerator::visitOutOfLineCallPostWri
 bool
 CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO *lir)
 {
 #ifdef JSGC_GENERATIONAL
     OutOfLineCallPostWriteBarrier *ool = new OutOfLineCallPostWriteBarrier(lir, lir->object());
     if (!addOutOfLineCode(ool))
         return false;
 
-    Nursery &nursery = gen->compartment->rt->gcNursery;
+    Nursery &nursery = GetIonContext()->runtime->gcNursery;
 
     if (lir->object()->isConstant()) {
         JS_ASSERT(!nursery.isInside(&lir->object()->toConstant()->toObject()));
     } else {
         Label tenured;
         Register objreg = ToRegister(lir->object());
         masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.start()), &tenured);
         masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.heapEnd()), ool->rejoin());
@@ -1390,17 +1390,17 @@ CodeGenerator::visitPostWriteBarrierV(LP
 #ifdef JSGC_GENERATIONAL
     OutOfLineCallPostWriteBarrier *ool = new OutOfLineCallPostWriteBarrier(lir, lir->object());
     if (!addOutOfLineCode(ool))
         return false;
 
     ValueOperand value = ToValue(lir, LPostWriteBarrierV::Input);
     masm.branchTestObject(Assembler::NotEqual, value, ool->rejoin());
 
-    Nursery &nursery = gen->compartment->rt->gcNursery;
+    Nursery &nursery = GetIonContext()->runtime->gcNursery;
 
     if (lir->object()->isConstant()) {
         JS_ASSERT(!nursery.isInside(&lir->object()->toConstant()->toObject()));
     } else {
         Label tenured;
         Register objreg = ToRegister(lir->object());
         masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.start()), &tenured);
         masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.heapEnd()), ool->rejoin());
@@ -2269,17 +2269,17 @@ CodeGenerator::visitCheckOverRecursed(LC
     // This is a weak check, justified by Ion using the C stack: we must always
     // be some distance away from the actual limit, since if the limit is
     // crossed, an error must be thrown, which requires more frames.
     //
     // It must always be possible to trespass past the stack limit.
     // Ion may legally place frames very close to the limit. Calling additional
     // C functions may then violate the limit without any checking.
 
-    JSRuntime *rt = gen->compartment->rt;
+    JSRuntime *rt = GetIonContext()->runtime;
 
     // Since Ion frames exist on the C stack, the stack limit may be
     // dynamically set by JS_SetThreadStackLimit() and JS_SetNativeStackQuota().
     uintptr_t *limitAddr = &rt->mainThread.ionStackLimit;
 
     CheckOverRecursedFailure *ool = new CheckOverRecursedFailure(lir);
     if (!addOutOfLineCode(ool))
         return false;
@@ -2767,17 +2767,17 @@ CodeGenerator::visitNewArrayCallVM(LNewA
 bool
 CodeGenerator::visitNewSlots(LNewSlots *lir)
 {
     Register temp1 = ToRegister(lir->temp1());
     Register temp2 = ToRegister(lir->temp2());
     Register temp3 = ToRegister(lir->temp3());
     Register output = ToRegister(lir->output());
 
-    masm.mov(ImmWord(gen->compartment->rt), temp1);
+    masm.mov(ImmWord(GetIonContext()->runtime), temp1);
     masm.mov(Imm32(lir->mir()->nslots()), temp2);
 
     masm.setupUnalignedABICall(2, temp3);
     masm.passABIArg(temp1);
     masm.passABIArg(temp2);
     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, NewSlots));
 
     masm.testPtr(output, output);
@@ -4331,17 +4331,17 @@ CodeGenerator::visitFromCharCode(LFromCh
     OutOfLineCode *ool = oolCallVM(StringFromCharCodeInfo, lir, (ArgList(), code), StoreRegisterTo(output));
     if (!ool)
         return false;
 
     // OOL path if code >= UNIT_STATIC_LIMIT.
     masm.branch32(Assembler::AboveOrEqual, code, Imm32(StaticStrings::UNIT_STATIC_LIMIT),
                   ool->entry());
 
-    masm.movePtr(ImmWord(&gen->compartment->rt->staticStrings.unitStaticTable), output);
+    masm.movePtr(ImmWord(&GetIonContext()->runtime->staticStrings.unitStaticTable), output);
     masm.loadPtr(BaseIndex(output, code, ScalePointer), output);
 
     masm.bind(ool->rejoin());
     return true;
 }
 
 bool
 CodeGenerator::visitInitializedLength(LInitializedLength *lir)
@@ -5038,17 +5038,17 @@ CodeGenerator::visitIteratorStart(LItera
     const Register temp1 = ToRegister(lir->temp1());
     const Register temp2 = ToRegister(lir->temp2());
     const Register niTemp = ToRegister(lir->temp3()); // Holds the NativeIterator object.
 
     // Iterators other than for-in should use LCallIteratorStart.
     JS_ASSERT(flags == JSITER_ENUMERATE);
 
     // Fetch the most recent iterator and ensure it's not NULL.
-    masm.loadPtr(AbsoluteAddress(&gen->compartment->rt->nativeIterCache.last), output);
+    masm.loadPtr(AbsoluteAddress(&GetIonContext()->runtime->nativeIterCache.last), output);
     masm.branchTestPtr(Assembler::Zero, output, output, ool->entry());
 
     // Load NativeIterator.
     masm.loadObjPrivate(output, JSObject::ITER_CLASS_NFIXED_SLOTS, niTemp);
 
     // Ensure the |active| and |unreusable| bits are not set.
     masm.branchTest32(Assembler::NonZero, Address(niTemp, offsetof(NativeIterator, flags)),
                       Imm32(JSITER_ACTIVE|JSITER_UNREUSABLE), ool->entry());
@@ -6241,17 +6241,17 @@ CodeGenerator::visitTypeOfV(LTypeOfV *li
     const ValueOperand value = ToValue(lir, LTypeOfV::Input);
     Register output = ToRegister(lir->output());
     Register tag = masm.splitTagForTest(value);
 
     OutOfLineTypeOfV *ool = new OutOfLineTypeOfV(lir);
     if (!addOutOfLineCode(ool))
         return false;
 
-    JSRuntime *rt = gen->compartment->rt;
+    JSRuntime *rt = GetIonContext()->runtime;
 
     // Jump to the OOL path if the value is an object. Objects are complicated
     // since they may have a typeof hook.
     masm.branchTestObject(Assembler::Equal, tag, ool->entry());
 
     Label done;
 
     Label notNumber;
@@ -7196,17 +7196,17 @@ CodeGenerator::visitAsmJSVoidReturn(LAsm
     if (current->mir() != *gen->graph().poBegin())
         masm.jump(&returnLabel_);
     return true;
 }
 
 bool
 CodeGenerator::visitAsmJSCheckOverRecursed(LAsmJSCheckOverRecursed *lir)
 {
-    uintptr_t *limitAddr = &gen->compartment->rt->mainThread.nativeStackLimit;
+    uintptr_t *limitAddr = &GetIonContext()->runtime->mainThread.nativeStackLimit;
     masm.branchPtr(Assembler::AboveOrEqual,
                    AbsoluteAddress(limitAddr),
                    StackPointer,
                    lir->mir()->onError());
     return true;
 }
 
 } // namespace ion
--- a/js/src/ion/CompilerRoot.h
+++ b/js/src/ion/CompilerRoot.h
@@ -24,17 +24,17 @@ namespace ion {
 template <typename T>
 class CompilerRoot : public CompilerRootNode
 {
   public:
     CompilerRoot(T ptr)
       : CompilerRootNode(NULL)
     {
         if (ptr) {
-            JS_ASSERT(!UninlinedIsInsideNursery(GetIonContext()->compartment->rt, ptr));
+            JS_ASSERT(!UninlinedIsInsideNursery(GetIonContext()->runtime, ptr));
             setRoot(ptr);
         }
     }
 
   public:
     // Sets the pointer and inserts into root list. The pointer becomes read-only.
     void setRoot(T root) {
         CompilerRootNode *&rootList = GetIonContext()->temp->rootList();
--- a/js/src/ion/Ion.cpp
+++ b/js/src/ion/Ion.cpp
@@ -118,18 +118,18 @@ IonContext::IonContext(JSContext *cx, Te
     compartment(cx->compartment()),
     temp(temp),
     prev_(CurrentIonContext()),
     assemblerCount_(0)
 {
     SetIonContext(this);
 }
 
-IonContext::IonContext(JSCompartment *comp, TempAllocator *temp)
-  : runtime(comp->rt),
+IonContext::IonContext(JSRuntime *rt, JSCompartment *comp, TempAllocator *temp)
+  : runtime(rt),
     cx(NULL),
     compartment(comp),
     temp(temp),
     prev_(CurrentIonContext()),
     assemblerCount_(0)
 {
     SetIonContext(this);
 }
@@ -545,17 +545,17 @@ IonCode::readBarrier(IonCode *code)
         MarkIonCodeUnbarriered(zone->barrierTracer(), &code, "ioncode read barrier");
 #endif
 }
 
 void
 IonCode::writeBarrierPre(IonCode *code)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!code || !code->runtime()->needsBarrier())
+    if (!code || !code->runtimeFromMainThread()->needsBarrier())
         return;
 
     Zone *zone = code->zone();
     if (zone->needsBarrier())
         MarkIonCodeUnbarriered(zone->barrierTracer(), &code, "ioncode write barrier");
 #endif
 }
 
@@ -893,18 +893,19 @@ IonScript::purgeCaches(Zone *zone)
     // Don't reset any ICs if we're invalidated, otherwise, repointing the
     // inline jump could overwrite an invalidation marker. These ICs can
     // no longer run, however, the IC slow paths may be active on the stack.
     // ICs therefore are required to check for invalidation before patching,
     // to ensure the same invariant.
     if (invalidated())
         return;
 
-    IonContext ictx(zone->rt);
-    AutoFlushCache afc("purgeCaches", zone->rt->ionRuntime());
+    JSRuntime *rt = zone->runtimeFromMainThread();
+    IonContext ictx(rt);
+    AutoFlushCache afc("purgeCaches", rt->ionRuntime());
     for (size_t i = 0; i < numCaches(); i++)
         getCache(i).reset();
 }
 
 void
 IonScript::destroyCaches()
 {
     for (size_t i = 0; i < numCaches(); i++)
@@ -932,21 +933,22 @@ IonScript::detachDependentAsmJSModules(F
     }
     fop->delete_(dependentAsmJSModules);
     dependentAsmJSModules = NULL;
 }
 
 void
 ion::ToggleBarriers(JS::Zone *zone, bool needs)
 {
-    IonContext ictx(zone->rt);
-    if (!zone->rt->hasIonRuntime())
+    JSRuntime *rt = zone->runtimeFromMainThread();
+    IonContext ictx(rt);
+    if (!rt->hasIonRuntime())
         return;
 
-    AutoFlushCache afc("ToggleBarriers", zone->rt->ionRuntime());
+    AutoFlushCache afc("ToggleBarriers", rt->ionRuntime());
     for (gc::CellIterUnderGC i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
         JSScript *script = i.get<JSScript>();
         if (script->hasIonScript())
             script->ionScript()->toggleBarriers(needs);
         if (script->hasBaselineScript())
             script->baselineScript()->toggleBarriers(needs);
     }
 
@@ -2110,18 +2112,18 @@ ion::InvalidateAll(FreeOp *fop, Zone *zo
         if (!comp->ionCompartment())
             continue;
         CancelOffThreadIonCompile(comp, NULL);
         FinishAllOffThreadCompilations(comp->ionCompartment());
     }
 
     for (JitActivationIterator iter(fop->runtime()); !iter.done(); ++iter) {
         if (iter.activation()->compartment()->zone() == zone) {
-            IonContext ictx(zone->rt);
-            AutoFlushCache afc("InvalidateAll", zone->rt->ionRuntime());
+            IonContext ictx(fop->runtime());
+            AutoFlushCache afc("InvalidateAll", fop->runtime()->ionRuntime());
             IonSpew(IonSpew_Invalidate, "Invalidating all frames for GC");
             InvalidateActivation(fop, iter.jitTop(), true);
         }
     }
 }
 
 
 void
--- a/js/src/ion/Ion.h
+++ b/js/src/ion/Ion.h
@@ -241,17 +241,17 @@ enum AbortReason {
 // of the Ion compiler. It points to a temporary allocator and the active
 // JSContext, either of which may be NULL, and the active compartment, which
 // will not be NULL.
 
 class IonContext
 {
   public:
     IonContext(JSContext *cx, TempAllocator *temp);
-    IonContext(JSCompartment *comp, TempAllocator *temp);
+    IonContext(JSRuntime *rt, JSCompartment *comp, TempAllocator *temp);
     IonContext(JSRuntime *rt);
     ~IonContext();
 
     JSRuntime *runtime;
     JSContext *cx;
     JSCompartment *compartment;
     TempAllocator *temp;
     int getNextAssemblerId() {
--- a/js/src/ion/IonMacroAssembler.cpp
+++ b/js/src/ion/IonMacroAssembler.cpp
@@ -473,28 +473,28 @@ MacroAssembler::newGCThing(const Registe
     // Inlined equivalent of js::gc::NewGCThing() without failure case handling.
 
     int thingSize = int(gc::Arena::thingSize(allocKind));
 
     Zone *zone = GetIonContext()->compartment->zone();
 
 #ifdef JS_GC_ZEAL
     // Don't execute the inline path if gcZeal is active.
-    movePtr(ImmWord(zone->rt), result);
+    movePtr(ImmWord(GetIonContext()->runtime), result);
     loadPtr(Address(result, offsetof(JSRuntime, gcZeal_)), result);
     branch32(Assembler::NotEqual, result, Imm32(0), fail);
 #endif
 
     // Don't execute the inline path if the compartment has an object metadata callback,
     // as the metadata to use for the object may vary between executions of the op.
     if (GetIonContext()->compartment->objectMetadataCallback)
         jump(fail);
 
 #ifdef JSGC_GENERATIONAL
-    Nursery &nursery = zone->rt->gcNursery;
+    Nursery &nursery = GetIonContext()->runtime->gcNursery;
     if (nursery.isEnabled() && allocKind <= gc::FINALIZE_OBJECT_LAST) {
         // Inline Nursery::allocate. No explicit check for nursery.isEnabled()
         // is needed, as the comparison with the nursery's end will always fail
         // in such cases.
         loadPtr(AbsoluteAddress(nursery.addressOfPosition()), result);
         addPtr(Imm32(thingSize), result);
         branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(nursery.addressOfCurrentEnd()), result, fail);
         storePtr(result, AbsoluteAddress(nursery.addressOfPosition()));
@@ -701,17 +701,17 @@ MacroAssembler::compareStrings(JSOp op, 
 }
 
 void
 MacroAssembler::checkInterruptFlagsPar(const Register &tempReg,
                                             Label *fail)
 {
     JSCompartment *compartment = GetIonContext()->compartment;
 
-    void *interrupt = (void*)&compartment->rt->interrupt;
+    void *interrupt = (void*)&GetIonContext()->runtime->interrupt;
     movePtr(ImmWord(interrupt), tempReg);
     load32(Address(tempReg, 0), tempReg);
     branchTest32(Assembler::NonZero, tempReg, tempReg, fail);
 }
 
 void
 MacroAssembler::maybeRemoveOsrFrame(Register scratch)
 {
--- a/js/src/ion/Lowering.cpp
+++ b/js/src/ion/Lowering.cpp
@@ -2717,17 +2717,17 @@ LIRGenerator::visitCallInstanceOf(MCallI
 bool
 LIRGenerator::visitFunctionBoundary(MFunctionBoundary *ins)
 {
     LFunctionBoundary *lir = new LFunctionBoundary(temp());
     if (!add(lir, ins))
         return false;
     // If slow assertions are enabled, then this node will result in a callVM
     // out to a C++ function for the assertions, so we will need a safepoint.
-    return !gen->compartment->rt->spsProfiler.slowAssertionsEnabled() ||
+    return !GetIonContext()->runtime->spsProfiler.slowAssertionsEnabled() ||
            assignSafepoint(lir, ins);
 }
 
 bool
 LIRGenerator::visitIsCallable(MIsCallable *ins)
 {
     JS_ASSERT(ins->object()->type() == MIRType_Object);
     JS_ASSERT(ins->type() == MIRType_Boolean);
--- a/js/src/ion/MIRGenerator.h
+++ b/js/src/ion/MIRGenerator.h
@@ -55,17 +55,17 @@ class MIRGenerator
     }
     bool ensureBallast() {
         return temp().ensureBallast();
     }
     IonCompartment *ionCompartment() const {
         return compartment->ionCompartment();
     }
     IonRuntime *ionRuntime() const {
-        return compartment->rt->ionRuntime();
+        return GetIonContext()->runtime->ionRuntime();
     }
     CompileInfo &info() {
         return *info_;
     }
 
     template <typename T>
     T * allocate(size_t count = 1) {
         return reinterpret_cast<T *>(temp().allocate(sizeof(T) * count));
@@ -76,17 +76,17 @@ class MIRGenerator
     bool abort(const char *message, ...);
     bool abortFmt(const char *message, va_list ap);
 
     bool errored() const {
         return error_;
     }
 
     bool instrumentedProfiling() {
-        return compartment->rt->spsProfiler.enabled();
+        return GetIonContext()->runtime->spsProfiler.enabled();
     }
 
     // Whether the main thread is trying to cancel this build.
     bool shouldCancel(const char *why) {
         return cancelBuild_;
     }
     void cancel() {
         cancelBuild_ = 1;
--- a/js/src/ion/arm/Bailouts-arm.cpp
+++ b/js/src/ion/arm/Bailouts-arm.cpp
@@ -127,17 +127,17 @@ IonBailoutIterator::IonBailoutIterator(c
 
     if (bailout->frameClass() == FrameSizeClass::None()) {
         snapshotOffset_ = bailout->snapshotOffset();
         return;
     }
 
     // Compute the snapshot offset from the bailout ID.
     JitActivation *activation = activations.activation()->asJit();
-    JSRuntime *rt = activation->compartment()->rt;
+    JSRuntime *rt = activation->compartment()->runtimeFromMainThread();
     IonCode *code = rt->ionRuntime()->getBailoutTable(bailout->frameClass());
     uintptr_t tableOffset = bailout->tableOffset();
     uintptr_t tableStart = reinterpret_cast<uintptr_t>(code->raw());
 
     JS_ASSERT(tableOffset >= tableStart &&
               tableOffset < tableStart + code->instructionsSize());
     JS_ASSERT((tableOffset - tableStart) % BAILOUT_TABLE_ENTRY_SIZE == 0);
 
--- a/js/src/ion/arm/CodeGenerator-arm.cpp
+++ b/js/src/ion/arm/CodeGenerator-arm.cpp
@@ -1717,17 +1717,17 @@ static const VMFunction InterruptCheckIn
 
 bool
 CodeGeneratorARM::visitInterruptCheck(LInterruptCheck *lir)
 {
     OutOfLineCode *ool = oolCallVM(InterruptCheckInfo, lir, (ArgList()), StoreNothing());
     if (!ool)
         return false;
 
-    void *interrupt = (void*)&gen->compartment->rt->interrupt;
+    void *interrupt = (void*)&GetIonContext()->runtime->interrupt;
     masm.load32(AbsoluteAddress(interrupt), lr);
     masm.ma_cmp(lr, Imm32(0));
     masm.ma_b(ool->entry(), Assembler::NonZero);
     masm.bind(ool->rejoin());
     return true;
 }
 
 bool
--- a/js/src/ion/arm/MacroAssembler-arm.cpp
+++ b/js/src/ion/arm/MacroAssembler-arm.cpp
@@ -2960,17 +2960,17 @@ MacroAssemblerARMCompat::storeTypeTag(Im
     ma_add(base, Imm32(NUNBOX32_TYPE_OFFSET), base);
     ma_mov(tag, ScratchRegister);
     ma_str(ScratchRegister, DTRAddr(base, DtrRegImmShift(index, LSL, shift)));
     ma_sub(base, Imm32(NUNBOX32_TYPE_OFFSET), base);
 }
 
 void
 MacroAssemblerARMCompat::linkExitFrame() {
-    uint8_t *dest = ((uint8_t*)GetIonContext()->compartment->rt) + offsetof(JSRuntime, mainThread.ionTop);
+    uint8_t *dest = ((uint8_t*)GetIonContext()->runtime) + offsetof(JSRuntime, mainThread.ionTop);
     movePtr(ImmWord(dest), ScratchRegister);
     ma_str(StackPointer, Operand(ScratchRegister, 0));
 }
 
 void
 MacroAssemblerARMCompat::linkParallelExitFrame(const Register &pt)
 {
     ma_str(StackPointer, Operand(pt, offsetof(PerThreadData, ionTop)));
--- a/js/src/ion/shared/CodeGenerator-shared.cpp
+++ b/js/src/ion/shared/CodeGenerator-shared.cpp
@@ -41,17 +41,17 @@ CodeGeneratorShared::CodeGeneratorShared
     gen(gen),
     graph(*graph),
     current(NULL),
     deoptTable_(NULL),
 #ifdef DEBUG
     pushedArgs_(0),
 #endif
     lastOsiPointOffset_(0),
-    sps_(&gen->compartment->rt->spsProfiler, &lastPC_),
+    sps_(&GetIonContext()->runtime->spsProfiler, &lastPC_),
     osrEntryOffset_(0),
     skipArgCheckEntryOffset_(0),
     frameDepth_(graph->localSlotCount() * sizeof(STACK_SLOT_SIZE) +
                 graph->argumentSlotCount() * sizeof(Value))
 {
     if (!gen->compilingAsmJS())
         masm.setInstrumentation(&sps_);
 
--- a/js/src/ion/x64/CodeGenerator-x64.cpp
+++ b/js/src/ion/x64/CodeGenerator-x64.cpp
@@ -288,17 +288,17 @@ static const VMFunction InterruptCheckIn
 
 bool
 CodeGeneratorX64::visitInterruptCheck(LInterruptCheck *lir)
 {
     OutOfLineCode *ool = oolCallVM(InterruptCheckInfo, lir, (ArgList()), StoreNothing());
     if (!ool)
         return false;
 
-    void *interrupt = (void*)&gen->compartment->rt->interrupt;
+    void *interrupt = (void*)&GetIonContext()->runtime->interrupt;
     masm.movq(ImmWord(interrupt), ScratchReg);
     masm.cmpl(Operand(ScratchReg, 0), Imm32(0));
     masm.j(Assembler::NonZero, ool->entry());
     masm.bind(ool->rejoin());
     return true;
 }
 
 bool
--- a/js/src/ion/x86/Bailouts-x86.cpp
+++ b/js/src/ion/x86/Bailouts-x86.cpp
@@ -80,17 +80,17 @@ IonBailoutIterator::IonBailoutIterator(c
 
     if (bailout->frameClass() == FrameSizeClass::None()) {
         snapshotOffset_ = bailout->snapshotOffset();
         return;
     }
 
     // Compute the snapshot offset from the bailout ID.
     JitActivation *activation = activations.activation()->asJit();
-    JSRuntime *rt = activation->compartment()->rt;
+    JSRuntime *rt = activation->compartment()->runtimeFromMainThread();
     IonCode *code = rt->ionRuntime()->getBailoutTable(bailout->frameClass());
     uintptr_t tableOffset = bailout->tableOffset();
     uintptr_t tableStart = reinterpret_cast<uintptr_t>(code->raw());
 
     JS_ASSERT(tableOffset >= tableStart &&
               tableOffset < tableStart + code->instructionsSize());
     JS_ASSERT((tableOffset - tableStart) % BAILOUT_TABLE_ENTRY_SIZE == 0);
 
--- a/js/src/ion/x86/CodeGenerator-x86.cpp
+++ b/js/src/ion/x86/CodeGenerator-x86.cpp
@@ -270,17 +270,17 @@ static const VMFunction InterruptCheckIn
 
 bool
 CodeGeneratorX86::visitInterruptCheck(LInterruptCheck *lir)
 {
     OutOfLineCode *ool = oolCallVM(InterruptCheckInfo, lir, (ArgList()), StoreNothing());
     if (!ool)
         return false;
 
-    void *interrupt = (void*)&gen->compartment->rt->interrupt;
+    void *interrupt = (void*)&GetIonContext()->runtime->interrupt;
     masm.cmpl(Operand(interrupt), Imm32(0));
     masm.j(Assembler::NonZero, ool->entry());
     masm.bind(ool->rejoin());
     return true;
 }
 
 bool
 CodeGeneratorX86::visitCompareB(LCompareB *lir)
--- a/js/src/ion/x86/MacroAssembler-x86.h
+++ b/js/src/ion/x86/MacroAssembler-x86.h
@@ -945,18 +945,18 @@ class MacroAssemblerX86 : public MacroAs
     void makeFrameDescriptor(Register frameSizeReg, FrameType type) {
         shll(Imm32(FRAMESIZE_SHIFT), frameSizeReg);
         orl(Imm32(type), frameSizeReg);
     }
 
     // Save an exit frame (which must be aligned to the stack pointer) to
     // ThreadData::ionTop of the main thread.
     void linkExitFrame() {
-        JSCompartment *compartment = GetIonContext()->compartment;
-        movl(StackPointer, Operand(&compartment->rt->mainThread.ionTop));
+        JSRuntime *runtime = GetIonContext()->runtime;
+        movl(StackPointer, Operand(&runtime->mainThread.ionTop));
     }
 
     void callWithExitFrame(IonCode *target, Register dynStack) {
         addPtr(Imm32(framePushed()), dynStack);
         makeFrameDescriptor(dynStack, IonFrame_OptimizedJS);
         Push(dynStack);
         call(target);
     }
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -788,34 +788,35 @@ JS_SetRuntimePrivate(JSRuntime *rt, void
     rt->data = data;
 }
 
 #ifdef JS_THREADSAFE
 static void
 StartRequest(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime();
-    rt->assertValidThread();
+    JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
 
     if (rt->requestDepth) {
         rt->requestDepth++;
     } else {
         /* Indicate that a request is running. */
         rt->requestDepth = 1;
 
         if (rt->activityCallback)
             rt->activityCallback(rt->activityCallbackArg, true);
     }
 }
 
 static void
 StopRequest(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime();
-    rt->assertValidThread();
+    JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
+
     JS_ASSERT(rt->requestDepth != 0);
     if (rt->requestDepth != 1) {
         rt->requestDepth--;
     } else {
         rt->conservativeGC.updateForRequestEnd();
         rt->requestDepth = 0;
 
         if (rt->activityCallback)
@@ -842,17 +843,17 @@ JS_EndRequest(JSContext *cx)
     StopRequest(cx);
 #endif
 }
 
 JS_PUBLIC_API(JSBool)
 JS_IsInRequest(JSRuntime *rt)
 {
 #ifdef JS_THREADSAFE
-    rt->assertValidThread();
+    JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
     return rt->requestDepth != 0;
 #else
     return false;
 #endif
 }
 
 JS_PUBLIC_API(void)
 JS_SetContextCallback(JSRuntime *rt, JSContextCallback cxCallback, void *data)
@@ -3044,17 +3045,17 @@ JS_PUBLIC_API(JSBool)
 JS_IsNative(JSObject *obj)
 {
     return obj->isNative();
 }
 
 JS_PUBLIC_API(JSRuntime *)
 JS_GetObjectRuntime(JSObject *obj)
 {
-    return obj->compartment()->rt;
+    return obj->compartment()->runtimeFromMainThread();
 }
 
 JS_PUBLIC_API(JSBool)
 JS_FreezeObject(JSContext *cx, JSObject *objArg)
 {
     RootedObject obj(cx, objArg);
     AssertHeapIsIdle(cx);
     CHECK_REQUEST(cx);
@@ -6594,17 +6595,20 @@ JS_SetRuntimeThread(JSRuntime *rt)
 #ifdef JS_THREADSAFE
     rt->setOwnerThread();
 #endif
 }
 
 extern JS_NEVER_INLINE JS_PUBLIC_API(void)
 JS_AbortIfWrongThread(JSRuntime *rt)
 {
-    rt->abortIfWrongThread();
+    if (!CurrentThreadCanAccessRuntime(rt))
+        MOZ_CRASH();
+    if (!js::TlsPerThreadData.get()->associatedWith(rt))
+        MOZ_CRASH();
 }
 
 #ifdef JS_GC_ZEAL
 JS_PUBLIC_API(void)
 JS_SetGCZeal(JSContext *cx, uint8_t zeal, uint32_t frequency)
 {
     SetGCZeal(cx->runtime(), zeal, frequency);
 }
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -1299,26 +1299,26 @@ JSContext::findVersion() const
 }
 
 #if defined JS_THREADSAFE && defined DEBUG
 
 JS::AutoCheckRequestDepth::AutoCheckRequestDepth(JSContext *cx)
     : cx(cx)
 {
     JS_ASSERT(cx->runtime()->requestDepth || cx->runtime()->isHeapBusy());
-    cx->runtime()->assertValidThread();
+    JS_ASSERT(CurrentThreadCanAccessRuntime(cx->runtime()));
     cx->runtime()->checkRequestDepth++;
 }
 
 JS::AutoCheckRequestDepth::AutoCheckRequestDepth(ContextFriendFields *cxArg)
     : cx(static_cast<ThreadSafeContext *>(cxArg)->maybeJSContext())
 {
     if (cx) {
         JS_ASSERT(cx->runtime()->requestDepth || cx->runtime()->isHeapBusy());
-        cx->runtime()->assertValidThread();
+        JS_ASSERT(CurrentThreadCanAccessRuntime(cx->runtime()));
         cx->runtime()->checkRequestDepth++;
     }
 }
 
 JS::AutoCheckRequestDepth::~AutoCheckRequestDepth()
 {
     if (cx) {
         JS_ASSERT(cx->runtime()->checkRequestDepth != 0);
--- a/js/src/jscntxtinlines.h
+++ b/js/src/jscntxtinlines.h
@@ -49,24 +49,24 @@ class CompartmentChecker
 
     static void fail(JS::Zone *z1, JS::Zone *z2) {
         printf("*** Zone mismatch %p vs. %p\n", (void *) z1, (void *) z2);
         MOZ_CRASH();
     }
 
     /* Note: should only be used when neither c1 nor c2 may be the default compartment. */
     static void check(JSCompartment *c1, JSCompartment *c2) {
-        JS_ASSERT(c1 != c1->rt->atomsCompartment);
-        JS_ASSERT(c2 != c2->rt->atomsCompartment);
+        JS_ASSERT(c1 != c1->runtimeFromMainThread()->atomsCompartment);
+        JS_ASSERT(c2 != c2->runtimeFromMainThread()->atomsCompartment);
         if (c1 != c2)
             fail(c1, c2);
     }
 
     void check(JSCompartment *c) {
-        if (c && c != compartment->rt->atomsCompartment) {
+        if (c && c != compartment->runtimeFromMainThread()->atomsCompartment) {
             if (!compartment)
                 compartment = c;
             else if (c != compartment)
                 fail(compartment, c);
         }
     }
 
     void checkZone(JS::Zone *z) {
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -31,65 +31,65 @@
 #include "gc/Barrier-inl.h"
 
 using namespace js;
 using namespace js::gc;
 
 using mozilla::DebugOnly;
 
 JSCompartment::JSCompartment(Zone *zone, const JS::CompartmentOptions &options = JS::CompartmentOptions())
-  : zone_(zone),
-    options_(options),
-    rt(zone->rt),
+  : options_(options),
+    zone_(zone),
+    runtime_(zone->runtimeFromMainThread()),
     principals(NULL),
     isSystem(false),
     marked(true),
 #ifdef DEBUG
     firedOnNewGlobalObject(false),
 #endif
     global_(NULL),
     enterCompartmentDepth(0),
     lastCodeRelease(0),
     analysisLifoAlloc(ANALYSIS_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
     data(NULL),
     objectMetadataCallback(NULL),
     lastAnimationTime(0),
-    regExps(rt),
+    regExps(runtime_),
     propertyTree(thisForCtor()),
     gcIncomingGrayPointers(NULL),
     gcLiveArrayBuffers(NULL),
     gcWeakMapList(NULL),
-    debugModeBits(rt->debugMode ? DebugFromC : 0),
+    debugModeBits(runtime_->debugMode ? DebugFromC : 0),
     rngState(0),
     watchpointMap(NULL),
     scriptCountsMap(NULL),
     debugScriptMap(NULL),
     debugScopes(NULL),
     enumerators(NULL),
     compartmentStats(NULL)
 #ifdef JS_ION
     , ionCompartment_(NULL)
 #endif
 {
-    rt->numCompartments++;
+    runtime_->numCompartments++;
 }
 
 JSCompartment::~JSCompartment()
 {
 #ifdef JS_ION
     js_delete(ionCompartment_);
 #endif
 
     js_delete(watchpointMap);
     js_delete(scriptCountsMap);
     js_delete(debugScriptMap);
     js_delete(debugScopes);
     js_free(enumerators);
 
-    rt->numCompartments--;
+    runtime_->numCompartments--;
 }
 
 bool
 JSCompartment::init(JSContext *cx)
 {
     /*
      * As a hack, we clear our timezone cache every time we create a new
      * compartment. This ensures that the cache is always relatively fresh, but
@@ -191,16 +191,18 @@ JSCompartment::putWrapper(const CrossCom
     JS_ASSERT_IF(wrapped.kind == CrossCompartmentKey::StringWrapper, wrapper.isString());
     JS_ASSERT_IF(wrapped.kind != CrossCompartmentKey::StringWrapper, wrapper.isObject());
     return crossCompartmentWrappers.put(wrapped, wrapper);
 }
 
 bool
 JSCompartment::wrap(JSContext *cx, MutableHandleValue vp, HandleObject existingArg)
 {
+    JSRuntime *rt = runtimeFromMainThread();
+
     JS_ASSERT(cx->compartment() == this);
     JS_ASSERT(this != rt->atomsCompartment);
     JS_ASSERT_IF(existingArg, existingArg->compartment() == cx->compartment());
     JS_ASSERT_IF(existingArg, vp.isObject());
     JS_ASSERT_IF(existingArg, IsDeadProxyObject(existingArg));
 
     unsigned flags = 0;
 
@@ -516,16 +518,18 @@ JSCompartment::mark(JSTracer *trc)
 void
 JSCompartment::sweep(FreeOp *fop, bool releaseTypes)
 {
     JS_ASSERT(!activeAnalysis);
 
     /* This function includes itself in PHASE_SWEEP_TABLES. */
     sweepCrossCompartmentWrappers();
 
+    JSRuntime *rt = runtimeFromMainThread();
+
     {
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES);
 
         /* Remove dead references held weakly by the compartment. */
 
         sweepBaseShapeTable();
         sweepInitialShapeTable();
         sweepNewTypeObjectTable(newTypeObjects);
@@ -577,16 +581,18 @@ JSCompartment::sweep(FreeOp *fop, bool r
 /*
  * Remove dead wrappers from the table. We must sweep all compartments, since
  * string entries in the crossCompartmentWrappers table are not marked during
  * markCrossCompartmentWrappers.
  */
 void
 JSCompartment::sweepCrossCompartmentWrappers()
 {
+    JSRuntime *rt = runtimeFromMainThread();
+
     gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_SWEEP_TABLES);
     gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_WRAPPER);
 
     /* Remove dead wrappers from the table. */
     for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
         CrossCompartmentKey key = e.front().key;
         bool keyDying = IsCellAboutToBeFinalized(&key.wrapped);
         bool valDying = IsValueAboutToBeFinalized(e.front().value.unsafeGet());
@@ -604,17 +610,17 @@ void
 JSCompartment::purge()
 {
     dtoaCache.purge();
 }
 
 bool
 JSCompartment::hasScriptsOnStack()
 {
-    for (ActivationIterator iter(rt); !iter.done(); ++iter) {
+    for (ActivationIterator iter(runtimeFromMainThread()); !iter.done(); ++iter) {
         if (iter.activation()->compartment() == this)
             return true;
     }
 
     return false;
 }
 
 static bool
@@ -724,16 +730,18 @@ JSCompartment::setDebugModeFromC(JSConte
             DebugScopes::onCompartmentLeaveDebugMode(this);
     }
     return true;
 }
 
 void
 JSCompartment::updateForDebugMode(FreeOp *fop, AutoDebugModeGC &dmgc)
 {
+    JSRuntime *rt = runtimeFromMainThread();
+
     for (ContextIter acx(rt); !acx.done(); acx.next()) {
         if (acx->compartment() == this)
             acx->updateJITEnabled();
     }
 
 #ifdef JS_ION
     JS_ASSERT_IF(debugMode(), !hasScriptsOnStack());
 
@@ -784,17 +792,17 @@ JSCompartment::addDebuggee(JSContext *cx
     return true;
 }
 
 void
 JSCompartment::removeDebuggee(FreeOp *fop,
                               js::GlobalObject *global,
                               js::GlobalObjectSet::Enum *debuggeesEnum)
 {
-    AutoDebugModeGC dmgc(rt);
+    AutoDebugModeGC dmgc(fop->runtime());
     return removeDebuggee(fop, global, dmgc, debuggeesEnum);
 }
 
 void
 JSCompartment::removeDebuggee(FreeOp *fop,
                               js::GlobalObject *global,
                               AutoDebugModeGC &dmgc,
                               js::GlobalObjectSet::Enum *debuggeesEnum)
@@ -823,17 +831,17 @@ JSCompartment::clearBreakpointsIn(FreeOp
         if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode())
             script->clearBreakpointsIn(fop, dbg, handler);
     }
 }
 
 void
 JSCompartment::clearTraps(FreeOp *fop)
 {
-    MinorGC(rt, JS::gcreason::EVICT_NURSERY);
+    MinorGC(fop->runtime(), JS::gcreason::EVICT_NURSERY);
     for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
         JSScript *script = i.get<JSScript>();
         if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode())
             script->clearTraps(fop);
     }
 }
 
 void
@@ -858,10 +866,10 @@ JSCompartment::sizeOfIncludingThis(mozil
 #else
     *baselineStubsOptimized = 0;
 #endif
 }
 
 void
 JSCompartment::adoptWorkerAllocator(Allocator *workerAllocator)
 {
-    zone()->allocator.arenas.adoptArenas(rt, &workerAllocator->arenas);
+    zone()->allocator.arenas.adoptArenas(runtimeFromMainThread(), &workerAllocator->arenas);
 }
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -117,20 +117,23 @@ namespace js {
 class AutoDebugModeGC;
 class ArrayBufferObject;
 class DebugScopes;
 class WeakMapBase;
 }
 
 struct JSCompartment
 {
-    JS::Zone                     *zone_;
     JS::CompartmentOptions       options_;
 
-    JSRuntime                    *rt;
+  private:
+    JS::Zone                     *zone_;
+    JSRuntime                    *runtime_;
+
+  public:
     JSPrincipals                 *principals;
     bool                         isSystem;
     bool                         marked;
 
 #ifdef DEBUG
     bool                         firedOnNewGlobalObject;
 #endif
 
@@ -149,16 +152,27 @@ struct JSCompartment
     void leave() { enterCompartmentDepth--; }
     bool hasBeenEntered() { return !!enterCompartmentDepth; }
 
     JS::Zone *zone() { return zone_; }
     const JS::Zone *zone() const { return zone_; }
     JS::CompartmentOptions &options() { return options_; }
     const JS::CompartmentOptions &options() const { return options_; }
 
+    JSRuntime *runtimeFromMainThread() {
+        JS_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
+        return runtime_;
+    }
+
+    // Note: Unrestricted access to the zone's runtime from an arbitrary
+    // thread can easily lead to races. Use this method very carefully.
+    JSRuntime *runtimeFromAnyThread() const {
+        return runtime_;
+    }
+
     /*
      * Nb: global_ might be NULL, if (a) it's the atoms compartment, or (b) the
      * compartment's global has been collected.  The latter can happen if e.g.
      * a string in a compartment is rooted but no object is, and thus the global
      * isn't rooted, and thus the global can be finalized while the compartment
      * lives on.
      *
      * In contrast, JSObject::global() is infallible because marking a JSObject
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -213,22 +213,22 @@ JS_FRIEND_API(void)
 JS_SetCompartmentPrincipals(JSCompartment *compartment, JSPrincipals *principals)
 {
     // Short circuit if there's no change.
     if (principals == compartment->principals)
         return;
 
     // Any compartment with the trusted principals -- and there can be
     // multiple -- is a system compartment.
-    JSPrincipals *trusted = compartment->rt->trustedPrincipals();
+    JSPrincipals *trusted = compartment->runtimeFromMainThread()->trustedPrincipals();
     bool isSystem = principals && principals == trusted;
 
     // Clear out the old principals, if any.
     if (compartment->principals) {
-        JS_DropPrincipals(compartment->rt, compartment->principals);
+        JS_DropPrincipals(compartment->runtimeFromMainThread(), compartment->principals);
         compartment->principals = NULL;
         // We'd like to assert that our new principals is always same-origin
         // with the old one, but JSPrincipals doesn't give us a way to do that.
         // But we can at least assert that we're not switching between system
         // and non-system.
         JS_ASSERT(compartment->isSystem == isSystem);
     }
 
@@ -332,17 +332,17 @@ JS_FRIEND_API(bool)
 js::IsSystemZone(Zone *zone)
 {
     return zone->isSystem;
 }
 
 JS_FRIEND_API(bool)
 js::IsAtomsCompartment(JSCompartment *comp)
 {
-    return comp == comp->rt->atomsCompartment;
+    return comp == comp->runtimeFromAnyThread()->atomsCompartment;
 }
 
 JS_FRIEND_API(bool)
 js::IsFunctionObject(JSObject *obj)
 {
     return obj->is<JSFunction>();
 }
 
@@ -915,17 +915,17 @@ JS::IsIncrementalBarrierNeeded(JSContext
 }
 
 JS_FRIEND_API(void)
 JS::IncrementalObjectBarrier(JSObject *obj)
 {
     if (!obj)
         return;
 
-    JS_ASSERT(!obj->zone()->rt->isHeapMajorCollecting());
+    JS_ASSERT(!obj->zone()->runtimeFromMainThread()->isHeapMajorCollecting());
 
     AutoMarkInDeadZone amn(obj->zone());
 
     JSObject::writeBarrierPre(obj);
 }
 
 JS_FRIEND_API(void)
 JS::IncrementalReferenceBarrier(void *ptr, JSGCTraceKind kind)
@@ -933,17 +933,17 @@ JS::IncrementalReferenceBarrier(void *pt
     if (!ptr)
         return;
 
     gc::Cell *cell = static_cast<gc::Cell *>(ptr);
     Zone *zone = kind == JSTRACE_OBJECT
                  ? static_cast<JSObject *>(cell)->zone()
                  : cell->tenuredZone();
 
-    JS_ASSERT(!zone->rt->isHeapMajorCollecting());
+    JS_ASSERT(!zone->runtimeFromMainThread()->isHeapMajorCollecting());
 
     AutoMarkInDeadZone amn(zone);
 
     if (kind == JSTRACE_OBJECT)
         JSObject::writeBarrierPre(static_cast<JSObject*>(cell));
     else if (kind == JSTRACE_STRING)
         JSString::writeBarrierPre(static_cast<JSString*>(cell));
     else if (kind == JSTRACE_SCRIPT)
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -254,17 +254,17 @@ ArenaHeader::checkSynchronizedWithFreeLi
      */
     JS_ASSERT(allocated());
 
     /*
      * We can be called from the background finalization thread when the free
      * list in the zone can mutate at any moment. We cannot do any
      * checks in this case.
      */
-    if (IsBackgroundFinalized(getAllocKind()) && zone->rt->gcHelperThread.onBackgroundThread())
+    if (IsBackgroundFinalized(getAllocKind()) && zone->runtimeFromAnyThread()->gcHelperThread.onBackgroundThread())
         return;
 
     FreeSpan firstSpan = FreeSpan::decodeOffsets(arenaAddress(), firstFreeSpanOffsets);
     if (firstSpan.isEmpty())
         return;
     const FreeSpan *list = zone->allocator.arenas.getFreeList(getAllocKind());
     if (list->isEmpty() || firstSpan.arenaAddress() != list->arenaAddress())
         return;
@@ -664,17 +664,17 @@ Chunk::init(JSRuntime *rt)
     }
 
     /* The rest of info fields are initialized in PickChunk. */
 }
 
 static inline Chunk **
 GetAvailableChunkList(Zone *zone)
 {
-    JSRuntime *rt = zone->rt;
+    JSRuntime *rt = zone->runtimeFromAnyThread();
     return zone->isSystem
            ? &rt->gcSystemAvailableChunkListHead
            : &rt->gcUserAvailableChunkListHead;
 }
 
 inline void
 Chunk::addToAvailableList(Zone *zone)
 {
@@ -763,17 +763,17 @@ Chunk::fetchNextFreeArena(JSRuntime *rt)
     return aheader;
 }
 
 ArenaHeader *
 Chunk::allocateArena(Zone *zone, AllocKind thingKind)
 {
     JS_ASSERT(hasAvailableArenas());
 
-    JSRuntime *rt = zone->rt;
+    JSRuntime *rt = zone->runtimeFromAnyThread();
     if (!rt->isHeapMinorCollecting() && rt->gcBytes >= rt->gcMaxBytes)
         return NULL;
 
     ArenaHeader *aheader = JS_LIKELY(info.numArenasFreeCommitted > 0)
                            ? fetchNextFreeArena(rt)
                            : fetchNextDecommittedArena();
     aheader->init(zone, thingKind);
     if (JS_UNLIKELY(!hasAvailableArenas()))
@@ -799,17 +799,17 @@ Chunk::addArenaToFreeList(JSRuntime *rt,
 }
 
 void
 Chunk::releaseArena(ArenaHeader *aheader)
 {
     JS_ASSERT(aheader->allocated());
     JS_ASSERT(!aheader->hasDelayedMarking);
     Zone *zone = aheader->zone;
-    JSRuntime *rt = zone->rt;
+    JSRuntime *rt = zone->runtimeFromAnyThread();
     AutoLockGC maybeLock;
     if (rt->gcHelperThread.sweeping())
         maybeLock.lock(rt);
 
     JS_ASSERT(rt->gcBytes >= ArenaSize);
     JS_ASSERT(zone->gcBytes >= ArenaSize);
     if (rt->gcHelperThread.sweeping())
         zone->reduceGCTriggerBytes(zone->gcHeapGrowthFactor * ArenaSize);
@@ -831,17 +831,17 @@ Chunk::releaseArena(ArenaHeader *aheader
         rt->gcChunkPool.put(this);
     }
 }
 
 /* The caller must hold the GC lock. */
 static Chunk *
 PickChunk(Zone *zone)
 {
-    JSRuntime *rt = zone->rt;
+    JSRuntime *rt = zone->runtimeFromAnyThread();
     Chunk **listHeadp = GetAvailableChunkList(zone);
     Chunk *chunk = *listHeadp;
     if (chunk)
         return chunk;
 
     chunk = rt->gcChunkPool.get(rt);
     if (!chunk)
         return NULL;
@@ -1105,32 +1105,33 @@ js_RemoveRoot(JSRuntime *rt, void *rp)
 
 typedef RootedValueMap::Range RootRange;
 typedef RootedValueMap::Entry RootEntry;
 typedef RootedValueMap::Enum RootEnum;
 
 static size_t
 ComputeTriggerBytes(Zone *zone, size_t lastBytes, size_t maxBytes, JSGCInvocationKind gckind)
 {
-    size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, zone->rt->gcAllocationThreshold);
+    size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, zone->runtimeFromMainThread()->gcAllocationThreshold);
     float trigger = float(base) * zone->gcHeapGrowthFactor;
     return size_t(Min(float(maxBytes), trigger));
 }
 
 void
 Zone::setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind)
 {
     /*
      * The heap growth factor depends on the heap size after a GC and the GC frequency.
      * For low frequency GCs (more than 1sec between GCs) we let the heap grow to 150%.
      * For high frequency GCs we let the heap grow depending on the heap size:
      *   lastBytes < highFrequencyLowLimit: 300%
      *   lastBytes > highFrequencyHighLimit: 150%
      *   otherwise: linear interpolation between 150% and 300% based on lastBytes
      */
+    JSRuntime *rt = runtimeFromMainThread();
 
     if (!rt->gcDynamicHeapGrowth) {
         gcHeapGrowthFactor = 3.0;
     } else if (lastBytes < 1 * 1024 * 1024) {
         gcHeapGrowthFactor = rt->gcLowFrequencyHeapGrowth;
     } else {
         JS_ASSERT(rt->gcHighFrequencyHighLimitBytes > rt->gcHighFrequencyLowLimitBytes);
         uint64_t now = PRMJ_Now();
@@ -1156,17 +1157,17 @@ Zone::setGCLastBytes(size_t lastBytes, J
     gcTriggerBytes = ComputeTriggerBytes(this, lastBytes, rt->gcMaxBytes, gckind);
 }
 
 void
 Zone::reduceGCTriggerBytes(size_t amount)
 {
     JS_ASSERT(amount > 0);
     JS_ASSERT(gcTriggerBytes >= amount);
-    if (gcTriggerBytes - amount < rt->gcAllocationThreshold * gcHeapGrowthFactor)
+    if (gcTriggerBytes - amount < runtimeFromAnyThread()->gcAllocationThreshold * gcHeapGrowthFactor)
         return;
     gcTriggerBytes -= amount;
 }
 
 Allocator::Allocator(Zone *zone)
   : zone_(zone)
 {}
 
@@ -1211,17 +1212,17 @@ ArenaLists::allocateFromArenaInline(Zone
 #ifdef JS_THREADSAFE
     volatile uintptr_t *bfs = &backgroundFinalizeState[thingKind];
     if (*bfs != BFS_DONE) {
         /*
          * We cannot search the arena list for free things while the
          * background finalization runs and can modify head or cursor at any
          * moment. So we always allocate a new arena in that case.
          */
-        maybeLock.lock(zone->rt);
+        maybeLock.lock(zone->runtimeFromMainThread());
         if (*bfs == BFS_RUN) {
             JS_ASSERT(!*al->cursor);
             chunk = PickChunk(zone);
             if (!chunk) {
                 /*
                  * Let the caller to wait for the background allocation to
                  * finish and restart the allocation attempt.
                  */
@@ -1251,27 +1252,27 @@ ArenaLists::allocateFromArenaInline(Zone
              * Move the free span stored in the arena to the free list and
              * allocate from it.
              */
             freeLists[thingKind] = aheader->getFirstFreeSpan();
             aheader->setAsFullyUsed();
             if (JS_UNLIKELY(zone->wasGCStarted())) {
                 if (zone->needsBarrier()) {
                     aheader->allocatedDuringIncremental = true;
-                    zone->rt->gcMarker.delayMarkingArena(aheader);
+                    zone->runtimeFromMainThread()->gcMarker.delayMarkingArena(aheader);
                 } else if (zone->isGCSweeping()) {
-                    PushArenaAllocatedDuringSweep(zone->rt, aheader);
+                    PushArenaAllocatedDuringSweep(zone->runtimeFromMainThread(), aheader);
                 }
             }
             return freeLists[thingKind].infallibleAllocate(Arena::thingSize(thingKind));
         }
 
         /* Make sure we hold the GC lock before we call PickChunk. */
         if (!maybeLock.locked())
-            maybeLock.lock(zone->rt);
+            maybeLock.lock(zone->runtimeFromAnyThread());
         chunk = PickChunk(zone);
         if (!chunk)
             return NULL;
     }
 
     /*
      * While we still hold the GC lock get an arena from some chunk, mark it
      * as full as its single free span is moved to the free lits, and insert
@@ -1284,19 +1285,19 @@ ArenaLists::allocateFromArenaInline(Zone
     JS_ASSERT(!*al->cursor);
     ArenaHeader *aheader = chunk->allocateArena(zone, thingKind);
     if (!aheader)
         return NULL;
 
     if (JS_UNLIKELY(zone->wasGCStarted())) {
         if (zone->needsBarrier()) {
             aheader->allocatedDuringIncremental = true;
-            zone->rt->gcMarker.delayMarkingArena(aheader);
+            zone->runtimeFromMainThread()->gcMarker.delayMarkingArena(aheader);
         } else if (zone->isGCSweeping()) {
-            PushArenaAllocatedDuringSweep(zone->rt, aheader);
+            PushArenaAllocatedDuringSweep(zone->runtimeFromMainThread(), aheader);
         }
     }
     aheader->next = al->head;
     if (!al->head) {
         JS_ASSERT(al->cursor == &al->head);
         al->cursor = &aheader->next;
     }
     al->head = aheader;
@@ -1501,24 +1502,23 @@ RunLastDitchGC(JSContext *cx, JS::Zone *
     return NULL;
 }
 
 template <AllowGC allowGC>
 /* static */ void *
 ArenaLists::refillFreeList(ThreadSafeContext *cx, AllocKind thingKind)
 {
     JS_ASSERT(cx->allocator()->arenas.freeLists[thingKind].isEmpty());
+    JS_ASSERT(!cx->isHeapBusy());
 
     Zone *zone = cx->allocator()->zone_;
-    JSRuntime *rt = zone->rt;
-    JS_ASSERT(!rt->isHeapBusy());
-
-    bool runGC = rt->gcIncrementalState != NO_INCREMENTAL &&
-                 zone->gcBytes > zone->gcTriggerBytes &&
-                 cx->allowGC() && allowGC;
+
+    bool runGC = cx->allowGC() && allowGC &&
+                 cx->asJSContext()->runtime()->gcIncrementalState != NO_INCREMENTAL &&
+                 zone->gcBytes > zone->gcTriggerBytes;
 
     for (;;) {
         if (JS_UNLIKELY(runGC)) {
             if (void *thing = RunLastDitchGC(cx->asJSContext(), zone, thingKind))
                 return thing;
         }
 
         /*
@@ -1529,25 +1529,25 @@ ArenaLists::refillFreeList(ThreadSafeCon
          * to fail but at this point it may have already stopped. To avoid
          * this race we always try to allocate twice.
          *
          * If we're in a fork join, we simply try it once and return whatever
          * value we get.
          */
         for (bool secondAttempt = false; ; secondAttempt = true) {
             void *thing = cx->allocator()->arenas.allocateFromArenaInline(zone, thingKind);
-            if (JS_LIKELY(!!thing) || cx->isForkJoinSlice())
+            if (JS_LIKELY(!!thing) || !cx->isJSContext())
                 return thing;
             if (secondAttempt)
                 break;
 
-            rt->gcHelperThread.waitBackgroundSweepEnd();
+            cx->asJSContext()->runtime()->gcHelperThread.waitBackgroundSweepEnd();
         }
 
-        if (!allowGC)
+        if (!cx->allowGC() || !allowGC)
             return NULL;
 
         /*
          * We failed to allocate. Run the GC if we haven't done it already.
          * Otherwise report OOM.
          */
         if (runGC)
             break;
@@ -1924,17 +1924,17 @@ void
 js::TriggerGC(JSRuntime *rt, JS::gcreason::Reason reason)
 {
     /* Wait till end of parallel section to trigger GC. */
     if (InParallelSection()) {
         ForkJoinSlice::Current()->requestGC(reason);
         return;
     }
 
-    rt->assertValidThread();
+    JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
 
     if (rt->isHeapBusy())
         return;
 
     JS::PrepareForFullGC(rt);
     TriggerOperationCallback(rt, reason);
 }
 
@@ -1945,18 +1945,17 @@ js::TriggerZoneGC(Zone *zone, JS::gcreas
      * If parallel threads are running, wait till they
      * are stopped to trigger GC.
      */
     if (InParallelSection()) {
         ForkJoinSlice::Current()->requestZoneGC(zone, reason);
         return;
     }
 
-    JSRuntime *rt = zone->rt;
-    rt->assertValidThread();
+    JSRuntime *rt = zone->runtimeFromMainThread();
 
     if (rt->isHeapBusy())
         return;
 
     if (rt->gcZeal() == ZealAllocValue) {
         TriggerGC(rt, reason);
         return;
     }
@@ -1970,17 +1969,17 @@ js::TriggerZoneGC(Zone *zone, JS::gcreas
     PrepareZoneForGC(zone);
     TriggerOperationCallback(rt, reason);
 }
 
 void
 js::MaybeGC(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime();
-    rt->assertValidThread();
+    JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
 
     if (rt->gcZeal() == ZealAllocValue || rt->gcZeal() == ZealPokeValue) {
         JS::PrepareForFullGC(rt);
         GC(rt, GC_NORMAL, JS::gcreason::MAYBEGC);
         return;
     }
 
     if (rt->gcIsNeeded) {
@@ -2511,17 +2510,17 @@ ReleaseObservedTypes(JSRuntime *rt)
  * |keepAtleastOne| is false. If some objects remain in the zone so that it
  * cannot be deleted, then we set |keepAtleastOne| to true, which prohibits
  * SweepCompartments from deleting every compartment. Instead, it preserves an
  * arbitrary compartment in the zone.
  */
 static void
 SweepCompartments(FreeOp *fop, Zone *zone, bool keepAtleastOne, bool lastGC)
 {
-    JSRuntime *rt = zone->rt;
+    JSRuntime *rt = zone->runtimeFromMainThread();
     JSDestroyCompartmentCallback callback = rt->destroyCompartmentCallback;
 
     JSCompartment **read = zone->compartments.begin();
     JSCompartment **end = zone->compartments.end();
     JSCompartment **write = read;
     bool foundOne = false;
     while (read < end) {
         JSCompartment *comp = *read++;
@@ -2598,20 +2597,21 @@ PurgeRuntime(JSRuntime *rt)
         activeCompilations |= iter->activeCompilations;
     if (!activeCompilations)
         rt->parseMapPool.purgeAll();
 }
 
 static bool
 ShouldPreserveJITCode(JSCompartment *comp, int64_t currentTime)
 {
-    if (comp->rt->gcShouldCleanUpEverything || !comp->zone()->types.inferenceEnabled)
+    JSRuntime *rt = comp->runtimeFromMainThread();
+    if (rt->gcShouldCleanUpEverything || !comp->zone()->types.inferenceEnabled)
         return false;
 
-    if (comp->rt->alwaysPreserveCode)
+    if (rt->alwaysPreserveCode)
         return true;
     if (comp->lastAnimationTime + PRMJ_USEC_PER_SEC >= currentTime &&
         comp->lastCodeRelease + (PRMJ_USEC_PER_SEC * 300) >= currentTime)
     {
         return true;
     }
 
     comp->lastCodeRelease = currentTime;
@@ -3295,16 +3295,17 @@ JSCompartment::findOutgoingEdges(Compone
 
 void
 Zone::findOutgoingEdges(ComponentFinder<JS::Zone> &finder)
 {
     /*
      * Any compartment may have a pointer to an atom in the atoms
      * compartment, and these aren't in the cross compartment map.
      */
+    JSRuntime *rt = runtimeFromMainThread();
     if (rt->atomsCompartment->zone()->isGCMarking())
         finder.addEdgeTo(rt->atomsCompartment->zone());
 
     for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next())
         comp->findOutgoingEdges(finder);
 }
 
 static void
@@ -5066,17 +5067,17 @@ AutoMaybeTouchDeadZones::AutoMaybeTouchD
     markCount(runtime->gcObjectsMarkedInDeadZones),
     inIncremental(JS::IsIncrementalGCInProgress(runtime)),
     manipulatingDeadZones(runtime->gcManipulatingDeadZones)
 {
     runtime->gcManipulatingDeadZones = true;
 }
 
 AutoMaybeTouchDeadZones::AutoMaybeTouchDeadZones(JSObject *obj)
-  : runtime(obj->compartment()->rt),
+  : runtime(obj->compartment()->runtimeFromMainThread()),
     markCount(runtime->gcObjectsMarkedInDeadZones),
     inIncremental(JS::IsIncrementalGCInProgress(runtime)),
     manipulatingDeadZones(runtime->gcManipulatingDeadZones)
 {
     runtime->gcManipulatingDeadZones = true;
 }
 
 AutoMaybeTouchDeadZones::~AutoMaybeTouchDeadZones()
@@ -5091,17 +5092,17 @@ AutoMaybeTouchDeadZones::~AutoMaybeTouch
 
 AutoSuppressGC::AutoSuppressGC(JSContext *cx)
   : suppressGC_(cx->runtime()->mainThread.suppressGC)
 {
     suppressGC_++;
 }
 
 AutoSuppressGC::AutoSuppressGC(JSCompartment *comp)
-  : suppressGC_(comp->rt->mainThread.suppressGC)
+  : suppressGC_(comp->runtimeFromMainThread()->mainThread.suppressGC)
 {
     suppressGC_++;
 }
 
 bool
 js::UninlinedIsInsideNursery(JSRuntime *rt, const void *thing)
 {
     return IsInsideNursery(rt, thing);
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -27,18 +27,19 @@ class Shape;
  * for more details.
  */
 struct AutoMarkInDeadZone
 {
     AutoMarkInDeadZone(JS::Zone *zone)
       : zone(zone),
         scheduled(zone->scheduledForDestruction)
     {
-        if (zone->rt->gcManipulatingDeadZones && zone->scheduledForDestruction) {
-            zone->rt->gcObjectsMarkedInDeadZones++;
+        JSRuntime *rt = zone->runtimeFromMainThread();
+        if (rt->gcManipulatingDeadZones && zone->scheduledForDestruction) {
+            rt->gcObjectsMarkedInDeadZones++;
             zone->scheduledForDestruction = false;
         }
     }
 
     ~AutoMarkInDeadZone() {
         zone->scheduledForDestruction = scheduled;
     }
 
@@ -85,17 +86,17 @@ IsInsideNursery(JSRuntime *rt, const voi
 }
 
 inline JSGCTraceKind
 GetGCThingTraceKind(const void *thing)
 {
     JS_ASSERT(thing);
     const Cell *cell = static_cast<const Cell *>(thing);
 #ifdef JSGC_GENERATIONAL
-    if (IsInsideNursery(cell->runtime(), cell))
+    if (IsInsideNursery(cell->runtimeFromMainThread(), cell))
         return JSTRACE_OBJECT;
 #endif
     return MapAllocToTraceKind(cell->tenuredGetAllocKind());
 }
 
 static inline void
 GCPoke(JSRuntime *rt)
 {
@@ -232,22 +233,22 @@ class CellIterImpl
         thing += thingSize;
     }
 };
 
 class CellIterUnderGC : public CellIterImpl
 {
   public:
     CellIterUnderGC(JS::Zone *zone, AllocKind kind) {
-        JS_ASSERT(zone->rt->isHeapBusy());
+        JS_ASSERT(zone->runtimeFromAnyThread()->isHeapBusy());
         init(zone, kind);
     }
 
     CellIterUnderGC(ArenaHeader *aheader) {
-        JS_ASSERT(aheader->zone->rt->isHeapBusy());
+        JS_ASSERT(aheader->zone->runtimeFromAnyThread()->isHeapBusy());
         init(aheader);
     }
 };
 
 class CellIter : public CellIterImpl
 {
     ArenaLists *lists;
     AllocKind kind;
@@ -263,26 +264,26 @@ class CellIter : public CellIterImpl
          * We have a single-threaded runtime, so there's no need to protect
          * against other threads iterating or allocating. However, we do have
          * background finalization; we have to wait for this to finish if it's
          * currently active.
          */
         if (IsBackgroundFinalized(kind) &&
             zone->allocator.arenas.needBackgroundFinalizeWait(kind))
         {
-            gc::FinishBackgroundFinalize(zone->rt);
+            gc::FinishBackgroundFinalize(zone->runtimeFromMainThread());
         }
         if (lists->isSynchronizedFreeList(kind)) {
             lists = NULL;
         } else {
-            JS_ASSERT(!zone->rt->isHeapBusy());
+            JS_ASSERT(!zone->runtimeFromMainThread()->isHeapBusy());
             lists->copyFreeListToArena(kind);
         }
 #ifdef DEBUG
-        counter = &zone->rt->noGCOrAllocationCheck;
+        counter = &zone->runtimeFromAnyThread()->noGCOrAllocationCheck;
         ++*counter;
 #endif
         init(zone, kind);
     }
 
     ~CellIter() {
 #ifdef DEBUG
         JS_ASSERT(*counter > 0);
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -6540,17 +6540,18 @@ TypeCompartment::sweepCompilerOutputs(Fr
         fop->delete_(pendingRecompiles);
         pendingRecompiles = NULL;
     }
 }
 
 void
 JSCompartment::sweepNewTypeObjectTable(TypeObjectSet &table)
 {
-    gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_TYPE_OBJECT);
+    gcstats::AutoPhase ap(runtimeFromMainThread()->gcStats,
+                          gcstats::PHASE_SWEEP_TABLES_TYPE_OBJECT);
 
     JS_ASSERT(zone()->isGCSweeping());
     if (table.initialized()) {
         for (TypeObjectSet::Enum e(table); !e.empty(); e.popFront()) {
             TypeObject *type = e.front();
             if (IsTypeObjectAboutToBeFinalized(&type))
                 e.removeFront();
             else if (type != e.front())
@@ -6851,17 +6852,17 @@ TypeZone::~TypeZone()
 {
 }
 
 void
 TypeZone::sweep(FreeOp *fop, bool releaseTypes)
 {
     JS_ASSERT(zone()->isGCSweeping());
 
-    JSRuntime *rt = zone()->rt;
+    JSRuntime *rt = fop->runtime();
 
     /*
      * Clear the analysis pool, but don't release its data yet. While
      * sweeping types any live data will be allocated into the pool.
      */
     LifoAlloc oldAlloc(typeLifoAlloc.defaultChunkSize());
     oldAlloc.steal(&typeLifoAlloc);
 
--- a/js/src/jsinferinlines.h
+++ b/js/src/jsinferinlines.h
@@ -1565,17 +1565,17 @@ TypeObject::getProperty(unsigned i)
     }
     return propertySet[i];
 }
 
 inline void
 TypeObject::writeBarrierPre(TypeObject *type)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!type || !type->runtime()->needsBarrier())
+    if (!type || !type->runtimeFromAnyThread()->needsBarrier())
         return;
 
     JS::Zone *zone = type->zone();
     if (zone->needsBarrier()) {
         TypeObject *tmp = type;
         MarkTypeObjectUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
         JS_ASSERT(tmp == type);
     }
@@ -1594,17 +1594,17 @@ TypeObject::readBarrier(TypeObject *type
     }
 #endif
 }
 
 inline void
 TypeNewScript::writeBarrierPre(TypeNewScript *newScript)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!newScript || !newScript->fun->runtime()->needsBarrier())
+    if (!newScript || !newScript->fun->runtimeFromAnyThread()->needsBarrier())
         return;
 
     JS::Zone *zone = newScript->fun->zone();
     if (zone->needsBarrier()) {
         MarkObject(zone->barrierTracer(), &newScript->fun, "write barrier");
         MarkShape(zone->barrierTracer(), &newScript->shape, "write barrier");
     }
 #endif
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -824,20 +824,18 @@ PropertyIteratorObject::trace(JSTracer *
 {
     if (NativeIterator *ni = obj->as<PropertyIteratorObject>().getNativeIterator())
         ni->mark(trc);
 }
 
 void
 PropertyIteratorObject::finalize(FreeOp *fop, JSObject *obj)
 {
-    if (NativeIterator *ni = obj->as<PropertyIteratorObject>().getNativeIterator()) {
-        obj->as<PropertyIteratorObject>().setNativeIterator(NULL);
+    if (NativeIterator *ni = obj->as<PropertyIteratorObject>().getNativeIterator())
         fop->free_(ni);
-    }
 }
 
 Class PropertyIteratorObject::class_ = {
     "Iterator",
     JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Iterator) |
     JSCLASS_HAS_PRIVATE |
     JSCLASS_BACKGROUND_FINALIZE,
--- a/js/src/jsmemorymetrics.cpp
+++ b/js/src/jsmemorymetrics.cpp
@@ -226,21 +226,21 @@ StatsCellCallback(JSRuntime *rt, void *d
         CompartmentStats *cStats = GetCompartmentStats(shape->compartment());
         size_t propTableSize, kidsSize;
         shape->sizeOfExcludingThis(rtStats->mallocSizeOf_, &propTableSize, &kidsSize);
         if (shape->inDictionary()) {
             cStats->gcHeapShapesDict += thingSize;
             cStats->shapesExtraDictTables += propTableSize;
             JS_ASSERT(kidsSize == 0);
         } else {
-            if (shape->base()->getObjectParent() == shape->compartment()->maybeGlobal()) {
+            JSObject *parent = shape->base()->getObjectParent();
+            if (parent && parent->is<GlobalObject>())
                 cStats->gcHeapShapesTreeGlobalParented += thingSize;
-            } else {
+            else
                 cStats->gcHeapShapesTreeNonGlobalParented += thingSize;
-            }
             cStats->shapesExtraTreeTables += propTableSize;
             cStats->shapesExtraTreeShapeKids += kidsSize;
         }
         break;
       }
 
       case JSTRACE_BASE_SHAPE: {
         BaseShape *base = static_cast<BaseShape *>(thing);
--- a/js/src/jsobjinlines.h
+++ b/js/src/jsobjinlines.h
@@ -101,17 +101,17 @@ inline void
 JSObject::finalize(js::FreeOp *fop)
 {
     js::Probes::finalizeObject(this);
 
 #ifdef DEBUG
     JS_ASSERT(isTenured());
     if (!IsBackgroundFinalized(tenuredGetAllocKind())) {
         /* Assert we're on the main thread. */
-        fop->runtime()->assertValidThread();
+        JS_ASSERT(CurrentThreadCanAccessRuntime(fop->runtime()));
     }
 #endif
     js::Class *clasp = getClass();
     if (clasp->finalize)
         clasp->finalize(fop, this);
 
     finish(fop);
 }
@@ -273,17 +273,17 @@ JSObject::copyDenseElements(uint32_t dst
     for (uint32_t i = 0; i < count; ++i)
         elements[dstStart + i].set(zone, this, js::HeapSlot::Element, dstStart + i, src[i]);
 }
 
 inline void
 JSObject::initDenseElements(uint32_t dstStart, const js::Value *src, uint32_t count)
 {
     JS_ASSERT(dstStart + count <= getDenseCapacity());
-    JSRuntime *rt = runtime();
+    JSRuntime *rt = runtimeFromMainThread();
     for (uint32_t i = 0; i < count; ++i)
         elements[dstStart + i].init(rt, this, js::HeapSlot::Element, dstStart + i, src[i]);
 }
 
 inline void
 JSObject::moveDenseElements(uint32_t dstStart, uint32_t srcStart, uint32_t count)
 {
     JS_ASSERT(dstStart + count <= getDenseCapacity());
@@ -311,17 +311,17 @@ JSObject::moveDenseElements(uint32_t dst
         } else {
             js::HeapSlot *dst = elements + dstStart + count - 1;
             js::HeapSlot *src = elements + srcStart + count - 1;
             for (uint32_t i = 0; i < count; i++, dst--, src--)
                 dst->set(zone, this, js::HeapSlot::Element, dst - elements, *src);
         }
     } else {
         memmove(elements + dstStart, elements + srcStart, count * sizeof(js::HeapSlot));
-        DenseRangeWriteBarrierPost(runtime(), this, dstStart, count);
+        DenseRangeWriteBarrierPost(runtimeFromMainThread(), this, dstStart, count);
     }
 }
 
 inline void
 JSObject::moveDenseElementsUnbarriered(uint32_t dstStart, uint32_t srcStart, uint32_t count)
 {
     JS_ASSERT(!zone()->needsBarrier());
 
@@ -347,17 +347,17 @@ JSObject::ensureDenseInitializedLength(j
      * for a write.
      */
     JS_ASSERT(index + extra <= getDenseCapacity());
     uint32_t &initlen = getElementsHeader()->initializedLength;
     if (initlen < index)
         markDenseElementsNotPacked(cx);
 
     if (initlen < index + extra) {
-        JSRuntime *rt = runtime();
+        JSRuntime *rt = runtimeFromAnyThread();
         size_t offset = initlen;
         for (js::HeapSlot *sp = elements + initlen;
              sp != elements + (index + extra);
              sp++, offset++)
             sp->init(rt, this, js::HeapSlot::Element, offset, js::MagicValue(JS_ELEMENTS_HOLE));
         initlen = index + extra;
     }
 }
--- a/js/src/jsscriptinlines.h
+++ b/js/src/jsscriptinlines.h
@@ -106,39 +106,39 @@ JSScript::global() const
      */
     return *compartment()->maybeGlobal();
 }
 
 inline void
 JSScript::writeBarrierPre(JSScript *script)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!script || !script->runtime()->needsBarrier())
+    if (!script || !script->runtimeFromAnyThread()->needsBarrier())
         return;
 
     JS::Zone *zone = script->zone();
     if (zone->needsBarrier()) {
-        JS_ASSERT(!zone->rt->isHeapMajorCollecting());
+        JS_ASSERT(!zone->runtimeFromMainThread()->isHeapMajorCollecting());
         JSScript *tmp = script;
         MarkScriptUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
         JS_ASSERT(tmp == script);
     }
 #endif
 }
 
 /* static */ inline void
 js::LazyScript::writeBarrierPre(js::LazyScript *lazy)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!lazy)
+    if (!lazy || !lazy->runtimeFromAnyThread()->needsBarrier())
         return;
 
     JS::Zone *zone = lazy->zone();
     if (zone->needsBarrier()) {
-        JS_ASSERT(!zone->rt->isHeapMajorCollecting());
+        JS_ASSERT(!zone->runtimeFromMainThread()->isHeapMajorCollecting());
         js::LazyScript *tmp = lazy;
         MarkLazyScriptUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
         JS_ASSERT(tmp == lazy);
     }
 #endif
 }
 
 inline JSPrincipals *
--- a/js/src/jsworkers.cpp
+++ b/js/src/jsworkers.cpp
@@ -94,43 +94,45 @@ js::StartOffThreadIonCompile(JSContext *
  * Move an IonBuilder for which compilation has either finished, failed, or
  * been cancelled into the Ion compartment's finished compilations list.
  * All off thread compilations which are started must eventually be finished.
  */
 static void
 FinishOffThreadIonCompile(ion::IonBuilder *builder)
 {
     JSCompartment *compartment = builder->script()->compartment();
-    JS_ASSERT(compartment->rt->workerThreadState);
-    JS_ASSERT(compartment->rt->workerThreadState->isLocked());
+    JS_ASSERT(compartment->runtimeFromAnyThread()->workerThreadState);
+    JS_ASSERT(compartment->runtimeFromAnyThread()->workerThreadState->isLocked());
 
     compartment->ionCompartment()->finishedOffThreadCompilations().append(builder);
 }
 
 static inline bool
 CompiledScriptMatches(JSCompartment *compartment, JSScript *script, JSScript *target)
 {
     if (script)
         return target == script;
     return target->compartment() == compartment;
 }
 
 void
 js::CancelOffThreadIonCompile(JSCompartment *compartment, JSScript *script)
 {
-    if (!compartment->rt->workerThreadState)
+    JSRuntime *rt = compartment->runtimeFromMainThread();
+
+    if (!rt->workerThreadState)
         return;
 
-    WorkerThreadState &state = *compartment->rt->workerThreadState;
+    WorkerThreadState &state = *rt->workerThreadState;
 
     ion::IonCompartment *ion = compartment->ionCompartment();
     if (!ion)
         return;
 
-    AutoLockWorkerThreadState lock(compartment->rt);
+    AutoLockWorkerThreadState lock(rt);
 
     /* Cancel any pending entries for which processing hasn't started. */
     for (size_t i = 0; i < state.ionWorklist.length(); i++) {
         ion::IonBuilder *builder = state.ionWorklist[i];
         if (CompiledScriptMatches(compartment, script, builder->script())) {
             FinishOffThreadIonCompile(builder);
             state.ionWorklist[i--] = state.ionWorklist.back();
             state.ionWorklist.popBack();
@@ -470,17 +472,17 @@ WorkerThread::handleAsmJSWorkload(Worker
     JS_ASSERT(state.canStartAsmJSCompile());
     JS_ASSERT(idle());
 
     asmData = state.asmJSWorklist.popCopy();
     bool success = false;
 
     state.unlock();
     do {
-        ion::IonContext icx(asmData->mir->compartment, &asmData->mir->temp());
+        ion::IonContext icx(runtime, asmData->mir->compartment, &asmData->mir->temp());
 
         int64_t before = PRMJ_Now();
 
         if (!OptimizeMIR(asmData->mir))
             break;
 
         asmData->lir = GenerateLIR(asmData->mir);
         if (!asmData->lir)
@@ -518,17 +520,17 @@ WorkerThread::handleIonWorkload(WorkerTh
 
     ionBuilder = state.ionWorklist.popCopy();
 
     DebugOnly<ion::ExecutionMode> executionMode = ionBuilder->info().executionMode();
     JS_ASSERT(GetIonScript(ionBuilder->script(), executionMode) == ION_COMPILING_SCRIPT);
 
     state.unlock();
     {
-        ion::IonContext ictx(ionBuilder->script()->compartment(), &ionBuilder->temp());
+        ion::IonContext ictx(runtime, ionBuilder->script()->compartment(), &ionBuilder->temp());
         ionBuilder->setBackgroundCodegen(ion::CompileBackEnd(ionBuilder));
     }
     state.lock();
 
     FinishOffThreadIonCompile(ionBuilder);
     ionBuilder = NULL;
 
     // Notify the main thread in case it is waiting for the compilation to finish.
@@ -607,17 +609,17 @@ WorkerThread::threadLoop()
 AutoPauseWorkersForGC::AutoPauseWorkersForGC(JSRuntime *rt MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
   : runtime(rt), needsUnpause(false)
 {
     MOZ_GUARD_OBJECT_NOTIFIER_INIT;
 
     if (!runtime->workerThreadState)
         return;
 
-    runtime->assertValidThread();
+    JS_ASSERT(CurrentThreadCanAccessRuntime(runtime));
 
     WorkerThreadState &state = *runtime->workerThreadState;
     if (!state.numThreads)
         return;
 
     AutoLockWorkerThreadState lock(runtime);
 
     // Tolerate reentrant use of AutoPauseWorkersForGC.
--- a/js/src/jswrapper.cpp
+++ b/js/src/jswrapper.cpp
@@ -161,17 +161,17 @@ bool CrossCompartmentWrapper::finalizeIn
 {
     if (!priv.isObject())
         return true;
 
     /*
      * Make the 'background-finalized-ness' of the wrapper the same as the
      * wrapped object, to allow transplanting between them.
      */
-    if (IsInsideNursery(priv.toObject().runtime(), &priv.toObject()))
+    if (IsInsideNursery(priv.toObject().runtimeFromMainThread(), &priv.toObject()))
         return false;
     return IsBackgroundFinalized(priv.toObject().tenuredGetAllocKind());
 }
 
 #define PIERCE(cx, wrapper, pre, op, post)                      \
     JS_BEGIN_MACRO                                              \
         bool ok;                                                \
         {                                                       \
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -373,17 +373,17 @@ Debugger::Debugger(JSContext *cx, JSObje
     JS_INIT_CLIST(&onNewGlobalObjectWatchersLink);
 }
 
 Debugger::~Debugger()
 {
     JS_ASSERT(debuggees.empty());
 
     /* This always happens in the GC thread, so no locking is required. */
-    JS_ASSERT(object->compartment()->rt->isHeapBusy());
+    JS_ASSERT(object->runtimeFromMainThread()->isHeapBusy());
 
     /*
      * Since the inactive state for this link is a singleton cycle, it's always
      * safe to apply JS_REMOVE_LINK to it, regardless of whether we're in the list or not.
      */
     JS_REMOVE_LINK(&onNewGlobalObjectWatchersLink);
 }
 
@@ -1638,17 +1638,20 @@ Debugger::detachAllDebuggersFromGlobal(F
 Debugger::findCompartmentEdges(Zone *zone, js::gc::ComponentFinder<Zone> &finder)
 {
     /*
      * For debugger cross compartment wrappers, add edges in the opposite
      * direction to those already added by JSCompartment::findOutgoingEdges.
      * This ensure that debuggers and their debuggees are finalized in the same
      * group.
      */
-    for (Debugger *dbg = zone->rt->debuggerList.getFirst(); dbg; dbg = dbg->getNext()) {
+    for (Debugger *dbg = zone->runtimeFromMainThread()->debuggerList.getFirst();
+         dbg;
+         dbg = dbg->getNext())
+    {
         Zone *w = dbg->object->zone();
         if (w == zone || !w->isGCMarking())
             continue;
         if (dbg->scripts.hasKeyInZone(zone) ||
             dbg->sources.hasKeyInZone(zone) ||
             dbg->objects.hasKeyInZone(zone) ||
             dbg->environments.hasKeyInZone(zone))
         {
@@ -2209,17 +2212,17 @@ Debugger::addDebuggeeGlobal(JSContext *c
     return false;
 }
 
 void
 Debugger::removeDebuggeeGlobal(FreeOp *fop, GlobalObject *global,
                                GlobalObjectSet::Enum *compartmentEnum,
                                GlobalObjectSet::Enum *debugEnum)
 {
-    AutoDebugModeGC dmgc(global->compartment()->rt);
+    AutoDebugModeGC dmgc(fop->runtime());
     return removeDebuggeeGlobal(fop, global, dmgc, compartmentEnum, debugEnum);
 }
 
 void
 Debugger::removeDebuggeeGlobal(FreeOp *fop, GlobalObject *global,
                                AutoDebugModeGC &dmgc,
                                GlobalObjectSet::Enum *compartmentEnum,
                                GlobalObjectSet::Enum *debugEnum)
--- a/js/src/vm/ForkJoin.cpp
+++ b/js/src/vm/ForkJoin.cpp
@@ -112,19 +112,19 @@ ParallelBailoutRecord::setCause(Parallel
 void
 ParallelBailoutRecord::addTrace(JSScript *script,
                                 jsbytecode *pc)
 {
     MOZ_ASSUME_UNREACHABLE("Not THREADSAFE build");
 }
 
 bool
-js::InSequentialOrExclusiveParallelSection()
+js::InExclusiveParallelSection()
 {
-    return true;
+    return false;
 }
 
 bool
 js::ParallelTestsShouldPass(JSContext *cx)
 {
     return false;
 }
 
@@ -1448,17 +1448,17 @@ ForkJoinShared::executePortion(PerThread
     ForkJoinSlice slice(perThread, threadId, numSlices_, allocator,
                         this, &records_[threadId]);
     AutoSetForkJoinSlice autoContext(&slice);
 
     Spew(SpewOps, "Up");
 
     // Make a new IonContext for the slice, which is needed if we need to
     // re-enter the VM.
-    IonContext icx(cx_->compartment(), NULL);
+    IonContext icx(cx_->runtime(), cx_->compartment(), NULL);
 
     JS_ASSERT(slice.bailoutRecord->topScript == NULL);
 
     RootedObject fun(perThread, fun_);
     JS_ASSERT(fun->is<JSFunction>());
     RootedFunction callee(perThread, &fun->as<JSFunction>());
     if (!callee->nonLazyScript()->hasParallelIonScript()) {
         // Sometimes, particularly with GCZeal, the parallel ion
@@ -2136,19 +2136,19 @@ parallel::SpewBailoutIR(uint32_t bblockI
                         JSScript *script, jsbytecode *pc)
 {
     spewer.spewBailoutIR(bblockId, lirId, lir, mir, script, pc);
 }
 
 #endif // DEBUG
 
 bool
-js::InSequentialOrExclusiveParallelSection()
+js::InExclusiveParallelSection()
 {
-    return !InParallelSection() || ForkJoinSlice::Current()->hasAcquiredContext();
+    return InParallelSection() && ForkJoinSlice::Current()->hasAcquiredContext();
 }
 
 bool
 js::ParallelTestsShouldPass(JSContext *cx)
 {
     return ion::IsEnabled(cx) &&
            ion::IsBaselineEnabled(cx) &&
            !ion::js_IonOptions.eagerCompilation &&
--- a/js/src/vm/ForkJoin.h
+++ b/js/src/vm/ForkJoin.h
@@ -405,17 +405,17 @@ InParallelSection()
 #ifdef JS_THREADSAFE
     ForkJoinSlice *current = ForkJoinSlice::Current();
     return current != NULL;
 #else
     return false;
 #endif
 }
 
-bool InSequentialOrExclusiveParallelSection();
+bool InExclusiveParallelSection();
 
 bool ParallelTestsShouldPass(JSContext *cx);
 
 ///////////////////////////////////////////////////////////////////////////
 // Debug Spew
 
 namespace parallel {
 
--- a/js/src/vm/ObjectImpl-inl.h
+++ b/js/src/vm/ObjectImpl-inl.h
@@ -156,27 +156,27 @@ inline bool
 js::ObjectImpl::inDictionaryMode() const
 {
     return lastProperty()->inDictionary();
 }
 
 JS_ALWAYS_INLINE JS::Zone *
 js::ObjectImpl::zone() const
 {
-    JS_ASSERT(InSequentialOrExclusiveParallelSection());
+    JS_ASSERT(CurrentThreadCanAccessZone(shape_->zone()));
     return shape_->zone();
 }
 
 /* static */ inline void
 js::ObjectImpl::readBarrier(ObjectImpl *obj)
 {
 #ifdef JSGC_INCREMENTAL
     Zone *zone = obj->zone();
     if (zone->needsBarrier()) {
-        MOZ_ASSERT(!zone->rt->isHeapMajorCollecting());
+        MOZ_ASSERT(!zone->runtimeFromMainThread()->isHeapMajorCollecting());
         JSObject *tmp = obj->asObjectPtr();
         MarkObjectUnbarriered(zone->barrierTracer(), &tmp, "read barrier");
         MOZ_ASSERT(tmp == obj->asObjectPtr());
     }
 #endif
 }
 
 inline void
@@ -190,64 +190,64 @@ js::ObjectImpl::privateWriteBarrierPre(v
     }
 #endif
 }
 
 inline void
 js::ObjectImpl::privateWriteBarrierPost(void **pprivate)
 {
 #ifdef JSGC_GENERATIONAL
-    runtime()->gcStoreBuffer.putCell(reinterpret_cast<js::gc::Cell **>(pprivate));
+    runtimeFromAnyThread()->gcStoreBuffer.putCell(reinterpret_cast<js::gc::Cell **>(pprivate));
 #endif
 }
 
 /* static */ inline void
 js::ObjectImpl::writeBarrierPre(ObjectImpl *obj)
 {
 #ifdef JSGC_INCREMENTAL
     /*
      * This would normally be a null test, but TypeScript::global uses 0x1 as a
      * special value.
      */
-    if (IsNullTaggedPointer(obj) || !obj->runtime()->needsBarrier())
+    if (IsNullTaggedPointer(obj) || !obj->runtimeFromMainThread()->needsBarrier())
         return;
 
     Zone *zone = obj->zone();
     if (zone->needsBarrier()) {
-        MOZ_ASSERT(!zone->rt->isHeapMajorCollecting());
+        MOZ_ASSERT(!zone->runtimeFromMainThread()->isHeapMajorCollecting());
         JSObject *tmp = obj->asObjectPtr();
         MarkObjectUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
         MOZ_ASSERT(tmp == obj->asObjectPtr());
     }
 #endif
 }
 
 /* static */ inline void
 js::ObjectImpl::writeBarrierPost(ObjectImpl *obj, void *addr)
 {
 #ifdef JSGC_GENERATIONAL
     if (IsNullTaggedPointer(obj))
         return;
-    obj->runtime()->gcStoreBuffer.putCell((Cell **)addr);
+    obj->runtimeFromAnyThread()->gcStoreBuffer.putCell((Cell **)addr);
 #endif
 }
 
 /* static */ inline void
 js::ObjectImpl::writeBarrierPostRelocate(ObjectImpl *obj, void *addr)
 {
 #ifdef JSGC_GENERATIONAL
-    obj->runtime()->gcStoreBuffer.putRelocatableCell((Cell **)addr);
+    obj->runtimeFromAnyThread()->gcStoreBuffer.putRelocatableCell((Cell **)addr);
 #endif
 }
 
 /* static */ inline void
 js::ObjectImpl::writeBarrierPostRemove(ObjectImpl *obj, void *addr)
 {
 #ifdef JSGC_GENERATIONAL
-    obj->runtime()->gcStoreBuffer.removeRelocatableCell((Cell **)addr);
+    obj->runtimeFromAnyThread()->gcStoreBuffer.removeRelocatableCell((Cell **)addr);
 #endif
 }
 
 inline void
 js::ObjectImpl::setPrivate(void *data)
 {
     void **pprivate = &privateRef(numFixedSlots());
     privateWriteBarrierPre(pprivate);
--- a/js/src/vm/ObjectImpl.cpp
+++ b/js/src/vm/ObjectImpl.cpp
@@ -259,28 +259,28 @@ js::ObjectImpl::initializeSlotRange(uint
 {
     /*
      * No bounds check, as this is used when the object's shape does not
      * reflect its allocated slots (updateSlotsForSpan).
      */
     HeapSlot *fixedStart, *fixedEnd, *slotsStart, *slotsEnd;
     getSlotRangeUnchecked(start, length, &fixedStart, &fixedEnd, &slotsStart, &slotsEnd);
 
-    JSRuntime *rt = runtime();
+    JSRuntime *rt = runtimeFromAnyThread();
     uint32_t offset = start;
     for (HeapSlot *sp = fixedStart; sp < fixedEnd; sp++)
         sp->init(rt, this->asObjectPtr(), HeapSlot::Slot, offset++, UndefinedValue());
     for (HeapSlot *sp = slotsStart; sp < slotsEnd; sp++)
         sp->init(rt, this->asObjectPtr(), HeapSlot::Slot, offset++, UndefinedValue());
 }
 
 void
 js::ObjectImpl::initSlotRange(uint32_t start, const Value *vector, uint32_t length)
 {
-    JSRuntime *rt = runtime();
+    JSRuntime *rt = runtimeFromAnyThread();
     HeapSlot *fixedStart, *fixedEnd, *slotsStart, *slotsEnd;
     getSlotRange(start, length, &fixedStart, &fixedEnd, &slotsStart, &slotsEnd);
     for (HeapSlot *sp = fixedStart; sp < fixedEnd; sp++)
         sp->init(rt, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
     for (HeapSlot *sp = slotsStart; sp < slotsEnd; sp++)
         sp->init(rt, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
 }
 
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -80,24 +80,24 @@ PerThreadData::init()
     return true;
 }
 
 void
 PerThreadData::addToThreadList()
 {
     // PerThreadData which are created/destroyed off the main thread do not
     // show up in the runtime's thread list.
-    runtime_->assertValidThread();
+    JS_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
     runtime_->threadList.insertBack(this);
 }
 
 void
 PerThreadData::removeFromThreadList()
 {
-    runtime_->assertValidThread();
+    JS_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
     removeFrom(runtime_->threadList);
 }
 
 JSRuntime::JSRuntime(JSUseHelperThreads useHelperThreads)
   : mainThread(this),
     interrupt(0),
 #ifdef JS_THREADSAFE
     operationCallbackLock(NULL),
@@ -388,18 +388,16 @@ JSRuntime::~JSRuntime()
 
 #ifdef JS_THREADSAFE
 # ifdef JS_ION
     if (workerThreadState)
         js_delete(workerThreadState);
 # endif
     sourceCompressorThread.finish();
 
-    clearOwnerThread();
-
     JS_ASSERT(!operationCallbackOwner);
     if (operationCallbackLock)
         PR_DestroyLock(operationCallbackLock);
 
     JS_ASSERT(!exclusiveAccessOwner);
     if (exclusiveAccessLock)
         PR_DestroyLock(exclusiveAccessLock);
 #endif
@@ -449,16 +447,20 @@ JSRuntime::~JSRuntime()
 
 #ifdef JSGC_GENERATIONAL
     gcStoreBuffer.disable();
     gcNursery.disable();
 #endif
 
     DebugOnly<size_t> oldCount = liveRuntimesCount--;
     JS_ASSERT(oldCount > 0);
+
+#ifdef JS_THREADSAFE
+    clearOwnerThread();
+#endif
 }
 
 #ifdef JS_THREADSAFE
 void
 JSRuntime::setOwnerThread()
 {
     JS_ASSERT(ownerThread_ == (void *)0xc1ea12);  /* "clear" */
     JS_ASSERT(requestDepth == 0);
@@ -471,49 +473,31 @@ JSRuntime::setOwnerThread()
 #ifdef XP_MACOSX
     asmJSMachExceptionHandler.setCurrentThread();
 #endif
 }
 
 void
 JSRuntime::clearOwnerThread()
 {
-    assertValidThread();
+    JS_ASSERT(CurrentThreadCanAccessRuntime(this));
     JS_ASSERT(requestDepth == 0);
     ownerThread_ = (void *)0xc1ea12;  /* "clear" */
     js::TlsPerThreadData.set(NULL);
     nativeStackBase = 0;
 #if JS_STACK_GROWTH_DIRECTION > 0
     mainThread.nativeStackLimit = UINTPTR_MAX;
 #else
     mainThread.nativeStackLimit = 0;
 #endif
 #ifdef XP_MACOSX
     asmJSMachExceptionHandler.clearCurrentThread();
 #endif
 }
-
-JS_FRIEND_API(void)
-JSRuntime::abortIfWrongThread() const
-{
-    if (ownerThread_ != PR_GetCurrentThread())
-        MOZ_CRASH();
-    if (!js::TlsPerThreadData.get()->associatedWith(this))
-        MOZ_CRASH();
-}
-
-#ifdef DEBUG
-JS_FRIEND_API(void)
-JSRuntime::assertValidThread() const
-{
-    JS_ASSERT(ownerThread_ == PR_GetCurrentThread());
-    JS_ASSERT(js::TlsPerThreadData.get()->associatedWith(this));
-}
-#endif  /* DEBUG */
-#endif  /* JS_THREADSAFE */
+#endif /* JS_THREADSAFE */
 
 void
 NewObjectCache::clearNurseryObjects(JSRuntime *rt)
 {
     for (unsigned i = 0; i < mozilla::ArrayLength(entries); ++i) {
         Entry &e = entries[i];
         JSObject *obj = reinterpret_cast<JSObject *>(&e.templateObject);
         if (IsInsideNursery(rt, e.key) ||
@@ -743,8 +727,41 @@ JSRuntime::onOutOfMemory(void *p, size_t
       p = js_realloc(p, nbytes);
     if (p)
         return p;
     if (cx)
         js_ReportOutOfMemory(cx);
     return NULL;
 }
 
+#ifdef JS_THREADSAFE
+
+bool
+js::CurrentThreadCanAccessRuntime(JSRuntime *rt)
+{
+    PerThreadData *pt = js::TlsPerThreadData.get();
+    JS_ASSERT(pt && pt->associatedWith(rt));
+    return rt->ownerThread_ == PR_GetCurrentThread() || InExclusiveParallelSection();
+}
+
+bool
+js::CurrentThreadCanAccessZone(Zone *zone)
+{
+    PerThreadData *pt = js::TlsPerThreadData.get();
+    JS_ASSERT(pt && pt->associatedWith(zone->runtime_));
+    return !InParallelSection() || InExclusiveParallelSection();
+}
+
+#else
+
+bool
+js::CurrentThreadCanAccessRuntime(JSRuntime *rt)
+{
+    return true;
+}
+
+bool
+js::CurrentThreadCanAccessZone(Zone *zone)
+{
+    return true;
+}
+
+#endif
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -795,29 +795,20 @@ struct JSRuntime : public JS::shadow::Ru
     JSVersion defaultVersion_;
 
     /* See comment for JS_AbortIfWrongThread in jsapi.h. */
 #ifdef JS_THREADSAFE
   public:
     void *ownerThread() const { return ownerThread_; }
     void clearOwnerThread();
     void setOwnerThread();
-    JS_FRIEND_API(void) abortIfWrongThread() const;
-#ifdef DEBUG
-    JS_FRIEND_API(void) assertValidThread() const;
-#else
-    void assertValidThread() const {}
-#endif
   private:
-    void                *ownerThread_;
+    void *ownerThread_;
+    friend bool js::CurrentThreadCanAccessRuntime(JSRuntime *rt);
   public:
-#else
-  public:
-    void abortIfWrongThread() const {}
-    void assertValidThread() const {}
 #endif
 
     /* Temporary arena pool used while compiling and decompiling. */
     static const size_t TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 4 * 1024;
     js::LifoAlloc tempLifoAlloc;
 
     /*
      * Free LIFO blocks are transferred to this allocator before being freed on
@@ -1691,17 +1682,17 @@ PerThreadData::setIonStackLimit(uintptr_
 {
     JS_ASSERT(runtime_->currentThreadOwnsOperationCallbackLock());
     ionStackLimit = limit;
 }
 
 inline JSRuntime *
 PerThreadData::runtimeFromMainThread()
 {
-    runtime_->assertValidThread();
+    JS_ASSERT(js::CurrentThreadCanAccessRuntime(runtime_));
     return runtime_;
 }
 
 /************************************************************************/
 
 static JS_ALWAYS_INLINE void
 MakeRangeGCSafe(Value *vec, size_t len)
 {
--- a/js/src/vm/ScopeObject.cpp
+++ b/js/src/vm/ScopeObject.cpp
@@ -1685,17 +1685,17 @@ CanUseDebugScopeMaps(JSContext *cx)
 
 DebugScopes *
 DebugScopes::ensureCompartmentData(JSContext *cx)
 {
     JSCompartment *c = cx->compartment();
     if (c->debugScopes)
         return c->debugScopes;
 
-    c->debugScopes = c->rt->new_<DebugScopes>(cx);
+    c->debugScopes = cx->runtime()->new_<DebugScopes>(cx);
     if (c->debugScopes && c->debugScopes->init())
         return c->debugScopes;
 
     js_ReportOutOfMemory(cx);
     return NULL;
 }
 
 DebugScopeObject *
@@ -2021,17 +2021,17 @@ DebugScopes::hasLiveFrame(ScopeObject &s
          *  1. GC starts, a suspended generator is not live
          *  2. hasLiveFrame returns a StackFrame* to the (soon to be dead)
          *     suspended generator
          *  3. stack frame values (which will neve be marked) are read from the
          *     StackFrame
          *  4. GC completes, live objects may now point to values that weren't
          *     marked and thus may point to swept GC things
          */
-        if (JSGenerator *gen = frame.maybeSuspendedGenerator(scope.compartment()->rt))
+        if (JSGenerator *gen = frame.maybeSuspendedGenerator(scope.compartment()->runtimeFromMainThread()))
             JSObject::readBarrier(gen->obj);
 
         return frame;
     }
     return NullFramePtr();
 }
 
 /*****************************************************************************/
--- a/js/src/vm/Shape-inl.h
+++ b/js/src/vm/Shape-inl.h
@@ -69,64 +69,64 @@ BaseShape::BaseShape(JSCompartment *comp
     this->clasp = clasp;
     this->parent = parent;
     this->metadata = metadata;
     this->flags = objectFlags;
     this->rawGetter = rawGetter;
     this->rawSetter = rawSetter;
     if ((attrs & JSPROP_GETTER) && rawGetter) {
         this->flags |= HAS_GETTER_OBJECT;
-        GetterSetterWriteBarrierPost(runtime(), &this->getterObj);
+        GetterSetterWriteBarrierPost(runtimeFromMainThread(), &this->getterObj);
     }
     if ((attrs & JSPROP_SETTER) && rawSetter) {
         this->flags |= HAS_SETTER_OBJECT;
-        GetterSetterWriteBarrierPost(runtime(), &this->setterObj);
+        GetterSetterWriteBarrierPost(runtimeFromMainThread(), &this->setterObj);
     }
     this->compartment_ = comp;
 }
 
 inline
 BaseShape::BaseShape(const StackBaseShape &base)
 {
     mozilla::PodZero(this);
     this->clasp = base.clasp;
     this->parent = base.parent;
     this->metadata = base.metadata;
     this->flags = base.flags;
     this->rawGetter = base.rawGetter;
     this->rawSetter = base.rawSetter;
     if ((base.flags & HAS_GETTER_OBJECT) && base.rawGetter)
-        GetterSetterWriteBarrierPost(runtime(), &this->getterObj);
+        GetterSetterWriteBarrierPost(runtimeFromMainThread(), &this->getterObj);
     if ((base.flags & HAS_SETTER_OBJECT) && base.rawSetter)
-        GetterSetterWriteBarrierPost(runtime(), &this->setterObj);
+        GetterSetterWriteBarrierPost(runtimeFromMainThread(), &this->setterObj);
     this->compartment_ = base.compartment;
 }
 
 inline BaseShape &
 BaseShape::operator=(const BaseShape &other)
 {
     clasp = other.clasp;
     parent = other.parent;
     metadata = other.metadata;
     flags = other.flags;
     slotSpan_ = other.slotSpan_;
     if (flags & HAS_GETTER_OBJECT) {
         getterObj = other.getterObj;
-        GetterSetterWriteBarrierPost(runtime(), &getterObj);
+        GetterSetterWriteBarrierPost(runtimeFromMainThread(), &getterObj);
     } else {
         if (rawGetter)
-            GetterSetterWriteBarrierPostRemove(runtime(), &getterObj);
+            GetterSetterWriteBarrierPostRemove(runtimeFromMainThread(), &getterObj);
         rawGetter = other.rawGetter;
     }
     if (flags & HAS_SETTER_OBJECT) {
         setterObj = other.setterObj;
-        GetterSetterWriteBarrierPost(runtime(), &setterObj);
+        GetterSetterWriteBarrierPost(runtimeFromMainThread(), &setterObj);
     } else {
         if (rawSetter)
-            GetterSetterWriteBarrierPostRemove(runtime(), &setterObj);
+            GetterSetterWriteBarrierPostRemove(runtimeFromMainThread(), &setterObj);
         rawSetter = other.rawSetter;
     }
     compartment_ = other.compartment_;
     return *this;
 }
 
 inline
 StackBaseShape::StackBaseShape(ExclusiveContext *cx, Class *clasp,
@@ -367,17 +367,17 @@ EmptyShape::EmptyShape(UnownedBaseShape 
     if (!getObjectClass()->isNative())
         flags |= NON_NATIVE;
 }
 
 inline void
 Shape::writeBarrierPre(Shape *shape)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!shape || !shape->runtime()->needsBarrier())
+    if (!shape || !shape->runtimeFromAnyThread()->needsBarrier())
         return;
 
     JS::Zone *zone = shape->zone();
     if (zone->needsBarrier()) {
         Shape *tmp = shape;
         MarkShapeUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
         JS_ASSERT(tmp == shape);
     }
@@ -405,17 +405,17 @@ Shape::markChildren(JSTracer *trc)
     if (parent)
         MarkShape(trc, &parent, "parent");
 }
 
 inline void
 BaseShape::writeBarrierPre(BaseShape *base)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!base || !base->runtime()->needsBarrier())
+    if (!base || !base->runtimeFromAnyThread()->needsBarrier())
         return;
 
     JS::Zone *zone = base->zone();
     if (zone->needsBarrier()) {
         BaseShape *tmp = base;
         MarkBaseShapeUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
         JS_ASSERT(tmp == base);
     }
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -1264,17 +1264,18 @@ BaseShape::getUnowned(ExclusiveContext *
         return NULL;
 
     return nbase;
 }
 
 void
 JSCompartment::sweepBaseShapeTable()
 {
-    gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_BASE_SHAPE);
+    gcstats::AutoPhase ap(runtimeFromMainThread()->gcStats,
+                          gcstats::PHASE_SWEEP_TABLES_BASE_SHAPE);
 
     if (baseShapes.initialized()) {
         for (BaseShapeSet::Enum e(baseShapes); !e.empty(); e.popFront()) {
             UnownedBaseShape *base = e.front();
             if (IsBaseShapeAboutToBeFinalized(&base))
                 e.removeFront();
         }
     }
@@ -1461,17 +1462,18 @@ JSCompartment::markAllInitialShapeTableE
             e.rekeyFront(e.front().getLookup(), moved);
         }
     }
 }
 
 void
 JSCompartment::sweepInitialShapeTable()
 {
-    gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_INITIAL_SHAPE);
+    gcstats::AutoPhase ap(runtimeFromMainThread()->gcStats,
+                          gcstats::PHASE_SWEEP_TABLES_INITIAL_SHAPE);
 
     if (initialShapes.initialized()) {
         for (InitialShapeSet::Enum e(initialShapes); !e.empty(); e.popFront()) {
             const InitialShapeEntry &entry = e.front();
             Shape *shape = entry.shape;
             JSObject *proto = entry.proto.raw();
             if (IsShapeAboutToBeFinalized(&shape) || (entry.proto.isObject() && IsObjectAboutToBeFinalized(&proto))) {
                 e.removeFront();
--- a/js/src/vm/String-inl.h
+++ b/js/src/vm/String-inl.h
@@ -100,17 +100,17 @@ StringWriteBarrierPostRemove(js::ThreadS
 }
 
 } /* namespace js */
 
 inline void
 JSString::writeBarrierPre(JSString *str)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!str || !str->runtime()->needsBarrier())
+    if (!str || !str->runtimeFromAnyThread()->needsBarrier())
         return;
 
     JS::Zone *zone = str->zone();
     if (zone->needsBarrier()) {
         JSString *tmp = str;
         MarkStringUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
         JS_ASSERT(tmp == str);
     }
--- a/js/src/vm/ThreadPool.cpp
+++ b/js/src/vm/ThreadPool.cpp
@@ -282,31 +282,30 @@ ThreadPool::terminateWorkers()
         js_delete(worker);
     }
 }
 
 bool
 ThreadPool::submitOne(JSContext *cx, TaskExecutor *executor)
 {
     JS_ASSERT(numWorkers() > 0);
-
-    runtime_->assertValidThread();
+    JS_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
 
     if (!lazyStartWorkers(cx))
         return false;
 
     // Find next worker in round-robin fashion.
     size_t id = JS_ATOMIC_INCREMENT(&nextId_) % numWorkers();
     return workers_[id]->submit(executor);
 }
 
 bool
 ThreadPool::submitAll(JSContext *cx, TaskExecutor *executor)
 {
-    runtime_->assertValidThread();
+    JS_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
 
     if (!lazyStartWorkers(cx))
         return false;
 
     for (size_t id = 0; id < numWorkers(); id++) {
         if (!workers_[id]->submit(executor))
             return false;
     }
--- a/js/src/vm/TypedArrayObject.cpp
+++ b/js/src/vm/TypedArrayObject.cpp
@@ -279,17 +279,17 @@ ArrayBufferObject::allocateSlots(JSConte
     return true;
 }
 
 static inline void
 PostBarrierTypedArrayObject(JSObject *obj)
 {
 #ifdef JSGC_GENERATIONAL
     JS_ASSERT(obj);
-    JSRuntime *rt = obj->runtime();
+    JSRuntime *rt = obj->runtimeFromMainThread();
     if (!rt->isHeapBusy() && !IsInsideNursery(rt, obj))
         rt->gcStoreBuffer.putWholeCell(obj);
 #endif
 }
 
 // The list of views must be stored somewhere in the ArrayBufferObject, but
 // the slots are already being used for the element storage and the private
 // field is used for a delegate object. The ObjectElements header has space
@@ -320,17 +320,17 @@ InitViewList(ArrayBufferObject *obj, Arr
 {
     reinterpret_cast<OldObjectRepresentationHack*>(obj->getElementsHeader())->views.init(viewsHead);
     PostBarrierTypedArrayObject(obj);
 }
 
 static EncapsulatedPtr<ArrayBufferViewObject> &
 GetViewListRef(ArrayBufferObject *obj)
 {
-    JS_ASSERT(obj->runtime()->isHeapBusy());
+    JS_ASSERT(obj->runtimeFromMainThread()->isHeapBusy());
     return reinterpret_cast<OldObjectRepresentationHack*>(obj->getElementsHeader())->views;
 }
 
 void
 ArrayBufferObject::changeContents(JSContext *maybecx, ObjectElements *newHeader)
 {
    // Grab out data before invalidating it.
    uint32_t byteLengthCopy = byteLength();