Bug 938950 - Don't provide full access to compartment/zone/runtime during Ion compilation, r=jandem. CLOBBER
authorBrian Hackett <bhackett1024@gmail.com>
Tue, 19 Nov 2013 08:37:52 -0700
changeset 156390 4df4f9fff8c57d95f097ff01c9417f9afab8a52c
parent 156389 7c50dd0a345f81a534eebb77d88528bdcf7db79f
child 156391 5ce118a0b0b56c829fbd7e82d4da07ab9ac3105e
push id25678
push userryanvm@gmail.com
push dateWed, 20 Nov 2013 03:26:13 +0000
treeherdermozilla-central@4f993fa378eb [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjandem
bugs938950
milestone28.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 938950 - Don't provide full access to compartment/zone/runtime during Ion compilation, r=jandem. CLOBBER
js/src/gc/Zone.h
js/src/jit/AsmJS.cpp
js/src/jit/BaselineIC.cpp
js/src/jit/BaselineJIT.cpp
js/src/jit/CodeGenerator.cpp
js/src/jit/CompileWrappers.cpp
js/src/jit/CompileWrappers.h
js/src/jit/CompilerRoot.h
js/src/jit/Ion.cpp
js/src/jit/Ion.h
js/src/jit/IonAnalysis.cpp
js/src/jit/IonBuilder.cpp
js/src/jit/IonBuilder.h
js/src/jit/IonMacroAssembler.cpp
js/src/jit/IonMacroAssembler.h
js/src/jit/IonSpewer.cpp
js/src/jit/JitCompartment.h
js/src/jit/Lowering.cpp
js/src/jit/MCallOptimize.cpp
js/src/jit/MIR.cpp
js/src/jit/MIR.h
js/src/jit/MIRGenerator.h
js/src/jit/MIRGraph.cpp
js/src/jit/arm/CodeGenerator-arm.cpp
js/src/jit/arm/MacroAssembler-arm.cpp
js/src/jit/shared/Assembler-shared.h
js/src/jit/shared/CodeGenerator-shared.cpp
js/src/jit/x64/CodeGenerator-x64.cpp
js/src/jit/x64/MacroAssembler-x64.h
js/src/jit/x86/CodeGenerator-x86.cpp
js/src/jit/x86/MacroAssembler-x86.h
js/src/jsapi.cpp
js/src/jsatominlines.h
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/jsfun.cpp
js/src/jsfun.h
js/src/jsworkers.cpp
js/src/moz.build
js/src/vm/ForkJoin.cpp
js/src/vm/Interpreter-inl.h
js/src/vm/Runtime.h
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -123,17 +123,17 @@ struct Zone : public JS::shadow::Zone,
 
     enum ShouldUpdateIon {
         DontUpdateIon,
         UpdateIon
     };
 
     void setNeedsBarrier(bool needs, ShouldUpdateIon updateIon);
 
-    const bool *AddressOfNeedsBarrier() const {
+    const bool *addressOfNeedsBarrier() const {
         return &needsBarrier_;
     }
 
   public:
     enum CompartmentGCState {
         NoGC,
         Mark,
         MarkGray,
--- a/js/src/jit/AsmJS.cpp
+++ b/js/src/jit/AsmJS.cpp
@@ -1929,17 +1929,17 @@ class FunctionCompiler
     {
         JS_ASSERT(locals_.count() == argTypes.length() + varInitializers_.length());
 
         alloc_  = lifo_.new_<TempAllocator>(&lifo_);
         ionContext_.construct(m_.cx(), alloc_);
 
         graph_  = lifo_.new_<MIRGraph>(alloc_);
         info_   = lifo_.new_<CompileInfo>(locals_.count(), SequentialExecution);
-        mirGen_ = lifo_.new_<MIRGenerator>(cx()->compartment(), alloc_, graph_, info_);
+        mirGen_ = lifo_.new_<MIRGenerator>(CompileCompartment::get(cx()->compartment()), alloc_, graph_, info_);
 
         if (!newBlock(/* pred = */ nullptr, &curBlock_, fn_))
             return false;
 
         curBlock_->add(MAsmJSCheckOverRecursed::New(alloc(), &m_.stackOverflowLabel()));
 
         for (ABIArgTypeIter i = argTypes; !i.done(); i++) {
             MAsmJSParameter *ins = MAsmJSParameter::New(alloc(), *i, i.mirType());
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -9209,17 +9209,17 @@ ICInstanceOf_Fallback::Compiler::generat
 //
 
 static bool
 DoTypeOfFallback(JSContext *cx, BaselineFrame *frame, ICTypeOf_Fallback *stub, HandleValue val,
                  MutableHandleValue res)
 {
     FallbackICSpew(cx, stub, "TypeOf");
     JSType type = js::TypeOfValue(val);
-    RootedString string(cx, TypeName(type, cx->runtime()));
+    RootedString string(cx, TypeName(type, cx->runtime()->atomState));
 
     res.setString(string);
 
     JS_ASSERT(type != JSTYPE_NULL);
     if (type != JSTYPE_OBJECT && type != JSTYPE_FUNCTION) {
         // Create a new TypeOf stub.
         IonSpew(IonSpew_BaselineIC, "  Generating TypeOf stub for JSType (%d)", (int) type);
         ICTypeOf_Typed::Compiler compiler(cx, type, string);
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -744,17 +744,19 @@ BaselineScript::pcForReturnAddress(JSScr
 void
 BaselineScript::toggleDebugTraps(JSScript *script, jsbytecode *pc)
 {
     JS_ASSERT(script->baselineScript() == this);
 
     SrcNoteLineScanner scanner(script->notes(), script->lineno);
 
     JSRuntime *rt = script->runtimeFromMainThread();
-    IonContext ictx(rt, script->compartment(), nullptr);
+    IonContext ictx(CompileRuntime::get(rt),
+                    CompileCompartment::get(script->compartment()),
+                    nullptr);
     AutoFlushCache afc("DebugTraps", rt->jitRuntime());
 
     for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++) {
         PCMappingIndexEntry &entry = pcMappingIndexEntry(i);
 
         CompactBufferReader reader(pcMappingReader(i));
         jsbytecode *curPC = script->code + entry.pcOffset;
         uint32_t nativeOffset = entry.nativeOffset;
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -707,17 +707,17 @@ CodeGenerator::visitIntToString(LIntToSt
     OutOfLineCode *ool = oolCallVM(IntToStringInfo, lir, (ArgList(), input),
                                    StoreRegisterTo(output));
     if (!ool)
         return false;
 
     masm.branch32(Assembler::AboveOrEqual, input, Imm32(StaticStrings::INT_STATIC_LIMIT),
                   ool->entry());
 
-    masm.movePtr(ImmPtr(&GetIonContext()->runtime->staticStrings.intStaticTable), output);
+    masm.movePtr(ImmPtr(&GetIonContext()->runtime->staticStrings().intStaticTable), output);
     masm.loadPtr(BaseIndex(output, input, ScalePointer), output);
 
     masm.bind(ool->rejoin());
     return true;
 }
 
 typedef JSString *(*DoubleToStringFn)(ThreadSafeContext *, double);
 typedef JSString *(*DoubleToStringParFn)(ForkJoinSlice *, double);
@@ -736,17 +736,17 @@ CodeGenerator::visitDoubleToString(LDoub
                                    StoreRegisterTo(output));
     if (!ool)
         return false;
 
     masm.convertDoubleToInt32(input, temp, ool->entry(), true);
     masm.branch32(Assembler::AboveOrEqual, temp, Imm32(StaticStrings::INT_STATIC_LIMIT),
                   ool->entry());
 
-    masm.movePtr(ImmPtr(&GetIonContext()->runtime->staticStrings.intStaticTable), output);
+    masm.movePtr(ImmPtr(&GetIonContext()->runtime->staticStrings().intStaticTable), output);
     masm.loadPtr(BaseIndex(output, temp, ScalePointer), output);
 
     masm.bind(ool->rejoin());
     return true;
 }
 
 typedef JSObject *(*CloneRegExpObjectFn)(JSContext *, JSObject *, JSObject *);
 static const VMFunction CloneRegExpObjectInfo =
@@ -793,17 +793,16 @@ CodeGenerator::visitLambda(LLambda *lir)
     Register output = ToRegister(lir->output());
     const LambdaFunctionInfo &info = lir->mir()->info();
 
     OutOfLineCode *ool = oolCallVM(LambdaInfo, lir, (ArgList(), ImmGCPtr(info.fun), scopeChain),
                                    StoreRegisterTo(output));
     if (!ool)
         return false;
 
-    JS_ASSERT(gen->compartment == info.fun->compartment());
     JS_ASSERT(!info.singletonType);
 
     masm.newGCThing(output, info.fun, ool->entry());
     masm.initGCThing(output, info.fun);
 
     emitLambdaInit(output, scopeChain, info);
 
     masm.bind(ool->rejoin());
@@ -1579,17 +1578,17 @@ CodeGenerator::visitOutOfLineCallPostWri
 bool
 CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO *lir)
 {
 #ifdef JSGC_GENERATIONAL
     OutOfLineCallPostWriteBarrier *ool = new OutOfLineCallPostWriteBarrier(lir, lir->object());
     if (!addOutOfLineCode(ool))
         return false;
 
-    Nursery &nursery = GetIonContext()->runtime->gcNursery;
+    const Nursery &nursery = GetIonContext()->runtime->gcNursery();
 
     if (lir->object()->isConstant()) {
         JS_ASSERT(!nursery.isInside(&lir->object()->toConstant()->toObject()));
     } else {
         Label tenured;
         Register objreg = ToRegister(lir->object());
         masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.start()), &tenured);
         masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.heapEnd()), ool->rejoin());
@@ -1611,17 +1610,17 @@ CodeGenerator::visitPostWriteBarrierV(LP
 #ifdef JSGC_GENERATIONAL
     OutOfLineCallPostWriteBarrier *ool = new OutOfLineCallPostWriteBarrier(lir, lir->object());
     if (!addOutOfLineCode(ool))
         return false;
 
     ValueOperand value = ToValue(lir, LPostWriteBarrierV::Input);
     masm.branchTestObject(Assembler::NotEqual, value, ool->rejoin());
 
-    Nursery &nursery = GetIonContext()->runtime->gcNursery;
+    const Nursery &nursery = GetIonContext()->runtime->gcNursery();
 
     if (lir->object()->isConstant()) {
         JS_ASSERT(!nursery.isInside(&lir->object()->toConstant()->toObject()));
     } else {
         Label tenured;
         Register objreg = ToRegister(lir->object());
         masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.start()), &tenured);
         masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.heapEnd()), ool->rejoin());
@@ -1640,17 +1639,17 @@ CodeGenerator::visitPostWriteBarrierV(LP
 bool
 CodeGenerator::visitPostWriteBarrierAllSlots(LPostWriteBarrierAllSlots *lir)
 {
 #ifdef JSGC_GENERATIONAL
     OutOfLineCallPostWriteBarrier *ool = new OutOfLineCallPostWriteBarrier(lir, lir->object());
     if (!addOutOfLineCode(ool))
         return false;
 
-    Nursery &nursery = GetIonContext()->runtime->gcNursery;
+    const Nursery &nursery = GetIonContext()->runtime->gcNursery();
 
     if (lir->object()->isConstant()) {
         JS_ASSERT(!nursery.isInside(&lir->object()->toConstant()->toObject()));
         return true;
     }
 
     Register objreg = ToRegister(lir->object());
     masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.start()), ool->entry());
@@ -2494,21 +2493,19 @@ CodeGenerator::visitCheckOverRecursed(LC
     // This is a weak check, justified by Ion using the C stack: we must always
     // be some distance away from the actual limit, since if the limit is
     // crossed, an error must be thrown, which requires more frames.
     //
     // It must always be possible to trespass past the stack limit.
     // Ion may legally place frames very close to the limit. Calling additional
     // C functions may then violate the limit without any checking.
 
-    JSRuntime *rt = GetIonContext()->runtime;
-
     // Since Ion frames exist on the C stack, the stack limit may be
     // dynamically set by JS_SetThreadStackLimit() and JS_SetNativeStackQuota().
-    uintptr_t *limitAddr = &rt->mainThread.ionStackLimit;
+    const void *limitAddr = GetIonContext()->runtime->addressOfIonStackLimit();
 
     CheckOverRecursedFailure *ool = new CheckOverRecursedFailure(lir);
     if (!addOutOfLineCode(ool))
         return false;
 
     // Conditional forward (unlikely) branch to failure.
     masm.branchPtr(Assembler::AboveOrEqual, AbsoluteAddress(limitAddr), StackPointer, ool->entry());
     masm.bind(ool->rejoin());
@@ -3861,17 +3858,17 @@ CodeGenerator::visitRandom(LRandom *ins)
 
 bool
 CodeGenerator::visitMathFunctionD(LMathFunctionD *ins)
 {
     Register temp = ToRegister(ins->temp());
     FloatRegister input = ToFloatRegister(ins->input());
     JS_ASSERT(ToFloatRegister(ins->output()) == ReturnFloatReg);
 
-    MathCache *mathCache = ins->mir()->cache();
+    const MathCache *mathCache = ins->mir()->cache();
 
     masm.setupUnalignedABICall(mathCache ? 2 : 1, temp);
     if (mathCache) {
         masm.movePtr(ImmPtr(mathCache), temp);
         masm.passABIArg(temp);
     }
     masm.passABIArg(input);
 
@@ -4415,17 +4412,17 @@ bool
 CodeGenerator::emitConcat(LInstruction *lir, Register lhs, Register rhs, Register output)
 {
     OutOfLineCode *ool = oolCallVM(ConcatStringsInfo, lir, (ArgList(), lhs, rhs),
                                    StoreRegisterTo(output));
     if (!ool)
         return false;
 
     ExecutionMode mode = gen->info().executionMode();
-    IonCode *stringConcatStub = gen->jitCompartment()->stringConcatStub(mode);
+    IonCode *stringConcatStub = gen->compartment->jitCompartment()->stringConcatStub(mode);
     masm.call(stringConcatStub);
     masm.branchTestPtr(Assembler::Zero, output, output, ool->entry());
 
     masm.bind(ool->rejoin());
     return true;
 }
 
 bool
@@ -4685,17 +4682,17 @@ CodeGenerator::visitFromCharCode(LFromCh
     OutOfLineCode *ool = oolCallVM(StringFromCharCodeInfo, lir, (ArgList(), code), StoreRegisterTo(output));
     if (!ool)
         return false;
 
     // OOL path if code >= UNIT_STATIC_LIMIT.
     masm.branch32(Assembler::AboveOrEqual, code, Imm32(StaticStrings::UNIT_STATIC_LIMIT),
                   ool->entry());
 
-    masm.movePtr(ImmPtr(&GetIonContext()->runtime->staticStrings.unitStaticTable), output);
+    masm.movePtr(ImmPtr(&GetIonContext()->runtime->staticStrings().unitStaticTable), output);
     masm.loadPtr(BaseIndex(output, code, ScalePointer), output);
 
     masm.bind(ool->rejoin());
     return true;
 }
 
 typedef JSObject *(*StringSplitFn)(JSContext *, HandleTypeObject, HandleString, HandleString);
 static const VMFunction StringSplitInfo = FunctionInfo<StringSplitFn>(js::str_split_string);
@@ -5340,17 +5337,17 @@ CodeGenerator::visitIteratorStart(LItera
     const Register temp1 = ToRegister(lir->temp1());
     const Register temp2 = ToRegister(lir->temp2());
     const Register niTemp = ToRegister(lir->temp3()); // Holds the NativeIterator object.
 
     // Iterators other than for-in should use LCallIteratorStart.
     JS_ASSERT(flags == JSITER_ENUMERATE);
 
     // Fetch the most recent iterator and ensure it's not nullptr.
-    masm.loadPtr(AbsoluteAddress(&GetIonContext()->runtime->nativeIterCache.last), output);
+    masm.loadPtr(AbsoluteAddress(GetIonContext()->runtime->addressOfLastCachedNativeIterator()), output);
     masm.branchTestPtr(Assembler::Zero, output, output, ool->entry());
 
     // Load NativeIterator.
     masm.loadObjPrivate(output, JSObject::ITER_CLASS_NFIXED_SLOTS, niTemp);
 
     // Ensure the |active| and |unreusable| bits are not set.
     masm.branchTest32(Assembler::NonZero, Address(niTemp, offsetof(NativeIterator, flags)),
                       Imm32(JSITER_ACTIVE|JSITER_UNREUSABLE), ool->entry());
@@ -5401,17 +5398,17 @@ CodeGenerator::visitIteratorStart(LItera
 #endif // !JSGC_GENERATIONAL
     }
 
     // Mark iterator as active.
     masm.storePtr(obj, Address(niTemp, offsetof(NativeIterator, obj)));
     masm.or32(Imm32(JSITER_ACTIVE), Address(niTemp, offsetof(NativeIterator, flags)));
 
     // Chain onto the active iterator stack.
-    masm.loadPtr(AbsoluteAddress(&gen->compartment->enumerators), temp1);
+    masm.loadPtr(AbsoluteAddress(gen->compartment->addressOfEnumerators()), temp1);
 
     // ni->next = list
     masm.storePtr(temp1, Address(niTemp, NativeIterator::offsetOfNext()));
 
     // ni->prev = list->prev
     masm.loadPtr(Address(temp1, NativeIterator::offsetOfPrev()), temp2);
     masm.storePtr(temp2, Address(niTemp, NativeIterator::offsetOfPrev()));
 
@@ -6728,63 +6725,63 @@ class OutOfLineTypeOfV : public OutOfLin
 
 bool
 CodeGenerator::visitTypeOfV(LTypeOfV *lir)
 {
     const ValueOperand value = ToValue(lir, LTypeOfV::Input);
     Register output = ToRegister(lir->output());
     Register tag = masm.splitTagForTest(value);
 
-    JSRuntime *rt = GetIonContext()->runtime;
+    const JSAtomState &names = GetIonContext()->runtime->names();
     Label done;
 
     OutOfLineTypeOfV *ool = nullptr;
     if (lir->mir()->inputMaybeCallableOrEmulatesUndefined()) {
         // The input may be a callable object (result is "function") or may
         // emulate undefined (result is "undefined"). Use an OOL path.
         ool = new OutOfLineTypeOfV(lir);
         if (!addOutOfLineCode(ool))
             return false;
 
         masm.branchTestObject(Assembler::Equal, tag, ool->entry());
     } else {
         // Input is not callable and does not emulate undefined, so if
         // it's an object the result is always "object".
         Label notObject;
         masm.branchTestObject(Assembler::NotEqual, tag, &notObject);
-        masm.movePtr(ImmGCPtr(rt->atomState.object), output);
+        masm.movePtr(ImmGCPtr(names.object), output);
         masm.jump(&done);
         masm.bind(&notObject);
     }
 
     Label notNumber;
     masm.branchTestNumber(Assembler::NotEqual, tag, &notNumber);
-    masm.movePtr(ImmGCPtr(rt->atomState.number), output);
+    masm.movePtr(ImmGCPtr(names.number), output);
     masm.jump(&done);
     masm.bind(&notNumber);
 
     Label notUndefined;
     masm.branchTestUndefined(Assembler::NotEqual, tag, &notUndefined);
-    masm.movePtr(ImmGCPtr(rt->atomState.undefined), output);
+    masm.movePtr(ImmGCPtr(names.undefined), output);
     masm.jump(&done);
     masm.bind(&notUndefined);
 
     Label notNull;
     masm.branchTestNull(Assembler::NotEqual, tag, &notNull);
-    masm.movePtr(ImmGCPtr(rt->atomState.object), output);
+    masm.movePtr(ImmGCPtr(names.object), output);
     masm.jump(&done);
     masm.bind(&notNull);
 
     Label notBoolean;
     masm.branchTestBoolean(Assembler::NotEqual, tag, &notBoolean);
-    masm.movePtr(ImmGCPtr(rt->atomState.boolean), output);
+    masm.movePtr(ImmGCPtr(names.boolean), output);
     masm.jump(&done);
     masm.bind(&notBoolean);
 
-    masm.movePtr(ImmGCPtr(rt->atomState.string), output);
+    masm.movePtr(ImmGCPtr(names.string), output);
 
     masm.bind(&done);
     if (ool)
         masm.bind(ool->rejoin());
     return true;
 }
 
 bool
new file mode 100644
--- /dev/null
+++ b/js/src/jit/CompileWrappers.cpp
@@ -0,0 +1,230 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "jit/Ion.h"
+
+using namespace js;
+using namespace js::jit;
+
+JSRuntime *
+CompileRuntime::runtime()
+{
+    return reinterpret_cast<JSRuntime *>(this);
+}
+
+/* static */ CompileRuntime *
+CompileRuntime::get(JSRuntime *rt)
+{
+    return reinterpret_cast<CompileRuntime *>(rt);
+}
+
+bool
+CompileRuntime::onMainThread()
+{
+    return js::CurrentThreadCanAccessRuntime(runtime());
+}
+
+const void *
+CompileRuntime::addressOfIonTop()
+{
+    return &runtime()->mainThread.ionTop;
+}
+
+const void *
+CompileRuntime::addressOfIonStackLimit()
+{
+    return &runtime()->mainThread.ionStackLimit;
+}
+
+const void *
+CompileRuntime::addressOfJSContext()
+{
+    return &runtime()->mainThread.ionJSContext;
+}
+
+const void *
+CompileRuntime::addressOfActivation()
+{
+    return runtime()->mainThread.addressOfActivation();
+}
+
+const void *
+CompileRuntime::addressOfLastCachedNativeIterator()
+{
+    return &runtime()->nativeIterCache.last;
+}
+
+#ifdef JS_GC_ZEAL
+const void *
+CompileRuntime::addressOfGCZeal()
+{
+    return &runtime()->gcZeal_;
+}
+#endif
+
+const void *
+CompileRuntime::addressOfInterrupt()
+{
+    return &runtime()->interrupt;
+}
+
+const JitRuntime *
+CompileRuntime::jitRuntime()
+{
+    return runtime()->jitRuntime();
+}
+
+SPSProfiler &
+CompileRuntime::spsProfiler()
+{
+    return runtime()->spsProfiler;
+}
+
+bool
+CompileRuntime::signalHandlersInstalled()
+{
+    return runtime()->signalHandlersInstalled();
+}
+
+bool
+CompileRuntime::jitSupportsFloatingPoint()
+{
+    return runtime()->jitSupportsFloatingPoint;
+}
+
+bool
+CompileRuntime::hadOutOfMemory()
+{
+    return runtime()->hadOutOfMemory;
+}
+
+const JSAtomState &
+CompileRuntime::names()
+{
+    return runtime()->atomState;
+}
+
+const StaticStrings &
+CompileRuntime::staticStrings()
+{
+    return runtime()->staticStrings;
+}
+
+const Value &
+CompileRuntime::NaNValue()
+{
+    return runtime()->NaNValue;
+}
+
+const Value &
+CompileRuntime::positiveInfinityValue()
+{
+    return runtime()->positiveInfinityValue;
+}
+
+bool
+CompileRuntime::isInsideNursery(gc::Cell *cell)
+{
+    return UninlinedIsInsideNursery(runtime(), cell);
+}
+
+const DOMCallbacks *
+CompileRuntime::DOMcallbacks()
+{
+    return GetDOMCallbacks(runtime());
+}
+
+const MathCache *
+CompileRuntime::maybeGetMathCache()
+{
+    return runtime()->maybeGetMathCache();
+}
+
+#ifdef JSGC_GENERATIONAL
+const Nursery &
+CompileRuntime::gcNursery()
+{
+    return runtime()->gcNursery;
+}
+#endif
+
+Zone *
+CompileZone::zone()
+{
+    return reinterpret_cast<Zone *>(this);
+}
+
+/* static */ CompileZone *
+CompileZone::get(Zone *zone)
+{
+    return reinterpret_cast<CompileZone *>(zone);
+}
+
+const void *
+CompileZone::addressOfNeedsBarrier()
+{
+    return zone()->addressOfNeedsBarrier();
+}
+
+const void *
+CompileZone::addressOfFreeListFirst(gc::AllocKind allocKind)
+{
+    return &zone()->allocator.arenas.getFreeList(allocKind)->first;
+}
+
+const void *
+CompileZone::addressOfFreeListLast(gc::AllocKind allocKind)
+{
+    return &zone()->allocator.arenas.getFreeList(allocKind)->last;
+}
+
+JSCompartment *
+CompileCompartment::compartment()
+{
+    return reinterpret_cast<JSCompartment *>(this);
+}
+
+/* static */ CompileCompartment *
+CompileCompartment::get(JSCompartment *comp)
+{
+    return reinterpret_cast<CompileCompartment *>(comp);
+}
+
+CompileZone *
+CompileCompartment::zone()
+{
+    return CompileZone::get(compartment()->zone());
+}
+
+CompileRuntime *
+CompileCompartment::runtime()
+{
+    return CompileRuntime::get(compartment()->runtimeFromAnyThread());
+}
+
+const void *
+CompileCompartment::addressOfEnumerators()
+{
+    return &compartment()->enumerators;
+}
+
+const CallsiteCloneTable &
+CompileCompartment::callsiteClones()
+{
+    return compartment()->callsiteClones;
+}
+
+const JitCompartment *
+CompileCompartment::jitCompartment()
+{
+    return compartment()->jitCompartment();
+}
+
+bool
+CompileCompartment::hasObjectMetadataCallback()
+{
+    return compartment()->hasObjectMetadataCallback();
+}
new file mode 100644
--- /dev/null
+++ b/js/src/jit/CompileWrappers.h
@@ -0,0 +1,119 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef jit_CompileWrappers_h
+#define jit_CompileWrappers_h
+
+#ifdef JS_ION
+
+#include "jscntxt.h"
+
+namespace js {
+namespace jit {
+
+class JitRuntime;
+
+// During Ion compilation we need access to various bits of the current
+// compartment, runtime and so forth. However, since compilation can run off
+// thread while the main thread is actively mutating the VM, this access needs
+// to be restricted. The classes below give the compiler an interface to access
+// all necessary information in a threadsafe fashion.
+
+class CompileRuntime
+{
+    JSRuntime *runtime();
+
+  public:
+    static CompileRuntime *get(JSRuntime *rt);
+
+    bool onMainThread();
+
+    // &mainThread.ionTop
+    const void *addressOfIonTop();
+
+    // rt->mainThread.ionStackLimit;
+    const void *addressOfIonStackLimit();
+
+    // &mainThread.ionJSContext
+    const void *addressOfJSContext();
+
+    // &mainThread.activation_
+    const void *addressOfActivation();
+
+    // &GetIonContext()->runtime->nativeIterCache.last
+    const void *addressOfLastCachedNativeIterator();
+
+#ifdef JS_GC_ZEAL
+    const void *addressOfGCZeal();
+#endif
+
+    const void *addressOfInterrupt();
+
+    const JitRuntime *jitRuntime();
+
+    // Compilation does not occur off thread when the SPS profiler is enabled.
+    SPSProfiler &spsProfiler();
+
+    bool signalHandlersInstalled();
+    bool jitSupportsFloatingPoint();
+    bool hadOutOfMemory();
+
+    const JSAtomState &names();
+    const StaticStrings &staticStrings();
+    const Value &NaNValue();
+    const Value &positiveInfinityValue();
+
+    bool isInsideNursery(gc::Cell *cell);
+
+    // DOM callbacks must be threadsafe (and will hopefully be removed soon).
+    const DOMCallbacks *DOMcallbacks();
+
+    const MathCache *maybeGetMathCache();
+
+#ifdef JSGC_GENERATIONAL
+    const Nursery &gcNursery();
+#endif
+};
+
+class CompileZone
+{
+    Zone *zone();
+
+  public:
+    static CompileZone *get(Zone *zone);
+
+    const void *addressOfNeedsBarrier();
+
+    // allocator.arenas.getFreeList(allocKind)
+    const void *addressOfFreeListFirst(gc::AllocKind allocKind);
+    const void *addressOfFreeListLast(gc::AllocKind allocKind);
+};
+
+class CompileCompartment
+{
+    JSCompartment *compartment();
+
+  public:
+    static CompileCompartment *get(JSCompartment *comp);
+
+    CompileZone *zone();
+    CompileRuntime *runtime();
+
+    const void *addressOfEnumerators();
+
+    const CallsiteCloneTable &callsiteClones();
+
+    const JitCompartment *jitCompartment();
+
+    bool hasObjectMetadataCallback();
+};
+
+} // namespace jit
+} // namespace js
+
+#endif // JS_ION
+
+#endif // jit_CompileWrappers_h
--- a/js/src/jit/CompilerRoot.h
+++ b/js/src/jit/CompilerRoot.h
@@ -24,17 +24,17 @@ namespace jit {
 template <typename T>
 class CompilerRoot : public CompilerRootNode
 {
   public:
     CompilerRoot(T ptr)
       : CompilerRootNode(nullptr)
     {
         if (ptr) {
-            JS_ASSERT(!UninlinedIsInsideNursery(GetIonContext()->runtime, ptr));
+            JS_ASSERT(!GetIonContext()->runtime->isInsideNursery(ptr));
             setRoot(ptr);
         }
     }
 
   public:
     // Sets the pointer and inserts into root list. The pointer becomes read-only.
     void setRoot(T root) {
         CompilerRootNode *&rootList = GetIonContext()->temp->rootList();
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -98,53 +98,53 @@ jit::GetIonContext()
 
 IonContext *
 jit::MaybeGetIonContext()
 {
     return CurrentIonContext();
 }
 
 IonContext::IonContext(JSContext *cx, TempAllocator *temp)
-  : runtime(cx->runtime()),
-    cx(cx),
-    compartment(cx->compartment()),
+  : cx(cx),
     temp(temp),
+    runtime(CompileRuntime::get(cx->runtime())),
+    compartment(CompileCompartment::get(cx->compartment())),
     prev_(CurrentIonContext()),
     assemblerCount_(0)
 {
     SetIonContext(this);
 }
 
 IonContext::IonContext(ExclusiveContext *cx, TempAllocator *temp)
-  : runtime(cx->runtime_),
-    cx(nullptr),
+  : cx(nullptr),
+    temp(temp),
+    runtime(CompileRuntime::get(cx->runtime_)),
     compartment(nullptr),
-    temp(temp),
     prev_(CurrentIonContext()),
     assemblerCount_(0)
 {
     SetIonContext(this);
 }
 
-IonContext::IonContext(JSRuntime *rt, JSCompartment *comp, TempAllocator *temp)
-  : runtime(rt),
-    cx(nullptr),
+IonContext::IonContext(CompileRuntime *rt, CompileCompartment *comp, TempAllocator *temp)
+  : cx(nullptr),
+    temp(temp),
+    runtime(rt),
     compartment(comp),
-    temp(temp),
     prev_(CurrentIonContext()),
     assemblerCount_(0)
 {
     SetIonContext(this);
 }
 
-IonContext::IonContext(JSRuntime *rt)
-  : runtime(rt),
-    cx(nullptr),
+IonContext::IonContext(CompileRuntime *rt)
+  : cx(nullptr),
+    temp(nullptr),
+    runtime(rt),
     compartment(nullptr),
-    temp(nullptr),
     prev_(CurrentIonContext()),
     assemblerCount_(0)
 {
     SetIonContext(this);
 }
 
 IonContext::~IonContext()
 {
@@ -584,24 +584,24 @@ JitCompartment::sweep(FreeOp *fop)
     if (stringConcatStub_ && !IsIonCodeMarked(stringConcatStub_.unsafeGet()))
         stringConcatStub_ = nullptr;
 
     if (parallelStringConcatStub_ && !IsIonCodeMarked(parallelStringConcatStub_.unsafeGet()))
         parallelStringConcatStub_ = nullptr;
 }
 
 IonCode *
-JitRuntime::getBailoutTable(const FrameSizeClass &frameClass)
+JitRuntime::getBailoutTable(const FrameSizeClass &frameClass) const
 {
     JS_ASSERT(frameClass != FrameSizeClass::None());
     return bailoutTables_[frameClass.classId()];
 }
 
 IonCode *
-JitRuntime::getVMWrapper(const VMFunction &f)
+JitRuntime::getVMWrapper(const VMFunction &f) const
 {
     JS_ASSERT(functionWrappers_);
     JS_ASSERT(functionWrappers_->initialized());
     JitRuntime::VMWrapperMap::Ptr p = functionWrappers_->readonlyThreadsafeLookup(&f);
     JS_ASSERT(p);
 
     return p->value;
 }
@@ -1072,17 +1072,17 @@ IonScript::purgeCaches(Zone *zone)
     // inline jump could overwrite an invalidation marker. These ICs can
     // no longer run, however, the IC slow paths may be active on the stack.
     // ICs therefore are required to check for invalidation before patching,
     // to ensure the same invariant.
     if (invalidated())
         return;
 
     JSRuntime *rt = zone->runtimeFromMainThread();
-    IonContext ictx(rt);
+    IonContext ictx(CompileRuntime::get(rt));
     AutoFlushCache afc("purgeCaches", rt->jitRuntime());
     for (size_t i = 0; i < numCaches(); i++)
         getCacheFromIndex(i).reset();
 }
 
 void
 IonScript::destroyCaches()
 {
@@ -1129,17 +1129,17 @@ IonScript::destroyBackedges(JSRuntime *r
 
 void
 jit::ToggleBarriers(JS::Zone *zone, bool needs)
 {
     JSRuntime *rt = zone->runtimeFromMainThread();
     if (!rt->hasJitRuntime())
         return;
 
-    IonContext ictx(rt);
+    IonContext ictx(CompileRuntime::get(rt));
     AutoFlushCache afc("ToggleBarriers", rt->jitRuntime());
     for (gc::CellIterUnderGC i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
         JSScript *script = i.get<JSScript>();
         if (script->hasIonScript())
             script->ionScript()->toggleBarriers(needs);
         if (script->hasBaselineScript())
             script->baselineScript()->toggleBarriers(needs);
     }
@@ -1639,17 +1639,19 @@ IonCompile(JSContext *cx, JSScript *scri
 
     AutoFlushCache afc("IonCompile", cx->runtime()->jitRuntime());
 
     AutoTempAllocatorRooter root(cx, temp);
     types::CompilerConstraintList *constraints = types::NewCompilerConstraintList(*temp);
     if (!constraints)
         return AbortReason_Alloc;
 
-    IonBuilder *builder = alloc->new_<IonBuilder>((JSContext *) nullptr, cx->compartment(), temp, graph, constraints,
+    IonBuilder *builder = alloc->new_<IonBuilder>((JSContext *) nullptr,
+                                                  CompileCompartment::get(cx->compartment()),
+                                                  temp, graph, constraints,
                                                   &inspector, info, baselineFrame);
     if (!builder)
         return AbortReason_Alloc;
 
     JS_ASSERT(!GetIonScript(builder->script(), executionMode));
     JS_ASSERT(CanIonCompile(builder->script(), executionMode));
 
     RootedScript builderScript(cx, builder->script());
@@ -2361,17 +2363,17 @@ jit::InvalidateAll(FreeOp *fop, Zone *zo
         if (!comp->jitCompartment())
             continue;
         CancelOffThreadIonCompile(comp, nullptr);
         FinishAllOffThreadCompilations(comp->jitCompartment());
     }
 
     for (JitActivationIterator iter(fop->runtime()); !iter.done(); ++iter) {
         if (iter.activation()->compartment()->zone() == zone) {
-            IonContext ictx(fop->runtime());
+            IonContext ictx(CompileRuntime::get(fop->runtime()));
             AutoFlushCache afc("InvalidateAll", fop->runtime()->jitRuntime());
             IonSpew(IonSpew_Invalidate, "Invalidating all frames for GC");
             InvalidateActivation(fop, iter.jitTop(), true);
         }
     }
 }
 
 
@@ -2607,17 +2609,17 @@ jit::UsesBeforeIonRecompile(JSScript *sc
     JS_ASSERT(loopDepth > 0);
     return minUses + loopDepth * 100;
 }
 
 void
 AutoFlushCache::updateTop(uintptr_t p, size_t len)
 {
     IonContext *ictx = MaybeGetIonContext();
-    JitRuntime *jrt = (ictx != nullptr) ? ictx->runtime->jitRuntime() : nullptr;
+    JitRuntime *jrt = (ictx != nullptr) ? const_cast<JitRuntime *>(ictx->runtime->jitRuntime()) : nullptr;
     if (!jrt || !jrt->flusher())
         JSC::ExecutableAllocator::cacheFlush((void*)p, len);
     else
         jrt->flusher()->update(p, len);
 }
 
 AutoFlushCache::AutoFlushCache(const char *nonce, JitRuntime *rt)
   : start_(0),
--- a/js/src/jit/Ion.h
+++ b/js/src/jit/Ion.h
@@ -10,16 +10,17 @@
 #ifdef JS_ION
 
 #include "mozilla/MemoryReporting.h"
 
 #include "jscntxt.h"
 #include "jscompartment.h"
 
 #include "jit/CompileInfo.h"
+#include "jit/CompileWrappers.h"
 
 namespace js {
 namespace jit {
 
 class TempAllocator;
 
 // Possible register allocators which may be used.
 enum IonRegisterAllocator {
@@ -271,24 +272,32 @@ enum AbortReason {
 // JSContext, either of which may be nullptr, and the active compartment, which
 // will not be nullptr.
 
 class IonContext
 {
   public:
     IonContext(JSContext *cx, TempAllocator *temp);
     IonContext(ExclusiveContext *cx, TempAllocator *temp);
-    IonContext(JSRuntime *rt, JSCompartment *comp, TempAllocator *temp);
-    IonContext(JSRuntime *rt);
+    IonContext(CompileRuntime *rt, CompileCompartment *comp, TempAllocator *temp);
+    IonContext(CompileRuntime *rt);
     ~IonContext();
 
-    JSRuntime *runtime;
+    // Running context when executing on the main thread. Not available during
+    // compilation.
     JSContext *cx;
-    JSCompartment *compartment;
+
+    // Allocator for temporary memory during compilation.
     TempAllocator *temp;
+
+    // Wrappers with information about the current runtime/compartment for use
+    // during compilation.
+    CompileRuntime *runtime;
+    CompileCompartment *compartment;
+
     int getNextAssemblerId() {
         return assemblerCount_++;
     }
   private:
     IonContext *prev_;
     int assemblerCount_;
 };
 
--- a/js/src/jit/IonAnalysis.cpp
+++ b/js/src/jit/IonAnalysis.cpp
@@ -2104,17 +2104,17 @@ jit::AnalyzeNewScriptProperties(JSContex
     CompileInfo info(script, fun,
                      /* osrPc = */ nullptr, /* constructing = */ false,
                      DefinitePropertiesAnalysis);
 
     AutoTempAllocatorRooter root(cx, &temp);
 
     types::CompilerConstraintList *constraints = types::NewCompilerConstraintList(temp);
     BaselineInspector inspector(script);
-    IonBuilder builder(cx, cx->compartment(), &temp, &graph, constraints,
+    IonBuilder builder(cx, CompileCompartment::get(cx->compartment()), &temp, &graph, constraints,
                        &inspector, &info, /* baselineFrame = */ nullptr);
 
     if (!builder.build()) {
         if (builder.abortReason() == AbortReason_Alloc)
             return false;
         return true;
     }
 
--- a/js/src/jit/IonBuilder.cpp
+++ b/js/src/jit/IonBuilder.cpp
@@ -32,17 +32,17 @@
 #include "jit/CompileInfo-inl.h"
 
 using namespace js;
 using namespace js::jit;
 
 using mozilla::DebugOnly;
 using mozilla::Maybe;
 
-IonBuilder::IonBuilder(JSContext *analysisContext, JSCompartment *comp, TempAllocator *temp, MIRGraph *graph,
+IonBuilder::IonBuilder(JSContext *analysisContext, CompileCompartment *comp, TempAllocator *temp, MIRGraph *graph,
                        types::CompilerConstraintList *constraints,
                        BaselineInspector *inspector, CompileInfo *info, BaselineFrame *baselineFrame,
                        size_t inliningDepth, uint32_t loopDepth)
   : MIRGenerator(comp, temp, graph, info),
     backgroundCodegen_(nullptr),
     analysisContext(analysisContext),
     baselineFrame_(baselineFrame),
     abortReason_(AbortReason_Disable),
@@ -4962,17 +4962,17 @@ IonBuilder::jsop_call(uint32_t argc, boo
 
     // If any call targets need to be cloned, look for existing clones to use.
     // Keep track of the originals as we need to case on them for poly inline.
     bool hasClones = false;
     ObjectVector targets(alloc());
     for (uint32_t i = 0; i < originals.length(); i++) {
         JSFunction *fun = &originals[i]->as<JSFunction>();
         if (fun->hasScript() && fun->nonLazyScript()->shouldCloneAtCallsite) {
-            if (JSFunction *clone = ExistingCloneFunctionAtCallsite(compartment, fun, script(), pc)) {
+            if (JSFunction *clone = ExistingCloneFunctionAtCallsite(compartment->callsiteClones(), fun, script(), pc)) {
                 fun = clone;
                 hasClones = true;
             }
         }
         if (!targets.append(fun))
             return false;
     }
 
@@ -5023,17 +5023,17 @@ IonBuilder::testShouldDOMCall(types::Typ
 {
     if (!func->isNative() || !func->jitInfo())
         return false;
 
     // If all the DOM objects flowing through are legal with this
     // property, we can bake in a call to the bottom half of the DOM
     // accessor
     DOMInstanceClassMatchesProto instanceChecker =
-        GetDOMCallbacks(compartment->runtimeFromAnyThread())->instanceClassMatchesProto;
+        compartment->runtime()->DOMcallbacks()->instanceClassMatchesProto;
 
     const JSJitInfo *jinfo = func->jitInfo();
     if (jinfo->type != opType)
         return false;
 
     for (unsigned i = 0; i < inTypes->getObjectCount(); i++) {
         types::TypeObjectKey *curType = inTypes->getObject(i);
         if (!curType)
@@ -5935,28 +5935,28 @@ IonBuilder::maybeInsertResume()
 
 static bool
 ClassHasEffectlessLookup(const Class *clasp)
 {
     return clasp->isNative() && !clasp->ops.lookupGeneric;
 }
 
 static bool
-ClassHasResolveHook(JSCompartment *comp, const Class *clasp, PropertyName *name)
+ClassHasResolveHook(CompileCompartment *comp, const Class *clasp, PropertyName *name)
 {
     if (clasp->resolve == JS_ResolveStub)
         return false;
 
     if (clasp->resolve == (JSResolveOp)str_resolve) {
         // str_resolve only resolves integers, not names.
         return false;
     }
 
     if (clasp->resolve == (JSResolveOp)fun_resolve)
-        return FunctionHasResolveHook(comp->runtimeFromAnyThread(), name);
+        return FunctionHasResolveHook(comp->runtime()->names(), name);
 
     return true;
 }
 
 JSObject *
 IonBuilder::testSingletonProperty(JSObject *obj, PropertyName *name)
 {
     // We would like to completely no-op property/global accesses which can
@@ -6195,19 +6195,19 @@ IonBuilder::getStaticName(JSObject *stat
 
     *psucceeded = true;
 
     if (staticObject->is<GlobalObject>()) {
         // Optimize undefined, NaN, and Infinity.
         if (name == names().undefined)
             return pushConstant(UndefinedValue());
         if (name == names().NaN)
-            return pushConstant(compartment->runtimeFromAnyThread()->NaNValue);
+            return pushConstant(compartment->runtime()->NaNValue());
         if (name == names().Infinity)
-            return pushConstant(compartment->runtimeFromAnyThread()->positiveInfinityValue);
+            return pushConstant(compartment->runtime()->positiveInfinityValue());
     }
 
     types::TypeObjectKey *staticType = types::TypeObjectKey::get(staticObject);
     if (analysisContext)
         staticType->ensureTrackedProperty(analysisContext, NameToId(name));
 
     if (staticType->unknownProperties()) {
         *psucceeded = false;
--- a/js/src/jit/IonBuilder.h
+++ b/js/src/jit/IonBuilder.h
@@ -200,17 +200,17 @@ class IonBuilder : public MIRGenerator
         static CFGState CondSwitch(IonBuilder *builder, jsbytecode *exitpc, jsbytecode *defaultTarget);
         static CFGState Label(jsbytecode *exitpc);
         static CFGState Try(jsbytecode *exitpc, MBasicBlock *successor);
     };
 
     static int CmpSuccessors(const void *a, const void *b);
 
   public:
-    IonBuilder(JSContext *analysisContext, JSCompartment *comp, TempAllocator *temp, MIRGraph *graph,
+    IonBuilder(JSContext *analysisContext, CompileCompartment *comp, TempAllocator *temp, MIRGraph *graph,
                types::CompilerConstraintList *constraints,
                BaselineInspector *inspector, CompileInfo *info, BaselineFrame *baselineFrame,
                size_t inliningDepth = 0, uint32_t loopDepth = 0);
 
     bool build();
     bool buildInline(IonBuilder *callerBuilder, MResumePoint *callerResumePoint,
                      CallInfo &callInfo);
 
@@ -729,17 +729,17 @@ class IonBuilder : public MIRGenerator
     types::CompilerConstraintList *constraints() {
         return constraints_;
     }
 
     bool isInlineBuilder() const {
         return callerBuilder_ != nullptr;
     }
 
-    JSAtomState &names() { return compartment->runtimeFromAnyThread()->atomState; }
+    const JSAtomState &names() { return compartment->runtime()->names(); }
 
   private:
     bool init();
 
     JSContext *analysisContext;
     BaselineFrame *baselineFrame_;
     AbortReason abortReason_;
     TypeRepresentationSetHash *reprSetHash_;
--- a/js/src/jit/IonMacroAssembler.cpp
+++ b/js/src/jit/IonMacroAssembler.cpp
@@ -631,59 +631,57 @@ MacroAssembler::clampDoubleToUint8(Float
 void
 MacroAssembler::newGCThing(const Register &result, gc::AllocKind allocKind, Label *fail,
                            gc::InitialHeap initialHeap /* = gc::DefaultHeap */)
 {
     // Inlined equivalent of js::gc::NewGCThing() without failure case handling.
 
     int thingSize = int(gc::Arena::thingSize(allocKind));
 
-    Zone *zone = GetIonContext()->compartment->zone();
-
 #ifdef JS_GC_ZEAL
     // Don't execute the inline path if gcZeal is active.
     branch32(Assembler::NotEqual,
-             AbsoluteAddress(&GetIonContext()->runtime->gcZeal_), Imm32(0),
+             AbsoluteAddress(GetIonContext()->runtime->addressOfGCZeal()), Imm32(0),
              fail);
 #endif
 
     // Don't execute the inline path if the compartment has an object metadata callback,
     // as the metadata to use for the object may vary between executions of the op.
     if (GetIonContext()->compartment->hasObjectMetadataCallback())
         jump(fail);
 
 #ifdef JSGC_GENERATIONAL
-    Nursery &nursery = GetIonContext()->runtime->gcNursery;
+    const Nursery &nursery = GetIonContext()->runtime->gcNursery();
     if (nursery.isEnabled() &&
         allocKind <= gc::FINALIZE_OBJECT_LAST &&
         initialHeap != gc::TenuredHeap)
     {
         // Inline Nursery::allocate. No explicit check for nursery.isEnabled()
         // is needed, as the comparison with the nursery's end will always fail
         // in such cases.
         loadPtr(AbsoluteAddress(nursery.addressOfPosition()), result);
         addPtr(Imm32(thingSize), result);
         branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(nursery.addressOfCurrentEnd()), result, fail);
         storePtr(result, AbsoluteAddress(nursery.addressOfPosition()));
         subPtr(Imm32(thingSize), result);
         return;
     }
 #endif // JSGC_GENERATIONAL
 
+    CompileZone *zone = GetIonContext()->compartment->zone();
+
     // Inline FreeSpan::allocate.
     // There is always exactly one FreeSpan per allocKind per JSCompartment.
     // If a FreeSpan is replaced, its members are updated in the freeLists table,
     // which the code below always re-reads.
-    gc::FreeSpan *list = const_cast<gc::FreeSpan *>
-                         (zone->allocator.arenas.getFreeList(allocKind));
-    loadPtr(AbsoluteAddress(&list->first), result);
-    branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(&list->last), result, fail);
+    loadPtr(AbsoluteAddress(zone->addressOfFreeListFirst(allocKind)), result);
+    branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(zone->addressOfFreeListLast(allocKind)), result, fail);
 
     addPtr(Imm32(thingSize), result);
-    storePtr(result, AbsoluteAddress(&list->first));
+    storePtr(result, AbsoluteAddress(zone->addressOfFreeListFirst(allocKind)));
     subPtr(Imm32(thingSize), result);
 }
 
 void
 MacroAssembler::newGCThing(const Register &result, JSObject *templateObject, Label *fail)
 {
     gc::AllocKind allocKind = templateObject->tenuredGetAllocKind();
     JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST);
@@ -865,17 +863,17 @@ MacroAssembler::compareStrings(JSOp op, 
 
     bind(&done);
 }
 
 void
 MacroAssembler::checkInterruptFlagsPar(const Register &tempReg,
                                             Label *fail)
 {
-    movePtr(ImmPtr(&GetIonContext()->runtime->interrupt), tempReg);
+    movePtr(ImmPtr(GetIonContext()->runtime->addressOfInterrupt()), tempReg);
     load32(Address(tempReg, 0), tempReg);
     branchTest32(Assembler::NonZero, tempReg, tempReg, fail);
 }
 
 static void
 ReportOverRecursed(JSContext *cx)
 {
     js_ReportOverRecursed(cx);
--- a/js/src/jit/IonMacroAssembler.h
+++ b/js/src/jit/IonMacroAssembler.h
@@ -368,20 +368,20 @@ class MacroAssembler : public MacroAssem
     }
 
     void loadStringLength(Register str, Register dest) {
         loadPtr(Address(str, JSString::offsetOfLengthAndFlags()), dest);
         rshiftPtr(Imm32(JSString::LENGTH_SHIFT), dest);
     }
 
     void loadJSContext(const Register &dest) {
-        loadPtr(AbsoluteAddress(&GetIonContext()->runtime->mainThread.ionJSContext), dest);
+        loadPtr(AbsoluteAddress(GetIonContext()->runtime->addressOfJSContext()), dest);
     }
     void loadJitActivation(const Register &dest) {
-        loadPtr(AbsoluteAddress(GetIonContext()->runtime->mainThread.addressOfActivation()), dest);
+        loadPtr(AbsoluteAddress(GetIonContext()->runtime->addressOfActivation()), dest);
     }
 
     template<typename T>
     void loadTypedOrValue(const T &src, TypedOrValueRegister dest) {
         if (dest.hasValue())
             loadValue(src, dest.valueReg());
         else
             loadUnboxedValue(src, dest.type(), dest.typedReg());
@@ -622,18 +622,18 @@ class MacroAssembler : public MacroAssem
         if (key.isRegister())
             branch32(cond, length, key.reg(), label);
         else
             branch32(cond, length, Imm32(key.constant()), label);
     }
 
     void branchTestNeedsBarrier(Condition cond, const Register &scratch, Label *label) {
         JS_ASSERT(cond == Zero || cond == NonZero);
-        JS::Zone *zone = GetIonContext()->compartment->zone();
-        movePtr(ImmPtr(zone->AddressOfNeedsBarrier()), scratch);
+        CompileZone *zone = GetIonContext()->compartment->zone();
+        movePtr(ImmPtr(zone->addressOfNeedsBarrier()), scratch);
         Address needsBarrierAddr(scratch, 0);
         branchTest32(cond, needsBarrierAddr, Imm32(0x1), label);
     }
 
     template <typename T>
     void callPreBarrier(const T &address, MIRType type) {
         JS_ASSERT(type == MIRType_Value ||
                   type == MIRType_String ||
@@ -642,17 +642,17 @@ class MacroAssembler : public MacroAssem
         Label done;
 
         if (type == MIRType_Value)
             branchTestGCThing(Assembler::NotEqual, address, &done);
 
         Push(PreBarrierReg);
         computeEffectiveAddress(address, PreBarrierReg);
 
-        JitRuntime *rt = GetIonContext()->runtime->jitRuntime();
+        const JitRuntime *rt = GetIonContext()->runtime->jitRuntime();
         IonCode *preBarrier = (type == MIRType_Shape)
                               ? rt->shapePreBarrier()
                               : rt->valuePreBarrier();
 
         call(preBarrier);
         Pop(PreBarrierReg);
 
         bind(&done);
--- a/js/src/jit/IonSpewer.cpp
+++ b/js/src/jit/IonSpewer.cpp
@@ -76,17 +76,17 @@ jit::EnableIonDebugLogging()
 {
     EnableChannel(IonSpew_Logs);
     ionspewer.init();
 }
 
 void
 jit::IonSpewNewFunction(MIRGraph *graph, HandleScript func)
 {
-    if (!OffThreadIonCompilationEnabled(GetIonContext()->runtime)) {
+    if (GetIonContext()->runtime->onMainThread()) {
         ionspewer.beginFunction(graph, func);
         return;
     }
 
     if (!IonSpewEnabled(IonSpew_Logs))
         return;
 
     // Ionspewer isn't threads-safe. Therefore logging is disabled for
@@ -97,31 +97,31 @@ jit::IonSpewNewFunction(MIRGraph *graph,
     } else {
         IonSpew(IonSpew_Logs, "Can't log asm.js compilation. (Compiled on background thread.)");
     }
 }
 
 void
 jit::IonSpewPass(const char *pass)
 {
-    if (!OffThreadIonCompilationEnabled(GetIonContext()->runtime))
+    if (GetIonContext()->runtime->onMainThread())
         ionspewer.spewPass(pass);
 }
 
 void
 jit::IonSpewPass(const char *pass, LinearScanAllocator *ra)
 {
-    if (!OffThreadIonCompilationEnabled(GetIonContext()->runtime))
+    if (GetIonContext()->runtime->onMainThread())
         ionspewer.spewPass(pass, ra);
 }
 
 void
 jit::IonSpewEndFunction()
 {
-    if (!OffThreadIonCompilationEnabled(GetIonContext()->runtime))
+    if (GetIonContext()->runtime->onMainThread())
         ionspewer.endFunction();
 }
 
 
 IonSpewer::~IonSpewer()
 {
     if (!inited_)
         return;
--- a/js/src/jit/JitCompartment.h
+++ b/js/src/jit/JitCompartment.h
@@ -270,32 +270,32 @@ class JitRuntime
     };
 
     void ensureIonCodeProtected(JSRuntime *rt);
     void ensureIonCodeAccessible(JSRuntime *rt);
     void patchIonBackedges(JSRuntime *rt, BackedgeTarget target);
 
     bool handleAccessViolation(JSRuntime *rt, void *faultingAddress);
 
-    IonCode *getVMWrapper(const VMFunction &f);
+    IonCode *getVMWrapper(const VMFunction &f) const;
     IonCode *debugTrapHandler(JSContext *cx);
 
     IonCode *getGenericBailoutHandler() const {
         return bailoutHandler_;
     }
 
     IonCode *getExceptionTail() const {
         return exceptionTail_;
     }
 
     IonCode *getBailoutTail() const {
         return bailoutTail_;
     }
 
-    IonCode *getBailoutTable(const FrameSizeClass &frameClass);
+    IonCode *getBailoutTable(const FrameSizeClass &frameClass) const;
 
     IonCode *getArgumentsRectifier(ExecutionMode mode) const {
         switch (mode) {
           case SequentialExecution: return argumentsRectifier_;
           case ParallelExecution:   return parallelArgumentsRectifier_;
           default:                  MOZ_ASSUME_UNREACHABLE("No such execution mode");
         }
     }
@@ -419,17 +419,17 @@ class JitCompartment
 
     void mark(JSTracer *trc, JSCompartment *compartment);
     void sweep(FreeOp *fop);
 
     JSC::ExecutableAllocator *execAlloc() {
         return rt->execAlloc_;
     }
 
-    IonCode *stringConcatStub(ExecutionMode mode) {
+    IonCode *stringConcatStub(ExecutionMode mode) const {
         switch (mode) {
           case SequentialExecution: return stringConcatStub_;
           case ParallelExecution:   return parallelStringConcatStub_;
           default:                  MOZ_ASSUME_UNREACHABLE("No such execution mode");
         }
     }
 
     OptimizedICStubSpace *optimizedStubSpace() {
--- a/js/src/jit/Lowering.cpp
+++ b/js/src/jit/Lowering.cpp
@@ -3204,17 +3204,17 @@ LIRGenerator::visitCallInstanceOf(MCallI
 bool
 LIRGenerator::visitFunctionBoundary(MFunctionBoundary *ins)
 {
     LFunctionBoundary *lir = new LFunctionBoundary(temp());
     if (!add(lir, ins))
         return false;
     // If slow assertions are enabled, then this node will result in a callVM
     // out to a C++ function for the assertions, so we will need a safepoint.
-    return !GetIonContext()->runtime->spsProfiler.slowAssertionsEnabled() ||
+    return !GetIonContext()->runtime->spsProfiler().slowAssertionsEnabled() ||
            assignSafepoint(lir, ins);
 }
 
 bool
 LIRGenerator::visitIsCallable(MIsCallable *ins)
 {
     JS_ASSERT(ins->object()->type() == MIRType_Object);
     JS_ASSERT(ins->type() == MIRType_Boolean);
--- a/js/src/jit/MCallOptimize.cpp
+++ b/js/src/jit/MCallOptimize.cpp
@@ -183,17 +183,17 @@ IonBuilder::inlineMathFunction(CallInfo 
     if (callInfo.argc() != 1)
         return InliningStatus_NotInlined;
 
     if (getInlineReturnType() != MIRType_Double)
         return InliningStatus_NotInlined;
     if (!IsNumberType(callInfo.getArg(0)->type()))
         return InliningStatus_NotInlined;
 
-    MathCache *cache = compartment->runtimeFromAnyThread()->maybeGetMathCache();
+    const MathCache *cache = compartment->runtime()->maybeGetMathCache();
     if (!cache)
         return InliningStatus_NotInlined;
 
     callInfo.unwrapArgs();
 
     MMathFunction *ins = MMathFunction::New(alloc(), callInfo.getArg(0), function, cache);
     current->add(ins);
     current->push(ins);
@@ -1261,17 +1261,17 @@ IonBuilder::inlineNewParallelArray(CallI
         return InliningStatus_NotInlined;
 
     types::TemporaryTypeSet *ctorTypes = callInfo.getArg(0)->resultTypeSet();
     JSObject *targetObj = ctorTypes ? ctorTypes->getSingleton() : nullptr;
     JSFunction *target = nullptr;
     if (targetObj && targetObj->is<JSFunction>())
         target = &targetObj->as<JSFunction>();
     if (target && target->isInterpreted() && target->nonLazyScript()->shouldCloneAtCallsite) {
-        if (JSFunction *clone = ExistingCloneFunctionAtCallsite(compartment, target, script(), pc))
+        if (JSFunction *clone = ExistingCloneFunctionAtCallsite(compartment->callsiteClones(), target, script(), pc))
             target = clone;
     }
     MDefinition *ctor = makeCallsiteClone(
         target,
         callInfo.getArg(0)->toPassArg()->getArgument());
 
     // Discard the function.
     return inlineParallelArrayTail(callInfo, target, ctor,
@@ -1286,17 +1286,17 @@ IonBuilder::inlineParallelArray(CallInfo
         return InliningStatus_NotInlined;
 
     uint32_t argc = callInfo.argc();
     JSFunction *target = ParallelArrayObject::maybeGetConstructor(&script()->global(), argc);
     if (!target)
         return InliningStatus_NotInlined;
 
     JS_ASSERT(target->nonLazyScript()->shouldCloneAtCallsite);
-    if (JSFunction *clone = ExistingCloneFunctionAtCallsite(compartment, target, script(), pc))
+    if (JSFunction *clone = ExistingCloneFunctionAtCallsite(compartment->callsiteClones(), target, script(), pc))
         target = clone;
 
     MConstant *ctor = MConstant::New(alloc(), ObjectValue(*target));
     current->add(ctor);
 
     return inlineParallelArrayTail(callInfo, target, ctor, nullptr, 0,
                                    ParallelArrayObject::construct);
 }
--- a/js/src/jit/MIR.cpp
+++ b/js/src/jit/MIR.cpp
@@ -1961,18 +1961,17 @@ MTypeOf::foldsTo(TempAllocator &alloc, b
             type = JSTYPE_OBJECT;
             break;
         }
         // FALL THROUGH
       default:
         return this;
     }
 
-    JSRuntime *rt = GetIonContext()->runtime;
-    return MConstant::New(alloc, StringValue(TypeName(type, rt)));
+    return MConstant::New(alloc, StringValue(TypeName(type, GetIonContext()->runtime->names())));
 }
 
 void
 MTypeOf::infer()
 {
     JS_ASSERT(inputMaybeCallableOrEmulatesUndefined());
 
     if (!MaybeEmulatesUndefined(input()) && !MaybeCallable(input()))
--- a/js/src/jit/MIR.h
+++ b/js/src/jit/MIR.h
@@ -3796,39 +3796,39 @@ class MMathFunction
         Trunc,
         Cbrt,
         Floor,
         Round
     };
 
   private:
     Function function_;
-    MathCache *cache_;
-
-    MMathFunction(MDefinition *input, Function function, MathCache *cache)
+    const MathCache *cache_;
+
+    MMathFunction(MDefinition *input, Function function, const MathCache *cache)
       : MUnaryInstruction(input), function_(function), cache_(cache)
     {
         setResultType(MIRType_Double);
         setPolicyType(MIRType_Double);
         setMovable();
     }
 
   public:
     INSTRUCTION_HEADER(MathFunction)
 
     // A nullptr cache means this function will neither access nor update the cache.
     static MMathFunction *New(TempAllocator &alloc, MDefinition *input, Function function,
-                              MathCache *cache)
+                              const MathCache *cache)
     {
         return new(alloc) MMathFunction(input, function, cache);
     }
     Function function() const {
         return function_;
     }
-    MathCache *cache() const {
+    const MathCache *cache() const {
         return cache_;
     }
     TypePolicy *typePolicy() {
         return this;
     }
     bool congruentTo(MDefinition *ins) const {
         if (!ins->isMathFunction())
             return false;
--- a/js/src/jit/MIRGenerator.h
+++ b/js/src/jit/MIRGenerator.h
@@ -28,31 +28,28 @@ namespace jit {
 
 class MBasicBlock;
 class MIRGraph;
 class MStart;
 
 class MIRGenerator
 {
   public:
-    MIRGenerator(JSCompartment *compartment, TempAllocator *alloc, MIRGraph *graph, CompileInfo *info);
+    MIRGenerator(CompileCompartment *compartment, TempAllocator *alloc, MIRGraph *graph, CompileInfo *info);
 
     TempAllocator &alloc() {
         return *alloc_;
     }
     MIRGraph &graph() {
         return *graph_;
     }
     bool ensureBallast() {
         return alloc().ensureBallast();
     }
-    JitCompartment *jitCompartment() const {
-        return compartment->jitCompartment();
-    }
-    JitRuntime *jitRuntime() const {
+    const JitRuntime *jitRuntime() const {
         return GetIonContext()->runtime->jitRuntime();
     }
     CompileInfo &info() {
         return *info_;
     }
 
     template <typename T>
     T * allocate(size_t count = 1) {
@@ -64,17 +61,17 @@ class MIRGenerator
     bool abort(const char *message, ...);
     bool abortFmt(const char *message, va_list ap);
 
     bool errored() const {
         return error_;
     }
 
     bool instrumentedProfiling() {
-        return GetIonContext()->runtime->spsProfiler.enabled();
+        return GetIonContext()->runtime->spsProfiler().enabled();
     }
 
     // Whether the main thread is trying to cancel this build.
     bool shouldCancel(const char *why) {
         return cancelBuild_;
     }
     void cancel() {
         cancelBuild_ = 1;
@@ -121,17 +118,17 @@ class MIRGenerator
     bool noteGlobalAccess(unsigned offset, unsigned globalDataOffset) {
         return asmJSGlobalAccesses_.append(AsmJSGlobalAccess(offset, globalDataOffset));
     }
     const Vector<AsmJSGlobalAccess, 0, IonAllocPolicy> &globalAccesses() const {
         return asmJSGlobalAccesses_;
     }
 
   public:
-    JSCompartment *compartment;
+    CompileCompartment *compartment;
 
   protected:
     CompileInfo *info_;
     TempAllocator *alloc_;
     JSFunction *fun_;
     uint32_t nslots_;
     MIRGraph *graph_;
     bool error_;
--- a/js/src/jit/MIRGraph.cpp
+++ b/js/src/jit/MIRGraph.cpp
@@ -11,17 +11,17 @@
 #include "jit/Ion.h"
 #include "jit/IonSpewer.h"
 #include "jit/MIR.h"
 #include "jit/MIRGenerator.h"
 
 using namespace js;
 using namespace js::jit;
 
-MIRGenerator::MIRGenerator(JSCompartment *compartment,
+MIRGenerator::MIRGenerator(CompileCompartment *compartment,
                            TempAllocator *alloc, MIRGraph *graph, CompileInfo *info)
   : compartment(compartment),
     info_(info),
     alloc_(alloc),
     graph_(graph),
     error_(false),
     cancelBuild_(0),
     maxAsmJSStackArgBytes_(0),
--- a/js/src/jit/arm/CodeGenerator-arm.cpp
+++ b/js/src/jit/arm/CodeGenerator-arm.cpp
@@ -1867,17 +1867,17 @@ CodeGeneratorARM::visitImplicitThis(LImp
 
 bool
 CodeGeneratorARM::visitInterruptCheck(LInterruptCheck *lir)
 {
     OutOfLineCode *ool = oolCallVM(InterruptCheckInfo, lir, (ArgList()), StoreNothing());
     if (!ool)
         return false;
 
-    void *interrupt = (void*)&GetIonContext()->runtime->interrupt;
+    void *interrupt = (void*)GetIonContext()->runtime->addressOfInterrupt();
     masm.load32(AbsoluteAddress(interrupt), lr);
     masm.ma_cmp(lr, Imm32(0));
     masm.ma_b(ool->entry(), Assembler::NonZero);
     masm.bind(ool->rejoin());
     return true;
 }
 
 bool
--- a/js/src/jit/arm/MacroAssembler-arm.cpp
+++ b/js/src/jit/arm/MacroAssembler-arm.cpp
@@ -3362,17 +3362,17 @@ MacroAssemblerARMCompat::storeTypeTag(Im
     ma_add(base, Imm32(NUNBOX32_TYPE_OFFSET), base);
     ma_mov(tag, ScratchRegister);
     ma_str(ScratchRegister, DTRAddr(base, DtrRegImmShift(index, LSL, shift)));
     ma_sub(base, Imm32(NUNBOX32_TYPE_OFFSET), base);
 }
 
 void
 MacroAssemblerARMCompat::linkExitFrame() {
-    uint8_t *dest = (uint8_t*)&GetIonContext()->runtime->mainThread.ionTop;
+    uint8_t *dest = (uint8_t*)GetIonContext()->runtime->addressOfIonTop();
     movePtr(ImmPtr(dest), ScratchRegister);
     ma_str(StackPointer, Operand(ScratchRegister, 0));
 }
 
 void
 MacroAssemblerARMCompat::linkParallelExitFrame(const Register &pt)
 {
     ma_str(StackPointer, Operand(pt, offsetof(PerThreadData, ionTop)));
--- a/js/src/jit/shared/Assembler-shared.h
+++ b/js/src/jit/shared/Assembler-shared.h
@@ -351,22 +351,18 @@ class Label : public LabelBase
 {
   public:
     Label()
     { }
     Label(const Label &label) : LabelBase(label)
     { }
     ~Label()
     {
-#ifdef DEBUG
-        // Note: the condition is a hack to silence this assert when OOM testing,
-        // see bug 756614.
-        if (MaybeGetIonContext() && !OffThreadIonCompilationEnabled(GetIonContext()->runtime))
-            JS_ASSERT_IF(!GetIonContext()->runtime->hadOutOfMemory, !used());
-#endif
+        if (MaybeGetIonContext())
+            JS_ASSERT_IF(!GetIonContext()->runtime->hadOutOfMemory(), !used());
     }
 };
 
 // Label's destructor asserts that if it has been used it has also been bound.
 // In the case long-lived labels, however, failed compilation (e.g. OOM) will
 // trigger this failure innocuously. This Label silences the assertion.
 class NonAssertingLabel : public Label
 {
--- a/js/src/jit/shared/CodeGenerator-shared.cpp
+++ b/js/src/jit/shared/CodeGenerator-shared.cpp
@@ -41,17 +41,17 @@ CodeGeneratorShared::CodeGeneratorShared
     gen(gen),
     graph(*graph),
     current(nullptr),
     deoptTable_(nullptr),
 #ifdef DEBUG
     pushedArgs_(0),
 #endif
     lastOsiPointOffset_(0),
-    sps_(&GetIonContext()->runtime->spsProfiler, &lastPC_),
+    sps_(&GetIonContext()->runtime->spsProfiler(), &lastPC_),
     osrEntryOffset_(0),
     skipArgCheckEntryOffset_(0),
     frameDepth_(graph->localSlotCount() * sizeof(STACK_SLOT_SIZE) +
                 graph->argumentSlotCount() * sizeof(Value))
 {
     if (!gen->compilingAsmJS())
         masm.setInstrumentation(&sps_);
 
@@ -618,17 +618,17 @@ CodeGeneratorShared::resetOsiPointRegs(L
 bool
 CodeGeneratorShared::callVM(const VMFunction &fun, LInstruction *ins, const Register *dynStack)
 {
     // Different execution modes have different sets of VM functions.
     JS_ASSERT(fun.executionMode == gen->info().executionMode());
 
     // If we're calling a function with an out parameter type of double, make
     // sure we have an FPU.
-    JS_ASSERT_IF(fun.outParam == Type_Double, GetIonContext()->runtime->jitSupportsFloatingPoint);
+    JS_ASSERT_IF(fun.outParam == Type_Double, GetIonContext()->runtime->jitSupportsFloatingPoint());
 
 #ifdef DEBUG
     if (ins->mirRaw()) {
         JS_ASSERT(ins->mirRaw()->isInstruction());
         MInstruction *mir = ins->mirRaw()->toInstruction();
         JS_ASSERT_IF(mir->isEffectful(), mir->resumePoint());
     }
 #endif
--- a/js/src/jit/x64/CodeGenerator-x64.cpp
+++ b/js/src/jit/x64/CodeGenerator-x64.cpp
@@ -276,17 +276,17 @@ CodeGeneratorX64::visitImplicitThis(LImp
 bool
 CodeGeneratorX64::visitInterruptCheck(LInterruptCheck *lir)
 {
     OutOfLineCode *ool = oolCallVM(InterruptCheckInfo, lir, (ArgList()), StoreNothing());
     if (!ool)
         return false;
 
     masm.branch32(Assembler::NotEqual,
-                  AbsoluteAddress(&GetIonContext()->runtime->interrupt), Imm32(0),
+                  AbsoluteAddress(GetIonContext()->runtime->addressOfInterrupt()), Imm32(0),
                   ool->entry());
     masm.bind(ool->rejoin());
     return true;
 }
 
 bool
 CodeGeneratorX64::visitCompareB(LCompareB *lir)
 {
--- a/js/src/jit/x64/MacroAssembler-x64.h
+++ b/js/src/jit/x64/MacroAssembler-x64.h
@@ -1223,17 +1223,17 @@ class MacroAssemblerX64 : public MacroAs
         shlq(Imm32(FRAMESIZE_SHIFT), frameSizeReg);
         orq(Imm32(type), frameSizeReg);
     }
 
     // Save an exit frame (which must be aligned to the stack pointer) to
     // ThreadData::ionTop of the main thread.
     void linkExitFrame() {
         storePtr(StackPointer,
-                 AbsoluteAddress(&GetIonContext()->runtime->mainThread.ionTop));
+                 AbsoluteAddress(GetIonContext()->runtime->addressOfIonTop()));
     }
 
     void callWithExitFrame(IonCode *target, Register dynStack) {
         addPtr(Imm32(framePushed()), dynStack);
         makeFrameDescriptor(dynStack, IonFrame_OptimizedJS);
         Push(dynStack);
         call(target);
     }
--- a/js/src/jit/x86/CodeGenerator-x86.cpp
+++ b/js/src/jit/x86/CodeGenerator-x86.cpp
@@ -266,17 +266,17 @@ CodeGeneratorX86::visitImplicitThis(LImp
 
 bool
 CodeGeneratorX86::visitInterruptCheck(LInterruptCheck *lir)
 {
     OutOfLineCode *ool = oolCallVM(InterruptCheckInfo, lir, (ArgList()), StoreNothing());
     if (!ool)
         return false;
 
-    masm.cmpl(Operand(AbsoluteAddress(&GetIonContext()->runtime->interrupt)), Imm32(0));
+    masm.cmpl(Operand(AbsoluteAddress(GetIonContext()->runtime->addressOfInterrupt())), Imm32(0));
     masm.j(Assembler::NonZero, ool->entry());
     masm.bind(ool->rejoin());
     return true;
 }
 
 bool
 CodeGeneratorX86::visitCompareB(LCompareB *lir)
 {
--- a/js/src/jit/x86/MacroAssembler-x86.h
+++ b/js/src/jit/x86/MacroAssembler-x86.h
@@ -1067,17 +1067,17 @@ class MacroAssemblerX86 : public MacroAs
     void makeFrameDescriptor(Register frameSizeReg, FrameType type) {
         shll(Imm32(FRAMESIZE_SHIFT), frameSizeReg);
         orl(Imm32(type), frameSizeReg);
     }
 
     // Save an exit frame (which must be aligned to the stack pointer) to
     // ThreadData::ionTop of the main thread.
     void linkExitFrame() {
-        movl(StackPointer, Operand(AbsoluteAddress(&GetIonContext()->runtime->mainThread.ionTop)));
+        movl(StackPointer, Operand(AbsoluteAddress(GetIonContext()->runtime->addressOfIonTop())));
     }
 
     void callWithExitFrame(IonCode *target, Register dynStack) {
         addPtr(Imm32(framePushed()), dynStack);
         makeFrameDescriptor(dynStack, IonFrame_OptimizedJS);
         Push(dynStack);
         call(target);
     }
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -1169,17 +1169,17 @@ typedef struct JSStdName {
     ClassInitializerOp init;
     size_t      atomOffset;     /* offset of atom pointer in JSAtomState */
     const Class *clasp;
 } JSStdName;
 
 static Handle<PropertyName*>
 StdNameToPropertyName(JSContext *cx, const JSStdName *stdn)
 {
-    return OFFSET_TO_NAME(cx->runtime(), stdn->atomOffset);
+    return AtomStateOffsetToName(cx->runtime()->atomState, stdn->atomOffset);
 }
 
 /*
  * Table of class initializers and their atom offsets in rt->atomState.
  * If you add a "standard" class, remember to update this table.
  */
 static const JSStdName standard_class_atoms[] = {
     {js_InitFunctionClass,              EAGER_CLASS_ATOM(Function), &JSFunction::class_},
@@ -1329,17 +1329,17 @@ JS_ResolveStandardClass(JSContext *cx, H
                                         JS_PropertyStub, JS_StrictPropertyStub,
                                         JSPROP_PERMANENT | JSPROP_READONLY);
     }
 
     /* Try for class constructors/prototypes named by well-known atoms. */
     stdnm = nullptr;
     for (i = 0; standard_class_atoms[i].init; i++) {
         JS_ASSERT(standard_class_atoms[i].clasp);
-        atom = OFFSET_TO_NAME(rt, standard_class_atoms[i].atomOffset);
+        atom = AtomStateOffsetToName(rt->atomState, standard_class_atoms[i].atomOffset);
         if (idstr == atom) {
             stdnm = &standard_class_atoms[i];
             break;
         }
     }
 
     if (!stdnm) {
         /* Try less frequently used top-level functions and constants. */
--- a/js/src/jsatominlines.h
+++ b/js/src/jsatominlines.h
@@ -158,24 +158,24 @@ AtomHasher::match(const AtomStateEntry &
     if (lookup.atom)
         return lookup.atom == key;
     if (key->length() != lookup.length)
         return false;
     return mozilla::PodEqual(key->chars(), lookup.chars, lookup.length);
 }
 
 inline Handle<PropertyName*>
-TypeName(JSType type, JSRuntime *rt)
+TypeName(JSType type, const JSAtomState &names)
 {
     JS_ASSERT(type < JSTYPE_LIMIT);
     JS_STATIC_ASSERT(offsetof(JSAtomState, undefined) +
                      JSTYPE_LIMIT * sizeof(FixedHeapPtr<PropertyName>) <=
                      sizeof(JSAtomState));
     JS_STATIC_ASSERT(JSTYPE_VOID == 0);
-    return (&rt->atomState.undefined)[type];
+    return (&names.undefined)[type];
 }
 
 inline Handle<PropertyName*>
 ClassName(JSProtoKey key, JSAtomState &atomState)
 {
     JS_ASSERT(key < JSProto_LIMIT);
     JS_STATIC_ASSERT(offsetof(JSAtomState, Null) +
                      JSProto_LIMIT * sizeof(FixedHeapPtr<PropertyName>) <=
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -105,48 +105,43 @@ JSCompartment::sweepCallsiteClones()
             JSFunction *fun = e.front().value;
             if (!IsScriptMarked(&key.script) || !IsObjectMarked(&fun))
                 e.removeFront();
         }
     }
 }
 
 JSFunction *
-js::ExistingCloneFunctionAtCallsite(JSCompartment *comp, JSFunction *fun,
+js::ExistingCloneFunctionAtCallsite(const CallsiteCloneTable &table, JSFunction *fun,
                                     JSScript *script, jsbytecode *pc)
 {
-    JS_ASSERT(comp->zone()->types.inferenceEnabled);
     JS_ASSERT(fun->nonLazyScript()->shouldCloneAtCallsite);
     JS_ASSERT(!fun->nonLazyScript()->enclosingStaticScope());
     JS_ASSERT(types::UseNewTypeForClone(fun));
 
     /*
      * If we start allocating function objects in the nursery, then the callsite
      * clone table will need a postbarrier.
      */
     JS_ASSERT(fun->isTenured());
 
-    typedef CallsiteCloneKey Key;
-    typedef CallsiteCloneTable Table;
-
-    Table &table = comp->callsiteClones;
     if (!table.initialized())
         return nullptr;
 
-    Table::Ptr p = table.lookup(Key(fun, script, pc - script->code));
+    CallsiteCloneTable::Ptr p = table.lookup(CallsiteCloneKey(fun, script, pc - script->code));
     if (p)
         return p->value;
 
     return nullptr;
 }
 
 JSFunction *
 js::CloneFunctionAtCallsite(JSContext *cx, HandleFunction fun, HandleScript script, jsbytecode *pc)
 {
-    if (JSFunction *clone = ExistingCloneFunctionAtCallsite(cx->compartment(), fun, script, pc))
+    if (JSFunction *clone = ExistingCloneFunctionAtCallsite(cx->compartment()->callsiteClones, fun, script, pc))
         return clone;
 
     RootedObject parent(cx, fun->environment());
     JSFunction *clone = CloneFunctionObject(cx, fun, parent);
     if (!clone)
         return nullptr;
 
     /*
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -60,17 +60,17 @@ struct CallsiteCloneKey {
 };
 
 typedef HashMap<CallsiteCloneKey,
                 ReadBarriered<JSFunction>,
                 CallsiteCloneKey,
                 SystemAllocPolicy> CallsiteCloneTable;
 
 JSFunction *
-ExistingCloneFunctionAtCallsite(JSCompartment *comp, JSFunction *fun,
+ExistingCloneFunctionAtCallsite(const CallsiteCloneTable &table, JSFunction *fun,
                                 JSScript *script, jsbytecode *pc);
 
 JSFunction *CloneFunctionAtCallsite(JSContext *cx, HandleFunction fun,
                                     HandleScript script, jsbytecode *pc);
 
 typedef HashSet<JSObject *> ObjectSet;
 typedef HashSet<Shape *> ShapeSet;
 
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -175,17 +175,17 @@ fun_enumerate(JSContext *cx, HandleObjec
         return false;
 
     id = NameToId(cx->names().name);
     if (!JSObject::hasProperty(cx, obj, id, &found, 0))
         return false;
 
     for (unsigned i = 0; i < ArrayLength(poisonPillProps); i++) {
         const uint16_t offset = poisonPillProps[i];
-        id = NameToId(OFFSET_TO_NAME(cx->runtime(), offset));
+        id = NameToId(AtomStateOffsetToName(cx->runtime()->atomState, offset));
         if (!JSObject::hasProperty(cx, obj, id, &found, 0))
             return false;
     }
 
     return true;
 }
 
 static JSObject *
@@ -243,25 +243,25 @@ ResolveInterpretedFunctionPrototype(JSCo
             return nullptr;
         }
     }
 
     return proto;
 }
 
 bool
-js::FunctionHasResolveHook(JSRuntime *rt, PropertyName *name)
+js::FunctionHasResolveHook(const JSAtomState &atomState, PropertyName *name)
 {
-    if (name == rt->atomState.prototype || name == rt->atomState.length || name == rt->atomState.name)
+    if (name == atomState.prototype || name == atomState.length || name == atomState.name)
         return true;
 
     for (unsigned i = 0; i < ArrayLength(poisonPillProps); i++) {
         const uint16_t offset = poisonPillProps[i];
 
-        if (name == OFFSET_TO_NAME(rt, offset))
+        if (name == AtomStateOffsetToName(atomState, offset))
             return true;
     }
 
     return false;
 }
 
 bool
 js::fun_resolve(JSContext *cx, HandleObject obj, HandleId id, unsigned flags,
@@ -314,17 +314,17 @@ js::fun_resolve(JSContext *cx, HandleObj
         }
         objp.set(fun);
         return true;
     }
 
     for (unsigned i = 0; i < ArrayLength(poisonPillProps); i++) {
         const uint16_t offset = poisonPillProps[i];
 
-        if (JSID_IS_ATOM(id, OFFSET_TO_NAME(cx->runtime(), offset))) {
+        if (JSID_IS_ATOM(id, AtomStateOffsetToName(cx->runtime()->atomState, offset))) {
             JS_ASSERT(!IsInternalFunctionObject(fun));
 
             PropertyOp getter;
             StrictPropertyOp setter;
             unsigned attrs = JSPROP_PERMANENT | JSPROP_SHARED;
             if (fun->isInterpretedLazy() && !fun->getOrCreateScript(cx))
                 return false;
             if (fun->isInterpreted() ? fun->strict() : fun->isBoundFunction()) {
--- a/js/src/jsfun.h
+++ b/js/src/jsfun.h
@@ -18,16 +18,18 @@
 namespace js {
 class FunctionExtended;
 
 typedef JSNative           Native;
 typedef JSParallelNative   ParallelNative;
 typedef JSThreadSafeNative ThreadSafeNative;
 }
 
+struct JSAtomState;
+
 class JSFunction : public JSObject
 {
   public:
     static const js::Class class_;
 
     enum Flags {
         INTERPRETED      = 0x0001,  /* function has a JSScript and environment. */
         NATIVE_CTOR      = 0x0002,  /* native that can be called as a constructor */
@@ -485,17 +487,17 @@ NewFunctionWithProto(ExclusiveContext *c
 
 extern JSFunction *
 DefineFunction(JSContext *cx, HandleObject obj, HandleId id, JSNative native,
                unsigned nargs, unsigned flags,
                gc::AllocKind allocKind = JSFunction::FinalizeKind,
                NewObjectKind newKind = GenericObject);
 
 bool
-FunctionHasResolveHook(JSRuntime *rt, PropertyName *name);
+FunctionHasResolveHook(const JSAtomState &atomState, PropertyName *name);
 
 extern bool
 fun_resolve(JSContext *cx, HandleObject obj, HandleId id,
             unsigned flags, MutableHandleObject objp);
 
 // ES6 9.2.5 IsConstructor
 bool IsConstructor(const Value &v);
 
--- a/js/src/jsworkers.cpp
+++ b/js/src/jsworkers.cpp
@@ -631,17 +631,17 @@ WorkerThread::handleAsmJSWorkload(Worker
     JS_ASSERT(state.canStartAsmJSCompile());
     JS_ASSERT(idle());
 
     asmData = state.asmJSWorklist.popCopy();
     bool success = false;
 
     state.unlock();
     do {
-        jit::IonContext icx(runtime, asmData->mir->compartment, &asmData->mir->alloc());
+        jit::IonContext icx(jit::CompileRuntime::get(runtime), asmData->mir->compartment, &asmData->mir->alloc());
 
         int64_t before = PRMJ_Now();
 
         if (!OptimizeMIR(asmData->mir))
             break;
 
         asmData->lir = GenerateLIR(asmData->mir);
         if (!asmData->lir)
@@ -686,17 +686,19 @@ WorkerThread::handleIonWorkload(WorkerTh
     AutoTraceLog logger(TraceLogging::getLogger(TraceLogging::ION_BACKGROUND_COMPILER),
                         TraceLogging::ION_COMPILE_START,
                         TraceLogging::ION_COMPILE_STOP,
                         ionBuilder->script());
 #endif
 
     state.unlock();
     {
-        jit::IonContext ictx(runtime, ionBuilder->script()->compartment(), &ionBuilder->alloc());
+        jit::IonContext ictx(jit::CompileRuntime::get(runtime),
+                             jit::CompileCompartment::get(ionBuilder->script()->compartment()),
+                             &ionBuilder->alloc());
         ionBuilder->setBackgroundCodegen(jit::CompileBackEnd(ionBuilder));
     }
     state.lock();
 
     FinishOffThreadIonCompile(ionBuilder);
     ionBuilder = nullptr;
 
     // Notify the main thread in case it is waiting for the compilation to finish.
--- a/js/src/moz.build
+++ b/js/src/moz.build
@@ -224,16 +224,17 @@ if CONFIG['ENABLE_ION']:
         'jit/BaselineFrameInfo.cpp',
         'jit/BaselineIC.cpp',
         'jit/BaselineInspector.cpp',
         'jit/BaselineJIT.cpp',
         'jit/BitSet.cpp',
         'jit/BytecodeAnalysis.cpp',
         'jit/C1Spewer.cpp',
         'jit/CodeGenerator.cpp',
+        'jit/CompileWrappers.cpp',
         'jit/EdgeCaseAnalysis.cpp',
         'jit/EffectiveAddressAnalysis.cpp',
         'jit/Ion.cpp',
         'jit/IonAnalysis.cpp',
         'jit/IonBuilder.cpp',
         'jit/IonCaches.cpp',
         'jit/IonFrames.cpp',
         'jit/IonMacroAssembler.cpp',
--- a/js/src/vm/ForkJoin.cpp
+++ b/js/src/vm/ForkJoin.cpp
@@ -1477,17 +1477,19 @@ ForkJoinShared::executePortion(PerThread
     ForkJoinSlice slice(perThread, threadId, numSlices_, allocator,
                         this, &records_[threadId]);
     AutoSetForkJoinSlice autoContext(&slice);
 
     Spew(SpewOps, "Up");
 
     // Make a new IonContext for the slice, which is needed if we need to
     // re-enter the VM.
-    IonContext icx(cx_->runtime(), cx_->compartment(), nullptr);
+    IonContext icx(CompileRuntime::get(cx_->runtime()),
+                   CompileCompartment::get(cx_->compartment()),
+                   nullptr);
 
     JS_ASSERT(slice.bailoutRecord->topScript == nullptr);
 
     RootedObject fun(perThread, fun_);
     JS_ASSERT(fun->is<JSFunction>());
     RootedFunction callee(perThread, &fun->as<JSFunction>());
     if (!callee->nonLazyScript()->hasParallelIonScript()) {
         // Sometimes, particularly with GCZeal, the parallel ion
--- a/js/src/vm/Interpreter-inl.h
+++ b/js/src/vm/Interpreter-inl.h
@@ -454,24 +454,24 @@ GetElementOperation(JSContext *cx, JSOp 
         return false;
     return GetObjectElementOperation(cx, op, obj, isObject, rref, res);
 }
 
 static JS_ALWAYS_INLINE JSString *
 TypeOfOperation(const Value &v, JSRuntime *rt)
 {
     JSType type = js::TypeOfValue(v);
-    return TypeName(type, rt);
+    return TypeName(type, rt->atomState);
 }
 
 static inline JSString *
 TypeOfObjectOperation(JSObject *obj, JSRuntime *rt)
 {
     JSType type = js::TypeOfObject(obj);
-    return TypeName(type, rt);
+    return TypeName(type, rt->atomState);
 }
 
 static JS_ALWAYS_INLINE bool
 InitElemOperation(JSContext *cx, HandleObject obj, HandleValue idval, HandleValue val)
 {
     JS_ASSERT(!val.isMagic(JS_ELEMENTS_HOLE));
 
     RootedId id(cx);
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -461,20 +461,25 @@ struct JSAtomState
 #define PROPERTYNAME_FIELD(idpart, id, text) js::FixedHeapPtr<js::PropertyName> id;
     FOR_EACH_COMMON_PROPERTYNAME(PROPERTYNAME_FIELD)
 #undef PROPERTYNAME_FIELD
 #define PROPERTYNAME_FIELD(name, code, init) js::FixedHeapPtr<js::PropertyName> name;
     JS_FOR_EACH_PROTOTYPE(PROPERTYNAME_FIELD)
 #undef PROPERTYNAME_FIELD
 };
 
+namespace js {
+
 #define NAME_OFFSET(name)       offsetof(JSAtomState, name)
-#define OFFSET_TO_NAME(rt,off)  (*(js::FixedHeapPtr<js::PropertyName>*)((char*)&(rt)->atomState + (off)))
 
-namespace js {
+inline HandlePropertyName
+AtomStateOffsetToName(const JSAtomState &atomState, size_t offset)
+{
+    return *(js::FixedHeapPtr<js::PropertyName>*)((char*)&atomState + offset);
+}
 
 /*
  * Encapsulates portions of the runtime/context that are tied to a
  * single active thread.  Normally, as most JS is single-threaded,
  * there is only one instance of this struct, embedded in the
  * JSRuntime as the field |mainThread|.  During Parallel JS sections,
  * however, there will be one instance per worker thread.
  */