Bug 785905 - Build Ion MIR graph off thread, r=jandem.
authorBrian Hackett <bhackett1024@gmail.com>
Mon, 16 Dec 2013 10:53:02 -0800
changeset 176727 dbeea0e93b56b586792fe8265779249d22018b2a
parent 176726 3df0585a23a1622bf21bce5a2e9f9adc8ce2ccb6
child 176728 b63e9c3e67ac4fefd4e4e95ed323dc60f2aead5a
push id3343
push userffxbld
push dateMon, 17 Mar 2014 21:55:32 +0000
treeherdermozilla-beta@2f7d3415f79f [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjandem
bugs785905
milestone29.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 785905 - Build Ion MIR graph off thread, r=jandem.
js/src/frontend/BytecodeEmitter.cpp
js/src/gc/Marking.cpp
js/src/gc/RootMarking.cpp
js/src/jit/BaselineCompiler.cpp
js/src/jit/BaselineIC.cpp
js/src/jit/BaselineIC.h
js/src/jit/BaselineInspector.cpp
js/src/jit/BaselineJIT.cpp
js/src/jit/BaselineJIT.h
js/src/jit/CodeGenerator.cpp
js/src/jit/CompileInfo.h
js/src/jit/CompileWrappers.cpp
js/src/jit/CompileWrappers.h
js/src/jit/Ion.cpp
js/src/jit/IonAnalysis.cpp
js/src/jit/IonBuilder.cpp
js/src/jit/IonBuilder.h
js/src/jit/IonCaches.cpp
js/src/jit/IonMacroAssembler.h
js/src/jit/MIR.cpp
js/src/jit/VMFunctions.cpp
js/src/jit/shared/CodeGenerator-x86-shared.cpp
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/jscompartment.cpp
js/src/jsfun.cpp
js/src/jsfun.h
js/src/jsinfer.cpp
js/src/jsinfer.h
js/src/jsinferinlines.h
js/src/jsiter.cpp
js/src/jsobj.cpp
js/src/jsobj.h
js/src/jsobjinlines.h
js/src/jsproxy.cpp
js/src/jsproxy.h
js/src/jsscript.cpp
js/src/jsscript.h
js/src/jsscriptinlines.h
js/src/jsworkers.cpp
js/src/jswrapper.cpp
js/src/vm/GlobalObject.cpp
js/src/vm/GlobalObject.h
js/src/vm/Interpreter.cpp
js/src/vm/ObjectImpl.cpp
js/src/vm/ObjectImpl.h
js/src/vm/Runtime.cpp
js/src/vm/Runtime.h
js/src/vm/SPSProfiler.cpp
js/src/vm/SelfHosting.cpp
js/src/vm/TypedArrayObject.cpp
js/src/vm/TypedArrayObject.h
--- a/js/src/frontend/BytecodeEmitter.cpp
+++ b/js/src/frontend/BytecodeEmitter.cpp
@@ -2799,17 +2799,23 @@ frontend::EmitFunctionScript(ExclusiveCo
     if (runOnce) {
         bce->script->setTreatAsRunOnce();
         JS_ASSERT(!bce->script->hasRunOnce());
     }
 
     /* Initialize fun->script() so that the debugger has a valid fun->script(). */
     RootedFunction fun(cx, bce->script->function());
     JS_ASSERT(fun->isInterpreted());
-    fun->setScript(bce->script);
+
+    if (fun->isInterpretedLazy()) {
+        AutoLockForCompilation lock(cx);
+        fun->setUnlazifiedScript(bce->script);
+    } else {
+        fun->setScript(bce->script);
+    }
 
     bce->tellDebuggerAboutCompiledScript(cx);
 
     return true;
 }
 
 static bool
 MaybeEmitVarDecl(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn,
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -1124,66 +1124,54 @@ ScanTypeObject(GCMarker *gcmarker, types
 {
     unsigned count = type->getPropertyCount();
     for (unsigned i = 0; i < count; i++) {
         types::Property *prop = type->getProperty(i);
         if (prop && JSID_IS_STRING(prop->id))
             PushMarkStack(gcmarker, JSID_TO_STRING(prop->id));
     }
 
-    if (TaggedProto(type->proto).isObject())
-        PushMarkStack(gcmarker, type->proto);
+    if (type->proto().isObject())
+        PushMarkStack(gcmarker, type->proto().toObject());
 
     if (type->singleton && !type->lazy())
         PushMarkStack(gcmarker, type->singleton);
 
-    if (type->addendum) {
-        switch (type->addendum->kind) {
-          case types::TypeObjectAddendum::NewScript:
-            PushMarkStack(gcmarker, type->newScript()->fun);
-            PushMarkStack(gcmarker, type->newScript()->templateObject);
-            break;
-
-          case types::TypeObjectAddendum::TypedObject:
-            PushMarkStack(gcmarker, type->typedObject()->typeRepr->ownerObject());
-            break;
-        }
+    if (type->hasNewScript()) {
+        PushMarkStack(gcmarker, type->newScript()->fun);
+        PushMarkStack(gcmarker, type->newScript()->templateObject);
+    } else if (type->hasTypedObject()) {
+        PushMarkStack(gcmarker, type->typedObject()->typeRepr->ownerObject());
     }
 
     if (type->interpretedFunction)
         PushMarkStack(gcmarker, type->interpretedFunction);
 }
 
 static void
 gc::MarkChildren(JSTracer *trc, types::TypeObject *type)
 {
     unsigned count = type->getPropertyCount();
     for (unsigned i = 0; i < count; i++) {
         types::Property *prop = type->getProperty(i);
         if (prop)
             MarkId(trc, &prop->id, "type_prop");
     }
 
-    if (TaggedProto(type->proto).isObject())
-        MarkObject(trc, &type->proto, "type_proto");
+    if (type->proto().isObject())
+        MarkObject(trc, &type->protoRaw(), "type_proto");
 
     if (type->singleton && !type->lazy())
         MarkObject(trc, &type->singleton, "type_singleton");
 
-    if (type->addendum) {
-        switch (type->addendum->kind) {
-          case types::TypeObjectAddendum::NewScript:
-            MarkObject(trc, &type->newScript()->fun, "type_new_function");
-            MarkObject(trc, &type->newScript()->templateObject, "type_new_template");
-            break;
-
-          case types::TypeObjectAddendum::TypedObject:
-            type->typedObject()->typeRepr->mark(trc);
-            break;
-        }
+    if (type->hasNewScript()) {
+        MarkObject(trc, &type->newScript()->fun, "type_new_function");
+        MarkObject(trc, &type->newScript()->templateObject, "type_new_template");
+    } else if (type->hasTypedObject()) {
+        type->typedObject()->typeRepr->mark(trc);
     }
 
     if (type->interpretedFunction)
         MarkObject(trc, &type->interpretedFunction, "type_function");
 }
 
 static void
 gc::MarkChildren(JSTracer *trc, jit::IonCode *code)
@@ -1436,17 +1424,17 @@ GCMarker::processMarkStackTop(SliceBudge
 
         types::TypeObject *type = obj->typeFromGC();
         PushMarkStack(this, type);
 
         Shape *shape = obj->lastProperty();
         PushMarkStack(this, shape);
 
         /* Call the trace hook if necessary. */
-        const Class *clasp = type->clasp;
+        const Class *clasp = type->clasp();
         if (clasp->trace) {
             JS_ASSERT_IF(runtime->gcMode() == JSGC_MODE_INCREMENTAL &&
                          runtime->gcIncrementalEnabled,
                          clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS);
             clasp->trace(this, obj);
         }
 
         if (!shape->isNative())
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -42,17 +42,17 @@ typedef RootedValueMap::Enum RootEnum;
 #ifdef JSGC_USE_EXACT_ROOTING
 static inline void
 MarkExactStackRoot(JSTracer *trc, Rooted<void*> *rooter, ThingRootKind kind)
 {
     void **addr = (void **)rooter->address();
     if (IsNullTaggedPointer(*addr))
         return;
 
-    if (kind == THING_ROOT_OBJECT && *addr == Proxy::LazyProto)
+    if (kind == THING_ROOT_OBJECT && *addr == TaggedProto::LazyProto)
         return;
 
     switch (kind) {
       case THING_ROOT_OBJECT:      MarkObjectRoot(trc, (JSObject **)addr, "exact-object"); break;
       case THING_ROOT_STRING:      MarkStringRoot(trc, (JSString **)addr, "exact-string"); break;
       case THING_ROOT_SCRIPT:      MarkScriptRoot(trc, (JSScript **)addr, "exact-script"); break;
       case THING_ROOT_SHAPE:       MarkShapeRoot(trc, (Shape **)addr, "exact-shape"); break;
       case THING_ROOT_BASE_SHAPE:  MarkBaseShapeRoot(trc, (BaseShape **)addr, "exact-baseshape"); break;
--- a/js/src/jit/BaselineCompiler.cpp
+++ b/js/src/jit/BaselineCompiler.cpp
@@ -181,20 +181,18 @@ BaselineCompiler::compile()
                                                          pcEntries.length(),
                                                          bytecodeTypeMapEntries);
     if (!baselineScript)
         return Method_Error;
 
     baselineScript->setMethod(code);
     baselineScript->setTemplateScope(templateScope);
 
-    script->setBaselineScript(baselineScript);
-
     IonSpew(IonSpew_BaselineScripts, "Created BaselineScript %p (raw %p) for %s:%d",
-            (void *) script->baselineScript(), (void *) code->raw(),
+            (void *) baselineScript, (void *) code->raw(),
             script->filename(), script->lineno());
 
 #ifdef JS_ION_PERF
     writePerfSpewerBaselineProfile(script, code);
 #endif
 
     JS_ASSERT(pcMappingIndexEntries.length() > 0);
     baselineScript->copyPCMappingIndexEntries(&pcMappingIndexEntries[0]);
@@ -249,16 +247,18 @@ BaselineCompiler::compile()
         // The last entry in the last index found, and is used to avoid binary
         // searches for the sought entry when queries are in linear order.
         bytecodeMap[script->nTypeSets()] = 0;
     }
 
     if (script->compartment()->debugMode())
         baselineScript->setDebugMode();
 
+    script->setBaselineScript(cx, baselineScript);
+
     return Method_Compiled;
 }
 
 bool
 BaselineCompiler::emitPrologue()
 {
     masm.push(BaselineFrameReg);
     masm.mov(BaselineStackReg, BaselineFrameReg);
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -125,22 +125,26 @@ ICStubIterator::operator++()
     if (!unlinked_)
         previousStub_ = currentStub_;
     currentStub_ = currentStub_->next();
     unlinked_ = false;
     return *this;
 }
 
 void
-ICStubIterator::unlink(Zone *zone)
+ICStubIterator::unlink(JSContext *cx)
 {
     JS_ASSERT(currentStub_->next() != nullptr);
     JS_ASSERT(currentStub_ != fallbackStub_);
     JS_ASSERT(!unlinked_);
-    fallbackStub_->unlinkStub(zone, previousStub_, currentStub_);
+
+    {
+        AutoLockForCompilation lock(cx);
+        fallbackStub_->unlinkStub(cx->zone(), previousStub_, currentStub_);
+    }
 
     // Mark the current iterator position as unlinked, so operator++ works properly.
     unlinked_ = true;
 }
 
 
 void
 ICStub::markCode(JSTracer *trc, const char *name)
@@ -480,17 +484,17 @@ ICFallbackStub::unlinkStub(Zone *zone, I
 #endif
 }
 
 void
 ICFallbackStub::unlinkStubsWithKind(JSContext *cx, ICStub::Kind kind)
 {
     for (ICStubIterator iter = beginChain(); !iter.atEnd(); iter++) {
         if (iter->kind() == kind)
-            iter.unlink(cx->zone());
+            iter.unlink(cx);
     }
 }
 
 void
 ICTypeMonitor_Fallback::resetMonitorStubChain(Zone *zone)
 {
     if (zone->needsBarrier()) {
         // We are removing edges from monitored stubs to gcthings (IonCode).
@@ -1056,17 +1060,17 @@ DoProfilerFallback(JSContext *cx, Baseli
     IonSpew(IonSpew_BaselineIC, "  Generating Profiler_PushFunction stub for %s:%d",
             script->filename(), script->lineno());
 
     // Create a new optimized stub.
     ICProfiler_PushFunction::Compiler compiler(cx, string, script);
     ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
     if (!optStub)
         return false;
-    stub->addNewStub(optStub);
+    stub->addNewStub(cx, optStub);
 
     return true;
 }
 
 typedef bool (*DoProfilerFallbackFn)(JSContext *, BaselineFrame *frame, ICProfiler_Fallback *);
 static const VMFunction DoProfilerFallbackInfo =
     FunctionInfo<DoProfilerFallbackFn>(DoProfilerFallback);
 
@@ -1141,18 +1145,20 @@ ICTypeMonitor_Fallback::addMonitorStubFo
                 if (existingStub->containsType(type))
                     return true;
             }
         }
 
         ICTypeMonitor_PrimitiveSet::Compiler compiler(cx, existingStub, type);
         ICStub *stub = existingStub ? compiler.updateStub()
                                     : compiler.getStub(compiler.getStubSpace(script));
-        if (!stub)
-            return false;
+        if (!stub) {
+            js_ReportOutOfMemory(cx);
+            return false;
+        }
 
         IonSpew(IonSpew_BaselineIC, "  %s TypeMonitor stub %p for primitive type %d",
                 existingStub ? "Modified existing" : "Created new", stub, type);
 
         if (!existingStub) {
             JS_ASSERT(!hasStub(TypeMonitor_PrimitiveSet));
             addOptimizedMonitorStub(stub);
         }
@@ -1166,18 +1172,20 @@ ICTypeMonitor_Fallback::addMonitorStubFo
                 iter->toTypeMonitor_SingleObject()->object() == obj)
             {
                 return true;
             }
         }
 
         ICTypeMonitor_SingleObject::Compiler compiler(cx, obj);
         ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
-        if (!stub)
-            return false;
+        if (!stub) {
+            js_ReportOutOfMemory(cx);
+            return false;
+        }
 
         IonSpew(IonSpew_BaselineIC, "  Added TypeMonitor stub %p for singleton %p",
                 stub, obj.get());
 
         addOptimizedMonitorStub(stub);
 
     } else {
         RootedTypeObject type(cx, val.toObject().type());
@@ -1188,18 +1196,20 @@ ICTypeMonitor_Fallback::addMonitorStubFo
                 iter->toTypeMonitor_TypeObject()->type() == type)
             {
                 return true;
             }
         }
 
         ICTypeMonitor_TypeObject::Compiler compiler(cx, type);
         ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
-        if (!stub)
-            return false;
+        if (!stub) {
+            js_ReportOutOfMemory(cx);
+            return false;
+        }
 
         IonSpew(IonSpew_BaselineIC, "  Added TypeMonitor stub %p for TypeObject %p",
                 stub, type.get());
 
         addOptimizedMonitorStub(stub);
     }
 
     bool firstMonitorStubAdded = wasDetachedMonitorChain && (numOptimizedMonitorStubs_ > 0);
@@ -1780,17 +1790,17 @@ DoCompareFallback(JSContext *cx, Baselin
     // Try to generate new stubs.
     if (lhs.isInt32() && rhs.isInt32()) {
         IonSpew(IonSpew_BaselineIC, "  Generating %s(Int32, Int32) stub", js_CodeName[op]);
         ICCompare_Int32::Compiler compiler(cx, op);
         ICStub *int32Stub = compiler.getStub(compiler.getStubSpace(script));
         if (!int32Stub)
             return false;
 
-        stub->addNewStub(int32Stub);
+        stub->addNewStub(cx, int32Stub);
         return true;
     }
 
     if (!cx->runtime()->jitSupportsFloatingPoint && (lhs.isNumber() || rhs.isNumber()))
         return true;
 
     if (lhs.isNumber() && rhs.isNumber()) {
         IonSpew(IonSpew_BaselineIC, "  Generating %s(Number, Number) stub", js_CodeName[op]);
@@ -1798,80 +1808,80 @@ DoCompareFallback(JSContext *cx, Baselin
         // Unlink int32 stubs, it's faster to always use the double stub.
         stub->unlinkStubsWithKind(cx, ICStub::Compare_Int32);
 
         ICCompare_Double::Compiler compiler(cx, op);
         ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
         if (!doubleStub)
             return false;
 
-        stub->addNewStub(doubleStub);
+        stub->addNewStub(cx, doubleStub);
         return true;
     }
 
     if ((lhs.isNumber() && rhs.isUndefined()) ||
         (lhs.isUndefined() && rhs.isNumber()))
     {
         IonSpew(IonSpew_BaselineIC, "  Generating %s(%s, %s) stub", js_CodeName[op],
                     rhs.isUndefined() ? "Number" : "Undefined",
                     rhs.isUndefined() ? "Undefined" : "Number");
         ICCompare_NumberWithUndefined::Compiler compiler(cx, op, lhs.isUndefined());
         ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
         if (!doubleStub)
             return false;
 
-        stub->addNewStub(doubleStub);
+        stub->addNewStub(cx, doubleStub);
         return true;
     }
 
     if (lhs.isBoolean() && rhs.isBoolean()) {
         IonSpew(IonSpew_BaselineIC, "  Generating %s(Boolean, Boolean) stub", js_CodeName[op]);
         ICCompare_Boolean::Compiler compiler(cx, op);
         ICStub *booleanStub = compiler.getStub(compiler.getStubSpace(script));
         if (!booleanStub)
             return false;
 
-        stub->addNewStub(booleanStub);
+        stub->addNewStub(cx, booleanStub);
         return true;
     }
 
     if ((lhs.isBoolean() && rhs.isInt32()) || (lhs.isInt32() && rhs.isBoolean())) {
         IonSpew(IonSpew_BaselineIC, "  Generating %s(%s, %s) stub", js_CodeName[op],
                     rhs.isInt32() ? "Boolean" : "Int32",
                     rhs.isInt32() ? "Int32" : "Boolean");
         ICCompare_Int32WithBoolean::Compiler compiler(cx, op, lhs.isInt32());
         ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
         if (!optStub)
             return false;
 
-        stub->addNewStub(optStub);
+        stub->addNewStub(cx, optStub);
         return true;
     }
 
     if (IsEqualityOp(op)) {
         if (lhs.isString() && rhs.isString() && !stub->hasStub(ICStub::Compare_String)) {
             IonSpew(IonSpew_BaselineIC, "  Generating %s(String, String) stub", js_CodeName[op]);
             ICCompare_String::Compiler compiler(cx, op);
             ICStub *stringStub = compiler.getStub(compiler.getStubSpace(script));
             if (!stringStub)
                 return false;
 
-            stub->addNewStub(stringStub);
+            stub->addNewStub(cx, stringStub);
             return true;
         }
 
         if (lhs.isObject() && rhs.isObject()) {
             JS_ASSERT(!stub->hasStub(ICStub::Compare_Object));
             IonSpew(IonSpew_BaselineIC, "  Generating %s(Object, Object) stub", js_CodeName[op]);
             ICCompare_Object::Compiler compiler(cx, op);
             ICStub *objectStub = compiler.getStub(compiler.getStubSpace(script));
             if (!objectStub)
                 return false;
 
-            stub->addNewStub(objectStub);
+            stub->addNewStub(cx, objectStub);
             return true;
         }
 
         if ((lhs.isObject() || lhs.isNull() || lhs.isUndefined()) &&
             (rhs.isObject() || rhs.isNull() || rhs.isUndefined()) &&
             !stub->hasStub(ICStub::Compare_ObjectWithUndefined))
         {
             IonSpew(IonSpew_BaselineIC, "  Generating %s(Obj/Null/Undef, Obj/Null/Undef) stub",
@@ -1879,17 +1889,17 @@ DoCompareFallback(JSContext *cx, Baselin
             bool lhsIsUndefined = lhs.isNull() || lhs.isUndefined();
             bool compareWithNull = lhs.isNull() || rhs.isNull();
             ICCompare_ObjectWithUndefined::Compiler compiler(cx, op,
                                                              lhsIsUndefined, compareWithNull);
             ICStub *objectStub = compiler.getStub(compiler.getStubSpace(script));
             if (!objectStub)
                 return false;
 
-            stub->addNewStub(objectStub);
+            stub->addNewStub(cx, objectStub);
             return true;
         }
     }
 
     return true;
 }
 
 typedef bool (*DoCompareFallbackFn)(JSContext *, BaselineFrame *, ICCompare_Fallback *,
@@ -2085,17 +2095,17 @@ ICCompare_ObjectWithUndefined::Compiler:
         // obj !== undefined for all objects.
         masm.moveValue(BooleanValue(op == JSOP_STRICTNE), R0);
         EmitReturnFromIC(masm);
     } else {
         // obj != undefined only where !obj->getClass()->emulatesUndefined()
         Label emulatesUndefined;
         Register obj = masm.extractObject(objectOperand, ExtractTemp0);
         masm.loadPtr(Address(obj, JSObject::offsetOfType()), obj);
-        masm.loadPtr(Address(obj, offsetof(types::TypeObject, clasp)), obj);
+        masm.loadPtr(Address(obj, types::TypeObject::offsetOfClasp()), obj);
         masm.branchTest32(Assembler::NonZero,
                           Address(obj, Class::offsetOfFlags()),
                           Imm32(JSCLASS_EMULATES_UNDEFINED),
                           &emulatesUndefined);
         masm.moveValue(BooleanValue(op == JSOP_NE), R0);
         EmitReturnFromIC(masm);
         masm.bind(&emulatesUndefined);
         masm.moveValue(BooleanValue(op == JSOP_EQ), R0);
@@ -2191,60 +2201,60 @@ DoToBoolFallback(JSContext *cx, Baseline
     // Try to generate new stubs.
     if (arg.isInt32()) {
         IonSpew(IonSpew_BaselineIC, "  Generating ToBool(Int32) stub.");
         ICToBool_Int32::Compiler compiler(cx);
         ICStub *int32Stub = compiler.getStub(compiler.getStubSpace(script));
         if (!int32Stub)
             return false;
 
-        stub->addNewStub(int32Stub);
+        stub->addNewStub(cx, int32Stub);
         return true;
     }
 
     if (arg.isDouble() && cx->runtime()->jitSupportsFloatingPoint) {
         IonSpew(IonSpew_BaselineIC, "  Generating ToBool(Double) stub.");
         ICToBool_Double::Compiler compiler(cx);
         ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
         if (!doubleStub)
             return false;
 
-        stub->addNewStub(doubleStub);
+        stub->addNewStub(cx, doubleStub);
         return true;
     }
 
     if (arg.isString()) {
         IonSpew(IonSpew_BaselineIC, "  Generating ToBool(String) stub");
         ICToBool_String::Compiler compiler(cx);
         ICStub *stringStub = compiler.getStub(compiler.getStubSpace(script));
         if (!stringStub)
             return false;
 
-        stub->addNewStub(stringStub);
+        stub->addNewStub(cx, stringStub);
         return true;
     }
 
     if (arg.isNull() || arg.isUndefined()) {
         ICToBool_NullUndefined::Compiler compiler(cx);
         ICStub *nilStub = compiler.getStub(compiler.getStubSpace(script));
         if (!nilStub)
             return false;
 
-        stub->addNewStub(nilStub);
+        stub->addNewStub(cx, nilStub);
         return true;
     }
 
     if (arg.isObject()) {
         IonSpew(IonSpew_BaselineIC, "  Generating ToBool(Object) stub.");
         ICToBool_Object::Compiler compiler(cx);
         ICStub *objStub = compiler.getStub(compiler.getStubSpace(script));
         if (!objStub)
             return false;
 
-        stub->addNewStub(objStub);
+        stub->addNewStub(cx, objStub);
         return true;
     }
 
     return true;
 }
 
 typedef bool (*pf)(JSContext *, BaselineFrame *, ICToBool_Fallback *, HandleValue,
                    MutableHandleValue);
@@ -2526,69 +2536,69 @@ DoBinaryArithFallback(JSContext *cx, Bas
             return false;
         break;
       }
       default:
         MOZ_ASSUME_UNREACHABLE("Unhandled baseline arith op");
     }
 
     if (ret.isDouble())
-        stub->setSawDoubleResult();
+        stub->setSawDoubleResult(cx);
 
     // Check to see if a new stub should be generated.
     if (stub->numOptimizedStubs() >= ICBinaryArith_Fallback::MAX_OPTIMIZED_STUBS) {
-        stub->noteUnoptimizableOperands();
+        stub->noteUnoptimizableOperands(cx);
         return true;
     }
 
     // Handle string concat.
     if (op == JSOP_ADD) {
         if (lhs.isString() && rhs.isString()) {
             IonSpew(IonSpew_BaselineIC, "  Generating %s(String, String) stub", js_CodeName[op]);
             JS_ASSERT(ret.isString());
             ICBinaryArith_StringConcat::Compiler compiler(cx);
             ICStub *strcatStub = compiler.getStub(compiler.getStubSpace(script));
             if (!strcatStub)
                 return false;
-            stub->addNewStub(strcatStub);
+            stub->addNewStub(cx, strcatStub);
             return true;
         }
 
         if ((lhs.isString() && rhs.isObject()) || (lhs.isObject() && rhs.isString())) {
             IonSpew(IonSpew_BaselineIC, "  Generating %s(%s, %s) stub", js_CodeName[op],
                     lhs.isString() ? "String" : "Object",
                     lhs.isString() ? "Object" : "String");
             JS_ASSERT(ret.isString());
             ICBinaryArith_StringObjectConcat::Compiler compiler(cx, lhs.isString());
             ICStub *strcatStub = compiler.getStub(compiler.getStubSpace(script));
             if (!strcatStub)
                 return false;
-            stub->addNewStub(strcatStub);
+            stub->addNewStub(cx, strcatStub);
             return true;
         }
     }
 
     if (((lhs.isBoolean() && (rhs.isBoolean() || rhs.isInt32())) ||
          (rhs.isBoolean() && (lhs.isBoolean() || lhs.isInt32()))) &&
         (op == JSOP_ADD || op == JSOP_SUB || op == JSOP_BITOR || op == JSOP_BITAND ||
          op == JSOP_BITXOR))
     {
         IonSpew(IonSpew_BaselineIC, "  Generating %s(%s, %s) stub", js_CodeName[op],
                 lhs.isBoolean() ? "Boolean" : "Int32", rhs.isBoolean() ? "Boolean" : "Int32");
         ICBinaryArith_BooleanWithInt32::Compiler compiler(cx, op, lhs.isBoolean(), rhs.isBoolean());
         ICStub *arithStub = compiler.getStub(compiler.getStubSpace(script));
         if (!arithStub)
             return false;
-        stub->addNewStub(arithStub);
+        stub->addNewStub(cx, arithStub);
         return true;
     }
 
     // Handle only int32 or double.
     if (!lhs.isNumber() || !rhs.isNumber()) {
-        stub->noteUnoptimizableOperands();
+        stub->noteUnoptimizableOperands(cx);
         return true;
     }
 
     JS_ASSERT(ret.isNumber());
 
     if (lhs.isDouble() || rhs.isDouble() || ret.isDouble()) {
         if (!cx->runtime()->jitSupportsFloatingPoint)
             return true;
@@ -2602,17 +2612,17 @@ DoBinaryArithFallback(JSContext *cx, Bas
             // Unlink int32 stubs, it's faster to always use the double stub.
             stub->unlinkStubsWithKind(cx, ICStub::BinaryArith_Int32);
             IonSpew(IonSpew_BaselineIC, "  Generating %s(Double, Double) stub", js_CodeName[op]);
 
             ICBinaryArith_Double::Compiler compiler(cx, op);
             ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
             if (!doubleStub)
                 return false;
-            stub->addNewStub(doubleStub);
+            stub->addNewStub(cx, doubleStub);
             return true;
           }
           default:
             break;
         }
     }
 
     if (lhs.isInt32() && rhs.isInt32()) {
@@ -2620,17 +2630,17 @@ DoBinaryArithFallback(JSContext *cx, Bas
         if (allowDouble)
             stub->unlinkStubsWithKind(cx, ICStub::BinaryArith_Int32);
         IonSpew(IonSpew_BaselineIC, "  Generating %s(Int32, Int32%s) stub", js_CodeName[op],
                 allowDouble ? " => Double" : "");
         ICBinaryArith_Int32::Compiler compilerInt32(cx, op, allowDouble);
         ICStub *int32Stub = compilerInt32.getStub(compilerInt32.getStubSpace(script));
         if (!int32Stub)
             return false;
-        stub->addNewStub(int32Stub);
+        stub->addNewStub(cx, int32Stub);
         return true;
     }
 
     // Handle Double <BITOP> Int32 or Int32 <BITOP> Double case.
     if (((lhs.isDouble() && rhs.isInt32()) || (lhs.isInt32() && rhs.isDouble())) &&
         ret.isInt32())
     {
         switch(op) {
@@ -2639,25 +2649,25 @@ DoBinaryArithFallback(JSContext *cx, Bas
           case JSOP_BITAND: {
             IonSpew(IonSpew_BaselineIC, "  Generating %s(%s, %s) stub", js_CodeName[op],
                         lhs.isDouble() ? "Double" : "Int32",
                         lhs.isDouble() ? "Int32" : "Double");
             ICBinaryArith_DoubleWithInt32::Compiler compiler(cx, op, lhs.isDouble());
             ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
             if (!optStub)
                 return false;
-            stub->addNewStub(optStub);
+            stub->addNewStub(cx, optStub);
             return true;
           }
           default:
             break;
         }
     }
 
-    stub->noteUnoptimizableOperands();
+    stub->noteUnoptimizableOperands(cx);
     return true;
 }
 #if defined(_MSC_VER)
 # pragma optimize("", on)
 #endif
 
 typedef bool (*DoBinaryArithFallbackFn)(JSContext *, BaselineFrame *, ICBinaryArith_Fallback *,
                                         HandleValue, HandleValue, MutableHandleValue);
@@ -3042,31 +3052,31 @@ DoUnaryArithFallback(JSContext *cx, Base
     }
 
     if (val.isInt32() && res.isInt32()) {
         IonSpew(IonSpew_BaselineIC, "  Generating %s(Int32 => Int32) stub", js_CodeName[op]);
         ICUnaryArith_Int32::Compiler compiler(cx, op);
         ICStub *int32Stub = compiler.getStub(compiler.getStubSpace(script));
         if (!int32Stub)
             return false;
-        stub->addNewStub(int32Stub);
+        stub->addNewStub(cx, int32Stub);
         return true;
     }
 
     if (val.isNumber() && res.isNumber() && cx->runtime()->jitSupportsFloatingPoint) {
         IonSpew(IonSpew_BaselineIC, "  Generating %s(Number => Number) stub", js_CodeName[op]);
 
         // Unlink int32 stubs, the double stub handles both cases and TI specializes for both.
         stub->unlinkStubsWithKind(cx, ICStub::UnaryArith_Int32);
 
         ICUnaryArith_Double::Compiler compiler(cx, op);
         ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
         if (!doubleStub)
             return false;
-        stub->addNewStub(doubleStub);
+        stub->addNewStub(cx, doubleStub);
         return true;
     }
 
     return true;
 }
 #if defined(_MSC_VER)
 # pragma optimize("", on)
 #endif
@@ -3624,42 +3634,42 @@ RemoveExistingGetElemNativeStubs(JSConte
                 ICGetElem_NativePrototypeSlot *protoStub = iter->toGetElem_NativePrototypeSlot();
 
                 if (holder != protoStub->holder())
                     continue;
 
                 // If the holder matches, but the holder's lastProperty doesn't match, then
                 // this stub is invalid anyway.  Unlink it.
                 if (holder->lastProperty() != protoStub->holderShape()) {
-                    iter.unlink(cx->zone());
+                    iter.unlink(cx);
                     continue;
                 }
             } else {
                 JS_ASSERT(iter->isGetElem_NativePrototypeCallNative() ||
                           iter->isGetElem_NativePrototypeCallScripted());
 
                 ICGetElemNativePrototypeCallStub *protoStub =
                     reinterpret_cast<ICGetElemNativePrototypeCallStub *>(*iter);
 
                 if (holder != protoStub->holder())
                     continue;
 
                 // If the holder matches, but the holder's lastProperty doesn't match, then
                 // this stub is invalid anyway.  Unlink it.
                 if (holder->lastProperty() != protoStub->holderShape()) {
-                    iter.unlink(cx->zone());
+                    iter.unlink(cx);
                     continue;
                 }
             }
         }
 
         // If the new stub needs atomization, and the old stub doesn't atomize, then
         // remove the old stub.
         if (needsAtomize && !getElemNativeStub->needsAtomize()) {
-            iter.unlink(cx->zone());
+            iter.unlink(cx);
             continue;
         }
 
         // Should never get here, because this means a matching stub exists, and if
         // a matching stub exists, this procedure should never have been called.
         MOZ_ASSUME_UNREACHABLE("Procedure should never have been called.");
     }
 }
@@ -3740,17 +3750,17 @@ static bool TryAttachNativeGetElemStub(J
         ICGetElemNativeStub::AccessType acctype = isFixedSlot ? ICGetElemNativeStub::FixedSlot
                                                               : ICGetElemNativeStub::DynamicSlot;
         ICGetElemNativeCompiler compiler(cx, kind, isCallElem, monitorStub, obj, holder, propName,
                                          acctype, needsAtomize, offset);
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
 
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         return true;
     }
 
     bool getterIsScripted = false;
     if (IsCacheableGetPropCall(cx, obj, holder, shape, &getterIsScripted, /*isDOMProxy=*/false)) {
         RootedFunction getter(cx, &shape->getterObject()->as<JSFunction>());
 
 #if JS_HAS_NO_SUCH_METHOD
@@ -3792,17 +3802,17 @@ static bool TryAttachNativeGetElemStub(J
                                                            ? ICGetElemNativeStub::ScriptedGetter
                                                            : ICGetElemNativeStub::NativeGetter;
         ICGetElemNativeCompiler compiler(cx, kind, monitorStub, obj, holder, propName, acctype,
                                          needsAtomize, getter, script->pcToOffset(pc), isCallElem);
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
 
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         return true;
     }
 
     return true;
 }
 
 static bool
 TypedArrayRequiresFloatingPoint(TypedArrayObject *tarr)
@@ -3826,17 +3836,17 @@ TryAttachGetElemStub(JSContext *cx, Hand
         // NoSuchMethod handling doesn't apply to string targets.
 
         IonSpew(IonSpew_BaselineIC, "  Generating GetElem(String[Int32]) stub");
         ICGetElem_String::Compiler compiler(cx);
         ICStub *stringStub = compiler.getStub(compiler.getStubSpace(script));
         if (!stringStub)
             return false;
 
-        stub->addNewStub(stringStub);
+        stub->addNewStub(cx, stringStub);
         return true;
     }
 
     if (lhs.isMagic(JS_OPTIMIZED_ARGUMENTS) && rhs.isInt32() &&
         !ArgumentsGetElemStubExists(stub, ICGetElem_Arguments::Magic))
     {
         // Any script with a CALLPROP on arguments (arguments.foo())
         // should not have optimized arguments.
@@ -3844,17 +3854,17 @@ TryAttachGetElemStub(JSContext *cx, Hand
 
         IonSpew(IonSpew_BaselineIC, "  Generating GetElem(MagicArgs[Int32]) stub");
         ICGetElem_Arguments::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
                                                ICGetElem_Arguments::Magic, false);
         ICStub *argsStub = compiler.getStub(compiler.getStubSpace(script));
         if (!argsStub)
             return false;
 
-        stub->addNewStub(argsStub);
+        stub->addNewStub(cx, argsStub);
         return true;
     }
 
     // Otherwise, GetElem is only optimized on objects.
     if (!lhs.isObject())
         return true;
     RootedObject obj(cx, &lhs.toObject());
 
@@ -3866,32 +3876,32 @@ TryAttachGetElemStub(JSContext *cx, Hand
         if (!ArgumentsGetElemStubExists(stub, which)) {
             IonSpew(IonSpew_BaselineIC, "  Generating GetElem(ArgsObj[Int32]) stub");
             ICGetElem_Arguments::Compiler compiler(
                 cx, stub->fallbackMonitorStub()->firstMonitorStub(), which, isCallElem);
             ICStub *argsStub = compiler.getStub(compiler.getStubSpace(script));
             if (!argsStub)
                 return false;
 
-            stub->addNewStub(argsStub);
+            stub->addNewStub(cx, argsStub);
             return true;
         }
     }
 
     if (obj->isNative()) {
         // Check for NativeObject[int] dense accesses.
         if (rhs.isInt32() && rhs.toInt32() >= 0) {
             IonSpew(IonSpew_BaselineIC, "  Generating GetElem(Native[Int32] dense) stub");
             ICGetElem_Dense::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
                                                obj->lastProperty(), isCallElem);
             ICStub *denseStub = compiler.getStub(compiler.getStubSpace(script));
             if (!denseStub)
                 return false;
 
-            stub->addNewStub(denseStub);
+            stub->addNewStub(cx, denseStub);
             return true;
         }
 
         // Check for NativeObject[id] shape-optimizable accesses.
         if (rhs.isString()) {
             if (!TryAttachNativeGetElemStub(cx, script, pc, stub, obj, rhs))
                 return false;
         }
@@ -3915,31 +3925,31 @@ TryAttachGetElemStub(JSContext *cx, Hand
         }
 
         IonSpew(IonSpew_BaselineIC, "  Generating GetElem(TypedArray[Int32]) stub");
         ICGetElem_TypedArray::Compiler compiler(cx, tarr->lastProperty(), tarr->type());
         ICStub *typedArrayStub = compiler.getStub(compiler.getStubSpace(script));
         if (!typedArrayStub)
             return false;
 
-        stub->addNewStub(typedArrayStub);
+        stub->addNewStub(cx, typedArrayStub);
         return true;
     }
 
     // GetElem operations on non-native objects other than typed arrays cannot
     // be cached by either Baseline or Ion. Indicate this in the cache so that
     // Ion does not generate a cache for this op.
     if (!obj->isNative() && !obj->is<TypedArrayObject>())
-        stub->noteNonNativeAccess();
+        stub->noteNonNativeAccess(cx);
 
     // GetElem operations which could access negative indexes generally can't
     // be optimized without the potential for bailouts, as we can't statically
     // determine that an object has no properties on such indexes.
     if (rhs.isNumber() && rhs.toNumber() < 0)
-        stub->noteNegativeIndex();
+        stub->noteNegativeIndex(cx);
 
     return true;
 }
 
 static bool
 DoGetElemFallback(JSContext *cx, BaselineFrame *frame, ICGetElem_Fallback *stub, HandleValue lhs,
                   HandleValue rhs, MutableHandleValue res)
 {
@@ -4839,17 +4849,17 @@ RemoveExistingTypedArraySetElemStub(JSCo
             continue;
 
         if (obj->lastProperty() != iter->toSetElem_TypedArray()->shape())
             continue;
 
         // TypedArraySetElem stubs are only removed using this procedure if
         // being replaced with one that expects out of bounds index.
         JS_ASSERT(!iter->toSetElem_TypedArray()->expectOutOfBounds());
-        iter.unlink(cx->zone());
+        iter.unlink(cx);
         return true;
     }
     return false;
 }
 
 static bool
 CanOptimizeDenseSetElem(JSContext *cx, HandleObject obj, uint32_t index,
                         HandleShape oldShape, uint32_t oldCapacity, uint32_t oldInitLength,
@@ -4996,31 +5006,31 @@ DoSetElemFallback(JSContext *cx, Baselin
                         obj->lastProperty(), type.get(), protoDepth);
                 ICSetElemDenseAddCompiler compiler(cx, obj, protoDepth);
                 ICUpdatedStub *denseStub = compiler.getStub(compiler.getStubSpace(script));
                 if (!denseStub)
                     return false;
                 if (!denseStub->addUpdateStubForValue(cx, script, obj, JSID_VOIDHANDLE, rhs))
                     return false;
 
-                stub->addNewStub(denseStub);
+                stub->addNewStub(cx, denseStub);
             } else if (!addingCase &&
                        !DenseSetElemStubExists(cx, ICStub::SetElem_Dense, stub, obj))
             {
                 IonSpew(IonSpew_BaselineIC,
                         "  Generating SetElem_Dense stub (shape=%p, type=%p)",
                         obj->lastProperty(), type.get());
                 ICSetElem_Dense::Compiler compiler(cx, shape, type);
                 ICUpdatedStub *denseStub = compiler.getStub(compiler.getStubSpace(script));
                 if (!denseStub)
                     return false;
                 if (!denseStub->addUpdateStubForValue(cx, script, obj, JSID_VOIDHANDLE, rhs))
                     return false;
 
-                stub->addNewStub(denseStub);
+                stub->addNewStub(cx, denseStub);
             }
         }
 
         return true;
     }
 
     if (obj->is<TypedArrayObject>() && index.isNumber() && rhs.isNumber()) {
         Rooted<TypedArrayObject*> tarr(cx, &obj->as<TypedArrayObject>());
@@ -5043,17 +5053,17 @@ DoSetElemFallback(JSContext *cx, Baselin
                     "  Generating SetElem_TypedArray stub (shape=%p, type=%u, oob=%s)",
                     tarr->lastProperty(), tarr->type(), expectOutOfBounds ? "yes" : "no");
             ICSetElem_TypedArray::Compiler compiler(cx, tarr->lastProperty(), tarr->type(),
                                                     expectOutOfBounds);
             ICStub *typedArrayStub = compiler.getStub(compiler.getStubSpace(script));
             if (!typedArrayStub)
                 return false;
 
-            stub->addNewStub(typedArrayStub);
+            stub->addNewStub(cx, typedArrayStub);
             return true;
         }
     }
 
     return true;
 }
 
 typedef bool (*DoSetElemFallbackFn)(JSContext *, BaselineFrame *, ICSetElem_Fallback *, Value *,
@@ -5093,23 +5103,23 @@ ICSetElem_Fallback::Compiler::generateSt
 
     masm.push(BaselineStubReg);
     masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
 
     return tailCallVM(DoSetElemFallbackInfo, masm);
 }
 
 void
-BaselineScript::noteArrayWriteHole(uint32_t pcOffset)
+BaselineScript::noteArrayWriteHole(JSContext *cx, uint32_t pcOffset)
 {
     ICEntry &entry = icEntryFromPCOffset(pcOffset);
     ICFallbackStub *stub = entry.fallbackStub();
 
     if (stub->isSetElem_Fallback())
-        stub->toSetElem_Fallback()->noteArrayWriteHole();
+        stub->toSetElem_Fallback()->noteArrayWriteHole(cx);
 }
 
 //
 // SetElem_Dense
 //
 
 bool
 ICSetElem_Dense::Compiler::generateStubCode(MacroAssembler &masm)
@@ -5621,17 +5631,17 @@ TryAttachGlobalNameStub(JSContext *cx, H
 
     ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
     IonSpew(IonSpew_BaselineIC, "  Generating GetName(GlobalName) stub");
     ICGetName_Global::Compiler compiler(cx, monitorStub, global->lastProperty(), slot);
     ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
     if (!newStub)
         return false;
 
-    stub->addNewStub(newStub);
+    stub->addNewStub(cx, newStub);
     return true;
 }
 
 static bool
 TryAttachScopeNameStub(JSContext *cx, HandleScript script, ICGetName_Fallback *stub,
                        HandleObject initialScopeChain, HandlePropertyName name)
 {
     AutoShapeVector shapes(cx);
@@ -5711,17 +5721,17 @@ TryAttachScopeNameStub(JSContext *cx, Ha
       }
       default:
         return true;
     }
 
     if (!newStub)
         return false;
 
-    stub->addNewStub(newStub);
+    stub->addNewStub(cx, newStub);
     return true;
 }
 
 static bool
 DoGetNameFallback(JSContext *cx, BaselineFrame *frame, ICGetName_Fallback *stub,
                   HandleObject scopeChain, MutableHandleValue res)
 {
     RootedScript script(cx, frame->script());
@@ -5918,17 +5928,17 @@ DoGetIntrinsicFallback(JSContext *cx, Ba
     types::TypeScript::Monitor(cx, script, pc, res);
 
     IonSpew(IonSpew_BaselineIC, "  Generating GetIntrinsic optimized stub");
     ICGetIntrinsic_Constant::Compiler compiler(cx, res);
     ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
     if (!newStub)
         return false;
 
-    stub->addNewStub(newStub);
+    stub->addNewStub(cx, newStub);
     return true;
 }
 
 typedef bool (*DoGetIntrinsicFallbackFn)(JSContext *, BaselineFrame *, ICGetIntrinsic_Fallback *,
                                          MutableHandleValue);
 static const VMFunction DoGetIntrinsicFallbackInfo =
     FunctionInfo<DoGetIntrinsicFallbackFn>(DoGetIntrinsicFallback);
 
@@ -5966,74 +5976,74 @@ TryAttachLengthStub(JSContext *cx, Handl
         JS_ASSERT(res.isInt32());
         IonSpew(IonSpew_BaselineIC, "  Generating GetProp(String.length) stub");
         ICGetProp_StringLength::Compiler compiler(cx);
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
 
         *attached = true;
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         return true;
     }
 
     if (val.isMagic(JS_OPTIMIZED_ARGUMENTS) && res.isInt32()) {
         IonSpew(IonSpew_BaselineIC, "  Generating GetProp(MagicArgs.length) stub");
         ICGetProp_ArgumentsLength::Compiler compiler(cx, ICGetProp_ArgumentsLength::Magic);
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
 
         *attached = true;
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         return true;
     }
 
     if (!val.isObject())
         return true;
 
     RootedObject obj(cx, &val.toObject());
 
     if (obj->is<ArrayObject>() && res.isInt32()) {
         IonSpew(IonSpew_BaselineIC, "  Generating GetProp(Array.length) stub");
         ICGetProp_ArrayLength::Compiler compiler(cx);
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
 
         *attached = true;
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         return true;
     }
     if (obj->is<TypedArrayObject>()) {
         JS_ASSERT(res.isInt32());
         IonSpew(IonSpew_BaselineIC, "  Generating GetProp(TypedArray.length) stub");
         ICGetProp_TypedArrayLength::Compiler compiler(cx);
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
 
         *attached = true;
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         return true;
     }
 
     if (obj->is<ArgumentsObject>() && res.isInt32()) {
         IonSpew(IonSpew_BaselineIC, "  Generating GetProp(ArgsObj.length %s) stub",
                 obj->is<StrictArgumentsObject>() ? "Strict" : "Normal");
         ICGetProp_ArgumentsLength::Which which = ICGetProp_ArgumentsLength::Normal;
         if (obj->is<StrictArgumentsObject>())
             which = ICGetProp_ArgumentsLength::Strict;
         ICGetProp_ArgumentsLength::Compiler compiler(cx, which);
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
 
         *attached = true;
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         return true;
     }
 
     return true;
 }
 
 static bool
 UpdateExistingGenerationalDOMProxyStub(ICGetProp_Fallback *stub,
@@ -6106,17 +6116,17 @@ TryAttachNativeGetPropStub(JSContext *cx
                     isDOMProxy ? "DOMProxy" : "Native",
                     (obj == holder) ? "direct" : "prototype");
         ICGetPropNativeCompiler compiler(cx, kind, isCallProp, monitorStub, obj, holder,
                                          name, isFixedSlot, offset);
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
 
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         *attached = true;
         return true;
     }
 
     bool isScripted = false;
     bool cacheableCall = IsCacheableGetPropCall(cx, obj, holder, shape, &isScripted, isDOMProxy);
 
     // Try handling scripted getters.
@@ -6137,17 +6147,17 @@ TryAttachNativeGetPropStub(JSContext *cx
                     callee->nonLazyScript()->filename(), callee->nonLazyScript()->lineno());
 
         ICGetProp_CallScripted::Compiler compiler(cx, monitorStub, obj, holder, callee,
                                                   script->pcToOffset(pc));
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
 
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         *attached = true;
         return true;
     }
 
     // Try handling JSNative getters.
     if (cacheableCall && !isScripted) {
 #if JS_HAS_NO_SUCH_METHOD
         // It's unlikely that a getter function will be used to generate functions for calling
@@ -6184,17 +6194,17 @@ TryAttachNativeGetPropStub(JSContext *cx
             newStub = compiler.getStub(compiler.getStubSpace(script));
         } else {
             ICGetProp_CallNative::Compiler compiler(cx, monitorStub, obj, holder, callee,
                                                     script->pcToOffset(pc));
             newStub = compiler.getStub(compiler.getStubSpace(script));
         }
         if (!newStub)
             return false;
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         *attached = true;
         return true;
     }
 
     // If it's a shadowed listbase proxy property, attach stub to call Proxy::get instead.
     if (isDOMProxy && domProxyShadowsResult == Shadows) {
         JS_ASSERT(obj == holder);
 #if JS_HAS_NO_SUCH_METHOD
@@ -6204,17 +6214,17 @@ TryAttachNativeGetPropStub(JSContext *cx
 
         IonSpew(IonSpew_BaselineIC, "  Generating GetProp(DOMProxyProxy) stub");
         Rooted<ProxyObject*> proxy(cx, &obj->as<ProxyObject>());
         ICGetProp_DOMProxyShadowed::Compiler compiler(cx, monitorStub, proxy, name,
                                                       script->pcToOffset(pc));
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         *attached = true;
         return true;
     }
 
     return true;
 }
 
 static bool
@@ -6258,17 +6268,17 @@ TryAttachPrimitiveGetPropStub(JSContext 
 
     IonSpew(IonSpew_BaselineIC, "  Generating GetProp_Primitive stub");
     ICGetProp_Primitive::Compiler compiler(cx, monitorStub, primitiveType, proto,
                                            isFixedSlot, offset);
     ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
     if (!newStub)
         return false;
 
-    stub->addNewStub(newStub);
+    stub->addNewStub(cx, newStub);
     *attached = true;
     return true;
 }
 
 static bool
 DoGetPropFallback(JSContext *cx, BaselineFrame *frame, ICGetProp_Fallback *stub,
                   MutableHandleValue val, MutableHandleValue res)
 {
@@ -6344,17 +6354,17 @@ DoGetPropFallback(JSContext *cx, Baselin
     if (val.isString() || val.isNumber() || val.isBoolean()) {
         if (!TryAttachPrimitiveGetPropStub(cx, script, pc, stub, name, val, res, &attached))
             return false;
         if (attached)
             return true;
     }
 
     JS_ASSERT(!attached);
-    stub->noteUnoptimizableAccess();
+    stub->noteUnoptimizableAccess(cx);
 
     return true;
 }
 
 typedef bool (*DoGetPropFallbackFn)(JSContext *, BaselineFrame *, ICGetProp_Fallback *,
                                     MutableHandleValue, MutableHandleValue);
 static const VMFunction DoGetPropFallbackInfo =
     FunctionInfo<DoGetPropFallbackFn>(DoGetPropFallback, PopValues(1));
@@ -7089,23 +7099,23 @@ ICGetProp_ArgumentsLength::Compiler::gen
     EmitReturnFromIC(masm);
 
     masm.bind(&failure);
     EmitStubGuardFailure(masm);
     return true;
 }
 
 void
-BaselineScript::noteAccessedGetter(uint32_t pcOffset)
+BaselineScript::noteAccessedGetter(JSContext *cx, uint32_t pcOffset)
 {
     ICEntry &entry = icEntryFromPCOffset(pcOffset);
     ICFallbackStub *stub = entry.fallbackStub();
 
     if (stub->isGetProp_Fallback())
-        stub->toGetProp_Fallback()->noteAccessedGetter();
+        stub->toGetProp_Fallback()->noteAccessedGetter(cx);
 }
 
 //
 // SetProp_Fallback
 //
 
 // Attach an optimized stub for a SETPROP/SETGNAME/SETNAME op.
 static bool
@@ -7136,17 +7146,17 @@ TryAttachSetPropStub(JSContext *cx, Hand
         IonSpew(IonSpew_BaselineIC, "  Generating SetProp(NativeObject.ADD) stub");
         ICSetPropNativeAddCompiler compiler(cx, obj, oldShape, chainDepth, isFixedSlot, offset);
         ICUpdatedStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
         if (!newStub->addUpdateStubForValue(cx, script, obj, id, rhs))
             return false;
 
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         *attached = true;
         return true;
     }
 
     if (IsCacheableSetPropWriteSlot(obj, oldShape, holder, shape)) {
         bool isFixedSlot;
         uint32_t offset;
         GetFixedOrDynamicSlotOffset(obj, shape->slot(), &isFixedSlot, &offset);
@@ -7154,17 +7164,17 @@ TryAttachSetPropStub(JSContext *cx, Hand
         IonSpew(IonSpew_BaselineIC, "  Generating SetProp(NativeObject.PROP) stub");
         ICSetProp_Native::Compiler compiler(cx, obj, isFixedSlot, offset);
         ICUpdatedStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
         if (!newStub->addUpdateStubForValue(cx, script, obj, id, rhs))
             return false;
 
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         *attached = true;
         return true;
     }
 
     bool isScripted = false;
     bool cacheableCall = IsCacheableSetPropCall(cx, obj, holder, shape, &isScripted);
 
     // Try handling scripted setters.
@@ -7176,17 +7186,17 @@ TryAttachSetPropStub(JSContext *cx, Hand
         IonSpew(IonSpew_BaselineIC, "  Generating SetProp(NativeObj/ScriptedSetter %s:%d) stub",
                     callee->nonLazyScript()->filename(), callee->nonLazyScript()->lineno());
 
         ICSetProp_CallScripted::Compiler compiler(cx, obj, holder, callee, script->pcToOffset(pc));
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
 
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         *attached = true;
         return true;
     }
 
     // Try handling JSNative setters.
     if (cacheableCall && !isScripted) {
         RootedFunction callee(cx, &shape->setterObject()->as<JSFunction>());
         JS_ASSERT(obj != holder);
@@ -7195,17 +7205,17 @@ TryAttachSetPropStub(JSContext *cx, Hand
         IonSpew(IonSpew_BaselineIC, "  Generating SetProp(NativeObj/NativeSetter %p) stub",
                     callee->native());
 
         ICSetProp_CallNative::Compiler compiler(cx, obj, holder, callee, script->pcToOffset(pc));
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
 
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         *attached = true;
         return true;
     }
 
     return true;
 }
 
 static bool
@@ -7266,17 +7276,17 @@ DoSetPropFallback(JSContext *cx, Baselin
          &attached))
     {
         return false;
     }
     if (attached)
         return true;
 
     JS_ASSERT(!attached);
-    stub->noteUnoptimizableAccess();
+    stub->noteUnoptimizableAccess(cx);
 
     return true;
 }
 
 typedef bool (*DoSetPropFallbackFn)(JSContext *, BaselineFrame *, ICSetProp_Fallback *,
                                     HandleValue, HandleValue, MutableHandleValue);
 static const VMFunction DoSetPropFallbackInfo =
     FunctionInfo<DoSetPropFallbackFn>(DoSetPropFallback, PopValues(2));
@@ -7770,34 +7780,34 @@ TryAttachFunApplyStub(JSContext *cx, ICC
             IonSpew(IonSpew_BaselineIC, "  Generating Call_ScriptedApplyArguments stub");
 
             ICCall_ScriptedApplyArguments::Compiler compiler(
                 cx, stub->fallbackMonitorStub()->firstMonitorStub(), script->pcToOffset(pc));
             ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
             if (!newStub)
                 return false;
 
-            stub->addNewStub(newStub);
+            stub->addNewStub(cx, newStub);
             return true;
         }
 
         // TODO: handle FUNAPPLY for native targets.
     }
 
     if (argv[1].isObject() && argv[1].toObject().is<ArrayObject>()) {
         if (isScripted && !stub->hasStub(ICStub::Call_ScriptedApplyArray)) {
             IonSpew(IonSpew_BaselineIC, "  Generating Call_ScriptedApplyArray stub");
 
             ICCall_ScriptedApplyArray::Compiler compiler(
                 cx, stub->fallbackMonitorStub()->firstMonitorStub(), script->pcToOffset(pc));
             ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
             if (!newStub)
                 return false;
 
-            stub->addNewStub(newStub);
+            stub->addNewStub(cx, newStub);
             return true;
         }
     }
     return true;
 }
 
 static bool
 GetTemplateObjectForNative(JSContext *cx, HandleScript script, jsbytecode *pc,
@@ -7928,17 +7938,17 @@ TryAttachCallStub(JSContext *cx, ICCall_
             ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
             if (!newStub)
                 return false;
 
             // Before adding new stub, unlink all previous Call_Scripted.
             stub->unlinkStubsWithKind(cx, ICStub::Call_Scripted);
 
             // Add new generalized stub.
-            stub->addNewStub(newStub);
+            stub->addNewStub(cx, newStub);
             return true;
         }
 
         // Keep track of the function's |prototype| property in type
         // information, for use during Ion compilation.
         if (IsIonEnabled(cx))
             types::EnsureTrackPropertyTypes(cx, fun, NameToId(cx->names().prototype));
 
@@ -7957,17 +7967,17 @@ TryAttachCallStub(JSContext *cx, ICCall_
                 constructing ? "yes" : "no");
         ICCallScriptedCompiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
                                         calleeScript, templateObject,
                                         constructing, script->pcToOffset(pc));
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
 
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         return true;
     }
 
     if (fun->isNative() && (!constructing || (constructing && fun->isNativeConstructor()))) {
         // Generalized native call stubs are not here yet!
         JS_ASSERT(!stub->nativeStubsAreGeneralized());
 
         // Check for JSOP_FUNAPPLY
@@ -7994,17 +8004,17 @@ TryAttachCallStub(JSContext *cx, ICCall_
         IonSpew(IonSpew_BaselineIC, "  Generating Call_Native stub (fun=%p, cons=%s)",
                 fun.get(), constructing ? "yes" : "no");
         ICCall_Native::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
                                          fun, templateObject, constructing, script->pcToOffset(pc));
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
         if (!newStub)
             return false;
 
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
         return true;
     }
 
     return true;
 }
 
 static bool
 MaybeCloneFunctionAtCallsite(JSContext *cx, MutableHandleValue callee, HandleScript script,
@@ -9118,17 +9128,17 @@ DoIteratorMoreFallback(JSContext *cx, Ba
 
     if (iterValue.toObject().is<PropertyIteratorObject>() &&
         !stub->hasStub(ICStub::IteratorMore_Native))
     {
         ICIteratorMore_Native::Compiler compiler(cx);
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(frame->script()));
         if (!newStub)
             return false;
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
     }
 
     return true;
 }
 
 typedef bool (*DoIteratorMoreFallbackFn)(JSContext *, BaselineFrame *, ICIteratorMore_Fallback *,
                                          HandleValue, MutableHandleValue);
 static const VMFunction DoIteratorMoreFallbackInfo =
@@ -9192,26 +9202,26 @@ DoIteratorNextFallback(JSContext *cx, Ba
 {
     FallbackICSpew(cx, stub, "IteratorNext");
 
     RootedObject iteratorObject(cx, &iterValue.toObject());
     if (!IteratorNext(cx, iteratorObject, res))
         return false;
 
     if (!res.isString() && !stub->hasNonStringResult())
-        stub->setHasNonStringResult();
+        stub->setHasNonStringResult(cx);
 
     if (iteratorObject->is<PropertyIteratorObject>() &&
         !stub->hasStub(ICStub::IteratorNext_Native))
     {
         ICIteratorNext_Native::Compiler compiler(cx);
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(frame->script()));
         if (!newStub)
             return false;
-        stub->addNewStub(newStub);
+        stub->addNewStub(cx, newStub);
     }
 
     return true;
 }
 
 typedef bool (*DoIteratorNextFallbackFn)(JSContext *, BaselineFrame *, ICIteratorNext_Fallback *,
                                          HandleValue, MutableHandleValue);
 static const VMFunction DoIteratorNextFallbackInfo =
@@ -9365,17 +9375,17 @@ DoTypeOfFallback(JSContext *cx, Baseline
     JS_ASSERT(type != JSTYPE_NULL);
     if (type != JSTYPE_OBJECT && type != JSTYPE_FUNCTION) {
         // Create a new TypeOf stub.
         IonSpew(IonSpew_BaselineIC, "  Generating TypeOf stub for JSType (%d)", (int) type);
         ICTypeOf_Typed::Compiler compiler(cx, type, string);
         ICStub *typeOfStub = compiler.getStub(compiler.getStubSpace(frame->script()));
         if (!typeOfStub)
             return false;
-        stub->addNewStub(typeOfStub);
+        stub->addNewStub(cx, typeOfStub);
     }
 
     return true;
 }
 
 typedef bool (*DoTypeOfFallbackFn)(JSContext *, BaselineFrame *frame, ICTypeOf_Fallback *,
                                    HandleValue, MutableHandleValue);
 static const VMFunction DoTypeOfFallbackInfo =
@@ -9452,17 +9462,17 @@ DoRetSubFallback(JSContext *cx, Baseline
 
     // Attach an optimized stub for this pc offset.
     IonSpew(IonSpew_BaselineIC, "  Generating RetSub stub for pc offset %u", offset);
     ICRetSub_Resume::Compiler compiler(cx, offset, *resumeAddr);
     ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
     if (!optStub)
         return false;
 
-    stub->addNewStub(optStub);
+    stub->addNewStub(cx, optStub);
     return true;
 }
 
 typedef bool(*DoRetSubFallbackFn)(JSContext *cx, BaselineFrame *, ICRetSub_Fallback *,
                                   HandleValue, uint8_t **);
 static const VMFunction DoRetSubFallbackInfo = FunctionInfo<DoRetSubFallbackFn>(DoRetSubFallback);
 
 typedef bool (*ThrowFn)(JSContext *, HandleValue);
--- a/js/src/jit/BaselineIC.h
+++ b/js/src/jit/BaselineIC.h
@@ -511,17 +511,17 @@ class ICStubIterator
     ICStub *operator ->() const {
         return currentStub_;
     }
 
     bool atEnd() const {
         return currentStub_ == (ICStub *) fallbackStub_;
     }
 
-    void unlink(Zone *zone);
+    void unlink(JSContext *cx);
 };
 
 //
 // Base class for all IC stubs.
 //
 class ICStub
 {
     friend class ICFallbackStub;
@@ -682,16 +682,18 @@ class ICStub
         return next_;
     }
 
     inline bool hasNext() const {
         return next_ != nullptr;
     }
 
     inline void setNext(ICStub *stub) {
+        // Note: next_ only needs to be changed under the compilation lock for
+        // non-type-monitor/update ICs.
         next_ = stub;
     }
 
     inline ICStub **addressOfNext() {
         return &next_;
     }
 
     inline IonCode *ionCode() {
@@ -828,17 +830,18 @@ class ICFallbackStub : public ICStub
     void fixupICEntry(ICEntry *icEntry) {
         JS_ASSERT(icEntry_ == nullptr);
         JS_ASSERT(lastStubPtrAddr_ == nullptr);
         icEntry_ = icEntry;
         lastStubPtrAddr_ = icEntry_->addressOfFirstStub();
     }
 
     // Add a new stub to the IC chain terminated by this fallback stub.
-    void addNewStub(ICStub *stub) {
+    void addNewStub(JSContext *cx, ICStub *stub) {
+        AutoLockForCompilation lock(cx);
         JS_ASSERT(*lastStubPtrAddr_ == this);
         JS_ASSERT(stub->next() == nullptr);
         stub->setNext(this);
         *lastStubPtrAddr_ = stub;
         lastStubPtrAddr_ = stub->addressOfNext();
         numOptimizedStubs_++;
     }
 
@@ -2434,23 +2437,25 @@ class ICBinaryArith_Fallback : public IC
         if (!code)
             return nullptr;
         return space->allocate<ICBinaryArith_Fallback>(code);
     }
 
     bool sawDoubleResult() const {
         return extra_ & SAW_DOUBLE_RESULT_BIT;
     }
-    void setSawDoubleResult() {
+    void setSawDoubleResult(JSContext *cx) {
+        AutoLockForCompilation lock(cx);
         extra_ |= SAW_DOUBLE_RESULT_BIT;
     }
     bool hadUnoptimizableOperands() const {
         return extra_ & UNOPTIMIZABLE_OPERANDS_BIT;
     }
-    void noteUnoptimizableOperands() {
+    void noteUnoptimizableOperands(JSContext *cx) {
+        AutoLockForCompilation lock(cx);
         extra_ |= UNOPTIMIZABLE_OPERANDS_BIT;
     }
 
     // Compiler for this stub kind.
     class Compiler : public ICStubCompiler {
       protected:
         bool generateStubCode(MacroAssembler &masm);
 
@@ -2841,24 +2846,26 @@ class ICGetElem_Fallback : public ICMoni
     static const uint32_t MAX_OPTIMIZED_STUBS = 16;
 
     static inline ICGetElem_Fallback *New(ICStubSpace *space, IonCode *code) {
         if (!code)
             return nullptr;
         return space->allocate<ICGetElem_Fallback>(code);
     }
 
-    void noteNonNativeAccess() {
+    void noteNonNativeAccess(JSContext *cx) {
+        AutoLockForCompilation lock(cx);
         extra_ |= EXTRA_NON_NATIVE;
     }
     bool hasNonNativeAccess() const {
         return extra_ & EXTRA_NON_NATIVE;
     }
 
-    void noteNegativeIndex() {
+    void noteNegativeIndex(JSContext *cx) {
+        AutoLockForCompilation lock(cx);
         extra_ |= EXTRA_NEGATIVE_INDEX;
     }
     bool hasNegativeIndex() const {
         return extra_ & EXTRA_NEGATIVE_INDEX;
     }
 
     // Compiler for this stub kind.
     class Compiler : public ICStubCompiler {
@@ -3436,17 +3443,18 @@ class ICSetElem_Fallback : public ICFall
     static const uint32_t MAX_OPTIMIZED_STUBS = 8;
 
     static inline ICSetElem_Fallback *New(ICStubSpace *space, IonCode *code) {
         if (!code)
             return nullptr;
         return space->allocate<ICSetElem_Fallback>(code);
     }
 
-    void noteArrayWriteHole() {
+    void noteArrayWriteHole(JSContext *cx) {
+        AutoLockForCompilation lock(cx);
         extra_ = 1;
     }
     bool hasArrayWriteHole() const {
         return extra_;
     }
 
     // Compiler for this stub kind.
     class Compiler : public ICStubCompiler {
@@ -4011,24 +4019,26 @@ class ICGetProp_Fallback : public ICMoni
         if (!code)
             return nullptr;
         return space->allocate<ICGetProp_Fallback>(code);
     }
 
     static const size_t UNOPTIMIZABLE_ACCESS_BIT = 0;
     static const size_t ACCESSED_GETTER_BIT = 1;
 
-    void noteUnoptimizableAccess() {
+    void noteUnoptimizableAccess(JSContext *cx) {
+        AutoLockForCompilation lock(cx);
         extra_ |= (1u << UNOPTIMIZABLE_ACCESS_BIT);
     }
     bool hadUnoptimizableAccess() const {
         return extra_ & (1u << UNOPTIMIZABLE_ACCESS_BIT);
     }
 
-    void noteAccessedGetter() {
+    void noteAccessedGetter(JSContext *cx) {
+        AutoLockForCompilation lock(cx);
         extra_ |= (1u << ACCESSED_GETTER_BIT);
     }
     bool hasAccessedGetter() const {
         return extra_ & (1u << ACCESSED_GETTER_BIT);
     }
 
     class Compiler : public ICStubCompiler {
       protected:
@@ -4825,17 +4835,18 @@ class ICSetProp_Fallback : public ICFall
 
     static inline ICSetProp_Fallback *New(ICStubSpace *space, IonCode *code) {
         if (!code)
             return nullptr;
         return space->allocate<ICSetProp_Fallback>(code);
     }
 
     static const size_t UNOPTIMIZABLE_ACCESS_BIT = 0;
-    void noteUnoptimizableAccess() {
+    void noteUnoptimizableAccess(JSContext *cx) {
+        AutoLockForCompilation lock(cx);
         extra_ |= (1u << UNOPTIMIZABLE_ACCESS_BIT);
     }
     bool hadUnoptimizableAccess() const {
         return extra_ & (1u << UNOPTIMIZABLE_ACCESS_BIT);
     }
 
     class Compiler : public ICStubCompiler {
       protected:
@@ -5716,17 +5727,18 @@ class ICIteratorNext_Fallback : public I
 
   public:
     static inline ICIteratorNext_Fallback *New(ICStubSpace *space, IonCode *code) {
         if (!code)
             return nullptr;
         return space->allocate<ICIteratorNext_Fallback>(code);
     }
 
-    void setHasNonStringResult() {
+    void setHasNonStringResult(JSContext *cx) {
+        AutoLockForCompilation lock(cx);
         JS_ASSERT(extra_ == 0);
         extra_ = 1;
     }
     bool hasNonStringResult() const {
         return extra_;
     }
 
     class Compiler : public ICStubCompiler {
--- a/js/src/jit/BaselineInspector.cpp
+++ b/js/src/jit/BaselineInspector.cpp
@@ -13,16 +13,18 @@
 using namespace js;
 using namespace js::jit;
 
 using mozilla::DebugOnly;
 
 bool
 SetElemICInspector::sawOOBDenseWrite() const
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     if (!icEntry_)
         return false;
 
     // Check for a SetElem_DenseAdd stub.
     for (ICStub *stub = icEntry_->firstStub(); stub; stub = stub->next()) {
         if (stub->isSetElem_DenseAdd())
             return true;
     }
@@ -33,60 +35,68 @@ SetElemICInspector::sawOOBDenseWrite() c
         return stub->toSetElem_Fallback()->hasArrayWriteHole();
 
     return false;
 }
 
 bool
 SetElemICInspector::sawOOBTypedArrayWrite() const
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     if (!icEntry_)
         return false;
 
     // Check for SetElem_TypedArray stubs with expectOutOfBounds set.
     for (ICStub *stub = icEntry_->firstStub(); stub; stub = stub->next()) {
         if (!stub->isSetElem_TypedArray())
             continue;
         if (stub->toSetElem_TypedArray()->expectOutOfBounds())
             return true;
     }
     return false;
 }
 
 bool
 SetElemICInspector::sawDenseWrite() const
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     if (!icEntry_)
         return false;
 
     // Check for a SetElem_DenseAdd or SetElem_Dense stub.
     for (ICStub *stub = icEntry_->firstStub(); stub; stub = stub->next()) {
         if (stub->isSetElem_DenseAdd() || stub->isSetElem_Dense())
             return true;
     }
     return false;
 }
 
 bool
 SetElemICInspector::sawTypedArrayWrite() const
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     if (!icEntry_)
         return false;
 
     // Check for a SetElem_TypedArray stub.
     for (ICStub *stub = icEntry_->firstStub(); stub; stub = stub->next()) {
         if (stub->isSetElem_TypedArray())
             return true;
     }
     return false;
 }
 
 bool
 BaselineInspector::maybeShapesForPropertyOp(jsbytecode *pc, ShapeVector &shapes)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     // Return a list of shapes seen by the baseline IC for the current op.
     // An empty list indicates no shapes are known, or there was an uncacheable
     // access.
     JS_ASSERT(shapes.empty());
 
     if (!hasBaselineScript())
         return true;
 
@@ -134,16 +144,18 @@ BaselineInspector::maybeShapesForPropert
         shapes.clear();
 
     return true;
 }
 
 ICStub *
 BaselineInspector::monomorphicStub(jsbytecode *pc)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     if (!hasBaselineScript())
         return nullptr;
 
     const ICEntry &entry = icEntryFromPC(pc);
 
     ICStub *stub = entry.firstStub();
     ICStub *next = stub->next();
 
@@ -151,16 +163,18 @@ BaselineInspector::monomorphicStub(jsbyt
         return nullptr;
 
     return stub;
 }
 
 bool
 BaselineInspector::dimorphicStub(jsbytecode *pc, ICStub **pfirst, ICStub **psecond)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     if (!hasBaselineScript())
         return false;
 
     const ICEntry &entry = icEntryFromPC(pc);
 
     ICStub *stub = entry.firstStub();
     ICStub *next = stub->next();
     ICStub *after = next ? next->next() : nullptr;
@@ -171,16 +185,18 @@ BaselineInspector::dimorphicStub(jsbytec
     *pfirst = stub;
     *psecond = next;
     return true;
 }
 
 MIRType
 BaselineInspector::expectedResultType(jsbytecode *pc)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     // Look at the IC entries for this op to guess what type it will produce,
     // returning MIRType_None otherwise.
 
     ICStub *stub = monomorphicStub(pc);
     if (!stub)
         return MIRType_None;
 
     switch (stub->kind()) {
@@ -217,16 +233,18 @@ static bool
 CanUseInt32Compare(ICStub::Kind kind)
 {
     return kind == ICStub::Compare_Int32 || kind == ICStub::Compare_Int32WithBoolean;
 }
 
 MCompare::CompareType
 BaselineInspector::expectedCompareType(jsbytecode *pc)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     ICStub *first = monomorphicStub(pc), *second = nullptr;
     if (!first && !dimorphicStub(pc, &first, &second))
         return MCompare::Compare_Unknown;
 
     if (CanUseInt32Compare(first->kind()) && (!second || CanUseInt32Compare(second->kind()))) {
         ICCompare_Int32WithBoolean *coerce =
             first->isCompare_Int32WithBoolean()
             ? first->toCompare_Int32WithBoolean()
@@ -299,16 +317,18 @@ TryToSpecializeBinaryArithOp(ICStub **st
     JS_ASSERT(sawInt32);
     *result = MIRType_Int32;
     return true;
 }
 
 MIRType
 BaselineInspector::expectedBinaryArithSpecialization(jsbytecode *pc)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     MIRType result;
     ICStub *stubs[2];
 
     const ICEntry &entry = icEntryFromPC(pc);
     ICStub *stub = entry.fallbackStub();
     if (stub->isBinaryArith_Fallback() &&
         stub->toBinaryArith_Fallback()->hadUnoptimizableOperands())
     {
@@ -327,72 +347,82 @@ BaselineInspector::expectedBinaryArithSp
     }
 
     return MIRType_None;
 }
 
 bool
 BaselineInspector::hasSeenNonNativeGetElement(jsbytecode *pc)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     if (!hasBaselineScript())
         return false;
 
     const ICEntry &entry = icEntryFromPC(pc);
     ICStub *stub = entry.fallbackStub();
 
     if (stub->isGetElem_Fallback())
         return stub->toGetElem_Fallback()->hasNonNativeAccess();
     return false;
 }
 
 bool
 BaselineInspector::hasSeenNegativeIndexGetElement(jsbytecode *pc)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     if (!hasBaselineScript())
         return false;
 
     const ICEntry &entry = icEntryFromPC(pc);
     ICStub *stub = entry.fallbackStub();
 
     if (stub->isGetElem_Fallback())
         return stub->toGetElem_Fallback()->hasNegativeIndex();
     return false;
 }
 
 bool
 BaselineInspector::hasSeenAccessedGetter(jsbytecode *pc)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     if (!hasBaselineScript())
         return false;
 
     const ICEntry &entry = icEntryFromPC(pc);
     ICStub *stub = entry.fallbackStub();
 
     if (stub->isGetProp_Fallback())
         return stub->toGetProp_Fallback()->hasAccessedGetter();
     return false;
 }
 
 bool
 BaselineInspector::hasSeenNonStringIterNext(jsbytecode *pc)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     JS_ASSERT(JSOp(*pc) == JSOP_ITERNEXT);
 
     if (!hasBaselineScript())
         return false;
 
     const ICEntry &entry = icEntryFromPC(pc);
     ICStub *stub = entry.fallbackStub();
 
     return stub->toIteratorNext_Fallback()->hasNonStringResult();
 }
 
 bool
 BaselineInspector::hasSeenDoubleResult(jsbytecode *pc)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     if (!hasBaselineScript())
         return false;
 
     const ICEntry &entry = icEntryFromPC(pc);
     ICStub *stub = entry.fallbackStub();
 
     JS_ASSERT(stub->isUnaryArith_Fallback() || stub->isBinaryArith_Fallback());
 
@@ -402,16 +432,18 @@ BaselineInspector::hasSeenDoubleResult(j
         return stub->toBinaryArith_Fallback()->sawDoubleResult();
 
     return false;
 }
 
 JSObject *
 BaselineInspector::getTemplateObject(jsbytecode *pc)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     if (!hasBaselineScript())
         return nullptr;
 
     const ICEntry &entry = icEntryFromPC(pc);
     for (ICStub *stub = entry.firstStub(); stub; stub = stub->next()) {
         switch (stub->kind()) {
           case ICStub::NewArray_Fallback:
             return stub->toNewArray_Fallback()->templateObject();
@@ -429,16 +461,18 @@ BaselineInspector::getTemplateObject(jsb
     }
 
     return nullptr;
 }
 
 JSObject *
 BaselineInspector::getTemplateObjectForNative(jsbytecode *pc, Native native)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     if (!hasBaselineScript())
         return nullptr;
 
     const ICEntry &entry = icEntryFromPC(pc);
     for (ICStub *stub = entry.firstStub(); stub; stub = stub->next()) {
         if (stub->isCall_Native() && stub->toCall_Native()->callee()->native() == native)
             return stub->toCall_Native()->templateObject();
     }
@@ -462,31 +496,35 @@ BaselineInspector::templateCallObject()
     JS_ASSERT(res);
 
     return &res->as<CallObject>();
 }
 
 JSObject *
 BaselineInspector::commonGetPropFunction(jsbytecode *pc, Shape **lastProperty, JSFunction **commonGetter)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     const ICEntry &entry = icEntryFromPC(pc);
     for (ICStub *stub = entry.firstStub(); stub; stub = stub->next()) {
         if (stub->isGetProp_CallScripted() || stub->isGetProp_CallNative()) {
             ICGetPropCallGetter *nstub = static_cast<ICGetPropCallGetter *>(stub);
             *lastProperty = nstub->holderShape();
             *commonGetter = nstub->getter();
             return nstub->holder();
         }
     }
     return nullptr;
 }
 
 JSObject *
 BaselineInspector::commonSetPropFunction(jsbytecode *pc, Shape **lastProperty, JSFunction **commonSetter)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     const ICEntry &entry = icEntryFromPC(pc);
     for (ICStub *stub = entry.firstStub(); stub; stub = stub->next()) {
         if (stub->isSetProp_CallScripted() || stub->isSetProp_CallNative()) {
             ICSetPropCallSetter *nstub = static_cast<ICSetPropCallSetter *>(stub);
             *lastProperty = nstub->holderShape();
             *commonSetter = nstub->setter();
             return nstub->holder();
         }
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -232,17 +232,17 @@ jit::BaselineCompile(JSContext *cx, Hand
 
     AutoFlushCache afc("BaselineJIT", cx->runtime()->jitRuntime());
     MethodStatus status = compiler.compile();
 
     JS_ASSERT_IF(status == Method_Compiled, script->hasBaselineScript());
     JS_ASSERT_IF(status != Method_Compiled, !script->hasBaselineScript());
 
     if (status == Method_CantCompile)
-        script->setBaselineScript(BASELINE_DISABLED_SCRIPT);
+        script->setBaselineScript(cx, BASELINE_DISABLED_SCRIPT);
 
     return status;
 }
 
 static MethodStatus
 CanEnterBaselineJIT(JSContext *cx, HandleScript script, bool osr)
 {
     // Limit the locals on a given script so that stack check on baseline frames        
@@ -645,17 +645,17 @@ BaselineScript::copyPCMappingIndexEntrie
 {
     for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++)
         pcMappingIndexEntry(i) = entries[i];
 }
 
 uint8_t *
 BaselineScript::nativeCodeForPC(JSScript *script, jsbytecode *pc, PCMappingSlotInfo *slotInfo)
 {
-    JS_ASSERT(script->baselineScript() == this);
+    JS_ASSERT_IF(script->hasBaselineScript(), script->baselineScript() == this);
 
     uint32_t pcOffset = script->pcToOffset(pc);
 
     // Look for the first PCMappingIndexEntry with pc > the pc we are
     // interested in.
     uint32_t i = 1;
     for (; i < numPCMappingIndexEntries(); i++) {
         if (pcMappingIndexEntry(i).pcOffset > pcOffset)
@@ -887,17 +887,17 @@ jit::FinishDiscardBaselineScript(FreeOp 
 
         // Reset |active| flag so that we don't need a separate script
         // iteration to unmark them.
         script->baselineScript()->resetActive();
         return;
     }
 
     BaselineScript *baseline = script->baselineScript();
-    script->setBaselineScript(nullptr);
+    script->setBaselineScript(nullptr, nullptr);
     BaselineScript::Destroy(fop, baseline);
 }
 
 void
 jit::JitCompartment::toggleBaselineStubBarriers(bool enabled)
 {
     for (ICStubCodeMap::Enum e(*stubCodes_); !e.empty(); e.popFront()) {
         IonCode *code = *e.front().value().unsafeGet();
--- a/js/src/jit/BaselineJIT.h
+++ b/js/src/jit/BaselineJIT.h
@@ -284,18 +284,18 @@ struct BaselineScript
 
     // Toggle debug traps (used for breakpoints and step mode) in the script.
     // If |pc| is nullptr, toggle traps for all ops in the script. Else, only
     // toggle traps at |pc|.
     void toggleDebugTraps(JSScript *script, jsbytecode *pc);
 
     void toggleSPS(bool enable);
 
-    void noteAccessedGetter(uint32_t pcOffset);
-    void noteArrayWriteHole(uint32_t pcOffset);
+    void noteAccessedGetter(JSContext *cx, uint32_t pcOffset);
+    void noteArrayWriteHole(JSContext *cx, uint32_t pcOffset);
 
     static size_t offsetOfFlags() {
         return offsetof(BaselineScript, flags_);
     }
 
     static void writeBarrierPre(Zone *zone, BaselineScript *script);
 
     uint32_t *bytecodeTypeMap() {
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -3384,24 +3384,24 @@ CodeGenerator::visitNewCallObject(LNewCa
     JSObject *templateObj = lir->mir()->templateObject();
 
     // If we have a template object, we can inline call object creation.
     OutOfLineCode *ool;
     if (lir->slots()->isRegister()) {
         ool = oolCallVM(NewCallObjectInfo, lir,
                         (ArgList(), ImmGCPtr(lir->mir()->block()->info().script()),
                                     ImmGCPtr(templateObj->lastProperty()),
-                                    ImmGCPtr(templateObj->hasLazyType() ? nullptr : templateObj->type()),
+                                    ImmGCPtr(templateObj->hasSingletonType() ? nullptr : templateObj->type()),
                                     ToRegister(lir->slots())),
                         StoreRegisterTo(obj));
     } else {
         ool = oolCallVM(NewCallObjectInfo, lir,
                         (ArgList(), ImmGCPtr(lir->mir()->block()->info().script()),
                                     ImmGCPtr(templateObj->lastProperty()),
-                                    ImmGCPtr(templateObj->hasLazyType() ? nullptr : templateObj->type()),
+                                    ImmGCPtr(templateObj->hasSingletonType() ? nullptr : templateObj->type()),
                                     ImmPtr(nullptr)),
                         StoreRegisterTo(obj));
     }
     if (!ool)
         return false;
 
     if (lir->mir()->needsSingletonType()) {
         // Objects can only be given singleton types in VM calls.
@@ -7404,39 +7404,37 @@ CodeGenerator::emitInstanceOf(LInstructi
         objReg = ToRegister(ins->toInstanceOfO()->lhs());
     }
 
     // Crawl the lhs's prototype chain in a loop to search for prototypeObject.
     // This follows the main loop of js::IsDelegate, though additionally breaks
     // out of the loop on Proxy::LazyProto.
 
     // Load the lhs's prototype.
-    masm.loadPtr(Address(objReg, JSObject::offsetOfType()), output);
-    masm.loadPtr(Address(output, offsetof(types::TypeObject, proto)), output);
+    masm.loadObjProto(objReg, output);
 
     Label testLazy;
     {
         Label loopPrototypeChain;
         masm.bind(&loopPrototypeChain);
 
         // Test for the target prototype object.
         Label notPrototypeObject;
         masm.branchPtr(Assembler::NotEqual, output, ImmGCPtr(prototypeObject), &notPrototypeObject);
         masm.mov(ImmWord(1), output);
         masm.jump(&done);
         masm.bind(&notPrototypeObject);
 
-        JS_ASSERT(uintptr_t(Proxy::LazyProto) == 1);
+        JS_ASSERT(uintptr_t(TaggedProto::LazyProto) == 1);
 
         // Test for nullptr or Proxy::LazyProto
         masm.branchPtr(Assembler::BelowOrEqual, output, ImmWord(1), &testLazy);
 
         // Load the current object's prototype.
-        masm.loadPtr(Address(output, JSObject::offsetOfType()), output);
-        masm.loadPtr(Address(output, offsetof(types::TypeObject, proto)), output);
+        masm.loadObjProto(output, output);
 
         masm.jump(&loopPrototypeChain);
     }
 
     // Make a VM call if an object with a lazy proto was found on the prototype
     // chain. This currently occurs only for cross compartment wrappers, which
     // we do not expect to be compared with non-wrapper functions from this
     // compartment. Otherwise, we stopped on a nullptr prototype and the output
--- a/js/src/jit/CompileInfo.h
+++ b/js/src/jit/CompileInfo.h
@@ -40,19 +40,19 @@ CountArgSlots(JSScript *script, JSFuncti
     return StartArgSlot(script) + (fun ? fun->nargs() + 1 : 0);
 }
 
 // Contains information about the compilation source for IR being generated.
 class CompileInfo
 {
   public:
     CompileInfo(JSScript *script, JSFunction *fun, jsbytecode *osrPc, bool constructing,
-                ExecutionMode executionMode)
+                ExecutionMode executionMode, bool scriptNeedsArgsObj)
       : script_(script), fun_(fun), osrPc_(osrPc), constructing_(constructing),
-        executionMode_(executionMode)
+        executionMode_(executionMode), scriptNeedsArgsObj_(scriptNeedsArgsObj)
     {
         JS_ASSERT_IF(osrPc, JSOp(*osrPc) == JSOP_LOOPENTRY);
 
         // The function here can flow in from anywhere so look up the canonical function to ensure that
         // we do not try to embed a nursery pointer in jit-code.
         if (fun_) {
             fun_ = fun_->nonLazyScript()->function();
             JS_ASSERT(fun_->isTenured());
@@ -63,17 +63,17 @@ class CompileInfo
         nargs_ = fun ? fun->nargs() : 0;
         nlocals_ = script->nfixed();
         nstack_ = script->nslots() - script->nfixed();
         nslots_ = nimplicit_ + nargs_ + nlocals_ + nstack_;
     }
 
     CompileInfo(unsigned nlocals, ExecutionMode executionMode)
       : script_(nullptr), fun_(nullptr), osrPc_(nullptr), constructing_(false),
-        executionMode_(executionMode)
+        executionMode_(executionMode), scriptNeedsArgsObj_(false)
     {
         nimplicit_ = 0;
         nargs_ = 0;
         nlocals_ = nlocals;
         nstack_ = 1;  /* For FunctionCompiler::pushPhiInput/popPhiOutput */
         nslots_ = nlocals_ + nstack_;
     }
 
@@ -245,20 +245,20 @@ class CompileInfo
 
     bool hasArguments() const {
         return script()->argumentsHasVarBinding();
     }
     bool argumentsAliasesFormals() const {
         return script()->argumentsAliasesFormals();
     }
     bool needsArgsObj() const {
-        return script()->needsArgsObj();
+        return scriptNeedsArgsObj_;
     }
     bool argsObjAliasesFormals() const {
-        return script()->argsObjAliasesFormals();
+        return scriptNeedsArgsObj_ && !script()->strict();
     }
 
     ExecutionMode executionMode() const {
         return executionMode_;
     }
 
     bool isParallelExecution() const {
         return executionMode_ == ParallelExecution;
@@ -270,14 +270,19 @@ class CompileInfo
     unsigned nlocals_;
     unsigned nstack_;
     unsigned nslots_;
     JSScript *script_;
     JSFunction *fun_;
     jsbytecode *osrPc_;
     bool constructing_;
     ExecutionMode executionMode_;
+
+    // Whether a script needs an arguments object is unstable over compilation
+    // since the arguments optimization could be marked as failed on the main
+    // thread, so cache a value here and use it throughout for consistency.
+    bool scriptNeedsArgsObj_;
 };
 
 } // namespace jit
 } // namespace js
 
 #endif /* jit_CompileInfo_h */
--- a/js/src/jit/CompileWrappers.cpp
+++ b/js/src/jit/CompileWrappers.cpp
@@ -120,21 +120,23 @@ CompileRuntime::NaNValue()
 }
 
 const Value &
 CompileRuntime::positiveInfinityValue()
 {
     return runtime()->positiveInfinityValue;
 }
 
+#ifdef DEBUG
 bool
 CompileRuntime::isInsideNursery(gc::Cell *cell)
 {
     return UninlinedIsInsideNursery(runtime(), cell);
 }
+#endif
 
 const DOMCallbacks *
 CompileRuntime::DOMcallbacks()
 {
     return GetDOMCallbacks(runtime());
 }
 
 const MathCache *
@@ -223,8 +225,15 @@ CompileCompartment::jitCompartment()
     return compartment()->jitCompartment();
 }
 
 bool
 CompileCompartment::hasObjectMetadataCallback()
 {
     return compartment()->hasObjectMetadataCallback();
 }
+
+AutoLockForCompilation::AutoLockForCompilation(CompileCompartment *compartment
+                                               MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
+{
+    MOZ_GUARD_OBJECT_NOTIFIER_INIT;
+    init(compartment->compartment()->runtimeFromAnyThread());
+}
--- a/js/src/jit/CompileWrappers.h
+++ b/js/src/jit/CompileWrappers.h
@@ -61,17 +61,19 @@ class CompileRuntime
     bool jitSupportsFloatingPoint();
     bool hadOutOfMemory();
 
     const JSAtomState &names();
     const StaticStrings &staticStrings();
     const Value &NaNValue();
     const Value &positiveInfinityValue();
 
+#ifdef DEBUG
     bool isInsideNursery(gc::Cell *cell);
+#endif
 
     // DOM callbacks must be threadsafe (and will hopefully be removed soon).
     const DOMCallbacks *DOMcallbacks();
 
     const MathCache *maybeGetMathCache();
 
 #ifdef JSGC_GENERATIONAL
     const Nursery &gcNursery();
@@ -91,16 +93,18 @@ class CompileZone
     const void *addressOfFreeListFirst(gc::AllocKind allocKind);
     const void *addressOfFreeListLast(gc::AllocKind allocKind);
 };
 
 class CompileCompartment
 {
     JSCompartment *compartment();
 
+    friend class js::AutoLockForCompilation;
+
   public:
     static CompileCompartment *get(JSCompartment *comp);
 
     CompileZone *zone();
     CompileRuntime *runtime();
 
     const void *addressOfEnumerators();
 
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -44,16 +44,18 @@
 #include "jscompartmentinlines.h"
 #include "jsgcinlines.h"
 #include "jsinferinlines.h"
 #include "jsobjinlines.h"
 
 using namespace js;
 using namespace js::jit;
 
+using mozilla::Maybe;
+
 // Global variables.
 IonOptions jit::js_IonOptions;
 
 // Assert that IonCode is gc::Cell aligned.
 JS_STATIC_ASSERT(sizeof(IonCode) % gc::CellSize == 0);
 
 #ifdef JS_THREADSAFE
 static bool IonTLSInitialized = false;
@@ -511,16 +513,18 @@ JitCompartment::ensureIonStubsExist(JSCo
 #endif
 
     return true;
 }
 
 void
 jit::FinishOffThreadBuilder(IonBuilder *builder)
 {
+    builder->script()->runtimeFromMainThread()->removeCompilationThread();
+
     ExecutionMode executionMode = builder->info().executionMode();
 
     // Clear the recompiling flag if it would have failed.
     if (builder->script()->hasIonScript())
         builder->script()->ionScript()->clearRecompiling();
 
     // Clean up if compilation did not succeed.
     if (CompilingOffThread(builder->script(), executionMode))
@@ -1558,16 +1562,19 @@ AttachFinishedCompilations(JSContext *cx
                 success = codegen->link(cx, builder->constraints());
             }
 
             if (!success) {
                 // Silently ignore OOM during code generation, we're at an
                 // operation callback and can't propagate failures.
                 cx->clearPendingException();
             }
+        } else {
+            if (builder->abortReason() == AbortReason_Disable)
+                SetIonScript(builder->script(), builder->info().executionMode(), ION_DISABLED_SCRIPT);
         }
 
         FinishOffThreadBuilder(builder);
     }
 
     compilations.clear();
 #endif
 }
@@ -1658,21 +1665,23 @@ IonCompile(JSContext *cx, JSScript *scri
     if (!cx->compartment()->jitCompartment()->ensureIonStubsExist(cx))
         return AbortReason_Alloc;
 
     MIRGraph *graph = alloc->new_<MIRGraph>(temp);
     if (!graph)
         return AbortReason_Alloc;
 
     CompileInfo *info = alloc->new_<CompileInfo>(script, script->function(), osrPc, constructing,
-                                                 executionMode);
+                                                 executionMode, script->needsArgsObj());
     if (!info)
         return AbortReason_Alloc;
 
-    BaselineInspector inspector(script);
+    BaselineInspector *inspector = alloc->new_<BaselineInspector>(script);
+    if (!inspector)
+        return AbortReason_Alloc;
 
     BaselineFrameInspector *baselineFrameInspector = nullptr;
     if (baselineFrame) {
         baselineFrameInspector = NewBaselineFrameInspector(temp, baselineFrame);
         if (!baselineFrameInspector)
             return AbortReason_Alloc;
     }
 
@@ -1681,48 +1690,26 @@ IonCompile(JSContext *cx, JSScript *scri
     AutoTempAllocatorRooter root(cx, temp);
     types::CompilerConstraintList *constraints = types::NewCompilerConstraintList(*temp);
     if (!constraints)
         return AbortReason_Alloc;
 
     IonBuilder *builder = alloc->new_<IonBuilder>((JSContext *) nullptr,
                                                   CompileCompartment::get(cx->compartment()),
                                                   temp, graph, constraints,
-                                                  &inspector, info, baselineFrameInspector);
+                                                  inspector, info, baselineFrameInspector);
     if (!builder)
         return AbortReason_Alloc;
 
     JS_ASSERT(recompile == HasIonScript(builder->script(), executionMode));
     JS_ASSERT(CanIonCompile(builder->script(), executionMode));
 
     RootedScript builderScript(cx, builder->script());
     IonSpewNewFunction(graph, builderScript);
 
-    mozilla::Maybe<AutoProtectHeapForCompilation> protect;
-    if (js_IonOptions.checkThreadSafety &&
-        cx->runtime()->gcIncrementalState == gc::NO_INCREMENTAL &&
-        !cx->runtime()->profilingScripts &&
-        !cx->runtime()->spsProfiler.enabled())
-    {
-        protect.construct(cx->runtime());
-    }
-
-    bool succeeded = builder->build();
-    builder->clearForBackEnd();
-
-    if (!succeeded) {
-        if (cx->isExceptionPending()) {
-            IonSpew(IonSpew_Abort, "Builder raised exception.");
-            return AbortReason_Error;
-        }
-
-        IonSpew(IonSpew_Abort, "Builder failed to build.");
-        return builder->abortReason();
-    }
-
     // If possible, compile the script off thread.
     if (OffThreadCompilationAvailable(cx)) {
         if (recompile) {
             JS_ASSERT(executionMode == SequentialExecution);
             builderScript->ionScript()->setRecompiling();
         } else {
             SetIonScript(builder->script(), executionMode, ION_COMPILING_SCRIPT);
         }
@@ -1734,24 +1721,43 @@ IonCompile(JSContext *cx, JSScript *scri
 
         // The allocator and associated data will be destroyed after being
         // processed in the finishedOffThreadCompilations list.
         autoDelete.forget();
 
         return AbortReason_NoAbort;
     }
 
+    Maybe<AutoEnterIonCompilation> ionCompiling;
+    ionCompiling.construct();
+
+    Maybe<AutoProtectHeapForIonCompilation> protect;
+    if (js_IonOptions.checkThreadSafety &&
+        cx->runtime()->gcIncrementalState == gc::NO_INCREMENTAL &&
+        !cx->runtime()->profilingScripts &&
+        !cx->runtime()->spsProfiler.enabled())
+    {
+        protect.construct(cx->runtime());
+    }
+
+    bool succeeded = builder->build();
+    builder->clearForBackEnd();
+
+    if (!succeeded)
+        return builder->abortReason();
+
     ScopedJSDeletePtr<CodeGenerator> codegen(CompileBackEnd(builder));
     if (!codegen) {
         IonSpew(IonSpew_Abort, "Failed during back-end compilation.");
         return AbortReason_Disable;
     }
 
     if (!protect.empty())
         protect.destroy();
+    ionCompiling.destroy();
 
     bool success = codegen->link(cx, builder->constraints());
 
     IonSpewEndFunction();
 
     return success ? AbortReason_NoAbort : AbortReason_Disable;
 }
 
--- a/js/src/jit/IonAnalysis.cpp
+++ b/js/src/jit/IonAnalysis.cpp
@@ -217,21 +217,20 @@ IsPhiObservable(MPhi *phi, Observability
     uint32_t slot = phi->slot();
     CompileInfo &info = phi->block()->info();
     JSFunction *fun = info.fun();
 
     // If the Phi is of the |this| value, it must always be observable.
     if (fun && slot == info.thisSlot())
         return true;
 
-    // If the function is heavyweight, and the Phi is of the |scopeChain|
-    // value, and the function may need an arguments object, then make sure
-    // to preserve the scope chain, because it may be needed to construct the
-    // arguments object during bailout.
-    if (fun && fun->isHeavyweight() && info.hasArguments() && slot == info.scopeChainSlot())
+    // If the function may need an arguments object, then make sure to preserve
+    // the scope chain, because it may be needed to construct the arguments
+    // object during bailout.
+    if (fun && info.hasArguments() && slot == info.scopeChainSlot())
         return true;
 
     // If the Phi is one of the formal argument, and we are using an argument
     // object in the function. The phi might be observable after a bailout.
     // For inlined frames this is not needed, as they are captured in the inlineResumePoint.
     if (fun && info.hasArguments()) {
         uint32_t first = info.firstArgSlot();
         if (first <= slot && slot - first < info.nargs()) {
@@ -2172,17 +2171,18 @@ jit::AnalyzeNewScriptProperties(JSContex
             return true;
     }
 
     types::TypeScript::SetThis(cx, script, types::Type::ObjectType(type));
 
     MIRGraph graph(&temp);
     CompileInfo info(script, fun,
                      /* osrPc = */ nullptr, /* constructing = */ false,
-                     DefinitePropertiesAnalysis);
+                     DefinitePropertiesAnalysis,
+                     script->needsArgsObj());
 
     AutoTempAllocatorRooter root(cx, &temp);
 
     types::CompilerConstraintList *constraints = types::NewCompilerConstraintList(temp);
     BaselineInspector inspector(script);
     IonBuilder builder(cx, CompileCompartment::get(cx->compartment()), &temp, &graph, constraints,
                        &inspector, &info, /* baselineFrame = */ nullptr);
 
--- a/js/src/jit/IonBuilder.cpp
+++ b/js/src/jit/IonBuilder.cpp
@@ -130,26 +130,34 @@ IonBuilder::IonBuilder(JSContext *analys
     inliningDepth_(inliningDepth),
     numLoopRestarts_(0),
     failedBoundsCheck_(info->script()->failedBoundsCheck()),
     failedShapeGuard_(info->script()->failedShapeGuard()),
     nonStringIteration_(false),
     lazyArguments_(nullptr),
     inlineCallInfo_(nullptr)
 {
-    script_.init(info->script());
+    script_ = info->script();
     pc = info->startPC();
 
+#ifdef DEBUG
+    lock();
     JS_ASSERT(script()->hasBaselineScript());
+    unlock();
+#endif
     JS_ASSERT(!!analysisContext == (info->executionMode() == DefinitePropertiesAnalysis));
 }
 
 void
 IonBuilder::clearForBackEnd()
 {
+    // This case should only be hit if there was a failure while building.
+    if (!lock_.empty())
+        lock_.destroy();
+
     JS_ASSERT(!analysisContext);
     baselineFrame_ = nullptr;
 
     // The caches below allocate data from the malloc heap. Release this before
     // later phases of compilation to avoid leaks, as the top level IonBuilder
     // is not explicitly destroyed. Note that builders for inner scripts are
     // constructed on the stack and will release this memory on destruction.
     gsn.purge();
@@ -576,22 +584,26 @@ IonBuilder::pushLoop(CFGState::State ini
     state.loop.initialStopAt = stopAt;
     state.loop.loopHead = loopHead;
     return cfgStack_.append(state);
 }
 
 bool
 IonBuilder::init()
 {
+    lock();
+
     if (!types::TypeScript::FreezeTypeSets(constraints(), script(),
                                            &thisTypes, &argTypes, &typeArray))
     {
         return false;
     }
 
+    unlock();
+
     if (!analysis().init(alloc(), gsn))
         return false;
 
     return true;
 }
 
 bool
 IonBuilder::build()
@@ -689,16 +701,18 @@ IonBuilder::build()
     if (info().hasArguments() && !info().argsObjAliasesFormals()) {
         lazyArguments_ = MConstant::New(alloc(), MagicValue(JS_OPTIMIZED_ARGUMENTS));
         current->add(lazyArguments_);
     }
 
     if (!traverseBytecode())
         return false;
 
+    unlock();
+
     if (!maybeAddOsrTypeBarriers())
         return false;
 
     if (!processIterators())
         return false;
 
     JS_ASSERT(loopDepth_ == 0);
     abortReason_ = AbortReason_NoAbort;
@@ -846,16 +860,17 @@ IonBuilder::buildInline(IonBuilder *call
     if (script_->argumentsHasVarBinding()) {
         lazyArguments_ = MConstant::New(alloc(), MagicValue(JS_OPTIMIZED_ARGUMENTS));
         current->add(lazyArguments_);
     }
 
     if (!traverseBytecode())
         return false;
 
+    unlock();
     return true;
 }
 
 void
 IonBuilder::rewriteParameter(uint32_t slotIdx, MDefinition *param, int32_t argIndex)
 {
     JS_ASSERT(param->isParameter() || param->isGetArgumentsObjectArg());
 
@@ -900,16 +915,19 @@ IonBuilder::initParameters()
 {
     if (!info().fun())
         return true;
 
     // If we are doing OSR on a frame which initially executed in the
     // interpreter and didn't accumulate type information, try to use that OSR
     // frame to determine possible initial types for 'this' and parameters.
 
+    // For unknownProperties() tests under addType.
+    lock();
+
     if (thisTypes->empty() && baselineFrame_) {
         if (!thisTypes->addType(baselineFrame_->thisType, alloc_->lifoAlloc()))
             return false;
     }
 
     MParameter *param = MParameter::New(alloc(), MParameter::THIS_SLOT, thisTypes);
     current->add(param);
     current->initSlot(info().thisSlot(), param);
@@ -923,16 +941,18 @@ IonBuilder::initParameters()
                 return false;
         }
 
         param = MParameter::New(alloc(), i, types);
         current->add(param);
         current->initSlot(info().argSlotUnchecked(i), param);
     }
 
+    unlock();
+
     return true;
 }
 
 bool
 IonBuilder::initScopeChain(MDefinition *callee)
 {
     MInstruction *scope = nullptr;
 
@@ -945,16 +965,18 @@ IonBuilder::initScopeChain(MDefinition *
 
     // The scope chain is only tracked in scripts that have NAME opcodes which
     // will try to access the scope. For other scripts, the scope instructions
     // will be held live by resume points and code will still be generated for
     // them, so just use a constant undefined value.
     if (!script()->compileAndGo())
         return abort("non-CNG global scripts are not supported");
 
+    lock();
+
     if (JSFunction *fun = info().fun()) {
         if (!callee) {
             MCallee *calleeIns = MCallee::New(alloc());
             current->add(calleeIns);
             callee = calleeIns;
         }
         scope = MFunctionEnvironment::New(alloc(), callee);
         current->add(scope);
@@ -970,16 +992,18 @@ IonBuilder::initScopeChain(MDefinition *
             scope = createCallObject(callee, scope);
             if (!scope)
                 return false;
         }
     } else {
         scope = constant(ObjectValue(script()->global()));
     }
 
+    unlock();
+
     current->setScopeChain(scope);
     return true;
 }
 
 bool
 IonBuilder::initArgumentsObject()
 {
     IonSpew(IonSpew_MIR, "%s:%d - Emitting code to initialize arguments object! block=%p",
@@ -1163,16 +1187,24 @@ IonBuilder::maybeAddOsrTypeBarriers()
 // that there is no active block.
 //
 // For normal diamond join points, we construct Phi nodes as we add
 // predecessors. For loops, care must be taken to propagate Phi nodes back
 // through uses in the loop body.
 bool
 IonBuilder::traverseBytecode()
 {
+    // Always hold the compilation lock when traversing bytecode, though release
+    // it before reacquiring it every few opcodes so that the main thread does not
+    // block for long when updating compilation data.
+    lock();
+
+    size_t lockOpcodeCount = 0;
+    static const size_t LOCK_OPCODE_GRANULARITY = 5;
+
     for (;;) {
         JS_ASSERT(pc < info().limitPC());
 
         for (;;) {
             if (!alloc().ensureBallast())
                 return false;
 
             // Check if we've hit an expected join point or edge in the bytecode.
@@ -1237,16 +1269,22 @@ IonBuilder::traverseBytecode()
         }
 #endif
 
         // Nothing in inspectOpcode() is allowed to advance the pc.
         JSOp op = JSOp(*pc);
         if (!inspectOpcode(op))
             return false;
 
+        if (++lockOpcodeCount == LOCK_OPCODE_GRANULARITY) {
+            unlock();
+            lock();
+            lockOpcodeCount = 0;
+        }
+
 #ifdef DEBUG
         for (size_t i = 0; i < popped.length(); i++) {
             // Call instructions can discard PassArg instructions. Ignore them.
             if (popped[i]->isPassArg() && !popped[i]->hasUses())
                 continue;
 
             switch (op) {
               case JSOP_POP:
@@ -3840,23 +3878,26 @@ IonBuilder::inlineScriptedCall(CallInfo 
             callInfo.setThis(barrier);
         }
     }
 
     // Start inlining.
     LifoAlloc *lifoAlloc = alloc_->lifoAlloc();
     CompileInfo *info = lifoAlloc->new_<CompileInfo>(calleeScript, target,
                                                      (jsbytecode *)nullptr, callInfo.constructing(),
-                                                     this->info().executionMode());
+                                                     this->info().executionMode(),
+                                                     /* needsArgsObj = */ false);
     if (!info)
         return false;
 
     MIRGraphReturns returns(alloc());
     AutoAccumulateReturns aar(graph(), returns);
 
+    unlock();
+
     // Build the graph.
     IonBuilder inlineBuilder(analysisContext, compartment,
                              &alloc(), &graph(), constraints(), &inspector, info, nullptr,
                              inliningDepth_ + 1, loopDepth_);
     if (!inlineBuilder.buildInline(this, outerResumePoint, callInfo)) {
         if (analysisContext && analysisContext->isExceptionPending()) {
             IonSpew(IonSpew_Abort, "Inline builder raised exception.");
             abortReason_ = AbortReason_Error;
@@ -3870,16 +3911,18 @@ IonBuilder::inlineScriptedCall(CallInfo 
             abortReason_ = AbortReason_Inlining;
         } else if (inlineBuilder.abortReason_ == AbortReason_Inlining) {
             abortReason_ = AbortReason_Inlining;
         }
 
         return false;
     }
 
+    lock();
+
     // Create return block.
     jsbytecode *postCall = GetNextPc(pc);
     MBasicBlock *returnBlock = newBlock(nullptr, postCall);
     if (!returnBlock)
         return false;
     returnBlock->setCallerResumePoint(callerResumePoint_);
 
     // When profiling add Inline_Exit instruction to indicate end of inlined function.
@@ -4693,17 +4736,17 @@ IonBuilder::createThisScriptedSingleton(
     // Get the singleton prototype (if exists)
     JSObject *proto = getSingletonPrototype(target);
     if (!proto)
         return nullptr;
 
     JSObject *templateObject = inspector->getTemplateObject(pc);
     if (!templateObject || !templateObject->is<JSObject>())
         return nullptr;
-    if (templateObject->getProto() != proto)
+    if (!templateObject->hasTenuredProto() || templateObject->getProto() != proto)
         return nullptr;
 
     if (!target->nonLazyScript()->types)
         return nullptr;
     if (!types::TypeScript::ThisTypes(target->nonLazyScript())->hasType(types::Type::ObjectType(templateObject)))
         return nullptr;
 
     // For template objects with NewScript info, the appropriate allocation
@@ -5091,16 +5134,18 @@ IonBuilder::testShouldDOMCall(types::Typ
     if (jinfo->type != opType)
         return false;
 
     for (unsigned i = 0; i < inTypes->getObjectCount(); i++) {
         types::TypeObjectKey *curType = inTypes->getObject(i);
         if (!curType)
             continue;
 
+        if (!curType->hasTenuredProto())
+            return false;
         JSObject *proto = curType->proto().toObjectOrNull();
         if (!instanceChecker(proto, jinfo->protoID, jinfo->depth))
             return false;
     }
 
     return true;
 }
 
@@ -6026,16 +6071,18 @@ IonBuilder::testSingletonProperty(JSObje
             if (obj->hasSingletonType())
                 return property.singleton(constraints());
             return nullptr;
         }
 
         if (ClassHasResolveHook(compartment, obj->getClass(), name))
             return nullptr;
 
+        if (!obj->hasTenuredProto())
+            return nullptr;
         obj = obj->getProto();
     }
 
     return nullptr;
 }
 
 bool
 IonBuilder::testSingletonPropertyTypes(MDefinition *obj, JSObject *singleton, PropertyName *name,
@@ -6099,16 +6146,18 @@ IonBuilder::testSingletonPropertyTypes(M
             if (!ClassHasEffectlessLookup(clasp, name) || ClassHasResolveHook(compartment, clasp, name))
                 return false;
             if (object->unknownProperties())
                 return false;
             types::HeapTypeSetKey property = object->property(NameToId(name));
             if (property.isOwnProperty(constraints()))
                 return false;
 
+            if (!object->hasTenuredProto())
+                return false;
             if (JSObject *proto = object->proto().toObjectOrNull()) {
                 // Test this type.
                 if (testSingletonProperty(proto, name) != singleton)
                     return false;
             } else {
                 // Can't be on the prototype chain with no prototypes...
                 return false;
             }
@@ -7892,16 +7941,18 @@ IonBuilder::objectsHaveCommonPrototype(t
                 if (!types->empty() || types->configuredProperty())
                     return false;
             }
             if (JSObject *obj = type->singleton()) {
                 if (types::CanHaveEmptyPropertyTypesForOwnProperty(obj))
                     return false;
             }
 
+            if (!type->hasTenuredProto())
+                return false;
             JSObject *proto = type->proto().toObjectOrNull();
             if (proto == foundProto)
                 break;
             if (!proto) {
                 // The foundProto being searched for did not show up on the
                 // object's prototype chain.
                 return false;
             }
@@ -7991,17 +8042,17 @@ IonBuilder::annotateGetPropertyCache(MDe
 
     // Ensure that the relevant property typeset for each type object is
     // is a single-object typeset containing a JSFunction
     for (unsigned int i = 0; i < objCount; i++) {
         types::TypeObject *baseTypeObj = objTypes->getTypeObject(i);
         if (!baseTypeObj)
             continue;
         types::TypeObjectKey *typeObj = types::TypeObjectKey::get(baseTypeObj);
-        if (typeObj->unknownProperties() || !typeObj->proto().isObject())
+        if (typeObj->unknownProperties() || !typeObj->hasTenuredProto() || !typeObj->proto().isObject())
             continue;
 
         const Class *clasp = typeObj->clasp();
         if (!ClassHasEffectlessLookup(clasp, name) || ClassHasResolveHook(compartment, clasp, name))
             continue;
 
         types::HeapTypeSetKey ownTypes = typeObj->property(NameToId(name));
         if (ownTypes.isOwnProperty(constraints()))
--- a/js/src/jit/IonBuilder.h
+++ b/js/src/jit/IonBuilder.h
@@ -719,28 +719,28 @@ class IonBuilder : public MIRGenerator
         setCurrent(block);
     }
 
     void setCurrent(MBasicBlock *block) {
         current = block;
     }
 
     // A builder is inextricably tied to a particular script.
-    HeapPtrScript script_;
+    JSScript *script_;
 
     // If off thread compilation is successful, the final code generator is
     // attached here. Code has been generated, but not linked (there is not yet
     // an IonScript). This is heap allocated, and must be explicitly destroyed,
     // performed by FinishOffThreadBuilder().
     CodeGenerator *backgroundCodegen_;
 
   public:
     void clearForBackEnd();
 
-    JSScript *script() const { return script_.get(); }
+    JSScript *script() const { return script_; }
 
     CodeGenerator *backgroundCodegen() const { return backgroundCodegen_; }
     void setBackgroundCodegen(CodeGenerator *codegen) { backgroundCodegen_ = codegen; }
 
     AbortReason abortReason() { return abortReason_; }
 
     TypeRepresentationSetHash *getOrCreateReprSetHash(); // fallible
 
@@ -760,16 +760,27 @@ class IonBuilder : public MIRGenerator
     JSContext *analysisContext;
     BaselineFrameInspector *baselineFrame_;
     AbortReason abortReason_;
     TypeRepresentationSetHash *reprSetHash_;
 
     // Constraints for recording dependencies on type information.
     types::CompilerConstraintList *constraints_;
 
+    mozilla::Maybe<AutoLockForCompilation> lock_;
+
+    void lock() {
+        if (!analysisContext)
+            lock_.construct(compartment);
+    }
+    void unlock() {
+        if (!analysisContext)
+            lock_.destroy();
+    }
+
     // Basic analysis information about the script.
     BytecodeAnalysis analysis_;
     BytecodeAnalysis &analysis() {
         return analysis_;
     }
 
     types::TemporaryTypeSet *thisTypes, *argTypes, *typeArray;
     uint32_t typeArrayHint;
--- a/js/src/jit/IonCaches.cpp
+++ b/js/src/jit/IonCaches.cpp
@@ -463,17 +463,17 @@ GeneratePrototypeGuards(JSContext *cx, I
      * in reshaping the holder, and thus the failure of the shape guard.
      */
     JS_ASSERT(obj != holder);
 
     if (obj->hasUncacheableProto()) {
         // Note: objectReg and scratchReg may be the same register, so we cannot
         // use objectReg in the rest of this function.
         masm.loadPtr(Address(objectReg, JSObject::offsetOfType()), scratchReg);
-        Address proto(scratchReg, offsetof(types::TypeObject, proto));
+        Address proto(scratchReg, types::TypeObject::offsetOfProto());
         masm.branchNurseryPtr(Assembler::NotEqual, proto,
                               ImmMaybeNurseryPtr(obj->getProto()), failures);
     }
 
     JSObject *pobj = IsCacheableDOMProxy(obj)
                      ? obj->getTaggedProto().toObjectOrNull()
                      : obj->getProto();
     if (!pobj)
@@ -791,21 +791,17 @@ GenerateReadSlot(JSContext *cx, IonScrip
                            &prototypeFailures);
         } else {
             // The property does not exist. Guard on everything in the
             // prototype chain.
             JSObject *proto = obj->getTaggedProto().toObjectOrNull();
             Register lastReg = object;
             JS_ASSERT(scratchReg != object);
             while (proto) {
-                Address addrType(lastReg, JSObject::offsetOfType());
-                masm.loadPtr(addrType, scratchReg);
-                Address addrProto(scratchReg, offsetof(types::TypeObject, proto));
-                masm.loadPtr(addrProto, scratchReg);
-                Address addrShape(scratchReg, JSObject::offsetOfShape());
+                masm.loadObjProto(lastReg, scratchReg);
 
                 // Guard the shape of the current prototype.
                 masm.branchPtr(Assembler::NotEqual,
                                Address(scratchReg, JSObject::offsetOfShape()),
                                ImmGCPtr(proto->lastProperty()),
                                &prototypeFailures);
 
                 proto = proto->getProto();
@@ -2579,18 +2575,17 @@ GenerateAddSlot(JSContext *cx, MacroAsse
     }
 
     JSObject *proto = obj->getProto();
     Register protoReg = object;
     while (proto) {
         Shape *protoShape = proto->lastProperty();
 
         // load next prototype
-        masm.loadPtr(Address(protoReg, JSObject::offsetOfType()), protoReg);
-        masm.loadPtr(Address(protoReg, offsetof(types::TypeObject, proto)), protoReg);
+        masm.loadObjProto(protoReg, protoReg);
 
         // Ensure that its shape matches.
         masm.branchTestObjShape(Assembler::NotEqual, protoReg, protoShape, &failuresPopObject);
 
         proto = proto->getProto();
     }
 
     masm.pop(object);     // restore object reg
--- a/js/src/jit/IonMacroAssembler.h
+++ b/js/src/jit/IonMacroAssembler.h
@@ -287,22 +287,22 @@ class MacroAssembler : public MacroAssem
     }
     void loadBaseShape(Register objReg, Register dest) {
         loadPtr(Address(objReg, JSObject::offsetOfShape()), dest);
 
         loadPtr(Address(dest, Shape::offsetOfBase()), dest);
     }
     void loadObjClass(Register objReg, Register dest) {
         loadPtr(Address(objReg, JSObject::offsetOfType()), dest);
-        loadPtr(Address(dest, offsetof(types::TypeObject, clasp)), dest);
+        loadPtr(Address(dest, types::TypeObject::offsetOfClasp()), dest);
     }
     void branchTestObjClass(Condition cond, Register obj, Register scratch, const js::Class *clasp,
                             Label *label) {
         loadPtr(Address(obj, JSObject::offsetOfType()), scratch);
-        branchPtr(cond, Address(scratch, offsetof(types::TypeObject, clasp)), ImmPtr(clasp), label);
+        branchPtr(cond, Address(scratch, types::TypeObject::offsetOfClasp()), ImmPtr(clasp), label);
     }
     void branchTestObjShape(Condition cond, Register obj, const Shape *shape, Label *label) {
         branchPtr(cond, Address(obj, JSObject::offsetOfShape()), ImmGCPtr(shape), label);
     }
     void branchTestObjShape(Condition cond, Register obj, Register shape, Label *label) {
         branchPtr(cond, Address(obj, JSObject::offsetOfShape()), shape, label);
     }
     void branchTestProxyHandlerFamily(Condition cond, Register proxy, Register scratch,
@@ -348,17 +348,17 @@ class MacroAssembler : public MacroAssem
     }
 
     void loadObjPrivate(Register obj, uint32_t nfixed, Register dest) {
         loadPtr(Address(obj, JSObject::getPrivateDataOffset(nfixed)), dest);
     }
 
     void loadObjProto(Register obj, Register dest) {
         loadPtr(Address(obj, JSObject::offsetOfType()), dest);
-        loadPtr(Address(dest, offsetof(types::TypeObject, proto)), dest);
+        loadPtr(Address(dest, types::TypeObject::offsetOfProto()), dest);
     }
 
     void loadStringLength(Register str, Register dest) {
         loadPtr(Address(str, JSString::offsetOfLengthAndFlags()), dest);
         rshiftPtr(Imm32(JSString::LENGTH_SHIFT), dest);
     }
 
     void loadJSContext(const Register &dest) {
--- a/js/src/jit/MIR.cpp
+++ b/js/src/jit/MIR.cpp
@@ -2924,17 +2924,23 @@ bool
 jit::PropertyReadNeedsTypeBarrier(JSContext *propertycx,
                                   types::CompilerConstraintList *constraints,
                                   types::TypeObjectKey *object, PropertyName *name,
                                   types::TemporaryTypeSet *observed, bool updateObserved)
 {
     // If this access has never executed, try to add types to the observed set
     // according to any property which exists on the object or its prototype.
     if (updateObserved && observed->empty() && name) {
-        JSObject *obj = object->singleton() ? object->singleton() : object->proto().toObjectOrNull();
+        JSObject *obj;
+        if (object->singleton())
+            obj = object->singleton();
+        else if (object->hasTenuredProto())
+            obj = object->proto().toObjectOrNull();
+        else
+            obj = nullptr;
 
         while (obj) {
             if (!obj->getClass()->isNative())
                 break;
 
             types::TypeObjectKey *typeObj = types::TypeObjectKey::get(obj);
             if (propertycx)
                 typeObj->ensureTrackedProperty(propertycx, NameToId(name));
@@ -2948,16 +2954,18 @@ jit::PropertyReadNeedsTypeBarrier(JSCont
                     if (types.length()) {
                         // Note: the return value here is ignored.
                         observed->addType(types[0], GetIonContext()->temp->lifoAlloc());
                         break;
                     }
                 }
             }
 
+            if (!obj->hasTenuredProto())
+                break;
             obj = obj->getProto();
         }
     }
 
     return PropertyReadNeedsTypeBarrier(constraints, object, name, observed);
 }
 
 bool
@@ -2999,17 +3007,21 @@ jit::PropertyReadOnPrototypeNeedsTypeBar
     types::TypeSet *types = obj->resultTypeSet();
     if (!types || types->unknownObject())
         return true;
 
     for (size_t i = 0; i < types->getObjectCount(); i++) {
         types::TypeObjectKey *object = types->getObject(i);
         if (!object)
             continue;
-        while (object->proto().isObject()) {
+        while (true) {
+            if (!object->hasTenuredProto())
+                return true;
+            if (!object->proto().isObject())
+                break;
             object = types::TypeObjectKey::get(object->proto().toObject());
             if (PropertyReadNeedsTypeBarrier(constraints, object, name, observed))
                 return true;
         }
     }
 
     return false;
 }
--- a/js/src/jit/VMFunctions.cpp
+++ b/js/src/jit/VMFunctions.cpp
@@ -941,17 +941,17 @@ void
 AssertValidObjectPtr(JSContext *cx, JSObject *obj)
 {
     // Check what we can, so that we'll hopefully assert/crash if we get a
     // bogus object (pointer).
     JS_ASSERT(obj->compartment() == cx->compartment());
     JS_ASSERT(obj->runtimeFromMainThread() == cx->runtime());
 
     JS_ASSERT_IF(!obj->hasLazyType(),
-                 obj->type()->clasp == obj->lastProperty()->getObjectClass());
+                 obj->type()->clasp() == obj->lastProperty()->getObjectClass());
 
     if (obj->isTenured()) {
         JS_ASSERT(obj->isAligned());
         gc::AllocKind kind = obj->tenuredGetAllocKind();
         JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST);
         JS_ASSERT(obj->tenuredZone() == cx->zone());
     }
 }
--- a/js/src/jit/shared/CodeGenerator-x86-shared.cpp
+++ b/js/src/jit/shared/CodeGenerator-x86-shared.cpp
@@ -1720,17 +1720,17 @@ CodeGeneratorX86Shared::visitGuardObject
 
 bool
 CodeGeneratorX86Shared::visitGuardClass(LGuardClass *guard)
 {
     Register obj = ToRegister(guard->input());
     Register tmp = ToRegister(guard->tempInt());
 
     masm.loadPtr(Address(obj, JSObject::offsetOfType()), tmp);
-    masm.cmpPtr(Operand(tmp, offsetof(types::TypeObject, clasp)), ImmPtr(guard->mir()->getClass()));
+    masm.cmpPtr(Operand(tmp, types::TypeObject::offsetOfClasp()), ImmPtr(guard->mir()->getClass()));
     if (!bailoutIf(Assembler::NotEqual, guard->snapshot()))
         return false;
     return true;
 }
 
 bool
 CodeGeneratorX86Shared::visitEffectiveAddress(LEffectiveAddress *ins)
 {
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -111,27 +111,28 @@ JSCompartment::sweepCallsiteClones()
 
 JSFunction *
 js::ExistingCloneFunctionAtCallsite(const CallsiteCloneTable &table, JSFunction *fun,
                                     JSScript *script, jsbytecode *pc)
 {
     JS_ASSERT(fun->nonLazyScript()->shouldCloneAtCallsite());
     JS_ASSERT(!fun->nonLazyScript()->enclosingStaticScope());
     JS_ASSERT(types::UseNewTypeForClone(fun));
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
 
     /*
      * If we start allocating function objects in the nursery, then the callsite
      * clone table will need a postbarrier.
      */
     JS_ASSERT(fun->isTenured());
 
     if (!table.initialized())
         return nullptr;
 
-    CallsiteCloneTable::Ptr p = table.lookup(CallsiteCloneKey(fun, script, script->pcToOffset(pc)));
+    CallsiteCloneTable::Ptr p = table.readonlyThreadsafeLookup(CallsiteCloneKey(fun, script, script->pcToOffset(pc)));
     if (p)
         return p->value();
 
     return nullptr;
 }
 
 JSFunction *
 js::CloneFunctionAtCallsite(JSContext *cx, HandleFunction fun, HandleScript script, jsbytecode *pc)
@@ -148,16 +149,18 @@ js::CloneFunctionAtCallsite(JSContext *c
      * Store a link back to the original for function.caller and avoid cloning
      * clones.
      */
     clone->nonLazyScript()->setIsCallsiteClone(fun);
 
     typedef CallsiteCloneKey Key;
     typedef CallsiteCloneTable Table;
 
+    AutoLockForCompilation lock(cx);
+
     Table &table = cx->compartment()->callsiteClones;
     if (!table.initialized() && !table.init())
         return nullptr;
 
     if (!table.putNew(Key(fun, script, script->pcToOffset(pc)), clone))
         return nullptr;
 
     return clone;
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -27,17 +27,20 @@ js_ReportOutOfMemory(js::ThreadSafeConte
 extern void
 js_ReportAllocationOverflow(js::ThreadSafeContext *cx);
 
 extern void
 js_ReportOverRecursed(js::ThreadSafeContext *cx);
 
 namespace js {
 
-namespace jit { class IonContext; }
+namespace jit {
+class IonContext;
+class CompileCompartment;
+}
 
 struct CallsiteCloneKey {
     /* The original function that we are cloning. */
     JSFunction *original;
 
     /* The script of the call. */
     JSScript *script;
 
@@ -1033,17 +1036,19 @@ class AutoLockForExclusiveAccess
 #ifdef JS_WORKER_THREADS
     JSRuntime *runtime;
 
     void init(JSRuntime *rt) {
         runtime = rt;
         if (runtime->numExclusiveThreads) {
             runtime->assertCanLock(JSRuntime::ExclusiveAccessLock);
             PR_Lock(runtime->exclusiveAccessLock);
+#ifdef DEBUG
             runtime->exclusiveAccessOwner = PR_GetCurrentThread();
+#endif
         } else {
             JS_ASSERT(!runtime->mainThreadHasExclusiveAccess);
             runtime->mainThreadHasExclusiveAccess = true;
         }
     }
 
   public:
     AutoLockForExclusiveAccess(ExclusiveContext *cx MOZ_GUARD_OBJECT_NOTIFIER_PARAM) {
@@ -1052,19 +1057,17 @@ class AutoLockForExclusiveAccess
     }
     AutoLockForExclusiveAccess(JSRuntime *rt MOZ_GUARD_OBJECT_NOTIFIER_PARAM) {
         MOZ_GUARD_OBJECT_NOTIFIER_INIT;
         init(rt);
     }
     ~AutoLockForExclusiveAccess() {
         if (runtime->numExclusiveThreads) {
             JS_ASSERT(runtime->exclusiveAccessOwner == PR_GetCurrentThread());
-#ifdef DEBUG
             runtime->exclusiveAccessOwner = nullptr;
-#endif
             PR_Unlock(runtime->exclusiveAccessLock);
         } else {
             JS_ASSERT(runtime->mainThreadHasExclusiveAccess);
             runtime->mainThreadHasExclusiveAccess = false;
         }
     }
 #else // JS_WORKER_THREADS
   public:
@@ -1078,15 +1081,78 @@ class AutoLockForExclusiveAccess
         // An empty destructor is needed to avoid warnings from clang about
         // unused local variables of this type.
     }
 #endif // JS_WORKER_THREADS
 
     MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
+class AutoLockForCompilation
+{
+#ifdef JS_WORKER_THREADS
+    JSRuntime *runtime;
+
+    void init(JSRuntime *rt) {
+        runtime = rt;
+        if (runtime->numCompilationThreads) {
+            runtime->assertCanLock(JSRuntime::CompilationLock);
+            PR_Lock(runtime->compilationLock);
+#ifdef DEBUG
+            runtime->compilationLockOwner = PR_GetCurrentThread();
+#endif
+        } else {
+#ifdef DEBUG
+            JS_ASSERT(!runtime->mainThreadHasCompilationLock);
+            runtime->mainThreadHasCompilationLock = true;
+#endif
+        }
+    }
+
+  public:
+    AutoLockForCompilation(ExclusiveContext *cx MOZ_GUARD_OBJECT_NOTIFIER_PARAM) {
+        MOZ_GUARD_OBJECT_NOTIFIER_INIT;
+        if (cx->isJSContext())
+            init(cx->asJSContext()->runtime());
+        else
+            runtime = nullptr;
+    }
+    AutoLockForCompilation(jit::CompileCompartment *compartment MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
+    ~AutoLockForCompilation() {
+        if (runtime) {
+            if (runtime->numCompilationThreads) {
+                JS_ASSERT(runtime->compilationLockOwner == PR_GetCurrentThread());
+#ifdef DEBUG
+                runtime->compilationLockOwner = nullptr;
+#endif
+                PR_Unlock(runtime->compilationLock);
+            } else {
+#ifdef DEBUG
+                JS_ASSERT(runtime->mainThreadHasCompilationLock);
+                runtime->mainThreadHasCompilationLock = false;
+#endif
+            }
+        }
+    }
+#else // JS_WORKER_THREADS
+  public:
+    AutoLockForCompilation(ExclusiveContext *cx MOZ_GUARD_OBJECT_NOTIFIER_PARAM) {
+        MOZ_GUARD_OBJECT_NOTIFIER_INIT;
+    }
+    AutoLockForCompilation(jit::CompileCompartment *compartment MOZ_GUARD_OBJECT_NOTIFIER_PARAM) {
+        MOZ_GUARD_OBJECT_NOTIFIER_INIT;
+    }
+    ~AutoLockForCompilation() {
+        // An empty destructor is needed to avoid warnings from clang about
+        // unused local variables of this type.
+    }
+#endif // JS_WORKER_THREADS
+
+    MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
+};
+
 } /* namespace js */
 
 #ifdef _MSC_VER
 #pragma warning(pop)
 #endif
 
 #endif /* jscntxt_h */
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -333,17 +333,17 @@ JSCompartment::wrap(JSContext *cx, Mutab
     RootedValue key(cx, ObjectValue(*obj));
     if (WrapperMap::Ptr p = crossCompartmentWrappers.lookup(key)) {
         obj.set(&p->value().get().toObject());
         JS_ASSERT(obj->is<CrossCompartmentWrapperObject>());
         JS_ASSERT(obj->getParent() == global);
         return true;
     }
 
-    RootedObject proto(cx, Proxy::LazyProto);
+    RootedObject proto(cx, TaggedProto::LazyProto);
     RootedObject existing(cx, existingArg);
     if (existing) {
         /* Is it possible to reuse |existing|? */
         if (!existing->getTaggedProto().isLazy() ||
             // Note: don't use is<ObjectProxyObject>() here -- it also matches subclasses!
             existing->getClass() != &ProxyObject::uncallableClass_ ||
             existing->getParent() != global ||
             obj->isCallable())
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -1119,35 +1119,31 @@ JSFunction::createScriptForLazilyInterpr
         if (cx->zone()->needsBarrier())
             LazyScript::writeBarrierPre(lazy);
 
         // Suppress GC for now although we should be able to remove this by
         // making 'lazy' a Rooted<LazyScript*> (which requires adding a
         // THING_ROOT_LAZY_SCRIPT).
         AutoSuppressGC suppressGC(cx);
 
-        fun->flags_ &= ~INTERPRETED_LAZY;
-        fun->flags_ |= INTERPRETED;
-
         RootedScript script(cx, lazy->maybeScript());
 
         if (script) {
-            fun->initScript(script);
+            AutoLockForCompilation lock(cx);
+            fun->setUnlazifiedScript(script);
             return true;
         }
 
-        fun->initScript(nullptr);
-
         if (fun != lazy->function()) {
             script = lazy->function()->getOrCreateScript(cx);
-            if (!script) {
-                fun->initLazyScript(lazy);
+            if (!script)
                 return false;
-            }
-            fun->initScript(script);
+
+            AutoLockForCompilation lock(cx);
+            fun->setUnlazifiedScript(script);
             return true;
         }
 
         // Lazy script caching is only supported for leaf functions. If a
         // script with inner functions was returned by the cache, those inner
         // functions would be delazified when deep cloning the script, even if
         // they have never executed.
         //
@@ -1157,50 +1153,48 @@ JSFunction::createScriptForLazilyInterpr
         if (!lazy->numInnerFunctions() && !JS::IsIncrementalGCInProgress(cx->runtime())) {
             LazyScriptCache::Lookup lookup(cx, lazy);
             cx->runtime()->lazyScriptCache.lookup(lookup, script.address());
         }
 
         if (script) {
             RootedObject enclosingScope(cx, lazy->enclosingScope());
             RootedScript clonedScript(cx, CloneScript(cx, enclosingScope, fun, script));
-            if (!clonedScript) {
-                fun->initLazyScript(lazy);
+            if (!clonedScript)
                 return false;
-            }
 
             clonedScript->setSourceObject(lazy->sourceObject());
 
             fun->initAtom(script->function()->displayAtom());
-            fun->initScript(clonedScript);
             clonedScript->setFunction(fun);
 
+            {
+                AutoLockForCompilation lock(cx);
+                fun->setUnlazifiedScript(clonedScript);
+            }
+
             CallNewScriptHook(cx, clonedScript, fun);
 
             lazy->initScript(clonedScript);
             return true;
         }
 
         JS_ASSERT(lazy->source()->hasSourceData());
 
         // Parse and compile the script from source.
         SourceDataCache::AutoSuppressPurge asp(cx);
         const jschar *chars = lazy->source()->chars(cx, asp);
-        if (!chars) {
-            fun->initLazyScript(lazy);
+        if (!chars)
             return false;
-        }
 
         const jschar *lazyStart = chars + lazy->begin();
         size_t lazyLength = lazy->end() - lazy->begin();
 
-        if (!frontend::CompileLazyFunction(cx, lazy, lazyStart, lazyLength)) {
-            fun->initLazyScript(lazy);
+        if (!frontend::CompileLazyFunction(cx, lazy, lazyStart, lazyLength))
             return false;
-        }
 
         script = fun->nonLazyScript();
 
         // Try to insert the newly compiled script into the lazy script cache.
         if (!lazy->numInnerFunctions()) {
             // A script's starting column isn't set by the bytecode emitter, so
             // specify this from the lazy script so that if an identical lazy
             // script is encountered later a match can be determined.
--- a/js/src/jsfun.h
+++ b/js/src/jsfun.h
@@ -118,29 +118,38 @@ class JSFunction : public JSObject
     bool isInterpreted()            const { return flags() & (INTERPRETED | INTERPRETED_LAZY); }
     bool isNative()                 const { return !isInterpreted(); }
 
     /* Possible attributes of a native function: */
     bool isNativeConstructor()      const { return flags() & NATIVE_CTOR; }
 
     /* Possible attributes of an interpreted function: */
     bool isFunctionPrototype()      const { return flags() & IS_FUN_PROTO; }
-    bool isInterpretedLazy()        const { return flags() & INTERPRETED_LAZY; }
-    bool hasScript()                const { return flags() & INTERPRETED; }
     bool isExprClosure()            const { return flags() & EXPR_CLOSURE; }
     bool hasGuessedAtom()           const { return flags() & HAS_GUESSED_ATOM; }
     bool isLambda()                 const { return flags() & LAMBDA; }
     bool isSelfHostedBuiltin()      const { return flags() & SELF_HOSTED; }
     bool isSelfHostedConstructor()  const { return flags() & SELF_HOSTED_CTOR; }
     bool hasRest()                  const { return flags() & HAS_REST; }
     bool isWrappable()              const {
         JS_ASSERT_IF(flags() & SH_WRAPPABLE, isSelfHostedBuiltin());
         return flags() & SH_WRAPPABLE;
     }
 
+    // Functions can change between being lazily interpreted and having scripts
+    // when under the compilation lock.
+    bool isInterpretedLazy()        const {
+        JS_ASSERT(js::CurrentThreadCanReadCompilationData());
+        return flags() & INTERPRETED_LAZY;
+    }
+    bool hasScript()                const {
+        JS_ASSERT(js::CurrentThreadCanReadCompilationData());
+        return flags() & INTERPRETED;
+    }
+
     bool hasJITCode() const {
         if (!hasScript())
             return false;
 
         return nonLazyScript()->hasBaselineScript() || nonLazyScript()->hasIonScript();
     }
 
     // Arrow functions are a little weird.
@@ -316,31 +325,34 @@ class JSFunction : public JSObject
             initScript(script);
         }
         JS_ASSERT(hasScript());
         return u.i.s.script_;
     }
 
     JSScript *nonLazyScript() const {
         JS_ASSERT(hasScript());
+        JS_ASSERT(js::CurrentThreadCanReadCompilationData());
         return u.i.s.script_;
     }
 
     js::HeapPtrScript &mutableScript() {
         JS_ASSERT(isInterpreted());
         return *(js::HeapPtrScript *)&u.i.s.script_;
     }
 
     js::LazyScript *lazyScript() const {
         JS_ASSERT(isInterpretedLazy() && u.i.s.lazy_);
+        JS_ASSERT(js::CurrentThreadCanReadCompilationData());
         return u.i.s.lazy_;
     }
 
     js::LazyScript *lazyScriptOrNull() const {
         JS_ASSERT(isInterpretedLazy());
+        JS_ASSERT(js::CurrentThreadCanReadCompilationData());
         return u.i.s.lazy_;
     }
 
     js::GeneratorKind generatorKind() const {
         if (!isInterpreted())
             return js::NotGenerator;
         if (hasScript())
             return nonLazyScript()->generatorKind();
@@ -352,25 +364,35 @@ class JSFunction : public JSObject
 
     bool isGenerator() const { return generatorKind() != js::NotGenerator; }
 
     bool isLegacyGenerator() const { return generatorKind() == js::LegacyGenerator; }
 
     bool isStarGenerator() const { return generatorKind() == js::StarGenerator; }
 
     void setScript(JSScript *script_) {
-        JS_ASSERT(isInterpreted());
+        JS_ASSERT(hasScript());
         mutableScript() = script_;
     }
 
     void initScript(JSScript *script_) {
-        JS_ASSERT(isInterpreted());
+        JS_ASSERT(hasScript());
         mutableScript().init(script_);
     }
 
+    void setUnlazifiedScript(JSScript *script) {
+        // Note: createScriptForLazilyInterpretedFunction triggers a barrier on
+        // lazy script before it is overwritten here.
+        JS_ASSERT(js::CurrentThreadCanWriteCompilationData());
+        JS_ASSERT(isInterpretedLazy());
+        flags_ &= ~INTERPRETED_LAZY;
+        flags_ |= INTERPRETED;
+        initScript(script);
+    }
+
     void initLazyScript(js::LazyScript *lazy) {
         JS_ASSERT(isInterpreted());
         flags_ &= ~INTERPRETED;
         flags_ |= INTERPRETED_LAZY;
         u.i.s.lazy_ = lazy;
     }
 
     JSNative native() const {
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -680,16 +680,18 @@ types::NewCompilerConstraintList(jit::Te
 }
 
 /* static */ bool
 TypeScript::FreezeTypeSets(CompilerConstraintList *constraints, JSScript *script,
                            TemporaryTypeSet **pThisTypes,
                            TemporaryTypeSet **pArgTypes,
                            TemporaryTypeSet **pBytecodeTypes)
 {
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+
     LifoAlloc *alloc = constraints->alloc();
     StackTypeSet *existing = script->types->typeArray();
 
     size_t count = NumTypeSets(script);
     TemporaryTypeSet *types = alloc->newArrayUninitialized<TemporaryTypeSet>(count);
     if (!types)
         return false;
     PodZero(types, count);
@@ -786,23 +788,36 @@ CompilerConstraintInstance<T>::generateT
     return true;
 }
 
 } /* anonymous namespace */
 
 const Class *
 TypeObjectKey::clasp()
 {
-    return isTypeObject() ? asTypeObject()->clasp : asSingleObject()->getClass();
+    return isTypeObject() ? asTypeObject()->clasp() : asSingleObject()->getClass();
 }
 
 TaggedProto
 TypeObjectKey::proto()
 {
-    return isTypeObject() ? TaggedProto(asTypeObject()->proto) : asSingleObject()->getTaggedProto();
+    JS_ASSERT(hasTenuredProto());
+    return isTypeObject() ? asTypeObject()->proto() : asSingleObject()->getTaggedProto();
+}
+
+bool
+ObjectImpl::hasTenuredProto() const
+{
+    return type_->hasTenuredProto();
+}
+
+bool
+TypeObjectKey::hasTenuredProto()
+{
+    return isTypeObject() ? asTypeObject()->hasTenuredProto() : asSingleObject()->hasTenuredProto();
 }
 
 JSObject *
 TypeObjectKey::singleton()
 {
     return isTypeObject() ? asTypeObject()->singleton : asSingleObject();
 }
 
@@ -831,16 +846,17 @@ TypeObjectKey::unknownProperties()
         return type->unknownProperties();
     return false;
 }
 
 HeapTypeSetKey
 TypeObjectKey::property(jsid id)
 {
     JS_ASSERT(!unknownProperties());
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
 
     HeapTypeSetKey property;
     property.object_ = this;
     property.id_ = id;
     if (TypeObject *type = maybeType())
         property.maybeTypes_ = type->maybeGetProperty(id);
 
     return property;
@@ -1424,18 +1440,20 @@ ObjectStateChange(ExclusiveContext *cxAr
 {
     if (object->unknownProperties())
         return;
 
     /* All constraints listening to state changes are on the empty id. */
     HeapTypeSet *types = object->maybeGetProperty(JSID_EMPTY);
 
     /* Mark as unknown after getting the types, to avoid assertion. */
-    if (markingUnknown)
-        object->flags |= OBJECT_FLAG_DYNAMIC_MASK | OBJECT_FLAG_UNKNOWN_PROPERTIES;
+    if (markingUnknown) {
+        AutoLockForCompilation lock(cxArg);
+        object->addFlags(OBJECT_FLAG_DYNAMIC_MASK | OBJECT_FLAG_UNKNOWN_PROPERTIES);
+    }
 
     if (types) {
         if (JSContext *cx = cxArg->maybeJSContext()) {
             TypeConstraint *constraint = types->constraintList;
             while (constraint) {
                 constraint->newObjectState(cx, object);
                 constraint = constraint->next;
             }
@@ -1673,16 +1691,19 @@ TemporaryTypeSet::getCommonPrototype()
     JSObject *proto = nullptr;
     unsigned count = getObjectCount();
 
     for (unsigned i = 0; i < count; i++) {
         TypeObjectKey *object = getObject(i);
         if (!object)
             continue;
 
+        if (!object->hasTenuredProto())
+            return nullptr;
+
         TaggedProto nproto = object->proto();
         if (proto) {
             if (nproto != proto)
                 return nullptr;
         } else {
             if (!nproto.isObject())
                 return nullptr;
             proto = nproto.toObject();
@@ -1732,28 +1753,34 @@ TypeZone::init(JSContext *cx)
     {
         return;
     }
 
     inferenceEnabled = true;
 }
 
 TypeObject *
-TypeCompartment::newTypeObject(ExclusiveContext *cx, const Class *clasp, Handle<TaggedProto> proto, bool unknown)
+TypeCompartment::newTypeObject(ExclusiveContext *cx, const Class *clasp, Handle<TaggedProto> proto,
+                               TypeObjectFlags initialFlags)
 {
     JS_ASSERT_IF(proto.isObject(), cx->isInsideCurrentCompartment(proto.toObject()));
 
+    if (!cx->typeInferenceEnabled())
+        initialFlags |= OBJECT_FLAG_UNKNOWN_MASK;
+
+    if (cx->isJSContext()) {
+        if (proto.isObject() && IsInsideNursery(cx->asJSContext()->runtime(), proto.toObject()))
+            initialFlags |= OBJECT_FLAG_NURSERY_PROTO;
+    }
+
     TypeObject *object = gc::NewGCThing<TypeObject, CanGC>(cx, gc::FINALIZE_TYPE_OBJECT,
                                                            sizeof(TypeObject), gc::TenuredHeap);
     if (!object)
         return nullptr;
-    new(object) TypeObject(clasp, proto, unknown);
-
-    if (!cx->typeInferenceEnabled())
-        object->flags |= OBJECT_FLAG_UNKNOWN_MASK;
+    new(object) TypeObject(clasp, proto, initialFlags);
 
     return object;
 }
 
 static inline jsbytecode *
 PreviousOpcode(HandleScript script, jsbytecode *pc)
 {
     ScriptAnalysis *analysis = script->analysis();
@@ -1866,22 +1893,21 @@ TypeCompartment::addAllocationSiteTypeOb
     }
 
     if (!res) {
         RootedObject proto(cx);
         if (!js_GetClassPrototype(cx, key.kind, &proto, nullptr))
             return nullptr;
 
         Rooted<TaggedProto> tagged(cx, TaggedProto(proto));
-        res = newTypeObject(cx, GetClassForProtoKey(key.kind), tagged);
+        res = newTypeObject(cx, GetClassForProtoKey(key.kind), tagged, OBJECT_FLAG_FROM_ALLOCATION_SITE);
         if (!res) {
             cx->compartment()->types.setPendingNukeTypes(cx);
             return nullptr;
         }
-        res->flags |= OBJECT_FLAG_FROM_ALLOCATION_SITE;
         key.script = keyScript;
     }
 
     if (JSOp(*pc) == JSOP_NEWOBJECT) {
         /*
          * This object is always constructed the same way and will not be
          * observed by other code before all properties have been added. Mark
          * all the properties as definite properties of the object.
@@ -2005,16 +2031,18 @@ PrototypeHasIndexedProperty(CompilerCons
         TypeObjectKey *type = TypeObjectKey::get(obj);
         if (ClassCanHaveExtraProperties(type->clasp()))
             return true;
         if (type->unknownProperties())
             return true;
         HeapTypeSetKey index = type->property(JSID_VOID);
         if (index.configured(constraints) || index.isOwnProperty(constraints))
             return true;
+        if (!obj->hasTenuredProto())
+            return true;
         obj = obj->getProto();
     } while (obj);
 
     return false;
 }
 
 bool
 types::ArrayPrototypeHasIndexedProperty(CompilerConstraintList *constraints, JSScript *script)
@@ -2170,20 +2198,19 @@ TypeZone::addPendingRecompile(JSContext 
     if (script->function() && !script->function()->hasLazyType())
         ObjectStateChange(cx, script->function()->type(), false);
 }
 
 void
 TypeCompartment::markSetsUnknown(JSContext *cx, TypeObject *target)
 {
     JS_ASSERT(this == &cx->compartment()->types);
-    JS_ASSERT(!(target->flags & OBJECT_FLAG_SETS_MARKED_UNKNOWN));
+    JS_ASSERT(!(target->flags() & OBJECT_FLAG_SETS_MARKED_UNKNOWN));
     JS_ASSERT(!target->singleton);
     JS_ASSERT(target->unknownProperties());
-    target->flags |= OBJECT_FLAG_SETS_MARKED_UNKNOWN;
 
     AutoEnterAnalysis enter(cx);
 
     /*
      * Mark both persistent and transient type sets which contain obj as having
      * a generic object type. It is not sufficient to mark just the persistent
      * sets, as analysis of individual opcodes can pull type objects from
      * static information (like initializer objects at various offsets).
@@ -2214,16 +2241,19 @@ TypeCompartment::markSetsUnknown(JSConte
             unsigned count = TypeScript::NumTypeSets(script);
             StackTypeSet *typeArray = script->types->typeArray();
             for (unsigned i = 0; i < count; i++) {
                 if (typeArray[i].hasType(Type::ObjectType(target)))
                     typeArray[i].addType(cx, Type::AnyObjectType());
             }
         }
     }
+
+    AutoLockForCompilation lock(cx);
+    target->addFlags(OBJECT_FLAG_SETS_MARKED_UNKNOWN);
 }
 
 void
 TypeCompartment::print(JSContext *cx, bool force)
 {
 #ifdef DEBUG
     gc::AutoSuppressGC suppressGC(cx);
 
@@ -2506,17 +2536,17 @@ TypeCompartment::fixObjectType(Exclusive
         entry.value = obj->getSlot(shape->slot());
         shape = shape->previous();
     }
 
     ObjectTableKey::Lookup lookup(properties.begin(), properties.length(), obj->numFixedSlots());
     ObjectTypeTable::AddPtr p = objectTypeTable->lookupForAdd(lookup);
 
     if (p) {
-        JS_ASSERT(obj->getProto() == p->value().object->proto);
+        JS_ASSERT(obj->getProto() == p->value().object->proto().toObject());
         JS_ASSERT(obj->lastProperty() == p->value().shape);
 
         UpdateObjectTableEntryTypes(cx, p->value(), properties.begin(), properties.length());
         obj->setType(p->value().object);
         return;
     }
 
     /* Make a new type to use for the object and similar future ones. */
@@ -2609,17 +2639,17 @@ TypeCompartment::newTypedObject(JSContex
     if (!p)
         return nullptr;
 
     RootedObject obj(cx, NewBuiltinClassInstance(cx, &JSObject::class_, allocKind));
     if (!obj) {
         cx->clearPendingException();
         return nullptr;
     }
-    JS_ASSERT(obj->getProto() == p->value().object->proto);
+    JS_ASSERT(obj->getProto() == p->value().object->proto().toObject());
 
     RootedShape shape(cx, p->value().shape);
     if (!JSObject::setLastProperty(cx, obj, shape)) {
         cx->clearPendingException();
         return nullptr;
     }
 
     UpdateObjectTableEntryTypes(cx, p->value(), properties, nproperties);
@@ -2630,40 +2660,72 @@ TypeCompartment::newTypedObject(JSContex
     obj->setType(p->value().object);
     return obj;
 }
 
 /////////////////////////////////////////////////////////////////////
 // TypeObject
 /////////////////////////////////////////////////////////////////////
 
+#ifdef DEBUG
+void
+TypeObject::assertCanAccessProto()
+{
+    // The proto pointer for type objects representing singletons may move.
+    JS_ASSERT_IF(singleton, CurrentThreadCanReadCompilationData());
+
+    // Any proto pointer which is in the nursery may be moved, and may not be
+    // accessed during off thread compilation.
+#if defined(JSGC_GENERATIONAL) && defined(JS_WORKER_THREADS)
+    PerThreadData *pt = TlsPerThreadData.get();
+    TaggedProto proto(proto_);
+    JS_ASSERT_IF(proto.isObject() && !proto.toObject()->isTenured(),
+                 !pt || !pt->ionCompiling);
+#endif
+}
+#endif // DEBUG
+
+void
+TypeObject::setProto(JSContext *cx, TaggedProto proto)
+{
+    JS_ASSERT(CurrentThreadCanWriteCompilationData());
+    JS_ASSERT(singleton);
+
+    if (proto.isObject() && IsInsideNursery(cx->runtime(), proto.toObject()))
+        addFlags(OBJECT_FLAG_NURSERY_PROTO);
+
+    setProtoUnchecked(proto);
+}
+
 static inline void
 UpdatePropertyType(ExclusiveContext *cx, HeapTypeSet *types, JSObject *obj, Shape *shape,
                    bool indexed)
 {
     if (!shape->writable())
         types->setConfiguredProperty(cx);
 
     if (shape->hasGetterValue() || shape->hasSetterValue()) {
         types->setConfiguredProperty(cx);
-        types->addType(cx, Type::UnknownType());
+        if (!types->TypeSet::addType(Type::UnknownType(), &cx->typeLifoAlloc()))
+            cx->compartment()->types.setPendingNukeTypes(cx);
     } else if (shape->hasDefaultGetter() && shape->hasSlot()) {
         if (!indexed && types->canSetDefinite(shape->slot()))
             types->setDefinite(shape->slot());
 
         const Value &value = obj->nativeGetSlot(shape->slot());
 
         /*
          * Don't add initial undefined types for properties of global objects
          * that are not collated into the JSID_VOID property (see propertySet
          * comment).
          */
         if (indexed || !value.isUndefined() || !CanHaveEmptyPropertyTypesForOwnProperty(obj)) {
             Type type = GetValueType(value);
-            types->addType(cx, type);
+            if (!types->TypeSet::addType(type, &cx->typeLifoAlloc()))
+                cx->compartment()->types.setPendingNukeTypes(cx);
         }
     }
 }
 
 bool
 TypeObject::addProperty(ExclusiveContext *cx, jsid id, Property **pprop)
 {
     JS_ASSERT(!*pprop);
@@ -2691,17 +2753,18 @@ TypeObject::addProperty(ExclusiveContext
                 shape = shape->previous();
             }
 
             /* Also get values of any dense elements in the object. */
             for (size_t i = 0; i < singleton->getDenseInitializedLength(); i++) {
                 const Value &value = singleton->getDenseElement(i);
                 if (!value.isMagic(JS_ELEMENTS_HOLE)) {
                     Type type = GetValueType(value);
-                    base->types.addType(cx, type);
+                    if (!base->types.TypeSet::addType(type, &cx->typeLifoAlloc()))
+                        cx->compartment()->types.setPendingNukeTypes(cx);
                 }
             }
         } else if (!JSID_IS_EMPTY(id)) {
             RootedId rootedId(cx, id);
             Shape *shape = singleton->nativeLookup(cx, rootedId);
             if (shape)
                 UpdatePropertyType(cx, &base->types, rSingleton, shape, false);
         }
@@ -2866,43 +2929,46 @@ TypeObject::markStateChange(ExclusiveCon
             JS_ASSERT(!types->constraintList);
         }
     }
 }
 
 void
 TypeObject::setFlags(ExclusiveContext *cx, TypeObjectFlags flags)
 {
-    if ((this->flags & flags) == flags)
+    if (hasAllFlags(flags))
         return;
 
     AutoEnterAnalysis enter(cx);
 
     if (singleton) {
         /* Make sure flags are consistent with persistent object state. */
         JS_ASSERT_IF(flags & OBJECT_FLAG_ITERATED,
                      singleton->lastProperty()->hasObjectFlag(BaseShape::ITERATED_SINGLETON));
     }
 
-    this->flags |= flags;
+    {
+        AutoLockForCompilation lock(cx);
+        addFlags(flags);
+    }
 
     InferSpew(ISpewOps, "%s: setFlags 0x%x", TypeObjectString(this), flags);
 
     ObjectStateChange(cx, this, false);
 }
 
 void
 TypeObject::markUnknown(ExclusiveContext *cx)
 {
     AutoEnterAnalysis enter(cx);
 
     JS_ASSERT(cx->compartment()->activeAnalysis);
     JS_ASSERT(!unknownProperties());
 
-    if (!(flags & OBJECT_FLAG_ADDENDUM_CLEARED))
+    if (!(flags() & OBJECT_FLAG_ADDENDUM_CLEARED))
         clearAddendum(cx);
 
     InferSpew(ISpewOps, "UnknownProperties: %s", TypeObjectString(this));
 
     ObjectStateChange(cx, this, true);
 
     /*
      * Existing constraints may have already been added to this object, which we need
@@ -2921,18 +2987,21 @@ TypeObject::markUnknown(ExclusiveContext
             prop->types.setConfiguredProperty(cx);
         }
     }
 }
 
 void
 TypeObject::clearAddendum(ExclusiveContext *cx)
 {
-    JS_ASSERT(!(flags & OBJECT_FLAG_ADDENDUM_CLEARED));
-    flags |= OBJECT_FLAG_ADDENDUM_CLEARED;
+    JS_ASSERT(!(flags() & OBJECT_FLAG_ADDENDUM_CLEARED));
+    {
+        AutoLockForCompilation lock(cx);
+        addFlags(OBJECT_FLAG_ADDENDUM_CLEARED);
+    }
 
     /*
      * It is possible for the object to not have a new script or other
      * addendum yet, but to have one added in the future. When
      * analyzing properties of new scripts we mix in adding
      * constraints to trigger clearNewScript with changes to the type
      * sets themselves (from breakTypeBarriers). It is possible that
      * we could trigger one of these constraints before
@@ -3067,21 +3136,21 @@ TypeObject::clearNewScriptAddendum(Exclu
 void
 TypeObject::clearTypedObjectAddendum(ExclusiveContext *cx)
 {
 }
 
 void
 TypeObject::print()
 {
-    TaggedProto tagged(proto);
+    TaggedProto tagged(proto());
     fprintf(stderr, "%s : %s",
-           TypeObjectString(this),
-           tagged.isObject() ? TypeString(Type::ObjectType(proto))
-                            : (tagged.isLazy() ? "(lazy)" : "(null)"));
+            TypeObjectString(this),
+            tagged.isObject() ? TypeString(Type::ObjectType(tagged.toObject()))
+                              : (tagged.isLazy() ? "(lazy)" : "(null)"));
 
     if (unknownProperties()) {
         fprintf(stderr, " unknown");
     } else {
         if (!hasAnyFlags(OBJECT_FLAG_SPARSE_INDEXES))
             fprintf(stderr, " dense");
         if (!hasAnyFlags(OBJECT_FLAG_NON_PACKED))
             fprintf(stderr, " packed");
@@ -3137,17 +3206,17 @@ class TypeConstraintClearDefiniteGetterS
         if (!object->hasNewScript())
             return;
         /*
          * Clear out the newScript shape and definite property information from
          * an object if the source type set could be a setter or could be
          * non-writable, both of which are indicated by the source type set
          * being marked as configured.
          */
-        if (!(object->flags & OBJECT_FLAG_ADDENDUM_CLEARED) && source->configuredProperty())
+        if (!(object->flags() & OBJECT_FLAG_ADDENDUM_CLEARED) && source->configuredProperty())
             object->clearAddendum(cx);
     }
 
     void newType(JSContext *cx, TypeSet *source, Type type) {}
 
     TypeConstraint *sweep(TypeZone &zone) {
         if (IsTypeObjectAboutToBeFinalized(&object))
             return nullptr;
@@ -3161,17 +3230,17 @@ class TypeConstraintClearDefiniteGetterS
 bool
 types::AddClearDefiniteGetterSetterForPrototypeChain(JSContext *cx, TypeObject *type, jsid id)
 {
     /*
      * Ensure that if the properties named here could have a getter, setter or
      * a permanent property in any transitive prototype, the definite
      * properties get cleared from the type.
      */
-    RootedObject parent(cx, type->proto);
+    RootedObject parent(cx, type->proto().toObjectOrNull());
     while (parent) {
         TypeObject *parentObject = parent->getType(cx);
         if (!parentObject || parentObject->unknownProperties())
             return false;
         HeapTypeSet *parentTypes = parentObject->getProperty(cx, id);
         if (!parentTypes || parentTypes->configuredProperty())
             return false;
         parentTypes->add(cx, cx->typeLifoAlloc().new_<TypeConstraintClearDefiniteGetterSetter>(type));
@@ -3191,17 +3260,17 @@ class TypeConstraintClearDefiniteSingle 
 
     TypeConstraintClearDefiniteSingle(TypeObject *object)
         : object(object)
     {}
 
     const char *kind() { return "clearDefiniteSingle"; }
 
     void newType(JSContext *cx, TypeSet *source, Type type) {
-        if (object->flags & OBJECT_FLAG_ADDENDUM_CLEARED)
+        if (object->flags() & OBJECT_FLAG_ADDENDUM_CLEARED)
             return;
 
         if (source->baseFlags() || source->getObjectCount() > 1)
             object->clearAddendum(cx);
     }
 
     TypeConstraint *sweep(TypeZone &zone) {
         if (IsTypeObjectAboutToBeFinalized(&object))
@@ -3274,35 +3343,35 @@ CheckNewScriptProperties(JSContext *cx, 
     Vector<TypeNewScript::Initializer> initializerList(cx);
 
     if (!jit::AnalyzeNewScriptProperties(cx, fun, type, baseobj, &initializerList)) {
         cx->compartment()->types.setPendingNukeTypes(cx);
         return;
     }
 
     if (baseobj->slotSpan() == 0 ||
-        !!(type->flags & OBJECT_FLAG_ADDENDUM_CLEARED))
+        !!(type->flags() & OBJECT_FLAG_ADDENDUM_CLEARED))
     {
-        if (type->addendum)
+        if (type->hasNewScript())
             type->clearAddendum(cx);
         return;
     }
 
     /*
      * If the type already has a new script, we are just regenerating the type
      * constraints and don't need to make another TypeNewScript. Make sure that
      * the properties added to baseobj match the type's definite properties.
      */
     if (type->hasNewScript()) {
         if (!type->matchDefiniteProperties(baseobj))
             type->clearAddendum(cx);
         return;
     }
-    JS_ASSERT(!type->addendum);
-    JS_ASSERT(!(type->flags & OBJECT_FLAG_ADDENDUM_CLEARED));
+    JS_ASSERT(!type->hasNewScript());
+    JS_ASSERT(!(type->flags() & OBJECT_FLAG_ADDENDUM_CLEARED));
 
     gc::AllocKind kind = gc::GetGCObjectKind(baseobj->slotSpan());
 
     /* We should not have overflowed the maximum number of fixed slots for an object. */
     JS_ASSERT(gc::GetGCKindSlots(kind) >= baseobj->slotSpan());
 
     TypeNewScript::Initializer done(TypeNewScript::Initializer::DONE, 0);
 
@@ -3331,17 +3400,21 @@ CheckNewScriptProperties(JSContext *cx, 
     do {
         p = cx->calloc_(numBytes);
     } while (IsPoisonedPtr(p));
     newScript = (TypeNewScript *) p;
 #else
     newScript = (TypeNewScript *) cx->calloc_(numBytes);
 #endif
     new (newScript) TypeNewScript();
-    type->addendum = newScript;
+
+    {
+        AutoLockForCompilation lock(cx);
+        type->setAddendum(newScript);
+    }
 
     if (!newScript) {
         cx->compartment()->types.setPendingNukeTypes(cx);
         return;
     }
 
     newScript->fun = fun;
     newScript->templateObject = baseobj;
@@ -3628,36 +3701,53 @@ JSObject::splicePrototype(JSContext *cx,
     if (!cx->typeInferenceEnabled()) {
         TypeObject *type = cx->getNewType(clasp, proto);
         if (!type)
             return false;
         self->type_ = type;
         return true;
     }
 
-    type->clasp = clasp;
-    type->proto = proto.raw();
+    {
+        AutoLockForCompilation lock(cx);
+        type->setClasp(clasp);
+        type->setProto(cx, proto);
+    }
 
     return true;
 }
 
 /* static */ TypeObject *
 JSObject::makeLazyType(JSContext *cx, HandleObject obj)
 {
     JS_ASSERT(obj->hasLazyType());
     JS_ASSERT(cx->compartment() == obj->compartment());
 
     /* De-lazification of functions can GC, so we need to do it up here. */
     if (obj->is<JSFunction>() && obj->as<JSFunction>().isInterpretedLazy()) {
         RootedFunction fun(cx, &obj->as<JSFunction>());
         if (!fun->getOrCreateScript(cx))
             return nullptr;
     }
+
+    // Find flags which need to be specified immediately on the object.
+    // Don't track whether singletons are packed.
+    TypeObjectFlags initialFlags = OBJECT_FLAG_NON_PACKED;
+
+    if (obj->lastProperty()->hasObjectFlag(BaseShape::ITERATED_SINGLETON))
+        initialFlags |= OBJECT_FLAG_ITERATED;
+
+    if (obj->isIndexed())
+        initialFlags |= OBJECT_FLAG_SPARSE_INDEXES;
+
+    if (obj->is<ArrayObject>() && obj->as<ArrayObject>().length() > INT32_MAX)
+        initialFlags |= OBJECT_FLAG_LENGTH_OVERFLOW;
+
     Rooted<TaggedProto> proto(cx, obj->getTaggedProto());
-    TypeObject *type = cx->compartment()->types.newTypeObject(cx, obj->getClass(), proto);
+    TypeObject *type = cx->compartment()->types.newTypeObject(cx, obj->getClass(), proto, initialFlags);
     if (!type) {
         if (cx->typeInferenceEnabled())
             cx->compartment()->types.setPendingNukeTypes(cx);
         return nullptr;
     }
 
     if (!cx->typeInferenceEnabled()) {
         /* This can only happen if types were previously nuked. */
@@ -3669,51 +3759,37 @@ JSObject::makeLazyType(JSContext *cx, Ha
 
     /* Fill in the type according to the state of this object. */
 
     type->singleton = obj;
 
     if (obj->is<JSFunction>() && obj->as<JSFunction>().isInterpreted())
         type->interpretedFunction = &obj->as<JSFunction>();
 
-    if (obj->lastProperty()->hasObjectFlag(BaseShape::ITERATED_SINGLETON))
-        type->flags |= OBJECT_FLAG_ITERATED;
-
-    /*
-     * Adjust flags for objects which will have the wrong flags set by just
-     * looking at the class prototype key.
-     */
-
-    /* Don't track whether singletons are packed. */
-    type->flags |= OBJECT_FLAG_NON_PACKED;
-
-    if (obj->isIndexed())
-        type->flags |= OBJECT_FLAG_SPARSE_INDEXES;
-
-    if (obj->is<ArrayObject>() && obj->as<ArrayObject>().length() > INT32_MAX)
-        type->flags |= OBJECT_FLAG_LENGTH_OVERFLOW;
-
-    obj->type_ = type;
+    {
+        AutoLockForCompilation lock(cx);
+        obj->type_ = type;
+    }
 
     return type;
 }
 
 /* static */ inline HashNumber
 TypeObjectWithNewScriptEntry::hash(const Lookup &lookup)
 {
     return PointerHasher<JSObject *, 3>::hash(lookup.hashProto.raw()) ^
            PointerHasher<const Class *, 3>::hash(lookup.clasp) ^
            PointerHasher<JSFunction *, 3>::hash(lookup.newFunction);
 }
 
 /* static */ inline bool
 TypeObjectWithNewScriptEntry::match(const TypeObjectWithNewScriptEntry &key, const Lookup &lookup)
 {
-    return key.object->proto == lookup.matchProto.raw() &&
-           key.object->clasp == lookup.clasp &&
+    return key.object->proto() == lookup.matchProto &&
+           key.object->clasp() == lookup.clasp &&
            key.newFunction == lookup.newFunction;
 }
 
 #ifdef DEBUG
 bool
 JSObject::hasNewType(const Class *clasp, TypeObject *type)
 {
     TypeObjectWithNewScriptSet &table = compartment()->newTypeObjects;
@@ -3800,34 +3876,40 @@ ExclusiveContext::getNewType(const Class
         else
             fun = nullptr;
     }
 
     TypeObjectWithNewScriptSet::AddPtr p =
         newTypeObjects.lookupForAdd(TypeObjectWithNewScriptSet::Lookup(clasp, proto, fun));
     if (p) {
         TypeObject *type = p->object;
-        JS_ASSERT(type->clasp == clasp);
-        JS_ASSERT(type->proto.get() == proto.raw());
+        JS_ASSERT(type->clasp() == clasp);
+        JS_ASSERT(type->proto() == proto);
         JS_ASSERT_IF(type->hasNewScript(), type->newScript()->fun == fun);
         return type;
     }
 
     AutoEnterAnalysis enter(this);
 
     if (proto.isObject() && !proto.toObject()->setDelegate(this))
         return nullptr;
 
-    bool markUnknown =
-        proto.isObject()
-        ? proto.toObject()->lastProperty()->hasObjectFlag(BaseShape::NEW_TYPE_UNKNOWN)
-        : true;
+    TypeObjectFlags initialFlags = 0;
+    if (!proto.isObject() || proto.toObject()->lastProperty()->hasObjectFlag(BaseShape::NEW_TYPE_UNKNOWN)) {
+        // The new type is not present in any type sets, so mark the object as
+        // unknown in all type sets it appears in. This allows the prototype of
+        // such objects to mutate freely without triggering an expensive walk of
+        // the compartment's type sets. (While scripts normally don't mutate
+        // __proto__, the browser will for proxies and such, and we need to
+        // accommodate this behavior).
+        initialFlags = OBJECT_FLAG_UNKNOWN_MASK | OBJECT_FLAG_SETS_MARKED_UNKNOWN;
+    }
 
     Rooted<TaggedProto> protoRoot(this, proto);
-    TypeObject *type = compartment()->types.newTypeObject(this, clasp, protoRoot, markUnknown);
+    TypeObject *type = compartment()->types.newTypeObject(this, clasp, protoRoot, initialFlags);
     if (!type)
         return nullptr;
 
     if (!newTypeObjects.add(p, TypeObjectWithNewScriptEntry(type, fun)))
         return nullptr;
 
 #ifdef JSGC_GENERATIONAL
     if (proto.isObject() && hasNursery() && nursery().isInside(proto.toObject())) {
@@ -3867,27 +3949,16 @@ ExclusiveContext::getNewType(const Class
         if (obj->is<ErrorObject>()) {
             AddTypeProperty(this, type, "fileName", types::Type::StringType());
             AddTypeProperty(this, type, "lineNumber", types::Type::Int32Type());
             AddTypeProperty(this, type, "columnNumber", types::Type::Int32Type());
             AddTypeProperty(this, type, "stack", types::Type::StringType());
         }
     }
 
-    /*
-     * The new type is not present in any type sets, so mark the object as
-     * unknown in all type sets it appears in. This allows the prototype of
-     * such objects to mutate freely without triggering an expensive walk of
-     * the compartment's type sets. (While scripts normally don't mutate
-     * __proto__, the browser will for proxies and such, and we need to
-     * accommodate this behavior).
-     */
-    if (type->unknownProperties())
-        type->flags |= OBJECT_FLAG_SETS_MARKED_UNKNOWN;
-
     return type;
 }
 
 TypeObject *
 ExclusiveContext::getLazyType(const Class *clasp, TaggedProto proto)
 {
     JS_ASSERT_IF(proto.isObject(), compartment() == proto.toObject()->compartment());
 
@@ -3902,17 +3973,17 @@ ExclusiveContext::getLazyType(const Clas
     if (p) {
         TypeObject *type = p->object;
         JS_ASSERT(type->lazy());
 
         return type;
     }
 
     Rooted<TaggedProto> protoRoot(this, proto);
-    TypeObject *type = compartment()->types.newTypeObject(this, clasp, protoRoot, false);
+    TypeObject *type = compartment()->types.newTypeObject(this, clasp, protoRoot);
     if (!type)
         return nullptr;
 
     if (!table.add(p, TypeObjectWithNewScriptEntry(type, nullptr)))
         return nullptr;
 
     type->singleton = (JSObject *) TypeObject::LAZY_SINGLETON;
 
@@ -4154,18 +4225,18 @@ JSCompartment::sweepNewTypeObjectTable(T
     if (table.initialized()) {
         for (TypeObjectWithNewScriptSet::Enum e(table); !e.empty(); e.popFront()) {
             TypeObjectWithNewScriptEntry entry = e.front();
             if (IsTypeObjectAboutToBeFinalized(entry.object.unsafeGet())) {
                 e.removeFront();
             } else if (entry.newFunction && IsObjectAboutToBeFinalized(&entry.newFunction)) {
                 e.removeFront();
             } else if (entry.object != e.front().object) {
-                TypeObjectWithNewScriptSet::Lookup lookup(entry.object->clasp,
-                                                          entry.object->proto.get(),
+                TypeObjectWithNewScriptSet::Lookup lookup(entry.object->clasp(),
+                                                          entry.object->proto(),
                                                           entry.newFunction);
                 e.rekeyFront(lookup, entry);
             }
         }
     }
 }
 
 TypeCompartment::~TypeCompartment()
@@ -4423,17 +4494,17 @@ TypeObject::addTypedObjectAddendum(JSCon
                                    TypeTypedObject::Kind kind,
                                    TypeRepresentation *repr)
 {
     if (!cx->typeInferenceEnabled())
         return true;
 
     JS_ASSERT(repr);
 
-    if (flags & OBJECT_FLAG_ADDENDUM_CLEARED)
+    if (flags() & OBJECT_FLAG_ADDENDUM_CLEARED)
         return true;
 
     JS_ASSERT(!unknownProperties());
 
     if (addendum) {
         JS_ASSERT(hasTypedObject());
         JS_ASSERT(typedObject()->typeRepr == repr);
         return true;
--- a/js/src/jsinfer.h
+++ b/js/src/jsinfer.h
@@ -20,30 +20,48 @@
 #include "ds/LifoAlloc.h"
 #include "gc/Barrier.h"
 #include "gc/Marking.h"
 #include "js/Utility.h"
 #include "js/Vector.h"
 
 namespace js {
 
+#ifdef DEBUG
+bool CurrentThreadCanWriteCompilationData();
+bool CurrentThreadCanReadCompilationData();
+#endif
+
 class TypeRepresentation;
 
 class TaggedProto
 {
   public:
+    static JSObject * const LazyProto;
+
     TaggedProto() : proto(nullptr) {}
     TaggedProto(JSObject *proto) : proto(proto) {}
 
     uintptr_t toWord() const { return uintptr_t(proto); }
 
-    inline bool isLazy() const;
-    inline bool isObject() const;
-    inline JSObject *toObject() const;
-    inline JSObject *toObjectOrNull() const;
+    bool isLazy() const {
+        return proto == LazyProto;
+    }
+    bool isObject() const {
+        /* Skip nullptr and LazyProto. */
+        return uintptr_t(proto) > uintptr_t(TaggedProto::LazyProto);
+    }
+    JSObject *toObject() const {
+        JS_ASSERT(isObject());
+        return proto;
+    }
+    JSObject *toObjectOrNull() const {
+        JS_ASSERT(!proto || isObject());
+        return proto;
+    }
     JSObject *raw() const { return proto; }
 
     bool operator ==(const TaggedProto &other) { return proto == other.proto; }
     bool operator !=(const TaggedProto &other) { return proto != other.proto; }
 
   private:
     JSObject *proto;
 };
@@ -72,20 +90,20 @@ template<class Outer>
 class TaggedProtoOperations
 {
     const TaggedProto *value() const {
         return static_cast<const Outer*>(this)->extract();
     }
 
   public:
     uintptr_t toWord() const { return value()->toWord(); }
-    inline bool isLazy() const;
-    inline bool isObject() const;
-    inline JSObject *toObject() const;
-    inline JSObject *toObjectOrNull() const;
+    inline bool isLazy() const { return value()->isLazy(); }
+    inline bool isObject() const { return value()->isObject(); }
+    inline JSObject *toObject() const { return value()->toObject(); }
+    inline JSObject *toObjectOrNull() const { return value()->toObjectOrNull(); }
     JSObject *raw() const { return value()->raw(); }
 };
 
 template <>
 class HandleBase<TaggedProto> : public TaggedProtoOperations<Handle<TaggedProto> >
 {
     friend class TaggedProtoOperations<Handle<TaggedProto> >;
     const TaggedProto * extract() const {
@@ -379,16 +397,22 @@ typedef uint32_t TypeFlags;
 enum MOZ_ENUM_TYPE(uint32_t) {
     /* Whether this type object is associated with some allocation site. */
     OBJECT_FLAG_FROM_ALLOCATION_SITE  = 0x1,
 
     /* If set, addendum information should not be installed on this object. */
     OBJECT_FLAG_ADDENDUM_CLEARED      = 0x2,
 
     /*
+     * If set, the object's prototype might be in the nursery and can't be
+     * used during Ion compilation (which may be occurring off thread).
+     */
+    OBJECT_FLAG_NURSERY_PROTO         = 0x4,
+
+    /*
      * Whether we have ensured all type sets in the compartment contain
      * ANYOBJECT instead of this object.
      */
     OBJECT_FLAG_SETS_MARKED_UNKNOWN   = 0x8,
 
     /* Mask/shift for the number of properties in propertySet */
     OBJECT_FLAG_PROPERTY_COUNT_MASK   = 0xfff0,
     OBJECT_FLAG_PROPERTY_COUNT_SHIFT  = 4,
@@ -498,17 +522,17 @@ class TypeSet
         JS_ASSERT(definiteProperty());
         return (flags >> TYPE_FLAG_DEFINITE_SHIFT) - 1;
     }
 
     /* Join two type sets into a new set. The result should not be modified further. */
     static TemporaryTypeSet *unionSets(TypeSet *a, TypeSet *b, LifoAlloc *alloc);
 
     /* Add a type to this set using the specified allocator. */
-    inline bool addType(Type type, LifoAlloc *alloc, bool *padded = nullptr);
+    inline bool addType(Type type, LifoAlloc *alloc);
 
     /* Get a list of all types in this set. */
     typedef Vector<Type, 1, SystemAllocPolicy> TypeList;
     bool enumerateTypes(TypeList *list);
 
     /*
      * Iterate through the objects in this set. getObjectCount overapproximates
      * in the hash case (see SET_ARRAY_SIZE in jsinferinlines.h), and getObject
@@ -852,78 +876,139 @@ struct TypeTypedObject : public TypeObje
  * information is sensitive to changes in the property's type. Future changes
  * to the property (whether those uncovered by analysis or those occurring
  * in the VM) will treat these properties like those of any other type object.
  */
 
 /* Type information about an object accessed by a script. */
 struct TypeObject : gc::BarrieredCell<TypeObject>
 {
-    /* Class shared by objects using this type. */
-    const Class *clasp;
+  private:
+    /* Class shared by object using this type. */
+    const Class *clasp_;
 
     /* Prototype shared by objects using this type. */
-    HeapPtrObject proto;
+    HeapPtrObject proto_;
+
+#ifdef DEBUG
+    void assertCanAccessProto();
+#else
+    void assertCanAccessProto() {}
+#endif
+
+  public:
+
+    const Class *clasp() {
+        return clasp_;
+    }
+
+    void setClasp(const Class *clasp) {
+        JS_ASSERT(CurrentThreadCanWriteCompilationData());
+        JS_ASSERT(singleton);
+        clasp_ = clasp;
+    }
+
+    TaggedProto proto() {
+        assertCanAccessProto();
+        return TaggedProto(proto_);
+    }
+
+    HeapPtrObject &protoRaw() {
+        // For use during marking, don't call otherwise.
+        return proto_;
+    }
+
+    void setProto(JSContext *cx, TaggedProto proto);
+    void setProtoUnchecked(TaggedProto proto) {
+        proto_ = proto.raw();
+    }
 
     /*
      * Whether there is a singleton JS object with this type. That JS object
      * must appear in type sets instead of this; we include the back reference
      * here to allow reverting the JS object to a lazy type.
      */
     HeapPtrObject singleton;
 
     /*
      * Value held by singleton if this is a standin type for a singleton JS
      * object whose type has not been constructed yet.
      */
     static const size_t LAZY_SINGLETON = 1;
     bool lazy() const { return singleton == (JSObject *) LAZY_SINGLETON; }
 
+  private:
     /* Flags for this object. */
-    TypeObjectFlags flags;
+    TypeObjectFlags flags_;
 
     /*
      * This field allows various special classes of objects to attach
      * additional information to a type object:
      *
      * - `TypeNewScript`: If addendum is a `TypeNewScript`, it
      *   indicates that objects of this type have always been
      *   constructed using 'new' on the specified script, which adds
      *   some number of properties to the object in a definite order
      *   before the object escapes.
      */
     HeapPtr<TypeObjectAddendum> addendum;
+  public:
+
+    TypeObjectFlags flags() const {
+        JS_ASSERT(CurrentThreadCanReadCompilationData());
+        return flags_;
+    }
+
+    void addFlags(TypeObjectFlags flags) {
+        JS_ASSERT(CurrentThreadCanWriteCompilationData());
+        flags_ |= flags;
+    }
+
+    void clearFlags(TypeObjectFlags flags) {
+        JS_ASSERT(CurrentThreadCanWriteCompilationData());
+        flags_ &= ~flags;
+    }
 
     bool hasNewScript() const {
+        JS_ASSERT(CurrentThreadCanReadCompilationData());
         return addendum && addendum->isNewScript();
     }
 
     TypeNewScript *newScript() {
+        JS_ASSERT(CurrentThreadCanReadCompilationData());
         return addendum->asNewScript();
     }
 
     bool hasTypedObject() {
+        JS_ASSERT(CurrentThreadCanReadCompilationData());
         return addendum && addendum->isTypedObject();
     }
 
     TypeTypedObject *typedObject() {
+        JS_ASSERT(CurrentThreadCanReadCompilationData());
         return addendum->asTypedObject();
     }
 
+    void setAddendum(TypeObjectAddendum *addendum) {
+        JS_ASSERT(CurrentThreadCanWriteCompilationData());
+        this->addendum = addendum;
+    }
+
     /*
      * Tag the type object for a binary data type descriptor, instance,
      * or handle with the type representation of the data it points at.
      * If this type object is already tagged with a binary data addendum,
      * this addendum must already be associated with the same TypeRepresentation,
      * and the method has no effect.
      */
     bool addTypedObjectAddendum(JSContext *cx,
                                 TypeTypedObject::Kind kind ,
                                 TypeRepresentation *repr);
 
+  private:
     /*
      * Properties of this object. This may contain JSID_VOID, representing the
      * types of all integer indexes of the object, and/or JSID_EMPTY, holding
      * constraints listening to changes to the object's state.
      *
      * The type sets in the properties of a type object describe the possible
      * values that can be read out of that property in actual JS objects.
      * Properties only account for native properties (those with a slot and no
@@ -948,55 +1033,60 @@ struct TypeObject : gc::BarrieredCell<Ty
      *    There is another exception for array lengths, which are special cased
      *    by the compiler and VM and are not reflected in property types.
      *
      * We establish these by using write barriers on calls to setProperty and
      * defineProperty which are on native properties, and on any jitcode which
      * might update the property with a new type.
      */
     Property **propertySet;
+  public:
 
     /* If this is an interpreted function, the function object. */
     HeapPtrFunction interpretedFunction;
 
 #if JS_BITS_PER_WORD == 32
     uint32_t padding;
 #endif
 
-    inline TypeObject(const Class *clasp, TaggedProto proto, bool unknown);
+    inline TypeObject(const Class *clasp, TaggedProto proto, TypeObjectFlags initialFlags);
 
     bool hasAnyFlags(TypeObjectFlags flags) {
         JS_ASSERT((flags & OBJECT_FLAG_DYNAMIC_MASK) == flags);
-        return !!(this->flags & flags);
+        return !!(this->flags() & flags);
     }
     bool hasAllFlags(TypeObjectFlags flags) {
         JS_ASSERT((flags & OBJECT_FLAG_DYNAMIC_MASK) == flags);
-        return (this->flags & flags) == flags;
+        return (this->flags() & flags) == flags;
     }
 
     bool unknownProperties() {
-        JS_ASSERT_IF(flags & OBJECT_FLAG_UNKNOWN_PROPERTIES,
+        JS_ASSERT_IF(flags() & OBJECT_FLAG_UNKNOWN_PROPERTIES,
                      hasAllFlags(OBJECT_FLAG_DYNAMIC_MASK));
-        return !!(flags & OBJECT_FLAG_UNKNOWN_PROPERTIES);
+        return !!(flags() & OBJECT_FLAG_UNKNOWN_PROPERTIES);
     }
 
     bool shouldPreTenure() {
         return hasAnyFlags(OBJECT_FLAG_PRE_TENURE) && !unknownProperties();
     }
 
+    bool hasTenuredProto() const {
+        return !(flags() & OBJECT_FLAG_NURSERY_PROTO);
+    }
+
     gc::InitialHeap initialHeap(CompilerConstraintList *constraints);
 
     bool canPreTenure() {
         // Only types associated with particular allocation sites or 'new'
         // scripts can be marked as needing pretenuring. Other types can be
         // used for different purposes across the compartment and can't use
         // this bit reliably.
         if (unknownProperties())
             return false;
-        return (flags & OBJECT_FLAG_FROM_ALLOCATION_SITE) || hasNewScript();
+        return (flags() & OBJECT_FLAG_FROM_ALLOCATION_SITE) || hasNewScript();
     }
 
     void setShouldPreTenure(ExclusiveContext *cx) {
         JS_ASSERT(canPreTenure());
         setFlags(cx, OBJECT_FLAG_PRE_TENURE);
     }
 
     /*
@@ -1041,22 +1131,30 @@ struct TypeObject : gc::BarrieredCell<Ty
      * Type objects don't have explicit finalizers. Memory owned by a type
      * object pending deletion is released when weak references are sweeped
      * from all the compartment's type objects.
      */
     void finalize(FreeOp *fop) {}
 
     static inline ThingRootKind rootKind() { return THING_ROOT_TYPE_OBJECT; }
 
+    static inline uint32_t offsetOfClasp() {
+        return offsetof(TypeObject, clasp_);
+    }
+
+    static inline uint32_t offsetOfProto() {
+        return offsetof(TypeObject, proto_);
+    }
+
   private:
     inline uint32_t basePropertyCount() const;
     inline void setBasePropertyCount(uint32_t count);
 
     static void staticAsserts() {
-        JS_STATIC_ASSERT(offsetof(TypeObject, proto) == offsetof(js::shadow::TypeObject, proto));
+        JS_STATIC_ASSERT(offsetof(TypeObject, proto_) == offsetof(js::shadow::TypeObject, proto));
     }
 };
 
 /*
  * Entries for the per-compartment set of type objects which are 'new' types to
  * use for some prototype and constructed with an optional script. This also
  * includes entries for the set of lazy type objects in the compartment, which
  * use a null script (though there are only a few of these per compartment).
@@ -1242,16 +1340,17 @@ struct TypeObjectKey
     }
     JSObject *asSingleObject() {
         JS_ASSERT(isSingleObject());
         return (JSObject *) (uintptr_t(this) & ~1);
     }
 
     const Class *clasp();
     TaggedProto proto();
+    bool hasTenuredProto();
     JSObject *singleton();
     TypeNewScript *newScript();
 
     bool unknownProperties();
     bool hasFlags(CompilerConstraintList *constraints, TypeObjectFlags flags);
     void watchStateChangeForInlinedCall(CompilerConstraintList *constraints);
     void watchStateChangeForNewScriptTemplate(CompilerConstraintList *constraints);
     void watchStateChangeForTypedArrayBuffer(CompilerConstraintList *constraints);
@@ -1411,17 +1510,17 @@ struct TypeCompartment
 
     /*
      * Make a function or non-function object associated with an optional
      * script. The 'key' parameter here may be an array, typed array, function
      * or JSProto_Object to indicate a type whose class is unknown (not just
      * js_ObjectClass).
      */
     TypeObject *newTypeObject(ExclusiveContext *cx, const Class *clasp, Handle<TaggedProto> proto,
-                              bool unknown = false);
+                              TypeObjectFlags initialFlags = 0);
 
     /* Get or make an object for an allocation site, and add to the allocation site table. */
     TypeObject *addAllocationSiteTypeObject(JSContext *cx, AllocationSiteKey key);
 
     /* Mark all types as needing destruction once inference has 'finished'. */
     void setPendingNukeTypes(ExclusiveContext *cx);
 
     /* Mark any type set containing obj as having a generic object type. */
--- a/js/src/jsinferinlines.h
+++ b/js/src/jsinferinlines.h
@@ -20,71 +20,16 @@
 #include "vm/BooleanObject.h"
 #include "vm/NumberObject.h"
 #include "vm/StringObject.h"
 #include "vm/TypedArrayObject.h"
 
 #include "jsanalyzeinlines.h"
 #include "jscntxtinlines.h"
 
-inline bool
-js::TaggedProto::isObject() const
-{
-    /* Skip nullptr and Proxy::LazyProto. */
-    return uintptr_t(proto) > uintptr_t(Proxy::LazyProto);
-}
-
-inline bool
-js::TaggedProto::isLazy() const
-{
-    return proto == Proxy::LazyProto;
-}
-
-inline JSObject *
-js::TaggedProto::toObject() const
-{
-    JS_ASSERT(isObject());
-    return proto;
-}
-
-inline JSObject *
-js::TaggedProto::toObjectOrNull() const
-{
-    JS_ASSERT(!proto || isObject());
-    return proto;
-}
-
-template<class Outer>
-inline bool
-js::TaggedProtoOperations<Outer>::isLazy() const
-{
-    return value()->isLazy();
-}
-
-template<class Outer>
-inline bool
-js::TaggedProtoOperations<Outer>::isObject() const
-{
-    return value()->isObject();
-}
-
-template<class Outer>
-inline JSObject *
-js::TaggedProtoOperations<Outer>::toObject() const
-{
-    return value()->toObject();
-}
-
-template<class Outer>
-inline JSObject *
-js::TaggedProtoOperations<Outer>::toObjectOrNull() const
-{
-    return value()->toObjectOrNull();
-}
-
 namespace js {
 namespace types {
 
 /////////////////////////////////////////////////////////////////////
 // CompilerOutput & RecompileInfo
 /////////////////////////////////////////////////////////////////////
 
 inline jit::IonScript *
@@ -529,17 +474,17 @@ MarkTypeObjectFlags(ExclusiveContext *cx
  */
 inline void
 MarkTypeObjectUnknownProperties(JSContext *cx, TypeObject *obj,
                                 bool markSetsUnknown = false)
 {
     if (cx->typeInferenceEnabled()) {
         if (!obj->unknownProperties())
             obj->markUnknown(cx);
-        if (markSetsUnknown && !(obj->flags & OBJECT_FLAG_SETS_MARKED_UNKNOWN))
+        if (markSetsUnknown && !(obj->flags() & OBJECT_FLAG_SETS_MARKED_UNKNOWN))
             cx->compartment()->types.markSetsUnknown(cx, obj);
     }
 }
 
 /*
  * Mark any property which has been deleted or configured to be non-writable or
  * have a getter/setter.
  */
@@ -600,30 +545,32 @@ extern void TypeDynamicResult(JSContext 
 TypeScript::NumTypeSets(JSScript *script)
 {
     return script->nTypeSets() + analyze::LocalSlot(script, 0);
 }
 
 /* static */ inline StackTypeSet *
 TypeScript::ThisTypes(JSScript *script)
 {
-    return script->types->typeArray() + script->nTypeSets() + js::analyze::ThisSlot();
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+    return script->types->typeArray() + script->nTypeSets() + analyze::ThisSlot();
 }
 
 /*
  * Note: for non-escaping arguments and locals, argTypes/localTypes reflect
  * only the initial type of the variable (e.g. passed values for argTypes,
  * or undefined for localTypes) and not types from subsequent assignments.
  */
 
 /* static */ inline StackTypeSet *
 TypeScript::ArgTypes(JSScript *script, unsigned i)
 {
     JS_ASSERT(i < script->function()->nargs());
-    return script->types->typeArray() + script->nTypeSets() + js::analyze::ArgSlot(i);
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+    return script->types->typeArray() + script->nTypeSets() + analyze::ArgSlot(i);
 }
 
 template <typename TYPESET>
 /* static */ inline TYPESET *
 TypeScript::BytecodeTypes(JSScript *script, jsbytecode *pc, uint32_t *hint, TYPESET *typeArray)
 {
     JS_ASSERT(js_CodeSpec[*pc].format & JOF_TYPESET);
 #ifdef JS_ION
@@ -1084,44 +1031,38 @@ TypeSet::setBaseObjectCount(uint32_t cou
 inline void
 TypeSet::clearObjects()
 {
     setBaseObjectCount(0);
     objectSet = nullptr;
 }
 
 bool
-TypeSet::addType(Type type, LifoAlloc *alloc, bool *padded)
+TypeSet::addType(Type type, LifoAlloc *alloc)
 {
-    JS_ASSERT_IF(padded, !*padded);
-
     if (unknown())
         return true;
 
     if (type.isUnknown()) {
         flags |= TYPE_FLAG_BASE_MASK;
         clearObjects();
         JS_ASSERT(unknown());
-        if (padded)
-            *padded = true;
         return true;
     }
 
     if (type.isPrimitive()) {
         TypeFlags flag = PrimitiveTypeFlag(type.primitive());
         if (flags & flag)
             return true;
 
         /* If we add float to a type set it is also considered to contain int. */
         if (flag == TYPE_FLAG_DOUBLE)
             flag |= TYPE_FLAG_INT32;
 
         flags |= flag;
-        if (padded)
-            *padded = true;
         return true;
     }
 
     if (flags & TYPE_FLAG_ANYOBJECT)
         return true;
     if (type.isAnyObject())
         goto unknownObject;
 
@@ -1151,33 +1092,34 @@ TypeSet::addType(Type type, LifoAlloc *a
 
     if (false) {
     unknownObject:
         type = Type::AnyObjectType();
         flags |= TYPE_FLAG_ANYOBJECT;
         clearObjects();
     }
 
-    if (padded)
-        *padded = true;
     return true;
 }
 
 inline void
 ConstraintTypeSet::addType(ExclusiveContext *cxArg, Type type)
 {
     JS_ASSERT(cxArg->compartment()->activeAnalysis);
 
-    bool added = false;
-    if (!TypeSet::addType(type, &cxArg->typeLifoAlloc(), &added)) {
-        cxArg->compartment()->types.setPendingNukeTypes(cxArg);
+    if (hasType(type))
         return;
+
+    {
+        AutoLockForCompilation lock(cxArg);
+        if (!TypeSet::addType(type, &cxArg->typeLifoAlloc())) {
+            cxArg->compartment()->types.setPendingNukeTypes(cxArg);
+            return;
+        }
     }
-    if (!added)
-        return;
 
     InferSpew(ISpewOps, "addType: %sT%p%s %s",
               InferSpewColor(this), this, InferSpewColorReset(),
               TypeString(type));
 
     /* Propagate the type to all constraints. */
     if (JSContext *cx = cxArg->maybeJSContext()) {
         TypeConstraint *constraint = constraintList;
@@ -1270,104 +1212,115 @@ TypeSet::getTypeOrSingleObject(JSContext
 }
 
 inline const Class *
 TypeSet::getObjectClass(unsigned i) const
 {
     if (JSObject *object = getSingleObject(i))
         return object->getClass();
     if (TypeObject *object = getTypeObject(i))
-        return object->clasp;
+        return object->clasp();
     return nullptr;
 }
 
 /////////////////////////////////////////////////////////////////////
 // TypeObject
 /////////////////////////////////////////////////////////////////////
 
-inline TypeObject::TypeObject(const Class *clasp, TaggedProto proto, bool unknown)
+inline TypeObject::TypeObject(const Class *clasp, TaggedProto proto, TypeObjectFlags initialFlags)
 {
     mozilla::PodZero(this);
 
     /* Inner objects may not appear on prototype chains. */
     JS_ASSERT_IF(proto.isObject(), !proto.toObject()->getClass()->ext.outerObject);
 
-    this->clasp = clasp;
-    this->proto = proto.raw();
-
-    if (unknown)
-        flags |= OBJECT_FLAG_UNKNOWN_MASK;
+    this->clasp_ = clasp;
+    this->proto_ = proto.raw();
+    this->flags_ = initialFlags;
 
     InferSpew(ISpewOps, "newObject: %s", TypeObjectString(this));
 }
 
 inline uint32_t
 TypeObject::basePropertyCount() const
 {
-    return (flags & OBJECT_FLAG_PROPERTY_COUNT_MASK) >> OBJECT_FLAG_PROPERTY_COUNT_SHIFT;
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
+    return (flags() & OBJECT_FLAG_PROPERTY_COUNT_MASK) >> OBJECT_FLAG_PROPERTY_COUNT_SHIFT;
 }
 
 inline void
 TypeObject::setBasePropertyCount(uint32_t count)
 {
+    // Note: Callers must ensure they are performing threadsafe operations.
     JS_ASSERT(count <= OBJECT_FLAG_PROPERTY_COUNT_LIMIT);
-    flags = (flags & ~OBJECT_FLAG_PROPERTY_COUNT_MASK)
-          | (count << OBJECT_FLAG_PROPERTY_COUNT_SHIFT);
+    flags_ = (flags() & ~OBJECT_FLAG_PROPERTY_COUNT_MASK)
+           | (count << OBJECT_FLAG_PROPERTY_COUNT_SHIFT);
 }
 
 inline HeapTypeSet *
 TypeObject::getProperty(ExclusiveContext *cx, jsid id)
 {
     JS_ASSERT(cx->compartment()->activeAnalysis);
 
     JS_ASSERT(JSID_IS_VOID(id) || JSID_IS_EMPTY(id) || JSID_IS_STRING(id));
     JS_ASSERT_IF(!JSID_IS_EMPTY(id), id == IdToTypeId(id));
     JS_ASSERT(!unknownProperties());
 
-    uint32_t propertyCount = basePropertyCount();
-    Property **pprop = HashSetInsert<jsid,Property,Property>
-        (cx->typeLifoAlloc(), propertySet, propertyCount, id);
-    if (!pprop) {
-        cx->compartment()->types.setPendingNukeTypes(cx);
-        return nullptr;
-    }
+    if (HeapTypeSet *types = maybeGetProperty(id))
+        return types;
+
+    uint32_t propertyCount;
+    Property **pprop;
+    {
+        AutoLockForCompilation lock(cx);
 
-    if (!*pprop) {
+        propertyCount = basePropertyCount();
+        pprop = HashSetInsert<jsid,Property,Property>
+            (cx->typeLifoAlloc(), propertySet, propertyCount, id);
+        if (!pprop) {
+            cx->compartment()->types.setPendingNukeTypes(cx);
+            return nullptr;
+        }
+
+        JS_ASSERT(!*pprop);
+
         setBasePropertyCount(propertyCount);
         if (!addProperty(cx, id, pprop)) {
             setBasePropertyCount(0);
             propertySet = nullptr;
             return nullptr;
         }
-        if (propertyCount == OBJECT_FLAG_PROPERTY_COUNT_LIMIT) {
-            markUnknown(cx);
+    }
+
+    if (propertyCount == OBJECT_FLAG_PROPERTY_COUNT_LIMIT) {
+        markUnknown(cx);
 
-            /*
-             * Return an arbitrary property in the object, as all have unknown
-             * type and are treated as configured.
-             */
-            unsigned count = getPropertyCount();
-            for (unsigned i = 0; i < count; i++) {
-                if (Property *prop = getProperty(i))
-                    return &prop->types;
-            }
+        /*
+         * Return an arbitrary property in the object, as all have unknown
+         * type and are treated as configured.
+         */
+        unsigned count = getPropertyCount();
+        for (unsigned i = 0; i < count; i++) {
+            if (Property *prop = getProperty(i))
+                return &prop->types;
+        }
 
-            MOZ_ASSUME_UNREACHABLE("Missing property");
-        }
+        MOZ_ASSUME_UNREACHABLE("Missing property");
     }
 
     return &(*pprop)->types;
 }
 
 inline HeapTypeSet *
 TypeObject::maybeGetProperty(jsid id)
 {
     JS_ASSERT(JSID_IS_VOID(id) || JSID_IS_EMPTY(id) || JSID_IS_STRING(id));
     JS_ASSERT_IF(!JSID_IS_EMPTY(id), id == IdToTypeId(id));
     JS_ASSERT(!unknownProperties());
+    JS_ASSERT(CurrentThreadCanReadCompilationData());
 
     Property *prop = HashSetLookup<jsid,Property,Property>
         (propertySet, basePropertyCount(), id);
 
     return prop ? &prop->types : nullptr;
 }
 
 inline unsigned
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -1932,16 +1932,17 @@ GlobalObject::initIteratorClasses(JSCont
         RootedObject genFunction(cx, NewFunctionWithProto(cx, NullPtr(), Generator, 1,
                                                           JSFunction::NATIVE_CTOR, global, name,
                                                           &function.toObject()));
         if (!genFunction)
             return false;
         if (!LinkConstructorAndPrototype(cx, genFunction, genFunctionProto))
             return false;
 
+        AutoLockForCompilation lock(cx);
         global->setSlot(STAR_GENERATOR_OBJECT_PROTO, ObjectValue(*genObjectProto));
         global->setConstructor(JSProto_GeneratorFunction, ObjectValue(*genFunction));
         global->setPrototype(JSProto_GeneratorFunction, ObjectValue(*genFunctionProto));
     }
 
     if (global->getPrototype(JSProto_StopIteration).isUndefined()) {
         proto = global->createBlankPrototype(cx, &StopIterationObject::class_);
         if (!proto || !JSObject::freeze(cx, proto))
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -1268,17 +1268,17 @@ NewObject(ExclusiveContext *cx, const Cl
     JS_ASSERT_IF(parent, &parent->global() == cx->global());
 
     RootedTypeObject type(cx, type_);
 
     JSObject *metadata = nullptr;
     if (!NewObjectMetadata(cx, &metadata))
         return nullptr;
 
-    RootedShape shape(cx, EmptyShape::getInitialShape(cx, clasp, TaggedProto(type->proto),
+    RootedShape shape(cx, EmptyShape::getInitialShape(cx, clasp, type->proto(),
                                                       parent, metadata, kind));
     if (!shape)
         return nullptr;
 
     gc::InitialHeap heap = GetInitialHeap(newKind, clasp);
     JSObject *obj = JSObject::create(cx, kind, heap, shape, type);
     if (!obj)
         return nullptr;
@@ -1452,17 +1452,17 @@ js::NewObjectWithType(JSContext *cx, Han
 
     JS_ASSERT(allocKind <= gc::FINALIZE_OBJECT_LAST);
     if (CanBeFinalizedInBackground(allocKind, &JSObject::class_))
         allocKind = GetBackgroundAllocKind(allocKind);
 
     NewObjectCache &cache = cx->runtime()->newObjectCache;
 
     NewObjectCache::EntryIndex entry = -1;
-    if (parent == type->proto->getParent() &&
+    if (parent == type->proto().toObject()->getParent() &&
         newKind == GenericObject &&
         !cx->compartment()->hasObjectMetadataCallback())
     {
         if (cache.lookupType(&JSObject::class_, type, allocKind, &entry)) {
             JSObject *obj = cache.newObjectFromHit(cx, entry, GetInitialHeap(newKind, &JSObject::class_));
             if (obj)
                 return obj;
         }
@@ -1980,19 +1980,22 @@ JSObject::TradeGuts(JSContext *cx, JSObj
 {
     JS_ASSERT(a->compartment() == b->compartment());
     JS_ASSERT(a->is<JSFunction>() == b->is<JSFunction>());
 
     /*
      * Swap the object's types, to restore their initial type information.
      * The prototypes and classes of the objects were swapped in ReserveForTradeGuts.
      */
-    TypeObject *tmp = a->type_;
-    a->type_ = b->type_;
-    b->type_ = tmp;
+    {
+        AutoLockForCompilation lock(cx);
+        TypeObject *tmp = a->type_;
+        a->type_ = b->type_;
+        b->type_ = tmp;
+    }
 
     /* Don't try to swap a JSFunction for a plain function JSObject. */
     JS_ASSERT_IF(a->is<JSFunction>(), a->tenuredSizeOfThis() == b->tenuredSizeOfThis());
 
     /*
      * Regexp guts are more complicated -- we would need to migrate the
      * refcounted JIT code blob for them across compartments instead of just
      * swapping guts.
@@ -2014,19 +2017,22 @@ JSObject::TradeGuts(JSContext *cx, JSObj
         /*
          * If the objects are the same size, then we make no assumptions about
          * whether they have dynamically allocated slots and instead just copy
          * them over wholesale.
          */
         char tmp[mozilla::tl::Max<sizeof(JSFunction), sizeof(JSObject_Slots16)>::value];
         JS_ASSERT(size <= sizeof(tmp));
 
-        js_memcpy(tmp, a, size);
-        js_memcpy(a, b, size);
-        js_memcpy(b, tmp, size);
+        {
+            AutoLockForCompilation lock(cx);
+            js_memcpy(tmp, a, size);
+            js_memcpy(a, b, size);
+            js_memcpy(b, tmp, size);
+        }
 
 #ifdef JSGC_GENERATIONAL
         /*
          * Trigger post barriers for fixed slots. JSObject bits are barriered
          * below, in common with the other case.
          */
         for (size_t i = 0; i < a->numFixedSlots(); ++i) {
             HeapSlot::writeBarrierPost(cx->runtime(), a, HeapSlot::Slot, i, a->getSlot(i));
@@ -2054,19 +2060,22 @@ JSObject::TradeGuts(JSContext *cx, JSObj
             js_free(a->slots);
         if (b->hasDynamicSlots())
             js_free(b->slots);
 
         void *apriv = a->hasPrivate() ? a->getPrivate() : nullptr;
         void *bpriv = b->hasPrivate() ? b->getPrivate() : nullptr;
 
         char tmp[sizeof(JSObject)];
-        js_memcpy(&tmp, a, sizeof tmp);
-        js_memcpy(a, b, sizeof tmp);
-        js_memcpy(b, &tmp, sizeof tmp);
+        {
+            AutoLockForCompilation lock(cx);
+            js_memcpy(&tmp, a, sizeof tmp);
+            js_memcpy(a, b, sizeof tmp);
+            js_memcpy(b, &tmp, sizeof tmp);
+        }
 
         if (a->isNative())
             a->shape_->setNumFixedSlots(reserved.newafixed);
         else
             a->shape_ = reserved.newashape;
 
         a->slots = reserved.newaslots;
         a->initSlotRange(0, reserved.bvals.begin(), bcap);
@@ -2962,17 +2971,21 @@ js::SetClassAndProto(JSContext *cx, Hand
      * are unknown. Type sets containing this object will contain the original
      * type but not the new type of the object, so we need to go and scan the
      * entire compartment for type sets which have these objects and mark them
      * as containing generic objects.
      */
     MarkTypeObjectUnknownProperties(cx, obj->type(), true);
     MarkTypeObjectUnknownProperties(cx, type, true);
 
-    obj->setType(type);
+    {
+        AutoLockForCompilation lock(cx);
+        obj->setType(type);
+    }
+
     *succeeded = true;
     return true;
 }
 
 bool
 js_GetClassObject(ExclusiveContext *cxArg, JSObject *obj, JSProtoKey key, MutableHandleObject objp)
 {
     Rooted<GlobalObject*> global(cxArg, &obj->global());
@@ -4025,17 +4038,17 @@ NativeGetInline(JSContext *cx,
         jsbytecode *pc;
         JSScript *script = cx->currentScript(&pc);
 #ifdef JS_ION
         if (script && script->hasBaselineScript()) {
             switch (JSOp(*pc)) {
               case JSOP_GETPROP:
               case JSOP_CALLPROP:
               case JSOP_LENGTH:
-                script->baselineScript()->noteAccessedGetter(script->pcToOffset(pc));
+                script->baselineScript()->noteAccessedGetter(cx, script->pcToOffset(pc));
                 break;
               default:
                 break;
             }
         }
 #endif
     }
 
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -469,17 +469,17 @@ class JSObject : public js::ObjectImpl
      *    check if the proto is an object, nullptr, or lazily computed.
      * 3. JSObject::getProto(cx, obj, &proto) computes the proto of an object.
      *    If obj is a proxy and the proto is lazy, this code may allocate or
      *    GC in order to compute the proto. Currently, it will not run JS code.
      */
     bool uninlinedIsProxy() const;
     JSObject *getProto() const {
         JS_ASSERT(!uninlinedIsProxy());
-        return js::ObjectImpl::getProto();
+        return getTaggedProto().toObjectOrNull();
     }
     static inline bool getProto(JSContext *cx, js::HandleObject obj,
                                 js::MutableHandleObject protop);
     // Returns false on error, success of operation in outparam.
     static inline bool setProto(JSContext *cx, JS::HandleObject obj,
                                 JS::HandleObject proto, bool *succeeded);
 
     // uninlinedSetType() is the same as setType(), but not inlined.
--- a/js/src/jsobjinlines.h
+++ b/js/src/jsobjinlines.h
@@ -388,29 +388,32 @@ JSObject::clearType(JSContext *cx, js::H
 
     obj->type_ = type;
     return true;
 }
 
 inline void
 JSObject::setType(js::types::TypeObject *newType)
 {
+    // Note: This is usually called for newly created objects that haven't
+    // escaped to script yet, so don't require that the compilation lock be
+    // held here.
     JS_ASSERT(newType);
     JS_ASSERT(!hasSingletonType());
     type_ = newType;
 }
 
 /* static */ inline bool
 JSObject::getProto(JSContext *cx, js::HandleObject obj, js::MutableHandleObject protop)
 {
     if (obj->getTaggedProto().isLazy()) {
         JS_ASSERT(obj->is<js::ProxyObject>());
         return js::Proxy::getPrototypeOf(cx, obj, protop);
     } else {
-        protop.set(obj->js::ObjectImpl::getProto());
+        protop.set(obj->getTaggedProto().toObjectOrNull());
         return true;
     }
 }
 
 /* static */ inline bool
 JSObject::setProto(JSContext *cx, JS::HandleObject obj, JS::HandleObject proto, bool *succeeded)
 {
     /* Proxies live in their own little world. */
@@ -456,21 +459,21 @@ JSObject::create(js::ExclusiveContext *c
                  js::HeapSlot *extantSlots /* = nullptr */)
 {
     /*
      * Callers must use dynamicSlotsCount to size the initial slot array of the
      * object. We can't check the allocated capacity of the dynamic slots, but
      * make sure their presence is consistent with the shape.
      */
     JS_ASSERT(shape && type);
-    JS_ASSERT(type->clasp == shape->getObjectClass());
-    JS_ASSERT(type->clasp != &js::ArrayObject::class_);
-    JS_ASSERT(js::gc::GetGCKindSlots(kind, type->clasp) == shape->numFixedSlots());
-    JS_ASSERT_IF(type->clasp->flags & JSCLASS_BACKGROUND_FINALIZE, IsBackgroundFinalized(kind));
-    JS_ASSERT_IF(type->clasp->finalize, heap == js::gc::TenuredHeap);
+    JS_ASSERT(type->clasp() == shape->getObjectClass());
+    JS_ASSERT(type->clasp() != &js::ArrayObject::class_);
+    JS_ASSERT(js::gc::GetGCKindSlots(kind, type->clasp()) == shape->numFixedSlots());
+    JS_ASSERT_IF(type->clasp()->flags & JSCLASS_BACKGROUND_FINALIZE, IsBackgroundFinalized(kind));
+    JS_ASSERT_IF(type->clasp()->finalize, heap == js::gc::TenuredHeap);
     JS_ASSERT_IF(extantSlots, dynamicSlotsCount(shape->numFixedSlots(), shape->slotSpan()));
 
     js::HeapSlot *slots = extantSlots;
     if (!slots) {
         size_t nDynamicSlots = dynamicSlotsCount(shape->numFixedSlots(), shape->slotSpan());
         if (nDynamicSlots) {
             slots = cx->pod_malloc<js::HeapSlot>(nDynamicSlots);
             if (!slots)
@@ -490,36 +493,36 @@ JSObject::create(js::ExclusiveContext *c
         cx->asJSContext()->runtime()->gcNursery.notifyInitialSlots(obj, slots);
 #endif
 
     obj->shape_.init(shape);
     obj->type_.init(type);
     obj->slots = slots;
     obj->elements = js::emptyObjectElements;
 
-    const js::Class *clasp = type->clasp;
+    const js::Class *clasp = type->clasp();
     if (clasp->hasPrivate())
         obj->privateRef(shape->numFixedSlots()) = nullptr;
 
     size_t span = shape->slotSpan();
     if (span && clasp != &js::ArrayBufferObject::class_)
         obj->initializeSlotRange(0, span);
 
     return obj;
 }
 
 /* static */ inline js::ArrayObject *
 JSObject::createArray(js::ExclusiveContext *cx, js::gc::AllocKind kind, js::gc::InitialHeap heap,
                       js::HandleShape shape, js::HandleTypeObject type,
                       uint32_t length)
 {
     JS_ASSERT(shape && type);
-    JS_ASSERT(type->clasp == shape->getObjectClass());
-    JS_ASSERT(type->clasp == &js::ArrayObject::class_);
-    JS_ASSERT_IF(type->clasp->finalize, heap == js::gc::TenuredHeap);
+    JS_ASSERT(type->clasp() == shape->getObjectClass());
+    JS_ASSERT(type->clasp() == &js::ArrayObject::class_);
+    JS_ASSERT_IF(type->clasp()->finalize, heap == js::gc::TenuredHeap);
 
     /*
      * Arrays use their fixed slots to store elements, and must have enough
      * space for the elements header and also be marked as having no space for
      * named properties stored in those fixed slots.
      */
     JS_ASSERT(shape->numFixedSlots() == 0);
 
@@ -974,18 +977,21 @@ DefineConstructorAndPrototype(JSContext 
     JS_ASSERT(!global->nativeEmpty()); /* reserved slots already allocated */
     JS_ASSERT(ctor);
     JS_ASSERT(proto);
 
     RootedId id(cx, NameToId(ClassName(key, cx)));
     JS_ASSERT(!global->nativeLookup(cx, id));
 
     /* Set these first in case AddTypePropertyId looks for this class. */
-    global->setConstructor(key, ObjectValue(*ctor));
-    global->setPrototype(key, ObjectValue(*proto));
+    {
+        AutoLockForCompilation lock(cx);
+        global->setConstructor(key, ObjectValue(*ctor));
+        global->setPrototype(key, ObjectValue(*proto));
+    }
     global->setConstructorPropertySlot(key, ObjectValue(*ctor));
 
     if (!global->addDataProperty(cx, id, GlobalObject::constructorPropertySlot(key), 0)) {
         global->setConstructor(key, UndefinedValue());
         global->setPrototype(key, UndefinedValue());
         global->setConstructorPropertySlot(key, UndefinedValue());
         return false;
     }
--- a/js/src/jsproxy.cpp
+++ b/js/src/jsproxy.cpp
@@ -2743,17 +2743,17 @@ Proxy::regexp_toShared(JSContext *cx, Ha
 
 bool
 Proxy::defaultValue(JSContext *cx, HandleObject proxy, JSType hint, MutableHandleValue vp)
 {
     JS_CHECK_RECURSION(cx, return false);
     return proxy->as<ProxyObject>().handler()->defaultValue(cx, proxy, hint, vp);
 }
 
-JSObject * const Proxy::LazyProto = reinterpret_cast<JSObject *>(0x1);
+JSObject * const TaggedProto::LazyProto = reinterpret_cast<JSObject *>(0x1);
 
 bool
 Proxy::getPrototypeOf(JSContext *cx, HandleObject proxy, MutableHandleObject proto)
 {
     JS_ASSERT(proxy->getTaggedProto().isLazy());
     JS_CHECK_RECURSION(cx, return false);
     return proxy->as<ProxyObject>().handler()->getPrototypeOf(cx, proxy, proto);
 }
--- a/js/src/jsproxy.h
+++ b/js/src/jsproxy.h
@@ -319,18 +319,16 @@ class Proxy
     static bool unwatch(JSContext *cx, HandleObject proxy, HandleId id);
 
     static bool slice(JSContext *cx, HandleObject obj, uint32_t begin, uint32_t end,
                       HandleObject result);
 
     /* IC entry path for handling __noSuchMethod__ on access. */
     static bool callProp(JSContext *cx, HandleObject proxy, HandleObject reveiver, HandleId id,
                          MutableHandleValue vp);
-
-    static JSObject * const LazyProto;
 };
 
 // Use these in places where you don't want to #include vm/ProxyObject.h.
 extern JS_FRIEND_DATA(const js::Class* const) CallableProxyClassPtr;
 extern JS_FRIEND_DATA(const js::Class* const) UncallableProxyClassPtr;
 extern JS_FRIEND_DATA(const js::Class* const) OuterWindowProxyClassPtr;
 
 inline bool IsProxyClass(const Class *clasp)
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -3010,17 +3010,20 @@ JSScript::argumentsOptimizationFailed(JS
      * is nothing to do; GuardFunApplySpeculation will patch in the real
      * argsobj.
      */
     if (script->needsArgsObj())
         return true;
 
     JS_ASSERT(!script->isGenerator());
 
-    script->needsArgsObj_ = true;
+    {
+        AutoLockForCompilation lock(cx);
+        script->needsArgsObj_ = true;
+    }
 
 #ifdef JS_ION
     /*
      * Since we can't invalidate baseline scripts, set a flag that's checked from
      * JIT code to indicate the arguments optimization failed and JSOP_ARGUMENTS
      * should create an arguments object next time.
      */
     if (script->hasBaselineScript())
--- a/js/src/jsscript.h
+++ b/js/src/jsscript.h
@@ -972,17 +972,21 @@ class JSScript : public js::gc::Barriere
      * needsArgsObj which is set by ScriptAnalysis::analyzeSSA before running
      * the script the first time. When !needsArgsObj, the prologue may simply
      * write MagicValue(JS_OPTIMIZED_ARGUMENTS) to 'arguments's slot and any
      * uses of 'arguments' will be guaranteed to handle this magic value.
      * So avoid spurious arguments object creation, we maintain the invariant
      * that needsArgsObj is only called after the script has been analyzed.
      */
     bool analyzedArgsUsage() const { return !needsArgsAnalysis_; }
-    bool needsArgsObj() const { JS_ASSERT(analyzedArgsUsage()); return needsArgsObj_; }
+    bool needsArgsObj() const {
+        JS_ASSERT(analyzedArgsUsage());
+        JS_ASSERT(js::CurrentThreadCanReadCompilationData());
+        return needsArgsObj_;
+    }
     void setNeedsArgsObj(bool needsArgsObj);
     static bool argumentsOptimizationFailed(JSContext *cx, js::HandleScript script);
 
     /*
      * Arguments access (via JSOP_*ARG* opcodes) must access the canonical
      * location for the argument. If an arguments object exists AND this is a
      * non-strict function (where 'arguments' aliases formals), then all access
      * must go through the arguments object. Otherwise, the local slot is the
@@ -1022,26 +1026,27 @@ class JSScript : public js::gc::Barriere
     void setIonScript(js::jit::IonScript *ionScript) {
         if (hasIonScript())
             js::jit::IonScript::writeBarrierPre(tenuredZone(), ion);
         ion = ionScript;
         updateBaselineOrIonRaw();
     }
 
     bool hasBaselineScript() const {
+        JS_ASSERT(js::CurrentThreadCanReadCompilationData());
         return baseline && baseline != BASELINE_DISABLED_SCRIPT;
     }
     bool canBaselineCompile() const {
         return baseline != BASELINE_DISABLED_SCRIPT;
     }
     js::jit::BaselineScript *baselineScript() const {
         JS_ASSERT(hasBaselineScript());
         return baseline;
     }
-    inline void setBaselineScript(js::jit::BaselineScript *baselineScript);
+    inline void setBaselineScript(JSContext *maybecx, js::jit::BaselineScript *baselineScript);
 
     void updateBaselineOrIonRaw();
 
     bool hasParallelIonScript() const {
         return parallelIon && parallelIon != ION_DISABLED_SCRIPT && parallelIon != ION_COMPILING_SCRIPT;
     }
 
     bool canParallelIonCompile() const {
--- a/js/src/jsscriptinlines.h
+++ b/js/src/jsscriptinlines.h
@@ -120,18 +120,21 @@ JSScript::setIsCallsiteClone(JSObject *f
     shouldCloneAtCallsite_ = false;
     isCallsiteClone_ = true;
     JS_ASSERT(isCallsiteClone());
     JS_ASSERT(fun->is<JSFunction>());
     enclosingScopeOrOriginalFunction_ = fun;
 }
 
 inline void
-JSScript::setBaselineScript(js::jit::BaselineScript *baselineScript) {
+JSScript::setBaselineScript(JSContext *maybecx, js::jit::BaselineScript *baselineScript) {
 #ifdef JS_ION
     if (hasBaselineScript())
         js::jit::BaselineScript::writeBarrierPre(tenuredZone(), baseline);
 #endif
+    mozilla::Maybe<js::AutoLockForCompilation> lock;
+    if (maybecx)
+        lock.construct(maybecx);
     baseline = baselineScript;
     updateBaselineOrIonRaw();
 }
 
 #endif /* jsscriptinlines_h */
--- a/js/src/jsworkers.cpp
+++ b/js/src/jsworkers.cpp
@@ -86,16 +86,18 @@ js::StartOffThreadIonCompile(JSContext *
     WorkerThreadState &state = *cx->runtime()->workerThreadState;
     JS_ASSERT(state.numThreads);
 
     AutoLockWorkerThreadState lock(state);
 
     if (!state.ionWorklist.append(builder))
         return false;
 
+    cx->runtime()->addCompilationThread();
+
     state.notifyAll(WorkerThreadState::PRODUCER);
     return true;
 }
 
 /*
  * Move an IonBuilder for which compilation has either finished, failed, or
  * been cancelled into the Ion compartment's finished compilations list.
  * All off thread compilations which are started must eventually be finished.
@@ -610,28 +612,30 @@ WorkerThreadState::finishParseTask(JSCon
     // to the corresponding prototype in the new compartment. This will briefly
     // create cross compartment pointers, which will be fixed by the
     // MergeCompartments call below.
     for (gc::CellIter iter(parseTask->cx->zone(), gc::FINALIZE_TYPE_OBJECT);
          !iter.done();
          iter.next())
     {
         types::TypeObject *object = iter.get<types::TypeObject>();
-        TaggedProto proto(object->proto);
+        TaggedProto proto(object->proto());
         if (!proto.isObject())
             continue;
 
         JSProtoKey key = js_IdentifyClassPrototype(proto.toObject());
         if (key == JSProto_Null)
             continue;
 
         JSObject *newProto = GetClassPrototypePure(&parseTask->scopeChain->global(), key);
         JS_ASSERT(newProto);
 
-        object->proto = newProto;
+        // Note: this is safe to do without requiring the compilation lock, as
+        // the new type is not yet available to compilation threads.
+        object->setProtoUnchecked(newProto);
     }
 
     // Move the parsed script and all its contents into the desired compartment.
     gc::MergeCompartments(parseTask->cx->compartment(), parseTask->scopeChain->compartment());
 
     RootedScript script(rt, parseTask->script);
 
     // If we have a context, report any error or warnings generated during the
@@ -755,17 +759,21 @@ WorkerThread::handleIonWorkload(WorkerTh
                         ionBuilder->script());
 #endif
 
     state.unlock();
     {
         jit::IonContext ictx(jit::CompileRuntime::get(runtime),
                              jit::CompileCompartment::get(ionBuilder->script()->compartment()),
                              &ionBuilder->alloc());
-        ionBuilder->setBackgroundCodegen(jit::CompileBackEnd(ionBuilder));
+        AutoEnterIonCompilation ionCompiling;
+        bool succeeded = ionBuilder->build();
+        ionBuilder->clearForBackEnd();
+        if (succeeded)
+            ionBuilder->setBackgroundCodegen(jit::CompileBackEnd(ionBuilder));
     }
     state.lock();
 
     FinishOffThreadIonCompile(ionBuilder);
     ionBuilder = nullptr;
 
     // Ping the main thread so that the compiled code can be incorporated
     // at the next operation callback. Don't interrupt Ion code for this, as
--- a/js/src/jswrapper.cpp
+++ b/js/src/jswrapper.cpp
@@ -43,17 +43,17 @@ Wrapper::New(JSContext *cx, JSObject *ob
 {
     JS_ASSERT(parent);
 
     AutoMarkInDeadZone amd(cx->zone());
 
     RootedValue priv(cx, ObjectValue(*obj));
     ProxyOptions options;
     options.setCallable(obj->isCallable());
-    return NewProxyObject(cx, handler, priv, Proxy::LazyProto, parent, options);
+    return NewProxyObject(cx, handler, priv, TaggedProto::LazyProto, parent, options);
 }
 
 JSObject *
 Wrapper::Renew(JSContext *cx, JSObject *existing, JSObject *obj, Wrapper *handler)
 {
     JS_ASSERT(!obj->isCallable());
     existing->as<ProxyObject>().renew(cx, handler, ObjectValue(*obj));
     return existing;
@@ -136,17 +136,17 @@ Wrapper Wrapper::singletonWithPrototype(
 
 extern JSObject *
 js::TransparentObjectWrapper(JSContext *cx, HandleObject existing, HandleObject obj,
                              HandleObject wrappedProto, HandleObject parent,
                              unsigned flags)
 {
     // Allow wrapping outer window proxies.
     JS_ASSERT(!obj->is<WrapperObject>() || obj->getClass()->ext.innerObject);
-    JS_ASSERT(wrappedProto == Proxy::LazyProto);
+    JS_ASSERT(wrappedProto == TaggedProto::LazyProto);
     return Wrapper::New(cx, obj, parent, &CrossCompartmentWrapper::singleton);
 }
 
 ErrorCopier::~ErrorCopier()
 {
     JSContext *cx = ac.ref().context()->asJSContext();
     if (ac.ref().origin() != cx->compartment() && cx->isExceptionPending()) {
         RootedValue exc(cx, cx->getPendingException());
--- a/js/src/vm/GlobalObject.cpp
+++ b/js/src/vm/GlobalObject.cpp
@@ -664,26 +664,52 @@ GlobalObject::addDebugger(JSContext *cx,
 
 bool
 GlobalObject::getSelfHostedFunction(JSContext *cx, HandleAtom selfHostedName, HandleAtom name,
                                     unsigned nargs, MutableHandleValue funVal)
 {
     RootedId shId(cx, AtomToId(selfHostedName));
     RootedObject holder(cx, cx->global()->intrinsicsHolder());
 
-    if (HasDataProperty(cx, holder, shId, funVal.address()))
+    if (cx->global()->maybeGetIntrinsicValue(shId, funVal.address()))
         return true;
 
     if (!cx->runtime()->maybeWrappedSelfHostedFunction(cx, shId, funVal))
         return false;
     if (!funVal.isUndefined())
         return true;
 
     JSFunction *fun = NewFunction(cx, NullPtr(), nullptr, nargs, JSFunction::INTERPRETED_LAZY,
                                   holder, name, JSFunction::ExtendedFinalizeKind, SingletonObject);
     if (!fun)
         return false;
     fun->setIsSelfHostedBuiltin();
     fun->setExtendedSlot(0, StringValue(selfHostedName));
     funVal.setObject(*fun);
 
-    return JSObject::defineGeneric(cx, holder, shId, funVal, nullptr, nullptr, 0);
+    return cx->global()->addIntrinsicValue(cx, shId, funVal);
 }
+
+bool
+GlobalObject::addIntrinsicValue(JSContext *cx, HandleId id, HandleValue value)
+{
+    RootedObject holder(cx, intrinsicsHolder());
+
+    // Work directly with the shape machinery underlying the object, so that we
+    // don't take the compilation lock until we are ready to update the object
+    // without triggering a GC.
+
+    uint32_t slot = holder->slotSpan();
+    RootedShape last(cx, holder->lastProperty());
+    Rooted<UnownedBaseShape*> base(cx, last->base()->unowned());
+
+    StackShape child(base, id, slot, holder->numFixedSlots(), 0, 0, 0);
+    RootedShape shape(cx, cx->compartment()->propertyTree.getChild(cx, last, holder->numFixedSlots(), child));
+    if (!shape)
+        return false;
+
+    AutoLockForCompilation lock(cx);
+    if (!JSObject::setLastProperty(cx, holder, shape))
+        return false;
+
+    holder->setSlot(shape->slot(), value);
+    return true;
+}
--- a/js/src/vm/GlobalObject.h
+++ b/js/src/vm/GlobalObject.h
@@ -508,36 +508,40 @@ class GlobalObject : public JSObject
         return &self->getPrototype(JSProto_DataView).toObject();
     }
 
     JSObject *intrinsicsHolder() {
         JS_ASSERT(!getSlotRefForCompilation(INTRINSICS).isUndefined());
         return &getSlotRefForCompilation(INTRINSICS).toObject();
     }
 
-    bool maybeGetIntrinsicValue(PropertyName *name, Value *vp) {
+    bool maybeGetIntrinsicValue(jsid id, Value *vp) {
+        JS_ASSERT(CurrentThreadCanReadCompilationData());
         JSObject *holder = intrinsicsHolder();
-        if (Shape *shape = holder->nativeLookupPure(name)) {
+        if (Shape *shape = holder->nativeLookupPure(id)) {
             *vp = holder->getSlot(shape->slot());
             return true;
         }
         return false;
     }
+    bool maybeGetIntrinsicValue(PropertyName *name, Value *vp) {
+        return maybeGetIntrinsicValue(NameToId(name), vp);
+    }
 
     bool getIntrinsicValue(JSContext *cx, HandlePropertyName name, MutableHandleValue value) {
         if (maybeGetIntrinsicValue(name, value.address()))
             return true;
-        Rooted<GlobalObject*> self(cx, this);
         if (!cx->runtime()->cloneSelfHostedValue(cx, name, value))
             return false;
-        RootedObject holder(cx, self->intrinsicsHolder());
         RootedId id(cx, NameToId(name));
-        return JS_DefinePropertyById(cx, holder, id, value, nullptr, nullptr, 0);
+        return addIntrinsicValue(cx, id, value);
     }
 
+    bool addIntrinsicValue(JSContext *cx, HandleId id, HandleValue value);
+
     bool setIntrinsicValue(JSContext *cx, PropertyName *name, HandleValue value) {
 #ifdef DEBUG
         RootedObject self(cx, this);
         JS_ASSERT(cx->runtime()->isSelfHostingGlobal(self));
 #endif
         RootedObject holder(cx, intrinsicsHolder());
         RootedValue valCopy(cx, value);
         return JSObject::setProperty(cx, holder, holder, name, &valCopy, false);
--- a/js/src/vm/Interpreter.cpp
+++ b/js/src/vm/Interpreter.cpp
@@ -1289,17 +1289,17 @@ SetObjectElementOperation(JSContext *cx,
 
 #ifdef JS_ION
     if (obj->isNative() && JSID_IS_INT(id)) {
         uint32_t length = obj->getDenseInitializedLength();
         int32_t i = JSID_TO_INT(id);
         if ((uint32_t)i >= length) {
             // Annotate script if provided with information (e.g. baseline)
             if (script && script->hasBaselineScript() && *pc == JSOP_SETELEM)
-                script->baselineScript()->noteArrayWriteHole(script->pcToOffset(pc));
+                script->baselineScript()->noteArrayWriteHole(cx, script->pcToOffset(pc));
         }
     }
 #endif
 
     if (obj->isNative() && !JSID_IS_INT(id) && !obj->setHadElementsAccess(cx))
         return false;
 
     RootedValue tmp(cx, value);
--- a/js/src/vm/ObjectImpl.cpp
+++ b/js/src/vm/ObjectImpl.cpp
@@ -355,17 +355,17 @@ js::ObjectImpl::numFixedSlotsForCompilat
 
 void
 js::ObjectImpl::markChildren(JSTracer *trc)
 {
     MarkTypeObject(trc, &type_, "type");
 
     MarkShape(trc, &shape_, "shape");
 
-    const Class *clasp = type_->clasp;
+    const Class *clasp = type_->clasp();
     JSObject *obj = asObjectPtr();
     if (clasp->trace)
         clasp->trace(trc, obj);
 
     if (shape_->isNative()) {
         MarkObjectSlots(trc, obj, 0, obj->slotSpan());
         gc::MarkArraySlots(trc, obj->getDenseInitializedLength(), obj->getDenseElements(), "objectElements");
     }
@@ -533,17 +533,17 @@ DenseElementsHeader::defineElement(JSCon
 
 JSObject *
 js::ArrayBufferDelegate(JSContext *cx, Handle<ObjectImpl*> obj)
 {
     MOZ_ASSERT(obj->hasClass(&ArrayBufferObject::class_));
     if (obj->getPrivate())
         return static_cast<JSObject *>(obj->getPrivate());
     JSObject *delegate = NewObjectWithGivenProto(cx, &JSObject::class_,
-                                                 obj->getProto(), nullptr);
+                                                 obj->getTaggedProto(), nullptr);
     obj->setPrivateGCThing(delegate);
     return delegate;
 }
 
 template <typename T>
 bool
 TypedElementsHeader<T>::defineElement(JSContext *cx, Handle<ObjectImpl*> obj,
                                       uint32_t index, const PropDesc &desc, bool shouldThrow,
@@ -679,17 +679,17 @@ js::GetProperty(JSContext *cx, Handle<Ob
             MOZ_ASSUME_UNREACHABLE("NYI: proxy [[GetP]]");
 
         AutoPropDescRooter desc(cx);
         if (!GetOwnProperty(cx, current, pid, resolveFlags, &desc.getPropDesc()))
             return false;
 
         /* No property?  Recur or bottom out. */
         if (desc.isUndefined()) {
-            current = current->getProto();
+            current = current->getTaggedProto().toObjectOrNull();
             if (current)
                 continue;
 
             vp.setUndefined();
             return true;
         }
 
         /* If it's a data property, return the value. */
@@ -741,17 +741,17 @@ js::GetElement(JSContext *cx, Handle<Obj
             MOZ_ASSUME_UNREACHABLE("NYI: proxy [[GetP]]");
 
         PropDesc desc;
         if (!GetOwnElement(cx, current, index, resolveFlags, &desc))
             return false;
 
         /* No property?  Recur or bottom out. */
         if (desc.isUndefined()) {
-            current = current->getProto();
+            current = current->getTaggedProto().toObjectOrNull();
             if (current)
                 continue;
 
             vp->setUndefined();
             return true;
         }
 
         /* If it's a data property, return the value. */
@@ -806,17 +806,17 @@ js::HasElement(JSContext *cx, Handle<Obj
         if (!GetOwnElement(cx, current, index, resolveFlags, &prop))
             return false;
 
         if (!prop.isUndefined()) {
             *found = true;
             return true;
         }
 
-        current = current->getProto();
+        current = current->getTaggedProto().toObjectOrNull();
         if (current)
             continue;
 
         *found = false;
         return true;
     } while (false);
 
     MOZ_ASSUME_UNREACHABLE("buggy control flow");
@@ -1004,17 +1004,17 @@ js::SetElement(JSContext *cx, Handle<Obj
 
                 *succeeded = true;
                 return Invoke(cx, args);
             }
 
             MOZ_ASSUME_UNREACHABLE("NYI: setting PropertyOp-based property");
         }
 
-        current = current->getProto();
+        current = current->getTaggedProto().toObjectOrNull();
         if (current)
             continue;
 
         PropDesc newDesc(v, PropDesc::Writable, PropDesc::Enumerable, PropDesc::Configurable);
         return DefineElement(cx, receiver, index, newDesc, false, resolveFlags, succeeded);
     } while (false);
 
     MOZ_ASSUME_UNREACHABLE("buggy control flow");
--- a/js/src/vm/ObjectImpl.h
+++ b/js/src/vm/ObjectImpl.h
@@ -975,22 +975,24 @@ class ObjectImpl : public gc::BarrieredC
     JSObject * asObjectPtr() { return reinterpret_cast<JSObject *>(this); }
     const JSObject * asObjectPtr() const { return reinterpret_cast<const JSObject *>(this); }
 
     friend inline Value ObjectValue(ObjectImpl &obj);
 
     /* These functions are public, and they should remain public. */
 
   public:
-    JSObject * getProto() const {
-        return type_->proto;
+    js::TaggedProto getTaggedProto() const {
+        return type_->proto();
     }
 
+    bool hasTenuredProto() const;
+
     const Class *getClass() const {
-        return type_->clasp;
+        return type_->clasp();
     }
 
     static inline bool
     isExtensible(ExclusiveContext *cx, Handle<ObjectImpl*> obj, bool *extensible);
 
     // Indicates whether a non-proxy is extensible.  Don't call on proxies!
     // This method really shouldn't exist -- but there are a few internal
     // places that want it (JITs and the like), and it'd be a pain to mark them
@@ -1167,20 +1169,16 @@ class ObjectImpl : public gc::BarrieredC
     }
 
     /*
      * These functions are currently public for simplicity; in the long run
      * it may make sense to make at least some of them private.
      */
 
   public:
-    js::TaggedProto getTaggedProto() const {
-        return TaggedProto(getProto());
-    }
-
     Shape * lastProperty() const {
         MOZ_ASSERT(shape_);
         return shape_;
     }
 
     bool generateOwnShape(ThreadSafeContext *cx, js::Shape *newShape = nullptr) {
         return replaceWithNewEquivalentShape(cx, lastProperty(), newShape);
     }
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -70,16 +70,19 @@ PerThreadData::PerThreadData(JSRuntime *
     runtime_(runtime),
     ionTop(nullptr),
     ionJSContext(nullptr),
     ionStackLimit(0),
     activation_(nullptr),
     asmJSActivationStack_(nullptr),
     dtoaState(nullptr),
     suppressGC(0),
+#ifdef DEBUG
+    ionCompiling(false),
+#endif
     activeCompilations(0)
 {}
 
 PerThreadData::~PerThreadData()
 {
     if (dtoaState)
         js_DestroyDtoaState(dtoaState);
 
@@ -130,16 +133,22 @@ JSRuntime::JSRuntime(JSUseHelperThreads 
     operationCallbackLockTaken(false),
 #endif
 #ifdef JS_WORKER_THREADS
     workerThreadState(nullptr),
     exclusiveAccessLock(nullptr),
     exclusiveAccessOwner(nullptr),
     mainThreadHasExclusiveAccess(false),
     numExclusiveThreads(0),
+    compilationLock(nullptr),
+#ifdef DEBUG
+    compilationLockOwner(nullptr),
+    mainThreadHasCompilationLock(false),
+#endif
+    numCompilationThreads(0),
 #endif
     systemZone(nullptr),
     numCompartments(0),
     localeCallbacks(nullptr),
     defaultLocale(nullptr),
     defaultVersion_(JSVERSION_DEFAULT),
 #ifdef JS_THREADSAFE
     ownerThread_(nullptr),
@@ -357,16 +366,20 @@ JSRuntime::init(uint32_t maxbytes)
     if (!gcLock)
         return false;
 #endif
 
 #ifdef JS_WORKER_THREADS
     exclusiveAccessLock = PR_NewLock();
     if (!exclusiveAccessLock)
         return false;
+
+    compilationLock = PR_NewLock();
+    if (!compilationLock)
+        return false;
 #endif
 
     if (!mainThread.init())
         return false;
 
     js::TlsPerThreadData.set(&mainThread);
     mainThread.addToThreadList();
 
@@ -478,16 +491,20 @@ JSRuntime::~JSRuntime()
 
     JS_ASSERT(!exclusiveAccessOwner);
     if (exclusiveAccessLock)
         PR_DestroyLock(exclusiveAccessLock);
 
     // Avoid bogus asserts during teardown.
     JS_ASSERT(!numExclusiveThreads);
     mainThreadHasExclusiveAccess = true;
+
+    JS_ASSERT(!compilationLockOwner);
+    if (compilationLock)
+        PR_DestroyLock(compilationLock);
 #endif
 
 #ifdef JS_THREADSAFE
     JS_ASSERT(!operationCallbackOwner);
     if (operationCallbackLock)
         PR_DestroyLock(operationCallbackLock);
 #endif
 
@@ -811,17 +828,17 @@ bool
 JSRuntime::activeGCInAtomsZone()
 {
     Zone *zone = atomsCompartment_->zone();
     return zone->needsBarrier() || zone->isGCScheduled() || zone->wasGCStarted();
 }
 
 #if defined(DEBUG) && !defined(XP_WIN)
 
-AutoProtectHeapForCompilation::AutoProtectHeapForCompilation(JSRuntime *rt MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
+AutoProtectHeapForIonCompilation::AutoProtectHeapForIonCompilation(JSRuntime *rt MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
   : runtime(rt)
 {
     MOZ_GUARD_OBJECT_NOTIFIER_INIT;
 
     JS_ASSERT(!runtime->heapProtected_);
     runtime->heapProtected_ = true;
 
     for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) {
@@ -829,17 +846,17 @@ AutoProtectHeapForCompilation::AutoProte
         // Note: Don't protect the last page in the chunk, which stores
         // immutable info and needs to be accessible for runtimeFromAnyThread()
         // in AutoThreadSafeAccess.
         if (mprotect(chunk, ChunkSize - sizeof(Arena), PROT_NONE))
             MOZ_CRASH();
     }
 }
 
-AutoProtectHeapForCompilation::~AutoProtectHeapForCompilation()
+AutoProtectHeapForIonCompilation::~AutoProtectHeapForIonCompilation()
 {
     JS_ASSERT(runtime->heapProtected_);
     JS_ASSERT(runtime->unprotectedArenas.empty());
     runtime->heapProtected_ = false;
 
     for (GCChunkSet::Range r(runtime->gcChunkSet.all()); !r.empty(); r.popFront()) {
         Chunk *chunk = r.front();
         if (mprotect(chunk, ChunkSize - sizeof(Arena), PROT_READ | PROT_WRITE))
@@ -931,17 +948,17 @@ js::CurrentThreadCanAccessRuntime(JSRunt
 }
 
 bool
 js::CurrentThreadCanAccessZone(Zone *zone)
 {
     return true;
 }
 
-#endif
+#endif // JS_THREADSAFE
 
 #ifdef DEBUG
 
 void
 JSRuntime::assertCanLock(RuntimeLock which)
 {
 #ifdef JS_WORKER_THREADS
     // In the switch below, each case falls through to the one below it. None
@@ -949,18 +966,78 @@ JSRuntime::assertCanLock(RuntimeLock whi
     // it must be done in the order below.
     switch (which) {
       case ExclusiveAccessLock:
         JS_ASSERT(exclusiveAccessOwner != PR_GetCurrentThread());
       case WorkerThreadStateLock:
         JS_ASSERT_IF(workerThreadState, !workerThreadState->isLocked());
       case OperationCallbackLock:
         JS_ASSERT(!currentThreadOwnsOperationCallbackLock());
+      case CompilationLock:
+        JS_ASSERT(compilationLockOwner != PR_GetCurrentThread());
       case GCLock:
         JS_ASSERT(gcLockOwner != PR_GetCurrentThread());
         break;
       default:
         MOZ_CRASH();
     }
-#endif // JS_THREADSAFE
+#endif // JS_WORKER_THREADS
+}
+
+AutoEnterIonCompilation::AutoEnterIonCompilation(MOZ_GUARD_OBJECT_NOTIFIER_ONLY_PARAM_IN_IMPL)
+{
+    MOZ_GUARD_OBJECT_NOTIFIER_INIT;
+
+#ifdef JS_WORKER_THREADS
+    PerThreadData *pt = js::TlsPerThreadData.get();
+    JS_ASSERT(!pt->ionCompiling);
+    pt->ionCompiling = true;
+#endif
+}
+
+AutoEnterIonCompilation::~AutoEnterIonCompilation()
+{
+#ifdef JS_WORKER_THREADS
+    PerThreadData *pt = js::TlsPerThreadData.get();
+    JS_ASSERT(pt->ionCompiling);
+    pt->ionCompiling = false;
+#endif
+}
+
+bool
+js::CurrentThreadCanWriteCompilationData()
+{
+#ifdef JS_WORKER_THREADS
+    PerThreadData *pt = TlsPerThreadData.get();
+
+    // Data can only be read from during compilation.
+    if (pt->ionCompiling)
+        return false;
+
+    // Ignore what threads with exclusive contexts are doing; these never have
+    // run scripts or have associated compilation threads.
+    JSRuntime *rt = pt->runtimeIfOnOwnerThread();
+    if (!rt)
+        return true;
+
+    return rt->currentThreadHasCompilationLock();
+#else
+    return true;
+#endif
+}
+
+bool
+js::CurrentThreadCanReadCompilationData()
+{
+#ifdef JS_WORKER_THREADS
+    PerThreadData *pt = TlsPerThreadData.get();
+
+    // Data can always be read from freely outside of compilation.
+    if (!pt || !pt->ionCompiling)
+        return true;
+
+    return pt->runtime_->currentThreadHasCompilationLock();
+#else
+    return true;
+#endif
 }
 
 #endif // DEBUG
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -530,16 +530,19 @@ class PerThreadData : public PerThreadDa
      * thread than the JSRuntime's owner thread all reads/writes must be
      * synchronized (by rt->operationCallbackLock).
      */
   private:
     friend class js::Activation;
     friend class js::ActivationIterator;
     friend class js::jit::JitActivation;
     friend class js::AsmJSActivation;
+#ifdef DEBUG
+    friend bool js::CurrentThreadCanReadCompilationData();
+#endif
 
     /*
      * Points to the most recent activation running on the thread.
      * See Activation comment in vm/Stack.h.
      */
     js::Activation *activation_;
 
     /* See AsmJSActivation comment. Protected by rt->operationCallbackLock. */
@@ -572,17 +575,22 @@ class PerThreadData : public PerThreadDa
      * to suppress GC when reporting an OOM (see js_ReportOutOfMemory) and in
      * debugging facilities that cannot tolerate a GC and would rather OOM
      * immediately, such as utilities exposed to GDB. Setting this flag is
      * extremely dangerous and should only be used when in an OOM situation or
      * in non-exposed debugging facilities.
      */
     int32_t suppressGC;
 
-    // Whether there is an active compilation on this thread.
+#ifdef DEBUG
+    // Whether this thread is actively Ion compiling.
+    bool ionCompiling;
+#endif
+
+    // Number of active bytecode compilation on this thread.
     unsigned activeCompilations;
 
     PerThreadData(JSRuntime *runtime);
     ~PerThreadData();
 
     bool init();
     void addToThreadList();
     void removeFromThreadList();
@@ -672,17 +680,18 @@ struct MallocProvider
 
 namespace gc {
 class MarkingValidator;
 } // namespace gc
 
 typedef Vector<JS::Zone *, 4, SystemAllocPolicy> ZoneVector;
 
 class AutoLockForExclusiveAccess;
-class AutoProtectHeapForCompilation;
+class AutoLockForCompilation;
+class AutoProtectHeapForIonCompilation;
 
 void RecomputeStackLimit(JSRuntime *rt, StackKind kind);
 
 } // namespace js
 
 struct JSRuntime : public JS::shadow::Runtime,
                    public js::MallocProvider<JSRuntime>
 {
@@ -722,16 +731,17 @@ struct JSRuntime : public JS::shadow::Ru
 
     // There are several per-runtime locks indicated by the enum below. When
     // acquiring multiple of these locks, the acquisition must be done in the
     // order below to avoid deadlocks.
     enum RuntimeLock {
         ExclusiveAccessLock,
         WorkerThreadStateLock,
         OperationCallbackLock,
+        CompilationLock,
         GCLock
     };
 #ifdef DEBUG
     void assertCanLock(RuntimeLock which);
 #else
     void assertCanLock(RuntimeLock which) {}
 #endif
 
@@ -800,39 +810,90 @@ struct JSRuntime : public JS::shadow::Ru
     mozilla::DebugOnly<PRThread *> exclusiveAccessOwner;
     mozilla::DebugOnly<bool> mainThreadHasExclusiveAccess;
 
     /* Number of non-main threads with an ExclusiveContext. */
     size_t numExclusiveThreads;
 
     friend class js::AutoLockForExclusiveAccess;
 
+    /*
+     * Lock taken when using data that can be modified by the main thread but
+     * read by Ion compilation threads. Any time either the main thread writes
+     * such data or the compilation thread reads it, this lock must be taken.
+     * Note that no externally visible data is modified by the compilation
+     * thread, so the main thread never needs to take this lock when reading.
+     */
+    PRLock *compilationLock;
+#ifdef DEBUG
+    PRThread *compilationLockOwner;
+    bool mainThreadHasCompilationLock;
+#endif
+
+    /* Number of in flight Ion compilations. */
+    size_t numCompilationThreads;
+
+    friend class js::AutoLockForCompilation;
+#ifdef DEBUG
+    friend bool js::CurrentThreadCanWriteCompilationData();
+    friend bool js::CurrentThreadCanReadCompilationData();
+#endif
+
   public:
     void setUsedByExclusiveThread(JS::Zone *zone);
     void clearUsedByExclusiveThread(JS::Zone *zone);
 
 #endif // JS_THREADSAFE && JS_ION
 
+#ifdef DEBUG
     bool currentThreadHasExclusiveAccess() {
-#if defined(JS_WORKER_THREADS) && defined(DEBUG)
+#ifdef JS_WORKER_THREADS
         return (!numExclusiveThreads && mainThreadHasExclusiveAccess) ||
-            exclusiveAccessOwner == PR_GetCurrentThread();
+               exclusiveAccessOwner == PR_GetCurrentThread();
 #else
         return true;
 #endif
     }
+#endif // DEBUG
 
     bool exclusiveThreadsPresent() const {
 #ifdef JS_WORKER_THREADS
         return numExclusiveThreads > 0;
 #else
         return false;
 #endif
     }
 
+    void addCompilationThread() {
+        numCompilationThreads++;
+    }
+    void removeCompilationThread() {
+        JS_ASSERT(numCompilationThreads);
+        numCompilationThreads--;
+    }
+
+    bool compilationThreadsPresent() const {
+#ifdef JS_WORKER_THREADS
+        return numCompilationThreads > 0;
+#else
+        return false;
+#endif
+    }
+
+#ifdef DEBUG
+    bool currentThreadHasCompilationLock() {
+#ifdef JS_WORKER_THREADS
+        return (!numCompilationThreads && mainThreadHasCompilationLock) ||
+               compilationLockOwner == PR_GetCurrentThread();
+#else
+        return true;
+#endif
+    }
+#endif // DEBUG
+
     /* Embedders can use this zone however they wish. */
     JS::Zone            *systemZone;
 
     /* List of compartments and zones (protected by the GC lock). */
     js::ZoneVector      zones;
 
     /* How many compartments there are across all zones. */
     size_t              numCompartments;
@@ -1426,17 +1487,17 @@ struct JSRuntime : public JS::shadow::Ru
 
 #if !EXPOSE_INTL_API
     /* Number localization, used by jsnum.cpp. */
     const char          *thousandsSeparator;
     const char          *decimalSeparator;
     const char          *numGrouping;
 #endif
 
-    friend class js::AutoProtectHeapForCompilation;
+    friend class js::AutoProtectHeapForIonCompilation;
     friend class js::AutoThreadSafeAccess;
     mozilla::DebugOnly<bool> heapProtected_;
 #ifdef DEBUG
     js::Vector<js::gc::ArenaHeader *, 0, js::SystemAllocPolicy> unprotectedArenas;
 
   public:
     bool heapProtected() {
         return heapProtected_;
@@ -2044,26 +2105,46 @@ class RuntimeAllocPolicy
     void *calloc_(size_t bytes) { return runtime->calloc_(bytes); }
     void *realloc_(void *p, size_t bytes) { return runtime->realloc_(p, bytes); }
     void free_(void *p) { js_free(p); }
     void reportAllocOverflow() const {}
 };
 
 extern const JSSecurityCallbacks NullSecurityCallbacks;
 
-class AutoProtectHeapForCompilation
+// Debugging RAII class which marks the current thread as performing an Ion
+// compilation, for use by CurrentThreadCan{Read,Write}CompilationData
+class AutoEnterIonCompilation
+{
+  public:
+#ifdef DEBUG
+    AutoEnterIonCompilation(MOZ_GUARD_OBJECT_NOTIFIER_ONLY_PARAM);
+    ~AutoEnterIonCompilation();
+#else
+    AutoEnterIonCompilation(MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
+    {
+        MOZ_GUARD_OBJECT_NOTIFIER_INIT;
+    }
+#endif
+    MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
+};
+
+// Debugging RAII class which protects the entire GC heap for the duration of an
+// Ion compilation. When used only the main thread will be active and all
+// accesses to GC things must be wrapped by an AutoThreadSafeAccess instance.
+class AutoProtectHeapForIonCompilation
 {
   public:
 #if defined(DEBUG) && !defined(XP_WIN)
     JSRuntime *runtime;
 
-    AutoProtectHeapForCompilation(JSRuntime *rt MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
-    ~AutoProtectHeapForCompilation();
+    AutoProtectHeapForIonCompilation(JSRuntime *rt MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
+    ~AutoProtectHeapForIonCompilation();
 #else
-    AutoProtectHeapForCompilation(JSRuntime *rt MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
+    AutoProtectHeapForIonCompilation(JSRuntime *rt MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
     {
         MOZ_GUARD_OBJECT_NOTIFIER_INIT;
     }
 #endif
     MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 } /* namespace js */
--- a/js/src/vm/SPSProfiler.cpp
+++ b/js/src/vm/SPSProfiler.cpp
@@ -51,23 +51,24 @@ SPSProfiler::setProfilingStack(ProfileEn
 void
 SPSProfiler::enable(bool enabled)
 {
     JS_ASSERT(installed());
 
     if (enabled_ == enabled)
         return;
 
-    enabled_ = enabled;
     /*
      * Ensure all future generated code will be instrumented, or that all
      * currently instrumented code is discarded
      */
     ReleaseAllJITCode(rt->defaultFreeOp());
 
+    enabled_ = enabled;
+
 #ifdef JS_ION
     /* Toggle SPS-related jumps on baseline jitcode.
      * The call to |ReleaseAllJITCode| above will release most baseline jitcode, but not
      * jitcode for scripts with active frames on the stack.  These scripts need to have
      * their profiler state toggled so they behave properly.
      */
     jit::ToggleBaselineSPS(rt, enabled);
 #endif
--- a/js/src/vm/SelfHosting.cpp
+++ b/js/src/vm/SelfHosting.cpp
@@ -953,20 +953,21 @@ JSRuntime::cloneSelfHostedFunctionScript
     // JSFunction::generatorKind can't handle lazy self-hosted functions, so we make sure there
     // aren't any.
     JS_ASSERT(!sourceFun->isGenerator());
     RootedScript sourceScript(cx, sourceFun->nonLazyScript());
     JS_ASSERT(!sourceScript->enclosingStaticScope());
     JSScript *cscript = CloneScript(cx, NullPtr(), targetFun, sourceScript);
     if (!cscript)
         return false;
-    targetFun->setScript(cscript);
     cscript->setFunction(targetFun);
+
     JS_ASSERT(sourceFun->nargs() == targetFun->nargs());
     targetFun->setFlags(sourceFun->flags() | JSFunction::EXTENDED);
+    targetFun->setScript(cscript);
     return true;
 }
 
 bool
 JSRuntime::cloneSelfHostedValue(JSContext *cx, Handle<PropertyName*> name, MutableHandleValue vp)
 {
     RootedObject shg(cx, selfHostingGlobal_);
     RootedValue val(cx);
--- a/js/src/vm/TypedArrayObject.cpp
+++ b/js/src/vm/TypedArrayObject.cpp
@@ -333,17 +333,17 @@ GetViewListRef(ArrayBufferObject *obj)
 
 /* static */ bool
 ArrayBufferObject::neuterViews(JSContext *cx, Handle<ArrayBufferObject*> buffer)
 {
     ArrayBufferViewObject *view;
     size_t numViews = 0;
     for (view = GetViewList(buffer); view; view = view->nextView()) {
         numViews++;
-        view->neuter();
+        view->neuter(cx);
 
         // Notify compiled jit code that the base pointer has moved.
         MarkObjectStateChange(cx, view);
     }
 
     // neuterAsmJSArrayBuffer adjusts state specific to the ArrayBuffer data
     // itself, but it only affects the behavior of views
     if (buffer->isAsmJSArrayBuffer()) {
@@ -1177,18 +1177,20 @@ TypedArrayObject::isArrayIndex(jsid id, 
             *ip = index;
         return true;
     }
 
     return false;
 }
 
 void
-TypedArrayObject::neuter()
+TypedArrayObject::neuter(JSContext *cx)
 {
+    AutoLockForCompilation lock(cx);
+
     setSlot(LENGTH_SLOT, Int32Value(0));
     setSlot(BYTELENGTH_SLOT, Int32Value(0));
     setSlot(BYTEOFFSET_SLOT, Int32Value(0));
     setPrivate(nullptr);
 }
 
 bool
 TypedArrayObject::obj_lookupGeneric(JSContext *cx, HandleObject tarray, HandleId id,
@@ -2620,22 +2622,22 @@ ArrayBufferViewObject::prependToViews(Ar
 
     // Move the multiview buffer list link into this view since we're
     // prepending it to the list.
     setBufferLink(viewsHead->bufferLink());
     viewsHead->setBufferLink(UNSET_BUFFER_LINK);
 }
 
 void
-ArrayBufferViewObject::neuter()
+ArrayBufferViewObject::neuter(JSContext *cx)
 {
     if (is<DataViewObject>())
         as<DataViewObject>().neuter();
     else
-        as<TypedArrayObject>().neuter();
+        as<TypedArrayObject>().neuter(cx);
 }
 
 // this default implementation is only valid for integer types
 // less than 32-bits in size.
 template<typename NativeType>
 void
 TypedArrayObjectTemplate<NativeType>::copyIndexToValue(JSObject *tarray, uint32_t index,
                                                        MutableHandleValue vp)
--- a/js/src/vm/TypedArrayObject.h
+++ b/js/src/vm/TypedArrayObject.h
@@ -271,17 +271,17 @@ class ArrayBufferViewObject : public JSO
     ArrayBufferViewObject *nextView() const {
         return static_cast<ArrayBufferViewObject*>(getFixedSlot(NEXT_VIEW_SLOT).toPrivate());
     }
 
     inline void setNextView(ArrayBufferViewObject *view);
 
     void prependToViews(ArrayBufferViewObject *viewsHead);
 
-    void neuter();
+    void neuter(JSContext *cx);
 
     static void trace(JSTracer *trc, JSObject *obj);
 };
 
 /*
  * TypedArrayObject
  *
  * The non-templated base class for the specific typed implementations.
@@ -348,17 +348,17 @@ class TypedArrayObject : public ArrayBuf
     }
     void *viewData() const {
         return static_cast<void*>(getPrivate(DATA_SLOT));
     }
 
     inline bool isArrayIndex(jsid id, uint32_t *ip = nullptr);
     void copyTypedArrayElement(uint32_t index, MutableHandleValue vp);
 
-    void neuter();
+    void neuter(JSContext *cx);
 
     static uint32_t slotWidth(int atype) {
         switch (atype) {
           case ScalarTypeRepresentation::TYPE_INT8:
           case ScalarTypeRepresentation::TYPE_UINT8:
           case ScalarTypeRepresentation::TYPE_UINT8_CLAMPED:
             return 1;
           case ScalarTypeRepresentation::TYPE_INT16: