Bug 1499644 - Move IC data out of BaselineScript. r=tcampbell
authorJan de Mooij <jdemooij@mozilla.com>
Thu, 29 Nov 2018 19:08:28 +0000
changeset 448766 6453222232be364fb8ce3fd29b6cbcd480e5f2e3
parent 448765 1cce2b80c6452dce775e3fa1c180cc1c7c4e9012
child 448767 ecb2818bcd055bc5381d0702c955ee78a64dc41c
child 448837 da158202751e9481710c64264d5f0e9161f6eb36
child 448849 57f8289e1159f57271001c480258598f90305ed1
push id35126
push userrmaries@mozilla.com
push dateThu, 29 Nov 2018 21:44:05 +0000
treeherdermozilla-central@6453222232be [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerstcampbell
bugs1499644, 1499324
milestone65.0a1
first release with
nightly linux32
6453222232be / 65.0a1 / 20181129214405 / files
nightly linux64
6453222232be / 65.0a1 / 20181129214405 / files
nightly mac
6453222232be / 65.0a1 / 20181129214405 / files
nightly win32
6453222232be / 65.0a1 / 20181129214405 / files
nightly win64
6453222232be / 65.0a1 / 20181129214405 / files
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
releases
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1499644 - Move IC data out of BaselineScript. r=tcampbell ICEntries and the fallback stub space are now stored in ICScript. The ICScript* is stored in TypeScript to not increase sizeof(JSScript). We need this for bug 1499324 but it also lets us greatly simplify the BaselineDebugModeOSR code as this patch shows. Note: some ICScript method definitions are still in BaselineJIT.cpp instead of BaselineIC.cpp to make this patch easier to review. We could move them to BaselineIC.cpp as a follow-up change. Differential Revision: https://phabricator.services.mozilla.com/D11746
js/src/gc/Zone.cpp
js/src/jit/BaselineBailouts.cpp
js/src/jit/BaselineCacheIRCompiler.cpp
js/src/jit/BaselineCompiler.cpp
js/src/jit/BaselineCompiler.h
js/src/jit/BaselineDebugModeOSR.cpp
js/src/jit/BaselineDebugModeOSR.h
js/src/jit/BaselineIC.cpp
js/src/jit/BaselineIC.h
js/src/jit/BaselineInspector.cpp
js/src/jit/BaselineInspector.h
js/src/jit/BaselineJIT.cpp
js/src/jit/BaselineJIT.h
js/src/jit/CacheIRCompiler.cpp
js/src/jit/CacheIRCompiler.h
js/src/jit/Ion.cpp
js/src/jit/arm/SharedICHelpers-arm.h
js/src/jit/arm64/SharedICHelpers-arm64.h
js/src/jit/mips-shared/SharedICHelpers-mips-shared.h
js/src/jit/none/SharedICHelpers-none.h
js/src/jit/x64/SharedICHelpers-x64.h
js/src/jit/x86/SharedICHelpers-x86.h
js/src/vm/Interpreter.cpp
js/src/vm/JSScript-inl.h
js/src/vm/JSScript.cpp
js/src/vm/JSScript.h
js/src/vm/NativeObject.cpp
js/src/vm/TypeInference.cpp
js/src/vm/TypeInference.h
js/src/vm/UnboxedObject.cpp
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -4,25 +4,27 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "gc/Zone-inl.h"
 
 #include "gc/FreeOp.h"
 #include "gc/Policy.h"
 #include "gc/PublicIterators.h"
+#include "jit/BaselineIC.h"
 #include "jit/BaselineJIT.h"
 #include "jit/Ion.h"
 #include "jit/JitRealm.h"
 #include "vm/Debugger.h"
 #include "vm/Runtime.h"
 #include "wasm/WasmInstance.h"
 
 #include "gc/GC-inl.h"
 #include "gc/Marking-inl.h"
+#include "vm/JSScript-inl.h"
 #include "vm/Realm-inl.h"
 
 using namespace js;
 using namespace js::gc;
 
 Zone * const Zone::NotOnList = reinterpret_cast<Zone*>(1);
 
 JS::Zone::Zone(JSRuntime* rt)
@@ -255,21 +257,30 @@ Zone::discardJitCode(FreeOp* fop, bool d
         /*
          * Make it impossible to use the control flow graphs cached on the
          * BaselineScript. They get deleted.
          */
         if (script->hasBaselineScript()) {
             script->baselineScript()->setControlFlowGraph(nullptr);
         }
 
-        // Try to release the script's TypeScript. This should happen last
-        // because we can't do this when the script still has JIT code.
+        // Try to release the script's TypeScript. This should happen after
+        // releasing JIT code because we can't do this when the script still has
+        // JIT code.
         if (releaseTypes) {
             script->maybeReleaseTypes();
         }
+
+        // The optimizedStubSpace will be purged below so make sure ICScript
+        // doesn't point into it. We do this after (potentially) releasing types
+        // because TypeScript contains the ICScript* and there's no need to
+        // purge stubs if we just destroyed the Typescript.
+        if (discardBaselineCode && script->hasICScript()) {
+            script->icScript()->purgeOptimizedStubs(script->zone());
+        }
     }
 
     /*
      * When scripts contains pointers to nursery things, the store buffer
      * can contain entries that point into the optimized stub space. Since
      * this method can be called outside the context of a GC, this situation
      * could result in us trying to mark invalid store buffer entries.
      *
--- a/js/src/jit/BaselineBailouts.cpp
+++ b/js/src/jit/BaselineBailouts.cpp
@@ -1058,16 +1058,17 @@ InitFromBailout(JSContext* cx, size_t fr
         op = JSOp(*pc);
         if (skippedLoopEntry && script->trackRecordReplayProgress()) {
             mozilla::recordreplay::AdvanceExecutionProgressCounter();
         }
     }
 
     const uint32_t pcOff = script->pcToOffset(pc);
     BaselineScript* baselineScript = script->baselineScript();
+    ICScript* icScript = script->icScript();
 
 #ifdef DEBUG
     uint32_t expectedDepth;
     bool reachablePC;
     if (!ReconstructStackDepth(cx, script, resumeAfter ? GetNextPc(pc) : pc, &expectedDepth, &reachablePC)) {
         return false;
     }
 
@@ -1115,34 +1116,34 @@ InitFromBailout(JSContext* cx, size_t fr
         // If the bailout was a resumeAfter, and the opcode is monitored,
         // then the bailed out state should be in a position to enter
         // into the ICTypeMonitor chain for the op.
         bool enterMonitorChain = false;
         if (resumeAfter && (CodeSpec[op].format & JOF_TYPESET)) {
             // Not every monitored op has a monitored fallback stub, e.g.
             // JSOP_NEWOBJECT, which always returns the same type for a
             // particular script/pc location.
-            ICEntry& icEntry = baselineScript->icEntryFromPCOffset(pcOff);
+            ICEntry& icEntry = icScript->icEntryFromPCOffset(pcOff);
             ICFallbackStub* fallbackStub = icEntry.firstStub()->getChainFallback();
             if (fallbackStub->isMonitoredFallback()) {
                 enterMonitorChain = true;
             }
         }
 
         uint32_t numUses = js::StackUses(pc);
 
         if (resumeAfter && !enterMonitorChain) {
             pc = GetNextPc(pc);
         }
 
         builder.setResumePC(pc);
         builder.setResumeFramePtr(prevFramePtr);
 
         if (enterMonitorChain) {
-            ICEntry& icEntry = baselineScript->icEntryFromPCOffset(pcOff);
+            ICEntry& icEntry = icScript->icEntryFromPCOffset(pcOff);
             ICFallbackStub* fallbackStub = icEntry.firstStub()->getChainFallback();
             MOZ_ASSERT(fallbackStub->isMonitoredFallback());
             JitSpew(JitSpew_BaselineBailouts, "      [TYPE-MONITOR CHAIN]");
 
             ICTypeMonitor_Fallback* typeMonitorFallback =
                 fallbackStub->toMonitoredFallbackStub()->getFallbackMonitorStub(cx, script);
             if (!typeMonitorFallback) {
                 return false;
@@ -1312,17 +1313,17 @@ InitFromBailout(JSContext* cx, size_t fr
                                                     FrameType::BaselineJS,
                                                     BaselineStubFrameLayout::Size());
     if (!builder.writeWord(baselineFrameDescr, "Descriptor")) {
         return false;
     }
 
     // Calculate and write out return address.
     // The icEntry in question MUST have an inlinable fallback stub.
-    ICEntry& icEntry = baselineScript->icEntryFromPCOffset(pcOff);
+    ICEntry& icEntry = icScript->icEntryFromPCOffset(pcOff);
     MOZ_ASSERT(IsInlinableFallback(icEntry.firstStub()->getChainFallback()));
 
     RetAddrEntry& retAddrEntry =
         baselineScript->retAddrEntryFromPCOffset(pcOff, RetAddrEntry::Kind::IC);
     if (!builder.writePtr(baselineScript->returnAddressForEntry(retAddrEntry), "ReturnAddr")) {
         return false;
     }
 
--- a/js/src/jit/BaselineCacheIRCompiler.cpp
+++ b/js/src/jit/BaselineCacheIRCompiler.cpp
@@ -2409,75 +2409,16 @@ ICCacheIR_Monitored::stubDataStart()
 }
 
 uint8_t*
 ICCacheIR_Updated::stubDataStart()
 {
     return reinterpret_cast<uint8_t*>(this) + stubInfo_->stubDataOffset();
 }
 
-/* static */ ICCacheIR_Regular*
-ICCacheIR_Regular::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                         ICCacheIR_Regular& other)
-{
-    const CacheIRStubInfo* stubInfo = other.stubInfo();
-    MOZ_ASSERT(stubInfo->makesGCCalls());
-
-    size_t bytesNeeded = stubInfo->stubDataOffset() + stubInfo->stubDataSize();
-    void* newStub = space->alloc(bytesNeeded);
-    if (!newStub) {
-        return nullptr;
-    }
-
-    ICCacheIR_Regular* res = new(newStub) ICCacheIR_Regular(other.jitCode(), stubInfo);
-    stubInfo->copyStubData(&other, res);
-    return res;
-}
-
-
-/* static */ ICCacheIR_Monitored*
-ICCacheIR_Monitored::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                           ICCacheIR_Monitored& other)
-{
-    const CacheIRStubInfo* stubInfo = other.stubInfo();
-    MOZ_ASSERT(stubInfo->makesGCCalls());
-
-    size_t bytesNeeded = stubInfo->stubDataOffset() + stubInfo->stubDataSize();
-    void* newStub = space->alloc(bytesNeeded);
-    if (!newStub) {
-        return nullptr;
-    }
-
-    ICCacheIR_Monitored* res = new(newStub) ICCacheIR_Monitored(other.jitCode(), firstMonitorStub,
-                                                                stubInfo);
-    stubInfo->copyStubData(&other, res);
-    return res;
-}
-
-/* static */ ICCacheIR_Updated*
-ICCacheIR_Updated::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                         ICCacheIR_Updated& other)
-{
-    const CacheIRStubInfo* stubInfo = other.stubInfo();
-    MOZ_ASSERT(stubInfo->makesGCCalls());
-
-    size_t bytesNeeded = stubInfo->stubDataOffset() + stubInfo->stubDataSize();
-    void* newStub = space->alloc(bytesNeeded);
-    if (!newStub) {
-        return nullptr;
-    }
-
-    ICCacheIR_Updated* res = new(newStub) ICCacheIR_Updated(other.jitCode(), stubInfo);
-    res->updateStubGroup() = other.updateStubGroup();
-    res->updateStubId() = other.updateStubId();
-
-    stubInfo->copyStubData(&other, res);
-    return res;
-}
-
 bool
 BaselineCacheIRCompiler::emitCallStringConcatResult()
 {
     AutoOutputRegister output(*this);
     Register lhs = allocator.useRegister(masm, reader.stringOperandId());
     Register rhs = allocator.useRegister(masm, reader.stringOperandId());
     AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
 
--- a/js/src/jit/BaselineCompiler.cpp
+++ b/js/src/jit/BaselineCompiler.cpp
@@ -45,20 +45,18 @@ BaselineCompiler::BaselineCompiler(JSCon
   : cx(cx),
     script(script),
     pc(script->code()),
     ionCompileable_(jit::IsIonEnabled(cx) && CanIonCompileScript(cx, script)),
     compileDebugInstrumentation_(script->isDebuggee()),
     alloc_(alloc),
     analysis_(alloc, script),
     frame(script, masm),
-    stubSpace_(),
-    icEntries_(),
     pcMappingEntries_(),
-    icLoadLabels_(),
+    icEntryIndex_(0),
     pushedBeforeCall_(0),
 #ifdef DEBUG
     inCall_(false),
 #endif
     profilerPushToggleOffset_(),
     profilerEnterFrameToggleOffset_(),
     profilerExitFrameToggleOffset_(),
     traceLoggerToggleOffsets_(cx),
@@ -241,17 +239,16 @@ BaselineCompiler::compile()
     size_t resumeEntries =
         script->hasResumeOffsets() ? script->resumeOffsets().size() : 0;
     UniquePtr<BaselineScript> baselineScript(
         BaselineScript::New(script, bailoutPrologueOffset_.offset(),
                             debugOsrPrologueOffset_.offset(),
                             debugOsrEpilogueOffset_.offset(),
                             profilerEnterFrameToggleOffset_.offset(),
                             profilerExitFrameToggleOffset_.offset(),
-                            icEntries_.length(),
                             retAddrEntries_.length(),
                             pcMappingIndexEntries.length(),
                             pcEntries.length(),
                             bytecodeTypeMapEntries,
                             resumeEntries,
                             traceLoggerToggleOffsets_.length()),
         JS::DeletePolicy<BaselineScript>(cx->runtime()));
     if (!baselineScript) {
@@ -267,42 +264,26 @@ BaselineCompiler::compile()
             script->filename(), script->lineno(), script->column());
 
     MOZ_ASSERT(pcMappingIndexEntries.length() > 0);
     baselineScript->copyPCMappingIndexEntries(&pcMappingIndexEntries[0]);
 
     MOZ_ASSERT(pcEntries.length() > 0);
     baselineScript->copyPCMappingEntries(pcEntries);
 
-    // Copy ICEntries and RetAddrEntries.
-    if (icEntries_.length() > 0) {
-        baselineScript->copyICEntries(script, &icEntries_[0]);
-    }
+    // Copy RetAddrEntries.
     if (retAddrEntries_.length() > 0) {
         baselineScript->copyRetAddrEntries(script, &retAddrEntries_[0]);
     }
 
-    // Adopt fallback stubs from the compiler into the baseline script.
-    baselineScript->adoptFallbackStubs(&stubSpace_);
-
     // If profiler instrumentation is enabled, toggle instrumentation on.
     if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime())) {
         baselineScript->toggleProfilerInstrumentation(true);
     }
 
-    // Patch IC loads using IC entries.
-    for (size_t i = 0; i < icLoadLabels_.length(); i++) {
-        CodeOffset label = icLoadLabels_[i].label;
-        size_t icEntry = icLoadLabels_[i].icEntry;
-        ICEntry* entryAddr = &(baselineScript->icEntry(icEntry));
-        Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, label),
-                                           ImmPtr(entryAddr),
-                                           ImmPtr((void*)-1));
-    }
-
     if (modifiesArguments_) {
         baselineScript->setModifiesArguments();
     }
     if (analysis_.usesEnvironmentChain()) {
         baselineScript->setUsesEnvironmentChain();
     }
 
 #ifdef JS_TRACE_LOGGING
@@ -580,44 +561,46 @@ BaselineCompiler::emitOutOfLinePostBarri
     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, PostWriteBarrier));
 
     masm.popValue(R0);
     masm.ret();
     return true;
 }
 
 bool
-BaselineCompiler::emitIC(ICStub* stub, bool isForOp)
-{
-    MOZ_ASSERT_IF(isForOp, BytecodeOpHasIC(JSOp(*pc)));
-
-    if (!stub) {
-        return false;
-    }
-
-    CodeOffset patchOffset, callOffset;
-    EmitCallIC(masm, &patchOffset, &callOffset);
-
-    // ICs need both an ICEntry and a RetAddrEntry.
-
-    RetAddrEntry::Kind kind = isForOp ? RetAddrEntry::Kind::IC : RetAddrEntry::Kind::NonOpIC;
+BaselineCompiler::emitNextIC()
+{
+    // Emit a call to an IC stored in ICScript. Calls to this must match the
+    // ICEntry order in ICScript: first the non-op IC entries for |this| and
+    // formal arguments, then the for-op IC entries for JOF_IC ops.
+
+    uint32_t pcOffset = script->pcToOffset(pc);
+
+    // We don't use every ICEntry and we can skip unreachable ops, so we have
+    // to loop until we find an ICEntry for the current pc.
+    const ICEntry* entry;
+    do {
+        entry = &script->icScript()->icEntry(icEntryIndex_);
+        icEntryIndex_++;
+    } while (entry->pcOffset() < pcOffset);
+
+    MOZ_RELEASE_ASSERT(entry->pcOffset() == pcOffset);
+    MOZ_ASSERT_IF(entry->isForOp(), BytecodeOpHasIC(JSOp(*pc)));
+
+    CodeOffset callOffset;
+    EmitCallIC(masm, entry, &callOffset);
+
+    RetAddrEntry::Kind kind =
+        entry->isForOp() ? RetAddrEntry::Kind::IC : RetAddrEntry::Kind::NonOpIC;
+
     if (!retAddrEntries_.emplaceBack(script->pcToOffset(pc), kind, callOffset)) {
         ReportOutOfMemory(cx);
         return false;
     }
 
-    if (!icEntries_.emplaceBack(stub, script->pcToOffset(pc), isForOp)) {
-        ReportOutOfMemory(cx);
-        return false;
-    }
-
-    if (!addICLoadLabel(patchOffset)) {
-        return false;
-    }
-
     return true;
 }
 
 void
 BaselineCompiler::prepareVMCall()
 {
     pushedBeforeCall_ = masm.framePushed();
 #ifdef DEBUG
@@ -942,18 +925,17 @@ BaselineCompiler::emitWarmUpCounterIncre
     masm.branchPtr(Assembler::Equal,
                    Address(scriptReg, JSScript::offsetOfIonScript()),
                    ImmPtr(ION_COMPILING_SCRIPT), &skipCall);
 
     // Try to compile and/or finish a compilation.
     if (JSOp(*pc) == JSOP_LOOPENTRY) {
         // During the loop entry we can try to OSR into ion.
         // The ic has logic for this.
-        ICWarmUpCounter_Fallback::Compiler stubCompiler(cx);
-        if (!emitNonOpIC(stubCompiler.getStub(&stubSpace_))) {
+        if (!emitNextIC()) {
             return false;
         }
     } else {
         // To call stubs we need to have an opcode. This code handles the
         // prologue and there is no dedicatd opcode present. Therefore use an
         // annotated vm call.
         prepareVMCall();
 
@@ -977,27 +959,25 @@ BaselineCompiler::emitArgumentTypeChecks
 {
     if (!function()) {
         return true;
     }
 
     frame.pushThis();
     frame.popRegsAndSync(1);
 
-    ICTypeMonitor_Fallback::Compiler compiler(cx, uint32_t(0));
-    if (!emitNonOpIC(compiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     for (size_t i = 0; i < function()->nargs(); i++) {
         frame.pushArg(i);
         frame.popRegsAndSync(1);
 
-        ICTypeMonitor_Fallback::Compiler compiler(cx, i + 1);
-        if (!emitNonOpIC(compiler.getStub(&stubSpace_))) {
+        if (!emitNextIC()) {
             return false;
         }
     }
 
     return true;
 }
 
 bool
@@ -1430,18 +1410,17 @@ BaselineCompiler::emit_JSOP_GOTO()
 
 bool
 BaselineCompiler::emitToBoolean()
 {
     Label skipIC;
     masm.branchTestBoolean(Assembler::Equal, R0, &skipIC);
 
     // Call IC
-    ICToBool_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     masm.bind(&skipIC);
     return true;
 }
 
 bool
@@ -1526,18 +1505,17 @@ BaselineCompiler::emit_JSOP_POS()
     // Keep top stack value in R0.
     frame.popRegsAndSync(1);
 
     // Inline path for int32 and double.
     Label done;
     masm.branchTestNumber(Assembler::Equal, R0, &done);
 
     // Call IC.
-    ICToNumber_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     masm.bind(&done);
     frame.push(R0);
     return true;
 }
 
@@ -2154,35 +2132,33 @@ BaselineCompiler::emit_JSOP_POW()
 
 bool
 BaselineCompiler::emitBinaryArith()
 {
     // Keep top JSStack value in R0 and R2
     frame.popRegsAndSync(2);
 
     // Call IC
-    ICBinaryArith_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     // Mark R0 as pushed stack value.
     frame.push(R0);
     return true;
 }
 
 bool
 BaselineCompiler::emitUnaryArith()
 {
     // Keep top stack value in R0.
     frame.popRegsAndSync(1);
 
     // Call IC
-    ICUnaryArith_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     // Mark R0 as pushed stack value.
     frame.push(R0);
     return true;
 }
 
@@ -2238,18 +2214,17 @@ bool
 BaselineCompiler::emitCompare()
 {
     // CODEGEN
 
     // Keep top JSStack value in R0 and R1.
     frame.popRegsAndSync(2);
 
     // Call IC.
-    ICCompare_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     // Mark R0 as pushed stack value.
     frame.push(R0, JSVAL_TYPE_BOOLEAN);
     return true;
 }
 
@@ -2309,23 +2284,17 @@ BaselineCompiler::emit_JSOP_NEWARRAY()
     uint32_t length = GET_UINT32(pc);
     MOZ_ASSERT(length <= INT32_MAX,
                "the bytecode emitter must fail to compile code that would "
                "produce JSOP_NEWARRAY with a length exceeding int32_t range");
 
     // Pass length in R0.
     masm.move32(Imm32(AssertedCast<int32_t>(length)), R0.scratchReg());
 
-    ObjectGroup* group = ObjectGroup::allocationSiteGroup(cx, script, pc, JSProto_Array);
-    if (!group) {
-        return false;
-    }
-
-    ICNewArray_Fallback::Compiler stubCompiler(cx, group);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     frame.push(R0);
     return true;
 }
 
 typedef ArrayObject* (*NewArrayCopyOnWriteFn)(JSContext*, HandleArrayObject, gc::InitialHeap);
@@ -2367,47 +2336,44 @@ BaselineCompiler::emit_JSOP_INITELEM_ARR
     uint32_t index = GET_UINT32(pc);
     MOZ_ASSERT(index <= INT32_MAX,
                "the bytecode emitter must fail to compile code that would "
                "produce JSOP_INITELEM_ARRAY with a length exceeding "
                "int32_t range");
     masm.moveValue(Int32Value(AssertedCast<int32_t>(index)), R1);
 
     // Call IC.
-    ICSetElem_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     // Pop the rhs, so that the object is on the top of the stack.
     frame.pop();
     return true;
 }
 
 bool
 BaselineCompiler::emit_JSOP_NEWOBJECT()
 {
     frame.syncStack(0);
 
-    ICNewObject_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     frame.push(R0);
     return true;
 }
 
 bool
 BaselineCompiler::emit_JSOP_NEWINIT()
 {
     frame.syncStack(0);
 
-    ICNewObject_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     frame.push(R0);
     return true;
 }
 
 bool
@@ -2423,18 +2389,17 @@ BaselineCompiler::emit_JSOP_INITELEM()
     // Push the object to store the result of the IC.
     frame.push(R0);
     frame.syncStack(0);
 
     // Keep RHS on the stack.
     frame.pushScratchValue();
 
     // Call IC.
-    ICSetElem_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     // Pop the rhs, so that the object is on the top of the stack.
     frame.pop();
     return true;
 }
 
@@ -2474,18 +2439,17 @@ bool
 BaselineCompiler::emit_JSOP_INITPROP()
 {
     // Load lhs in R0, rhs in R1.
     frame.syncStack(0);
     masm.loadValue(frame.addressOfStackValue(frame.peek(-2)), R0);
     masm.loadValue(frame.addressOfStackValue(frame.peek(-1)), R1);
 
     // Call IC.
-    ICSetProp_Fallback::Compiler compiler(cx);
-    if (!emitOpIC(compiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     // Leave the object on the stack.
     frame.pop();
     return true;
 }
 
@@ -2503,18 +2467,17 @@ BaselineCompiler::emit_JSOP_INITHIDDENPR
 
 bool
 BaselineCompiler::emit_JSOP_GETELEM()
 {
     // Keep top two stack values in R0 and R1.
     frame.popRegsAndSync(2);
 
     // Call IC.
-    ICGetElem_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     // Mark R0 as pushed stack value.
     frame.push(R0);
     return true;
 }
 
@@ -2526,18 +2489,17 @@ BaselineCompiler::emit_JSOP_GETELEM_SUPE
     frame.pop();
 
     // Keep receiver and index in R0 and R1.
     frame.popRegsAndSync(2);
 
     // Keep obj on the stack.
     frame.pushScratchValue();
 
-    ICGetElem_Fallback::Compiler stubCompiler(cx, /* hasReceiver = */ true);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     frame.pop(); // This value is also popped in InitFromBailout.
     frame.push(R0);
     return true;
 }
 
@@ -2556,18 +2518,17 @@ BaselineCompiler::emit_JSOP_SETELEM()
 
     // Keep object and index in R0 and R1.
     frame.popRegsAndSync(2);
 
     // Keep RHS on the stack.
     frame.pushScratchValue();
 
     // Call IC.
-    ICSetElem_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     return true;
 }
 
 bool
 BaselineCompiler::emit_JSOP_STRICTSETELEM()
@@ -2648,32 +2609,30 @@ BaselineCompiler::emit_JSOP_STRICTDELELE
     return emit_JSOP_DELELEM();
 }
 
 bool
 BaselineCompiler::emit_JSOP_IN()
 {
     frame.popRegsAndSync(2);
 
-    ICIn_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     frame.push(R0);
     return true;
 }
 
 bool
 BaselineCompiler::emit_JSOP_HASOWN()
 {
     frame.popRegsAndSync(2);
 
-    ICHasOwn_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     frame.push(R0);
     return true;
 }
 
 bool
@@ -2699,18 +2658,17 @@ BaselineCompiler::emit_JSOP_GETGNAME()
         return true;
     }
 
     frame.syncStack(0);
 
     masm.movePtr(ImmGCPtr(&script->global().lexicalEnvironment()), R0.scratchReg());
 
     // Call IC.
-    ICGetName_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     // Mark R0 as pushed stack value.
     frame.push(R0);
     return true;
 }
 
@@ -2774,18 +2732,17 @@ BaselineCompiler::emit_JSOP_SETPROP()
     // Keep lhs in R0, rhs in R1.
     frame.popRegsAndSync(2);
 
     // Keep RHS on the stack.
     frame.push(R1);
     frame.syncStack(0);
 
     // Call IC.
-    ICSetProp_Fallback::Compiler compiler(cx);
-    if (!emitOpIC(compiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     return true;
 }
 
 bool
 BaselineCompiler::emit_JSOP_STRICTSETPROP()
@@ -2860,18 +2817,17 @@ BaselineCompiler::emit_JSOP_STRICTSETPRO
 
 bool
 BaselineCompiler::emit_JSOP_GETPROP()
 {
     // Keep object in R0.
     frame.popRegsAndSync(1);
 
     // Call IC.
-    ICGetProp_Fallback::Compiler compiler(cx);
-    if (!emitOpIC(compiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     // Mark R0 as pushed stack value.
     frame.push(R0);
     return true;
 }
 
@@ -2896,18 +2852,17 @@ BaselineCompiler::emit_JSOP_GETBOUNDNAME
 bool
 BaselineCompiler::emit_JSOP_GETPROP_SUPER()
 {
     // Receiver -> R1, Object -> R0
     frame.popRegsAndSync(1);
     masm.loadValue(frame.addressOfStackValue(frame.peek(-1)), R1);
     frame.pop();
 
-    ICGetProp_Fallback::Compiler compiler(cx, /* hasReceiver = */ true);
-    if (!emitOpIC(compiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     frame.push(R0);
     return true;
 }
 
 
@@ -2983,18 +2938,17 @@ BaselineCompiler::emit_JSOP_GETALIASEDVA
 {
     frame.syncStack(0);
 
     Address address = getEnvironmentCoordinateAddress(R0.scratchReg());
     masm.loadValue(address, R0);
 
     if (ionCompileable_) {
         // No need to monitor types if we know Ion can't compile this script.
-        ICTypeMonitor_Fallback::Compiler compiler(cx, nullptr);
-        if (!emitOpIC(compiler.getStub(&stubSpace_))) {
+        if (!emitNextIC()) {
             return false;
         }
     }
 
     frame.push(R0);
     return true;
 }
 
@@ -3010,18 +2964,17 @@ BaselineCompiler::emit_JSOP_SETALIASEDVA
         frame.syncStack(0);
         masm.loadValue(frame.addressOfStackValue(frame.peek(-1)), R1);
 
         // Load and box lhs into R0.
         getEnvironmentCoordinateObject(R2.scratchReg());
         masm.tagValue(JSVAL_TYPE_OBJECT, R2.scratchReg(), R0);
 
         // Call SETPROP IC.
-        ICSetProp_Fallback::Compiler compiler(cx);
-        if (!emitOpIC(compiler.getStub(&stubSpace_))) {
+        if (!emitNextIC()) {
             return false;
         }
 
         return true;
     }
 
     // Keep rvalue in R0.
     frame.popRegsAndSync(1);
@@ -3050,18 +3003,17 @@ BaselineCompiler::emit_JSOP_SETALIASEDVA
 bool
 BaselineCompiler::emit_JSOP_GETNAME()
 {
     frame.syncStack(0);
 
     masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
 
     // Call IC.
-    ICGetName_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     // Mark R0 as pushed stack value.
     frame.push(R0);
     return true;
 }
 
@@ -3072,18 +3024,17 @@ BaselineCompiler::emit_JSOP_BINDNAME()
 
     if (*pc == JSOP_BINDGNAME && !script->hasNonSyntacticScope()) {
         masm.movePtr(ImmGCPtr(&script->global().lexicalEnvironment()), R0.scratchReg());
     } else {
         masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg());
     }
 
     // Call IC.
-    ICBindName_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     // Mark R0 as pushed stack value.
     frame.push(R0);
     return true;
 }
 
@@ -3140,33 +3091,31 @@ BaselineCompiler::emit_JSOP_GETIMPORT()
     if (targetEnv->getSlot(shape->slot()).isMagic(JS_UNINITIALIZED_LEXICAL)) {
         if (!emitUninitializedLexicalCheck(R0)) {
             return false;
         }
     }
 
     if (ionCompileable_) {
         // No need to monitor types if we know Ion can't compile this script.
-        ICTypeMonitor_Fallback::Compiler compiler(cx, nullptr);
-        if (!emitOpIC(compiler.getStub(&stubSpace_))) {
+        if (!emitNextIC()) {
             return false;
         }
     }
 
     frame.push(R0);
     return true;
 }
 
 bool
 BaselineCompiler::emit_JSOP_GETINTRINSIC()
 {
     frame.syncStack(0);
 
-    ICGetIntrinsic_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     frame.push(R0);
     return true;
 }
 
 typedef bool (*DefVarFn)(JSContext*, HandlePropertyName, unsigned, HandleObject);
@@ -3370,18 +3319,17 @@ BaselineCompiler::emit_JSOP_INITELEM_INC
     // Keep the object and rhs on the stack.
     frame.syncStack(0);
 
     // Load object in R0, index in R1.
     masm.loadValue(frame.addressOfStackValue(frame.peek(-3)), R0);
     masm.loadValue(frame.addressOfStackValue(frame.peek(-2)), R1);
 
     // Call IC.
-    ICSetElem_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     // Pop the rhs
     frame.pop();
 
     // Increment index
     Address indexAddr = frame.addressOfStackValue(frame.peek(-1));
@@ -3668,19 +3616,17 @@ BaselineCompiler::emitCall()
 
     bool construct = JSOp(*pc) == JSOP_NEW || JSOp(*pc) == JSOP_SUPERCALL;
     uint32_t argc = GET_ARGC(pc);
 
     frame.syncStack(0);
     masm.move32(Imm32(argc), R0.scratchReg());
 
     // Call IC
-    ICCall_Fallback::Compiler stubCompiler(cx, /* isConstructing = */ construct,
-                                           /* isSpread = */ false);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     // Update FrameInfo.
     frame.popn(2 + argc + construct);
     frame.push(R0);
     return true;
 }
@@ -3690,19 +3636,17 @@ BaselineCompiler::emitSpreadCall()
 {
     MOZ_ASSERT(IsCallPC(pc));
 
     frame.syncStack(0);
     masm.move32(Imm32(1), R0.scratchReg());
 
     // Call IC
     bool construct = JSOp(*pc) == JSOP_SPREADNEW || JSOp(*pc) == JSOP_SPREADSUPERCALL;
-    ICCall_Fallback::Compiler stubCompiler(cx, /* isConstructing = */ construct,
-                                           /* isSpread = */ true);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     // Update FrameInfo.
     frame.popn(3 + construct);
     frame.push(R0);
     return true;
 }
@@ -3848,32 +3792,30 @@ BaselineCompiler::emit_JSOP_GIMPLICITTHI
     return emit_JSOP_IMPLICITTHIS();
 }
 
 bool
 BaselineCompiler::emit_JSOP_INSTANCEOF()
 {
     frame.popRegsAndSync(2);
 
-    ICInstanceOf_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     frame.push(R0);
     return true;
 }
 
 bool
 BaselineCompiler::emit_JSOP_TYPEOF()
 {
     frame.popRegsAndSync(1);
 
-    ICTypeOf_Fallback::Compiler stubCompiler(cx);
-    if (!emitOpIC(stubCompiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     frame.push(R0);
     return true;
 }
 
 bool
@@ -4531,33 +4473,31 @@ BaselineCompiler::emit_JSOP_TABLESWITCH(
     return true;
 }
 
 bool
 BaselineCompiler::emit_JSOP_ITER()
 {
     frame.popRegsAndSync(1);
 
-    ICGetIterator_Fallback::Compiler compiler(cx);
-    if (!emitOpIC(compiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     frame.push(R0);
     return true;
 }
 
 bool
 BaselineCompiler::emit_JSOP_MOREITER()
 {
     frame.syncStack(0);
     masm.loadValue(frame.addressOfStackValue(frame.peek(-1)), R0);
 
-    ICIteratorMore_Fallback::Compiler compiler(cx);
-    if (!emitOpIC(compiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     frame.push(R0);
     return true;
 }
 
 bool
@@ -4588,18 +4528,21 @@ BaselineCompiler::emit_JSOP_ISNOITER()
 bool
 BaselineCompiler::emit_JSOP_ENDITER()
 {
     if (!emit_JSOP_JUMPTARGET()) {
         return false;
     }
     frame.popRegsAndSync(1);
 
-    ICIteratorClose_Fallback::Compiler compiler(cx);
-    return emitOpIC(compiler.getStub(&stubSpace_));
+    if (!emitNextIC()) {
+        return false;
+    }
+
+    return true;
 }
 
 bool
 BaselineCompiler::emit_JSOP_ISGENCLOSING()
 {
     return emitIsMagicValue();
 }
 
@@ -4825,26 +4768,17 @@ BaselineCompiler::emit_JSOP_RUNONCE()
     return callVM(RunOnceScriptPrologueInfo);
 }
 
 bool
 BaselineCompiler::emit_JSOP_REST()
 {
     frame.syncStack(0);
 
-    ArrayObject* templateObject =
-        ObjectGroup::newArrayObject(cx, nullptr, 0, TenuredObject,
-                                    ObjectGroup::NewArrayKind::UnknownIndex);
-    if (!templateObject) {
-        return false;
-    }
-
-    // Call IC.
-    ICRest_Fallback::Compiler compiler(cx, templateObject);
-    if (!emitOpIC(compiler.getStub(&stubSpace_))) {
+    if (!emitNextIC()) {
         return false;
     }
 
     // Mark R0 as pushed stack value.
     frame.push(R0);
     return true;
 }
 
--- a/js/src/jit/BaselineCompiler.h
+++ b/js/src/jit/BaselineCompiler.h
@@ -257,43 +257,34 @@ class BaselineCompiler final
     StackMacroAssembler masm;
     bool ionCompileable_;
     bool compileDebugInstrumentation_;
 
     TempAllocator& alloc_;
     BytecodeAnalysis analysis_;
     FrameInfo frame;
 
-    FallbackICStubSpace stubSpace_;
-    js::Vector<ICEntry, 16, SystemAllocPolicy> icEntries_;
     js::Vector<RetAddrEntry, 16, SystemAllocPolicy> retAddrEntries_;
 
     // Stores the native code offset for a bytecode pc.
     struct PCMappingEntry
     {
         uint32_t pcOffset;
         uint32_t nativeOffset;
         PCMappingSlotInfo slotInfo;
 
         // If set, insert a PCMappingIndexEntry before encoding the
         // current entry.
         bool addIndexEntry;
     };
 
     js::Vector<PCMappingEntry, 16, SystemAllocPolicy> pcMappingEntries_;
 
-    // Labels for the 'movWithPatch' for loading IC entry pointers in
-    // the generated IC-calling code in the main jitcode.  These need
-    // to be patched with the actual icEntry offsets after the BaselineScript
-    // has been allocated.
-    struct ICLoadLabel {
-        size_t icEntry;
-        CodeOffset label;
-    };
-    js::Vector<ICLoadLabel, 16, SystemAllocPolicy> icLoadLabels_;
+    // Index of the current ICEntry in the script's ICScript.
+    uint32_t icEntryIndex_;
 
     uint32_t pushedBeforeCall_;
 #ifdef DEBUG
     bool inCall_;
 #endif
 
     CodeOffset profilerPushToggleOffset_;
     CodeOffset profilerEnterFrameToggleOffset_;
@@ -346,28 +337,16 @@ class BaselineCompiler final
     MOZ_MUST_USE bool appendRetAddrEntry(RetAddrEntry::Kind kind, uint32_t retOffset) {
         if (!retAddrEntries_.emplaceBack(script->pcToOffset(pc), kind, CodeOffset(retOffset))) {
             ReportOutOfMemory(cx);
             return false;
         }
         return true;
     }
 
-    bool addICLoadLabel(CodeOffset label) {
-        MOZ_ASSERT(!icEntries_.empty());
-        ICLoadLabel loadLabel;
-        loadLabel.label = label;
-        loadLabel.icEntry = icEntries_.length() - 1;
-        if (!icLoadLabels_.append(loadLabel)) {
-            ReportOutOfMemory(cx);
-            return false;
-        }
-        return true;
-    }
-
     JSFunction* function() const {
         // Not delazifying here is ok as the function is guaranteed to have
         // been delazified before compilation started.
         return script->functionNonDelazifying();
     }
 
     ModuleObject* module() const {
         return script->module();
@@ -415,24 +394,17 @@ class BaselineCompiler final
 
     MOZ_MUST_USE bool emitCheckThis(ValueOperand val, bool reinit=false);
     void emitLoadReturnValue(ValueOperand val);
 
     void emitInitializeLocals();
     MOZ_MUST_USE bool emitPrologue();
     MOZ_MUST_USE bool emitEpilogue();
     MOZ_MUST_USE bool emitOutOfLinePostBarrierSlot();
-    MOZ_MUST_USE bool emitIC(ICStub* stub, bool isForOp);
-    MOZ_MUST_USE bool emitOpIC(ICStub* stub) {
-        return emitIC(stub, true);
-    }
-    MOZ_MUST_USE bool emitNonOpIC(ICStub* stub) {
-        return emitIC(stub, false);
-    }
-
+    MOZ_MUST_USE bool emitNextIC();
     MOZ_MUST_USE bool emitStackCheck();
     MOZ_MUST_USE bool emitInterruptCheck();
     MOZ_MUST_USE bool emitWarmUpCounterIncrement(bool allowOsr=true);
     MOZ_MUST_USE bool emitArgumentTypeChecks();
     void emitIsDebuggeeCheck();
     MOZ_MUST_USE bool emitDebugPrologue();
     MOZ_MUST_USE bool emitDebugTrap();
     MOZ_MUST_USE bool emitTraceLoggerEnter();
--- a/js/src/jit/BaselineDebugModeOSR.cpp
+++ b/js/src/jit/BaselineDebugModeOSR.cpp
@@ -18,77 +18,65 @@
 
 using namespace js;
 using namespace js::jit;
 
 struct DebugModeOSREntry
 {
     JSScript* script;
     BaselineScript* oldBaselineScript;
-    ICStub* oldStub;
-    ICStub* newStub;
     BaselineDebugModeOSRInfo* recompInfo;
     uint32_t pcOffset;
     RetAddrEntry::Kind frameKind;
 
     explicit DebugModeOSREntry(JSScript* script)
       : script(script),
         oldBaselineScript(script->baselineScript()),
-        oldStub(nullptr),
-        newStub(nullptr),
         recompInfo(nullptr),
         pcOffset(uint32_t(-1)),
         frameKind(RetAddrEntry::Kind::Invalid)
     { }
 
     DebugModeOSREntry(JSScript* script, uint32_t pcOffset)
       : script(script),
         oldBaselineScript(script->baselineScript()),
-        oldStub(nullptr),
-        newStub(nullptr),
         recompInfo(nullptr),
         pcOffset(pcOffset),
         frameKind(RetAddrEntry::Kind::Invalid)
     { }
 
     DebugModeOSREntry(JSScript* script, const RetAddrEntry& retAddrEntry)
       : script(script),
         oldBaselineScript(script->baselineScript()),
-        oldStub(nullptr),
-        newStub(nullptr),
         recompInfo(nullptr),
         pcOffset(retAddrEntry.pcOffset()),
         frameKind(retAddrEntry.kind())
     {
 #ifdef DEBUG
         MOZ_ASSERT(pcOffset == retAddrEntry.pcOffset());
         MOZ_ASSERT(frameKind == retAddrEntry.kind());
 #endif
     }
 
     DebugModeOSREntry(JSScript* script, BaselineDebugModeOSRInfo* info)
       : script(script),
         oldBaselineScript(script->baselineScript()),
-        oldStub(nullptr),
-        newStub(nullptr),
         recompInfo(nullptr),
         pcOffset(script->pcToOffset(info->pc)),
         frameKind(info->frameKind)
     {
 #ifdef DEBUG
         MOZ_ASSERT(pcOffset == script->pcToOffset(info->pc));
         MOZ_ASSERT(frameKind == info->frameKind);
 #endif
     }
 
     DebugModeOSREntry(DebugModeOSREntry&& other)
       : script(other.script),
         oldBaselineScript(other.oldBaselineScript),
-        oldStub(other.oldStub),
-        newStub(other.newStub),
         recompInfo(other.recompInfo ? other.takeRecompInfo() : nullptr),
         pcOffset(other.pcOffset),
         frameKind(other.frameKind)
     { }
 
     ~DebugModeOSREntry() {
         // Note that this is nulled out when the recompInfo is taken by the
         // frame. The frame then has the responsibility of freeing the
@@ -127,22 +115,16 @@ struct DebugModeOSREntry
         jsbytecode* pc = script->offsetToPC(pcOffset);
 
         // XXX: Work around compiler error disallowing using bitfields
         // with the template magic of new_.
         RetAddrEntry::Kind kind = frameKind;
         recompInfo = cx->new_<BaselineDebugModeOSRInfo>(pc, kind);
         return !!recompInfo;
     }
-
-    ICFallbackStub* fallbackStub() const {
-        MOZ_ASSERT(script);
-        MOZ_ASSERT(oldStub);
-        return script->baselineScript()->icEntryFromPCOffset(pcOffset).fallbackStub();
-    }
 };
 
 typedef Vector<DebugModeOSREntry> DebugModeOSREntryVector;
 
 class UniqueScriptOSREntryIter
 {
     const DebugModeOSREntryVector& entries_;
     size_t index_;
@@ -179,26 +161,24 @@ class UniqueScriptOSREntryIter
         return *this;
     }
 };
 
 static bool
 CollectJitStackScripts(JSContext* cx, const Debugger::ExecutionObservableSet& obs,
                        const ActivationIterator& activation, DebugModeOSREntryVector& entries)
 {
-    ICStub* prevFrameStubPtr = nullptr;
     bool needsRecompileHandler = false;
     for (OnlyJSJitFrameIter iter(activation); !iter.done(); ++iter) {
         const JSJitFrameIter& frame = iter.frame();
         switch (frame.type()) {
           case FrameType::BaselineJS: {
             JSScript* script = frame.script();
 
             if (!obs.shouldRecompileOrInvalidate(script)) {
-                prevFrameStubPtr = nullptr;
                 break;
             }
 
             BaselineFrame* baselineFrame = frame.baselineFrame();
 
             if (BaselineDebugModeOSRInfo* info = baselineFrame->getDebugModeOSRInfo()) {
                 // If patching a previously patched yet unpopped frame, we can
                 // use the BaselineDebugModeOSRInfo on the frame directly to
@@ -230,24 +210,20 @@ CollectJitStackScripts(JSContext* cx, co
 
             if (entries.back().needsRecompileInfo()) {
                 if (!entries.back().allocateRecompileInfo(cx)) {
                     return false;
                 }
 
                 needsRecompileHandler |= true;
             }
-            entries.back().oldStub = prevFrameStubPtr;
-            prevFrameStubPtr = nullptr;
             break;
           }
 
           case FrameType::BaselineStub:
-            prevFrameStubPtr =
-                reinterpret_cast<BaselineStubFrameLayout*>(frame.fp())->maybeStubPtr();
             break;
 
           case FrameType::IonJS: {
             InlineFrameIterator inlineIter(cx, &frame);
             while (true) {
                 if (obs.shouldRecompileOrInvalidate(inlineIter.script())) {
                     if (!entries.append(DebugModeOSREntry(inlineIter.script()))) {
                         return false;
@@ -342,38 +318,30 @@ SpewPatchBaselineFrameFromExceptionHandl
 {
     JitSpew(JitSpew_BaselineDebugModeOSR,
             "Patch return %p -> %p on BaselineJS frame (%s:%u:%u) from exception handler at %s",
             oldReturnAddress, newReturnAddress, script->filename(), script->lineno(),
             script->column(), CodeName[(JSOp)*pc]);
 }
 
 static void
-SpewPatchStubFrame(ICStub* oldStub, ICStub* newStub)
-{
-    JitSpew(JitSpew_BaselineDebugModeOSR,
-            "Patch   stub %p -> %p on BaselineStub frame (%s)",
-            oldStub, newStub, newStub ? ICStub::KindString(newStub->kind()) : "exception handler");
-}
-
-static void
 PatchBaselineFramesForDebugMode(JSContext* cx,
                                 const Debugger::ExecutionObservableSet& obs,
                                 const ActivationIterator& activation,
                                 DebugModeOSREntryVector& entries, size_t* start)
 {
     //
     // Recompile Patching Overview
     //
     // When toggling debug mode with live baseline scripts on the stack, we
     // could have entered the VM via the following ways from the baseline
     // script.
     //
     // Off to On:
-    //  A. From a "can call" stub.
+    //  A. From a "can call" IC stub.
     //  B. From a VM call.
     //  H. From inside HandleExceptionBaseline
     //  I. From inside the interrupt handler via the prologue stack check.
     //  J. From the warmup counter in the prologue.
     //
     // On to Off:
     //  - All the ways above.
     //  C. From the debug trap handler.
@@ -419,23 +387,19 @@ PatchBaselineFramesForDebugMode(JSContex
             MOZ_ASSERT(pcOffset < script->length());
 
             BaselineScript* bl = script->baselineScript();
             RetAddrEntry::Kind kind = entry.frameKind;
 
             if (kind == RetAddrEntry::Kind::IC) {
                 // Case A above.
                 //
-                // Patching these cases needs to patch both the stub frame and
-                // the baseline frame. The stub frame is patched below. For
-                // the baseline frame here, we resume right after the IC
-                // returns.
-                //
-                // Since we're using the same IC stub code, we can resume
-                // directly to the IC resume address.
+                // For the baseline frame here, we resume right after the IC
+                // returns. Since we're using the same IC stubs and stub code,
+                // we don't have to patch the stub or stub frame.
                 RetAddrEntry& retAddrEntry = bl->retAddrEntryFromPCOffset(pcOffset, kind);
                 uint8_t* retAddr = bl->returnAddressForEntry(retAddrEntry);
                 SpewPatchBaselineFrame(prev->returnAddress(), retAddr, script, kind, pc);
                 DebugModeOSRVolatileJitFrameIter::forwardLiveIterators(
                     cx, prev->returnAddress(), retAddr);
                 prev->setReturnAddress(retAddr);
                 entryIndex++;
                 break;
@@ -585,61 +549,16 @@ PatchBaselineFramesForDebugMode(JSContex
             prev->setReturnAddress(reinterpret_cast<uint8_t*>(handlerAddr));
             frame.baselineFrame()->setDebugModeOSRInfo(recompInfo);
             frame.baselineFrame()->setOverridePc(recompInfo->pc);
 
             entryIndex++;
             break;
           }
 
-          case FrameType::BaselineStub: {
-            JSJitFrameIter prev(iter.frame());
-            ++prev;
-            BaselineFrame* prevFrame = prev.baselineFrame();
-            if (!obs.shouldRecompileOrInvalidate(prevFrame->script())) {
-                break;
-            }
-
-            DebugModeOSREntry& entry = entries[entryIndex];
-
-            // If the script wasn't recompiled, there's nothing to patch.
-            if (!entry.recompiled()) {
-                break;
-            }
-
-            BaselineStubFrameLayout* layout =
-                reinterpret_cast<BaselineStubFrameLayout*>(frame.fp());
-            MOZ_ASSERT(layout->maybeStubPtr() == entry.oldStub);
-
-            // Patch baseline stub frames for case A above.
-            //
-            // We need to patch the stub frame to point to an ICStub belonging
-            // to the recompiled baseline script. These stubs are allocated up
-            // front in CloneOldBaselineStub. They share the same JitCode as
-            // the old baseline script's stubs, so we don't need to patch the
-            // exit frame's return address.
-            //
-            // Subtlety here: the debug trap handler of case C above pushes a
-            // stub frame with a null stub pointer. This handler will exist
-            // across recompiling the script, so we don't patch anything for
-            // such stub frames. We will return to that handler, which takes
-            // care of cleaning up the stub frame.
-            //
-            // Note that for stub pointers that are already on the C stack
-            // (i.e. fallback calls), we need to check for recompilation using
-            // DebugModeOSRVolatileStub.
-            if (layout->maybeStubPtr()) {
-                MOZ_ASSERT(entry.newStub || prevFrame->isHandlingException());
-                SpewPatchStubFrame(entry.oldStub, entry.newStub);
-                layout->setStubPtr(entry.newStub);
-            }
-
-            break;
-          }
-
           case FrameType::IonJS: {
             // Nothing to patch.
             InlineFrameIterator inlineIter(cx, &frame);
             while (true) {
                 if (obs.shouldRecompileOrInvalidate(inlineIter.script())) {
                     entryIndex++;
                 }
                 if (!inlineIter.more()) {
@@ -707,123 +626,16 @@ RecompileBaselineScriptForDebugMode(JSCo
     }
 
     // Don't destroy the old baseline script yet, since if we fail any of the
     // recompiles we need to rollback all the old baseline scripts.
     MOZ_ASSERT(script->baselineScript()->hasDebugInstrumentation() == observing);
     return true;
 }
 
-#define PATCHABLE_ICSTUB_KIND_LIST(_)           \
-    _(CacheIR_Monitored)                        \
-    _(CacheIR_Regular)                          \
-    _(CacheIR_Updated)                          \
-    _(Call_Scripted)                            \
-    _(Call_AnyScripted)                         \
-    _(Call_Native)                              \
-    _(Call_ClassHook)                           \
-    _(Call_ScriptedApplyArray)                  \
-    _(Call_ScriptedApplyArguments)              \
-    _(Call_ScriptedFunCall)
-
-static bool
-CloneOldBaselineStub(JSContext* cx, DebugModeOSREntryVector& entries, size_t entryIndex)
-{
-    DebugModeOSREntry& entry = entries[entryIndex];
-    if (!entry.oldStub) {
-        return true;
-    }
-
-    ICStub* oldStub = entry.oldStub;
-    MOZ_ASSERT(oldStub->makesGCCalls());
-
-    // If this script was not recompiled (because it already had the correct
-    // debug instrumentation), don't clone to avoid attaching duplicate stubs.
-    if (!entry.recompiled()) {
-        entry.newStub = nullptr;
-        return true;
-    }
-
-    if (entry.frameKind == RetAddrEntry::Kind::Invalid) {
-        // The exception handler can modify the frame's override pc while
-        // unwinding scopes. This is fine, but if we have a stub frame, the code
-        // code below will get confused: the entry's pcOffset doesn't match the
-        // stub that's still on the stack. To prevent that, we just set the new
-        // stub to nullptr as we will never return to this stub frame anyway.
-        entry.newStub = nullptr;
-        return true;
-    }
-
-    // Get the new fallback stub from the recompiled baseline script.
-    ICFallbackStub* fallbackStub = entry.fallbackStub();
-
-    // Some stubs are monitored, get the first stub in the monitor chain from
-    // the new fallback stub if so. We do this before checking for fallback
-    // stubs below, to ensure monitored fallback stubs have a type monitor
-    // chain.
-    ICStub* firstMonitorStub;
-    if (fallbackStub->isMonitoredFallback()) {
-        ICMonitoredFallbackStub* monitored = fallbackStub->toMonitoredFallbackStub();
-        ICTypeMonitor_Fallback* fallback = monitored->getFallbackMonitorStub(cx, entry.script);
-        if (!fallback) {
-            return false;
-        }
-        firstMonitorStub = fallback->firstMonitorStub();
-    } else {
-        firstMonitorStub = nullptr;
-    }
-
-    // We don't need to clone fallback stubs, as they are guaranteed to
-    // exist. Furthermore, their JitCode is cached and should be the same even
-    // across the recompile.
-    if (oldStub->isFallback()) {
-        MOZ_ASSERT(oldStub->jitCode() == fallbackStub->jitCode());
-        entry.newStub = fallbackStub;
-        return true;
-    }
-
-    // Check if we have already cloned the stub on a younger frame. Ignore
-    // frames that entered the exception handler (entries[i].newStub is nullptr
-    // in that case, see above).
-    for (size_t i = 0; i < entryIndex; i++) {
-        if (oldStub == entries[i].oldStub && entries[i].frameKind != RetAddrEntry::Kind::Invalid) {
-            MOZ_ASSERT(entries[i].newStub);
-            entry.newStub = entries[i].newStub;
-            return true;
-        }
-    }
-
-    ICStubSpace* stubSpace = ICStubCompiler::StubSpaceForStub(oldStub->makesGCCalls(),
-                                                              entry.script);
-
-    // Clone the existing stub into the recompiled IC.
-    //
-    // Note that since JitCode is a GC thing, cloning an ICStub with the same
-    // JitCode ensures it won't be collected.
-    switch (oldStub->kind()) {
-#define CASE_KIND(kindName)                                                  \
-      case ICStub::kindName:                                                 \
-        entry.newStub = IC##kindName::Clone(cx, stubSpace, firstMonitorStub, \
-                                            *oldStub->to##kindName());       \
-        break;
-        PATCHABLE_ICSTUB_KIND_LIST(CASE_KIND)
-#undef CASE_KIND
-
-      default:
-        MOZ_CRASH("Bad stub kind");
-    }
-
-    if (!entry.newStub) {
-        return false;
-    }
-
-    fallbackStub->addNewStub(entry.newStub);
-    return true;
-}
-
 static bool
 InvalidateScriptsInZone(JSContext* cx, Zone* zone, const Vector<DebugModeOSREntry>& entries)
 {
     RecompileInfoVector invalid;
     for (UniqueScriptOSREntryIter iter(entries); !iter.done(); ++iter) {
         JSScript* script = iter.entry().script;
         if (script->zone() != zone) {
             continue;
@@ -913,19 +725,17 @@ jit::RecompileOnStackBaselineScriptsForD
     }
 
     // Try to recompile all the scripts. If we encounter an error, we need to
     // roll back as if none of the compilations happened, so that we don't
     // crash.
     for (size_t i = 0; i < entries.length(); i++) {
         JSScript* script = entries[i].script;
         AutoRealm ar(cx, script);
-        if (!RecompileBaselineScriptForDebugMode(cx, script, observing) ||
-            !CloneOldBaselineStub(cx, entries, i))
-        {
+        if (!RecompileBaselineScriptForDebugMode(cx, script, observing)) {
             UndoRecompileBaselineScriptsForDebugMode(cx, entries);
             return false;
         }
     }
 
     // If all recompiles succeeded, destroy the old baseline scripts and patch
     // the live frames.
     //
--- a/js/src/jit/BaselineDebugModeOSR.h
+++ b/js/src/jit/BaselineDebugModeOSR.h
@@ -17,65 +17,16 @@
 namespace js {
 namespace jit {
 
 // Note that this file and the corresponding .cpp implement debug mode
 // on-stack recompilation. This is to be distinguished from ordinary
 // Baseline->Ion OSR, which is used to jump into compiled loops.
 
 //
-// A volatile location due to recompilation of an on-stack baseline script
-// (e.g., for debug mode toggling).
-//
-// It is usually used in fallback stubs which may trigger on-stack
-// recompilation by calling out into the VM. Example use:
-//
-//     DebugModeOSRVolatileStub<FallbackStubT*> stub(frame, stub_)
-//
-//     // Call out to the VM
-//     // Other effectful operations like TypeScript::Monitor
-//
-//     if (stub.invalid()) {
-//         return true;
-//     }
-//
-//     // First use of stub after VM call.
-//
-template <typename T>
-class DebugModeOSRVolatileStub
-{
-    T stub_;
-    BaselineFrame* frame_;
-    uint32_t pcOffset_;
-
-  public:
-    DebugModeOSRVolatileStub(BaselineFrame* frame, ICFallbackStub* stub)
-      : stub_(static_cast<T>(stub)),
-        frame_(frame),
-        pcOffset_(stub->icEntry()->pcOffset())
-    { }
-
-    bool invalid() const {
-        MOZ_ASSERT(!frame_->isHandlingException());
-        ICEntry& entry = frame_->script()->baselineScript()->icEntryFromPCOffset(pcOffset_);
-        return stub_ != entry.fallbackStub();
-    }
-
-    operator const T&() const { MOZ_ASSERT(!invalid()); return stub_; }
-    T operator->() const { MOZ_ASSERT(!invalid()); return stub_; }
-    T* address() { MOZ_ASSERT(!invalid()); return &stub_; }
-    const T* address() const { MOZ_ASSERT(!invalid()); return &stub_; }
-    T& get() { MOZ_ASSERT(!invalid()); return stub_; }
-    const T& get() const { MOZ_ASSERT(!invalid()); return stub_; }
-
-    bool operator!=(const T& other) const { MOZ_ASSERT(!invalid()); return stub_ != other; }
-    bool operator==(const T& other) const { MOZ_ASSERT(!invalid()); return stub_ == other; }
-};
-
-//
 // A frame iterator that updates internal JSJitFrameIter in case of
 // recompilation of an on-stack baseline script.
 //
 
 class DebugModeOSRVolatileJitFrameIter : public JitFrameIter
 {
     DebugModeOSRVolatileJitFrameIter** stack;
     DebugModeOSRVolatileJitFrameIter* prev;
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -118,16 +118,345 @@ ICEntry::fallbackStub() const
 void
 ICEntry::trace(JSTracer* trc)
 {
     for (ICStub* stub = firstStub(); stub; stub = stub->next()) {
         stub->trace(trc);
     }
 }
 
+/* static */ UniquePtr<ICScript>
+ICScript::create(JSContext* cx, JSScript* script)
+{
+    FallbackICStubSpace stubSpace;
+    js::Vector<ICEntry, 16, SystemAllocPolicy> icEntries;
+
+    auto addIC = [cx, &icEntries, script](jsbytecode* pc, ICStub* stub) {
+        if (!stub) {
+            MOZ_ASSERT(cx->isExceptionPending());
+            return false;
+        }
+        uint32_t offset = pc ? script->pcToOffset(pc) : ICEntry::NonOpPCOffset;
+        if (!icEntries.emplaceBack(stub, offset)) {
+            ReportOutOfMemory(cx);
+            return false;
+        }
+        return true;
+    };
+
+    // Add ICEntries and fallback stubs for this/argument type checks.
+    // Note: we pass a nullptr pc to indicate this is a non-op IC.
+    // See ICEntry::NonOpPCOffset.
+    if (JSFunction* fun = script->functionNonDelazifying()) {
+        ICTypeMonitor_Fallback::Compiler compiler(cx, uint32_t(0));
+        if (!addIC(nullptr, compiler.getStub(&stubSpace))) {
+            return nullptr;
+        }
+
+        for (size_t i = 0; i < fun->nargs(); i++) {
+            ICTypeMonitor_Fallback::Compiler compiler(cx, i + 1);
+            if (!addIC(nullptr, compiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+        }
+    }
+
+    jsbytecode const* pcEnd = script->codeEnd();
+
+    // Add ICEntries and fallback stubs for JOF_IC bytecode ops.
+    for (jsbytecode* pc = script->code(); pc < pcEnd; pc = GetNextPc(pc)) {
+        JSOp op = JSOp(*pc);
+        if (!BytecodeOpHasIC(op)) {
+            continue;
+        }
+
+        switch (op) {
+          case JSOP_NOT:
+          case JSOP_AND:
+          case JSOP_OR:
+          case JSOP_IFEQ:
+          case JSOP_IFNE: {
+            ICToBool_Fallback::Compiler stubCompiler(cx);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_BITNOT:
+          case JSOP_NEG: {
+            ICUnaryArith_Fallback::Compiler stubCompiler(cx);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_BITOR:
+          case JSOP_BITXOR:
+          case JSOP_BITAND:
+          case JSOP_LSH:
+          case JSOP_RSH:
+          case JSOP_URSH:
+          case JSOP_ADD:
+          case JSOP_SUB:
+          case JSOP_MUL:
+          case JSOP_DIV:
+          case JSOP_MOD:
+          case JSOP_POW: {
+            ICBinaryArith_Fallback::Compiler stubCompiler(cx);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_EQ:
+          case JSOP_NE:
+          case JSOP_LT:
+          case JSOP_LE:
+          case JSOP_GT:
+          case JSOP_GE:
+          case JSOP_STRICTEQ:
+          case JSOP_STRICTNE: {
+            ICCompare_Fallback::Compiler stubCompiler(cx);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_POS: {
+            ICToNumber_Fallback::Compiler stubCompiler(cx);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_LOOPENTRY: {
+            ICWarmUpCounter_Fallback::Compiler stubCompiler(cx);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_NEWARRAY: {
+            ObjectGroup* group = ObjectGroup::allocationSiteGroup(cx, script, pc, JSProto_Array);
+            if (!group) {
+                return nullptr;
+            }
+            ICNewArray_Fallback::Compiler stubCompiler(cx, group);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_NEWOBJECT:
+          case JSOP_NEWINIT: {
+            ICNewObject_Fallback::Compiler stubCompiler(cx);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_INITELEM:
+          case JSOP_INITHIDDENELEM:
+          case JSOP_INITELEM_ARRAY:
+          case JSOP_INITELEM_INC:
+          case JSOP_SETELEM:
+          case JSOP_STRICTSETELEM: {
+            ICSetElem_Fallback::Compiler stubCompiler(cx);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_INITPROP:
+          case JSOP_INITLOCKEDPROP:
+          case JSOP_INITHIDDENPROP:
+          case JSOP_SETALIASEDVAR:
+          case JSOP_INITGLEXICAL:
+          case JSOP_INITALIASEDLEXICAL:
+          case JSOP_SETPROP:
+          case JSOP_STRICTSETPROP:
+          case JSOP_SETNAME:
+          case JSOP_STRICTSETNAME:
+          case JSOP_SETGNAME:
+          case JSOP_STRICTSETGNAME: {
+            ICSetProp_Fallback::Compiler compiler(cx);
+            if (!addIC(pc, compiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_GETPROP:
+          case JSOP_CALLPROP:
+          case JSOP_LENGTH:
+          case JSOP_GETPROP_SUPER:
+          case JSOP_GETBOUNDNAME: {
+            bool hasReceiver = (op == JSOP_GETPROP_SUPER);
+            ICGetProp_Fallback::Compiler compiler(cx, hasReceiver);
+            if (!addIC(pc, compiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_GETELEM:
+          case JSOP_CALLELEM:
+          case JSOP_GETELEM_SUPER: {
+            bool hasReceiver = (op == JSOP_GETELEM_SUPER);
+            ICGetElem_Fallback::Compiler stubCompiler(cx, hasReceiver);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_IN: {
+            ICIn_Fallback::Compiler stubCompiler(cx);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_HASOWN: {
+            ICHasOwn_Fallback::Compiler stubCompiler(cx);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_GETNAME:
+          case JSOP_GETGNAME: {
+            ICGetName_Fallback::Compiler stubCompiler(cx);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_BINDNAME:
+          case JSOP_BINDGNAME: {
+            ICBindName_Fallback::Compiler stubCompiler(cx);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_GETALIASEDVAR:
+          case JSOP_GETIMPORT: {
+            ICTypeMonitor_Fallback::Compiler compiler(cx, nullptr);
+            if (!addIC(pc, compiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_GETINTRINSIC: {
+            ICGetIntrinsic_Fallback::Compiler stubCompiler(cx);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_CALL:
+          case JSOP_CALL_IGNORES_RV:
+          case JSOP_CALLITER:
+          case JSOP_SUPERCALL:
+          case JSOP_FUNCALL:
+          case JSOP_FUNAPPLY:
+          case JSOP_NEW:
+          case JSOP_EVAL:
+          case JSOP_STRICTEVAL: {
+            bool construct = JSOp(*pc) == JSOP_NEW || JSOp(*pc) == JSOP_SUPERCALL;
+            ICCall_Fallback::Compiler stubCompiler(cx, /* isConstructing = */ construct,
+                                                   /* isSpread = */ false);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_SPREADCALL:
+          case JSOP_SPREADSUPERCALL:
+          case JSOP_SPREADNEW:
+          case JSOP_SPREADEVAL:
+          case JSOP_STRICTSPREADEVAL: {
+            bool construct = JSOp(*pc) == JSOP_SPREADNEW || JSOp(*pc) == JSOP_SPREADSUPERCALL;
+            ICCall_Fallback::Compiler stubCompiler(cx, /* isConstructing = */ construct,
+                                                   /* isSpread = */ true);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_INSTANCEOF: {
+            ICInstanceOf_Fallback::Compiler stubCompiler(cx);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_TYPEOF:
+          case JSOP_TYPEOFEXPR: {
+            ICTypeOf_Fallback::Compiler stubCompiler(cx);
+            if (!addIC(pc, stubCompiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_ITER: {
+            ICGetIterator_Fallback::Compiler compiler(cx);
+            if (!addIC(pc, compiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_MOREITER: {
+            ICIteratorMore_Fallback::Compiler compiler(cx);
+            if (!addIC(pc, compiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_ENDITER: {
+            ICIteratorClose_Fallback::Compiler compiler(cx);
+            if (!addIC(pc, compiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          case JSOP_REST: {
+            ArrayObject* templateObject =
+                ObjectGroup::newArrayObject(cx, nullptr, 0, TenuredObject,
+                                            ObjectGroup::NewArrayKind::UnknownIndex);
+            if (!templateObject) {
+                return nullptr;
+            }
+            ICRest_Fallback::Compiler compiler(cx, templateObject);
+            if (!addIC(pc, compiler.getStub(&stubSpace))) {
+                return nullptr;
+            }
+            break;
+          }
+          default:
+            MOZ_CRASH("JOF_IC op not handled");
+        }
+    }
+
+    UniquePtr<ICScript> icScript(
+        script->zone()->pod_malloc_with_extra<ICScript, ICEntry>(icEntries.length()));
+    if (!icScript) {
+        ReportOutOfMemory(cx);
+        return nullptr;
+    }
+    new (icScript.get()) ICScript(icEntries.length());
+
+    // Adopt fallback stubs into the ICScript.
+    icScript->fallbackStubSpace_.adoptFrom(&stubSpace);
+
+    if (icEntries.length() > 0) {
+        icScript->initICEntries(script, &icEntries[0]);
+    }
+
+    return icScript;
+}
+
 ICStubConstIterator&
 ICStubConstIterator::operator++()
 {
     MOZ_ASSERT(currentStub_ != nullptr);
     currentStub_ = currentStub_->next();
     return *this;
 }
 
@@ -712,17 +1041,17 @@ ICMonitoredStub::ICMonitoredStub(Kind ki
 }
 
 bool
 ICMonitoredFallbackStub::initMonitoringChain(JSContext* cx, JSScript* script)
 {
     MOZ_ASSERT(fallbackMonitorStub_ == nullptr);
 
     ICTypeMonitor_Fallback::Compiler compiler(cx, this);
-    ICStubSpace* space = script->baselineScript()->fallbackStubSpace();
+    ICStubSpace* space = script->icScript()->fallbackStubSpace();
     ICTypeMonitor_Fallback* stub = compiler.getStub(space);
     if (!stub) {
         return false;
     }
     fallbackMonitorStub_ = stub;
     return true;
 }
 
@@ -747,16 +1076,25 @@ ICUpdatedStub::initUpdatingChain(JSConte
     if (!stub) {
         return false;
     }
 
     firstUpdateStub_ = stub;
     return true;
 }
 
+/* static */ ICStubSpace*
+ICStubCompiler::StubSpaceForStub(bool makesGCCalls, JSScript* script)
+{
+    if (makesGCCalls) {
+        return script->icScript()->fallbackStubSpace();
+    }
+    return script->zone()->jitZone()->optimizedStubSpace();
+}
+
 JitCode*
 ICStubCompiler::getStubCode()
 {
     JitRealm* realm = cx->realm()->jitRealm();
 
     // Check for existing cached stubcode.
     uint32_t stubKey = getKey();
     JitCode* stubCode = realm->getStubCode(stubKey);
@@ -886,19 +1224,18 @@ ICStubCompiler::pushStubPayload(MacroAss
 
 void
 ICStubCompiler::PushStubPayload(MacroAssembler& masm, Register scratch)
 {
     pushStubPayload(masm, scratch);
     masm.adjustFrame(sizeof(intptr_t));
 }
 
-//
 void
-BaselineScript::noteAccessedGetter(uint32_t pcOffset)
+ICScript::noteAccessedGetter(uint32_t pcOffset)
 {
     ICEntry& entry = icEntryFromPCOffset(pcOffset);
     ICFallbackStub* stub = entry.fallbackStub();
 
     if (stub->isGetProp_Fallback()) {
         stub->toGetProp_Fallback()->noteAccessedGetter();
     }
 }
@@ -1693,21 +2030,19 @@ StripPreliminaryObjectStubs(JSContext* c
     }
 }
 
 //
 // GetElem_Fallback
 //
 
 static bool
-DoGetElemFallback(JSContext* cx, BaselineFrame* frame, ICGetElem_Fallback* stub_, HandleValue lhs,
+DoGetElemFallback(JSContext* cx, BaselineFrame* frame, ICGetElem_Fallback* stub, HandleValue lhs,
                   HandleValue rhs, MutableHandleValue res)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICGetElem_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     RootedScript script(cx, frame->script());
     jsbytecode* pc = stub->icEntry()->pc(frame->script());
     StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
 
     JSOp op = JSOp(*pc);
     FallbackICSpew(cx, stub, "GetElem(%s)", CodeName[op]);
@@ -1760,21 +2095,16 @@ DoGetElemFallback(JSContext* cx, Baselin
 
     if (!isOptimizedArgs) {
         if (!GetElementOperation(cx, op, lhsCopy, rhs, res)) {
             return false;
         }
         TypeScript::Monitor(cx, script, pc, types, res);
     }
 
-    // Check if debug mode toggling made the stub invalid.
-    if (stub.invalid()) {
-        return true;
-    }
-
     // Add a type monitor stub for the resulting value.
     if (!stub->addMonitorStubForValue(cx, frame, types, res)) {
         return false;
     }
 
     if (attached) {
         return true;
     }
@@ -1785,22 +2115,20 @@ DoGetElemFallback(JSContext* cx, Baselin
     if (rhs.isNumber() && rhs.toNumber() < 0) {
         stub->noteNegativeIndex();
     }
 
     return true;
 }
 
 static bool
-DoGetElemSuperFallback(JSContext* cx, BaselineFrame* frame, ICGetElem_Fallback* stub_,
+DoGetElemSuperFallback(JSContext* cx, BaselineFrame* frame, ICGetElem_Fallback* stub,
                        HandleValue lhs, HandleValue rhs, HandleValue receiver,
                        MutableHandleValue res)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICGetElem_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     RootedScript script(cx, frame->script());
     jsbytecode* pc = stub->icEntry()->pc(frame->script());
     StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
 
     JSOp op = JSOp(*pc);
     FallbackICSpew(cx, stub, "GetElemSuper(%s)", CodeName[op]);
@@ -1838,21 +2166,16 @@ DoGetElemSuperFallback(JSContext* cx, Ba
 
     // |lhs| is [[HomeObject]].[[Prototype]] which must be Object
     RootedObject lhsObj(cx, &lhs.toObject());
     if (!GetObjectElementOperation(cx, op, lhsObj, receiver, rhs, res)) {
         return false;
     }
     TypeScript::Monitor(cx, script, pc, types, res);
 
-    // Check if debug mode toggling made the stub invalid.
-    if (stub.invalid()) {
-        return true;
-    }
-
     // Add a type monitor stub for the resulting value.
     if (!stub->addMonitorStubForValue(cx, frame, types, res)) {
         return false;
     }
 
     if (attached) {
         return true;
     }
@@ -1926,21 +2249,19 @@ SetUpdateStubData(ICCacheIR_Updated* stu
 {
     if (info->isSet()) {
         stub->updateStubGroup() = info->group();
         stub->updateStubId() = info->id();
     }
 }
 
 static bool
-DoSetElemFallback(JSContext* cx, BaselineFrame* frame, ICSetElem_Fallback* stub_, Value* stack,
+DoSetElemFallback(JSContext* cx, BaselineFrame* frame, ICSetElem_Fallback* stub, Value* stack,
                   HandleValue objv, HandleValue index, HandleValue rhs)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICSetElem_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     RootedScript script(cx, frame->script());
     RootedScript outerScript(cx, script);
     jsbytecode* pc = stub->icEntry()->pc(script);
     JSOp op = JSOp(*pc);
     FallbackICSpew(cx, stub, "SetElem(%s)", CodeName[JSOp(*pc)]);
 
@@ -2029,21 +2350,16 @@ DoSetElemFallback(JSContext* cx, Baselin
     if (op == JSOP_INITHIDDENELEM) {
         return true;
     }
 
     // Overwrite the object on the stack (pushed for the decompiler) with the rhs.
     MOZ_ASSERT(stack[2] == objv);
     stack[2] = rhs;
 
-    // Check if debug mode toggling made the stub invalid.
-    if (stub.invalid()) {
-        return true;
-    }
-
     if (attached) {
         return true;
     }
 
     // The SetObjectElement call might have entered this IC recursively, so try
     // to transition.
     if (stub->state().maybeTransition()) {
         stub->discardStubs(cx);
@@ -2117,17 +2433,17 @@ ICSetElem_Fallback::Compiler::generateSt
 
     masm.push(ICStubReg);
     pushStubPayload(masm, R0.scratchReg());
 
     return tailCallVM(DoSetElemFallbackInfo, masm);
 }
 
 void
-BaselineScript::noteHasDenseAdd(uint32_t pcOffset)
+ICScript::noteHasDenseAdd(uint32_t pcOffset)
 {
     ICEntry& entry = icEntryFromPCOffset(pcOffset);
     ICFallbackStub* stub = entry.fallbackStub();
 
     if (stub->isSetElem_Fallback()) {
         stub->toSetElem_Fallback()->noteHasDenseAdd();
     }
 }
@@ -2217,21 +2533,19 @@ StoreToTypedArray(JSContext* cx, MacroAs
                   const ValueOperand& value, const BaseIndex& dest, Register scratch,
                   Label* failure);
 
 //
 // In_Fallback
 //
 
 static bool
-DoInFallback(JSContext* cx, BaselineFrame* frame, ICIn_Fallback* stub_,
+DoInFallback(JSContext* cx, BaselineFrame* frame, ICIn_Fallback* stub,
              HandleValue key, HandleValue objValue, MutableHandleValue res)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICIn_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     FallbackICSpew(cx, stub, "In");
 
     if (!objValue.isObject()) {
         ReportInNotObjectError(cx, key, -2, objValue, -1);
         return false;
     }
@@ -2271,21 +2585,19 @@ ICIn_Fallback::Compiler::generateStubCod
     return tailCallVM(DoInFallbackInfo, masm);
 }
 
 //
 // HasOwn_Fallback
 //
 
 static bool
-DoHasOwnFallback(JSContext* cx, BaselineFrame* frame, ICHasOwn_Fallback* stub_,
+DoHasOwnFallback(JSContext* cx, BaselineFrame* frame, ICHasOwn_Fallback* stub,
                  HandleValue keyValue, HandleValue objValue, MutableHandleValue res)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICIn_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     FallbackICSpew(cx, stub, "HasOwn");
 
     TryAttachStub<HasPropIRGenerator>("HasOwn", cx, frame, stub,
         BaselineCacheIRStubKind::Regular, CacheKind::HasOwn,
         keyValue, objValue);
 
@@ -2322,21 +2634,19 @@ ICHasOwn_Fallback::Compiler::generateStu
 }
 
 
 //
 // GetName_Fallback
 //
 
 static bool
-DoGetNameFallback(JSContext* cx, BaselineFrame* frame, ICGetName_Fallback* stub_,
+DoGetNameFallback(JSContext* cx, BaselineFrame* frame, ICGetName_Fallback* stub,
                   HandleObject envChain, MutableHandleValue res)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICGetName_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     RootedScript script(cx, frame->script());
     jsbytecode* pc = stub->icEntry()->pc(script);
     mozilla::DebugOnly<JSOp> op = JSOp(*pc);
     FallbackICSpew(cx, stub, "GetName(%s)", CodeName[JSOp(*pc)]);
 
     MOZ_ASSERT(op == JSOP_GETNAME || op == JSOP_GETGNAME);
@@ -2355,21 +2665,16 @@ DoGetNameFallback(JSContext* cx, Baselin
         if (!GetEnvironmentName<GetNameMode::Normal>(cx, envChain, name, res)) {
             return false;
         }
     }
 
     StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
     TypeScript::Monitor(cx, script, pc, types, res);
 
-    // Check if debug mode toggling made the stub invalid.
-    if (stub.invalid()) {
-        return true;
-    }
-
     // Add a type monitor stub for the resulting value.
     if (!stub->addMonitorStubForValue(cx, frame, types, res)) {
         return false;
     }
 
     return true;
 }
 
@@ -2440,21 +2745,19 @@ ICBindName_Fallback::Compiler::generateS
     return tailCallVM(DoBindNameFallbackInfo, masm);
 }
 
 //
 // GetIntrinsic_Fallback
 //
 
 static bool
-DoGetIntrinsicFallback(JSContext* cx, BaselineFrame* frame, ICGetIntrinsic_Fallback* stub_,
+DoGetIntrinsicFallback(JSContext* cx, BaselineFrame* frame, ICGetIntrinsic_Fallback* stub,
                        MutableHandleValue res)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICGetIntrinsic_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     RootedScript script(cx, frame->script());
     jsbytecode* pc = stub->icEntry()->pc(script);
     mozilla::DebugOnly<JSOp> op = JSOp(*pc);
     FallbackICSpew(cx, stub, "GetIntrinsic(%s)", CodeName[JSOp(*pc)]);
 
     MOZ_ASSERT(op == JSOP_GETINTRINSIC);
@@ -2464,21 +2767,16 @@ DoGetIntrinsicFallback(JSContext* cx, Ba
     }
 
     // An intrinsic operation will always produce the same result, so only
     // needs to be monitored once. Attach a stub to load the resulting constant
     // directly.
 
     TypeScript::Monitor(cx, script, pc, res);
 
-    // Check if debug mode toggling made the stub invalid.
-    if (stub.invalid()) {
-        return true;
-    }
-
     TryAttachStub<GetIntrinsicIRGenerator>("GetIntrinsic", cx, frame, stub, BaselineCacheIRStubKind::Regular, res);
 
     return true;
 }
 
 typedef bool (*DoGetIntrinsicFallbackFn)(JSContext*, BaselineFrame*, ICGetIntrinsic_Fallback*,
                                          MutableHandleValue);
 static const VMFunction DoGetIntrinsicFallbackInfo =
@@ -2528,25 +2826,23 @@ ComputeGetPropResult(JSContext* cx, Base
             }
         }
     }
 
     return true;
 }
 
 static bool
-DoGetPropFallback(JSContext* cx, BaselineFrame* frame, ICGetProp_Fallback* stub_,
+DoGetPropFallback(JSContext* cx, BaselineFrame* frame, ICGetProp_Fallback* stub,
                   MutableHandleValue val, MutableHandleValue res)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICGetProp_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     RootedScript script(cx, frame->script());
-    jsbytecode* pc = stub_->icEntry()->pc(script);
+    jsbytecode* pc = stub->icEntry()->pc(script);
     JSOp op = JSOp(*pc);
     FallbackICSpew(cx, stub, "GetProp(%s)", CodeName[op]);
 
     MOZ_ASSERT(op == JSOP_GETPROP ||
                op == JSOP_CALLPROP ||
                op == JSOP_LENGTH ||
                op == JSOP_GETBOUNDNAME);
 
@@ -2588,38 +2884,31 @@ DoGetPropFallback(JSContext* cx, Baselin
 
     if (!ComputeGetPropResult(cx, frame, op, name, val, res)) {
         return false;
     }
 
     StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
     TypeScript::Monitor(cx, script, pc, types, res);
 
-    // Check if debug mode toggling made the stub invalid.
-    if (stub.invalid()) {
-        return true;
-    }
-
     // Add a type monitor stub for the resulting value.
     if (!stub->addMonitorStubForValue(cx, frame, types, res)) {
         return false;
     }
     return true;
 }
 
 static bool
-DoGetPropSuperFallback(JSContext* cx, BaselineFrame* frame, ICGetProp_Fallback* stub_,
+DoGetPropSuperFallback(JSContext* cx, BaselineFrame* frame, ICGetProp_Fallback* stub,
                        HandleValue receiver, MutableHandleValue val, MutableHandleValue res)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICGetProp_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     RootedScript script(cx, frame->script());
-    jsbytecode* pc = stub_->icEntry()->pc(script);
+    jsbytecode* pc = stub->icEntry()->pc(script);
     FallbackICSpew(cx, stub, "GetPropSuper(%s)", CodeName[JSOp(*pc)]);
 
     MOZ_ASSERT(JSOp(*pc) == JSOP_GETPROP_SUPER);
 
     RootedPropertyName name(cx, script->getName(pc));
 
     // There are some reasons we can fail to attach a stub that are temporary.
     // We want to avoid calling noteUnoptimizableAccess() if the reason we
@@ -2659,21 +2948,16 @@ DoGetPropSuperFallback(JSContext* cx, Ba
     RootedObject valObj(cx, &val.toObject());
     if (!GetProperty(cx, valObj, receiver, name, res)) {
         return false;
     }
 
     StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
     TypeScript::Monitor(cx, script, pc, types, res);
 
-    // Check if debug mode toggling made the stub invalid.
-    if (stub.invalid()) {
-        return true;
-    }
-
     // Add a type monitor stub for the resulting value.
     if (!stub->addMonitorStubForValue(cx, frame, types, res)) {
         return false;
     }
 
     return true;
 }
 
@@ -2749,21 +3033,19 @@ ICGetProp_Fallback::Compiler::postGenera
     cx->realm()->jitRealm()->initBailoutReturnAddr(address, getKey(), kind);
 }
 
 //
 // SetProp_Fallback
 //
 
 static bool
-DoSetPropFallback(JSContext* cx, BaselineFrame* frame, ICSetProp_Fallback* stub_, Value* stack,
+DoSetPropFallback(JSContext* cx, BaselineFrame* frame, ICSetProp_Fallback* stub, Value* stack,
                   HandleValue lhs, HandleValue rhs)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICSetProp_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     RootedScript script(cx, frame->script());
     jsbytecode* pc = stub->icEntry()->pc(script);
     JSOp op = JSOp(*pc);
     FallbackICSpew(cx, stub, "SetProp(%s)", CodeName[op]);
 
     MOZ_ASSERT(op == JSOP_SETPROP ||
@@ -2873,21 +3155,16 @@ DoSetPropFallback(JSContext* cx, Baselin
             return false;
         }
     }
 
     // Overwrite the LHS on the stack (pushed for the decompiler) with the RHS.
     MOZ_ASSERT(stack[1] == lhs);
     stack[1] = rhs;
 
-    // Check if debug mode toggling made the stub invalid.
-    if (stub.invalid()) {
-        return true;
-    }
-
     if (attached) {
         return true;
     }
 
     // The SetProperty call might have entered this IC recursively, so try
     // to transition.
     if (stub->state().maybeTransition()) {
         stub->discardStubs(cx);
@@ -3565,21 +3842,19 @@ TryAttachConstStringSplit(JSContext* cx,
     }
 
     stub->addNewStub(newStub);
     *attached = true;
     return true;
 }
 
 static bool
-DoCallFallback(JSContext* cx, BaselineFrame* frame, ICCall_Fallback* stub_, uint32_t argc,
+DoCallFallback(JSContext* cx, BaselineFrame* frame, ICCall_Fallback* stub, uint32_t argc,
                Value* vp, MutableHandleValue res)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICCall_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     RootedScript script(cx, frame->script());
     jsbytecode* pc = stub->icEntry()->pc(script);
     JSOp op = JSOp(*pc);
     FallbackICSpew(cx, stub, "Call(%s)", CodeName[op]);
 
     MOZ_ASSERT(argc == GET_ARGC(pc));
@@ -3670,21 +3945,16 @@ DoCallFallback(JSContext* cx, BaselineFr
         }
 
         res.set(callArgs.rval());
     }
 
     StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
     TypeScript::Monitor(cx, script, pc, types, res);
 
-    // Check if debug mode toggling made the stub invalid.
-    if (stub.invalid()) {
-        return true;
-    }
-
     // Add a type monitor stub for the resulting value.
     if (!stub->addMonitorStubForValue(cx, frame, types, res)) {
         return false;
     }
 
     // Try to transition again in case we called this IC recursively.
     if (stub->state().maybeTransition()) {
         stub->discardStubs(cx);
@@ -3704,21 +3974,19 @@ DoCallFallback(JSContext* cx, BaselineFr
         if (canAttachStub) {
             stub->state().trackNotAttached();
         }
     }
     return true;
 }
 
 static bool
-DoSpreadCallFallback(JSContext* cx, BaselineFrame* frame, ICCall_Fallback* stub_, Value* vp,
+DoSpreadCallFallback(JSContext* cx, BaselineFrame* frame, ICCall_Fallback* stub, Value* vp,
                      MutableHandleValue res)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICCall_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     RootedScript script(cx, frame->script());
     jsbytecode* pc = stub->icEntry()->pc(script);
     JSOp op = JSOp(*pc);
     bool constructing = (op == JSOP_SPREADNEW || op == JSOP_SPREADSUPERCALL);
     FallbackICSpew(cx, stub, "SpreadCall(%s)", CodeName[op]);
 
@@ -3738,21 +4006,16 @@ DoSpreadCallFallback(JSContext* cx, Base
     {
         return false;
     }
 
     if (!SpreadCallOperation(cx, script, pc, thisv, callee, arr, newTarget, res)) {
         return false;
     }
 
-    // Check if debug mode toggling made the stub invalid.
-    if (stub.invalid()) {
-        return true;
-    }
-
     // Add a type monitor stub for the resulting value.
     StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
     if (!stub->addMonitorStubForValue(cx, frame, types, res)) {
         return false;
     }
 
     return true;
 }
@@ -5136,34 +5399,27 @@ ICGetIterator_Fallback::Compiler::genera
     return tailCallVM(DoGetIteratorFallbackInfo, masm);
 }
 
 //
 // IteratorMore_Fallback
 //
 
 static bool
-DoIteratorMoreFallback(JSContext* cx, BaselineFrame* frame, ICIteratorMore_Fallback* stub_,
+DoIteratorMoreFallback(JSContext* cx, BaselineFrame* frame, ICIteratorMore_Fallback* stub,
                        HandleObject iterObj, MutableHandleValue res)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICIteratorMore_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     FallbackICSpew(cx, stub, "IteratorMore");
 
     if (!IteratorMore(cx, iterObj, res)) {
         return false;
     }
 
-    // Check if debug mode toggling made the stub invalid.
-    if (stub.invalid()) {
-        return true;
-    }
-
     if (!res.isMagic(JS_NO_ITER_VALUE) && !res.isString()) {
         stub->setHasNonStringResult();
     }
 
     if (iterObj->is<PropertyIteratorObject>() &&
         !stub->hasStub(ICStub::IteratorMore_Native))
     {
         ICIteratorMore_Native::Compiler compiler(cx);
@@ -5270,21 +5526,19 @@ ICIteratorClose_Fallback::Compiler::gene
     return tailCallVM(DoIteratorCloseFallbackInfo, masm);
 }
 
 //
 // InstanceOf_Fallback
 //
 
 static bool
-DoInstanceOfFallback(JSContext* cx, BaselineFrame* frame, ICInstanceOf_Fallback* stub_,
+DoInstanceOfFallback(JSContext* cx, BaselineFrame* frame, ICInstanceOf_Fallback* stub,
                      HandleValue lhs, HandleValue rhs, MutableHandleValue res)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICInstanceOf_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     FallbackICSpew(cx, stub, "InstanceOf");
 
     if (!rhs.isObject()) {
         ReportValueError(cx, JSMSG_BAD_INSTANCEOF_RHS, -1, rhs, nullptr);
         return false;
     }
@@ -5292,21 +5546,16 @@ DoInstanceOfFallback(JSContext* cx, Base
     RootedObject obj(cx, &rhs.toObject());
     bool cond = false;
     if (!HasInstance(cx, obj, lhs, &cond)) {
         return false;
     }
 
     res.setBoolean(cond);
 
-    // Check if debug mode toggling made the stub invalid.
-    if (stub.invalid()) {
-        return true;
-    }
-
     if (!obj->is<JSFunction>()) {
         // ensure we've recorded at least one failure, so we can detect there was a non-optimizable case
         if (!stub->state().hasFailures()) {
             stub->state().trackNotAttached();
         }
         return true;
     }
 
@@ -5401,31 +5650,16 @@ ICCall_Scripted::ICCall_Scripted(JitCode
                                  JSFunction* callee, JSObject* templateObject,
                                  uint32_t pcOffset)
   : ICMonitoredStub(ICStub::Call_Scripted, stubCode, firstMonitorStub),
     callee_(callee),
     templateObject_(templateObject),
     pcOffset_(pcOffset)
 { }
 
-/* static */ ICCall_Scripted*
-ICCall_Scripted::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                       ICCall_Scripted& other)
-{
-    return New<ICCall_Scripted>(cx, space, other.jitCode(), firstMonitorStub, other.callee_,
-                                other.templateObject_, other.pcOffset_);
-}
-
-/* static */ ICCall_AnyScripted*
-ICCall_AnyScripted::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                          ICCall_AnyScripted& other)
-{
-    return New<ICCall_AnyScripted>(cx, space, other.jitCode(), firstMonitorStub, other.pcOffset_);
-}
-
 ICCall_Native::ICCall_Native(JitCode* stubCode, ICStub* firstMonitorStub,
                              JSFunction* callee, JSObject* templateObject,
                              uint32_t pcOffset)
   : ICMonitoredStub(ICStub::Call_Native, stubCode, firstMonitorStub),
     callee_(callee),
     templateObject_(templateObject),
     pcOffset_(pcOffset)
 {
@@ -5433,24 +5667,16 @@ ICCall_Native::ICCall_Native(JitCode* st
     // The simulator requires VM calls to be redirected to a special swi
     // instruction to handle them. To make this work, we store the redirected
     // pointer in the stub.
     native_ = Simulator::RedirectNativeFunction(JS_FUNC_TO_DATA_PTR(void*, callee->native()),
                                                 Args_General3);
 #endif
 }
 
-/* static */ ICCall_Native*
-ICCall_Native::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                     ICCall_Native& other)
-{
-    return New<ICCall_Native>(cx, space, other.jitCode(), firstMonitorStub, other.callee_,
-                              other.templateObject_, other.pcOffset_);
-}
-
 ICCall_ClassHook::ICCall_ClassHook(JitCode* stubCode, ICStub* firstMonitorStub,
                                    const Class* clasp, Native native,
                                    JSObject* templateObject, uint32_t pcOffset)
   : ICMonitoredStub(ICStub::Call_ClassHook, stubCode, firstMonitorStub),
     clasp_(clasp),
     native_(JS_FUNC_TO_DATA_PTR(void*, native)),
     templateObject_(templateObject),
     pcOffset_(pcOffset)
@@ -5458,55 +5684,16 @@ ICCall_ClassHook::ICCall_ClassHook(JitCo
 #ifdef JS_SIMULATOR
     // The simulator requires VM calls to be redirected to a special swi
     // instruction to handle them. To make this work, we store the redirected
     // pointer in the stub.
     native_ = Simulator::RedirectNativeFunction(native_, Args_General3);
 #endif
 }
 
-/* static */ ICCall_ClassHook*
-ICCall_ClassHook::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                        ICCall_ClassHook& other)
-{
-    ICCall_ClassHook* res = New<ICCall_ClassHook>(cx, space, other.jitCode(), firstMonitorStub,
-                                                  other.clasp(), nullptr, other.templateObject_,
-                                                  other.pcOffset_);
-    if (res) {
-        res->native_ = other.native();
-    }
-    return res;
-}
-
-/* static */ ICCall_ScriptedApplyArray*
-ICCall_ScriptedApplyArray::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                                 ICCall_ScriptedApplyArray& other)
-{
-    return New<ICCall_ScriptedApplyArray>(cx, space, other.jitCode(), firstMonitorStub,
-                                          other.pcOffset_);
-}
-
-/* static */ ICCall_ScriptedApplyArguments*
-ICCall_ScriptedApplyArguments::Clone(JSContext* cx,
-                                     ICStubSpace* space,
-                                     ICStub* firstMonitorStub,
-                                     ICCall_ScriptedApplyArguments& other)
-{
-    return New<ICCall_ScriptedApplyArguments>(cx, space, other.jitCode(), firstMonitorStub,
-                                              other.pcOffset_);
-}
-
-/* static */ ICCall_ScriptedFunCall*
-ICCall_ScriptedFunCall::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                              ICCall_ScriptedFunCall& other)
-{
-    return New<ICCall_ScriptedFunCall>(cx, space, other.jitCode(), firstMonitorStub,
-                                       other.pcOffset_);
-}
-
 //
 // Rest_Fallback
 //
 
 static bool
 DoRestFallback(JSContext* cx, BaselineFrame* frame, ICRest_Fallback* stub,
                MutableHandleValue res)
 {
@@ -5543,18 +5730,16 @@ ICRest_Fallback::Compiler::generateStubC
 //
 // UnaryArith_Fallback
 //
 
 static bool
 DoUnaryArithFallback(JSContext* cx, BaselineFrame* frame, ICUnaryArith_Fallback* stub,
                      HandleValue val, MutableHandleValue res)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICUnaryArith_Fallback*> debug_stub(frame, stub);
     stub->incrementEnteredCount();
 
     RootedScript script(cx, frame->script());
     jsbytecode* pc = stub->icEntry()->pc(script);
     JSOp op = JSOp(*pc);
     FallbackICSpew(cx, stub, "UnaryArith(%s)", CodeName[op]);
 
     switch (op) {
@@ -5572,21 +5757,16 @@ DoUnaryArithFallback(JSContext* cx, Base
             return false;
         }
         break;
       }
       default:
         MOZ_CRASH("Unexpected op");
     }
 
-    // Check if debug mode toggling made the stub invalid.
-    if (debug_stub.invalid()) {
-        return true;
-    }
-
     if (res.isDouble()) {
         stub->setSawDoubleResult();
     }
 
     TryAttachStub<UnaryArithIRGenerator>("UniaryArith", cx, frame, stub, BaselineCacheIRStubKind::Regular, op, val, res);
     return true;
 }
 
@@ -5615,21 +5795,19 @@ ICUnaryArith_Fallback::Compiler::generat
     return tailCallVM(DoUnaryArithFallbackInfo, masm);
 }
 
 //
 // BinaryArith_Fallback
 //
 
 static bool
-DoBinaryArithFallback(JSContext* cx, BaselineFrame* frame, ICBinaryArith_Fallback* stub_,
+DoBinaryArithFallback(JSContext* cx, BaselineFrame* frame, ICBinaryArith_Fallback* stub,
                       HandleValue lhs, HandleValue rhs, MutableHandleValue ret)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICBinaryArith_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     RootedScript script(cx, frame->script());
     jsbytecode* pc = stub->icEntry()->pc(script);
     JSOp op = JSOp(*pc);
     FallbackICSpew(cx, stub, "CacheIRBinaryArith(%s,%d,%d)", CodeName[op],
             int(lhs.isDouble() ? JSVAL_TYPE_DOUBLE : lhs.extractNonDoubleType()),
             int(rhs.isDouble() ? JSVAL_TYPE_DOUBLE : rhs.extractNonDoubleType()));
@@ -5707,21 +5885,16 @@ DoBinaryArithFallback(JSContext* cx, Bas
             return false;
         }
         break;
       }
       default:
         MOZ_CRASH("Unhandled baseline arith op");
     }
 
-    // Check if debug mode toggling made the stub invalid.
-    if (stub.invalid()) {
-        return true;
-    }
-
     if (ret.isDouble()) {
         stub->setSawDoubleResult();
     }
 
     TryAttachStub<BinaryArithIRGenerator>("BinaryArith", cx, frame, stub, BaselineCacheIRStubKind::Regular, op, lhs, rhs, ret);
     return true;
 }
 
@@ -5751,21 +5924,19 @@ ICBinaryArith_Fallback::Compiler::genera
 
     return tailCallVM(DoBinaryArithFallbackInfo, masm);
 }
 
 //
 // Compare_Fallback
 //
 static bool
-DoCompareFallback(JSContext* cx, BaselineFrame* frame, ICCompare_Fallback* stub_, HandleValue lhs,
+DoCompareFallback(JSContext* cx, BaselineFrame* frame, ICCompare_Fallback* stub, HandleValue lhs,
                   HandleValue rhs, MutableHandleValue ret)
 {
-    // This fallback stub may trigger debug mode toggling.
-    DebugModeOSRVolatileStub<ICCompare_Fallback*> stub(frame, stub_);
     stub->incrementEnteredCount();
 
     RootedScript script(cx, frame->script());
     jsbytecode* pc = stub->icEntry()->pc(script);
     JSOp op = JSOp(*pc);
 
     FallbackICSpew(cx, stub, "Compare(%s)", CodeName[op]);
 
@@ -5819,21 +5990,16 @@ DoCompareFallback(JSContext* cx, Baselin
         break;
       default:
         MOZ_ASSERT_UNREACHABLE("Unhandled baseline compare op");
         return false;
     }
 
     ret.setBoolean(out);
 
-    // Check if debug mode toggling made the stub invalid.
-    if (stub.invalid()) {
-        return true;
-    }
-
     TryAttachStub<CompareIRGenerator>("Compare", cx, frame, stub, BaselineCacheIRStubKind::Regular, op, lhs, rhs);
     return true;
 }
 
 typedef bool (*DoCompareFallbackFn)(JSContext*, BaselineFrame*, ICCompare_Fallback*,
                                     HandleValue, HandleValue, MutableHandleValue);
 static const VMFunction DoCompareFallbackInfo =
     FunctionInfo<DoCompareFallbackFn>(DoCompareFallback, "DoCompareFallback", TailCall,
--- a/js/src/jit/BaselineIC.h
+++ b/js/src/jit/BaselineIC.h
@@ -217,63 +217,161 @@ void TypeFallbackICSpew(JSContext* cx, I
 // An entry in the BaselineScript IC descriptor table. There's one ICEntry per
 // IC.
 class ICEntry
 {
     // A pointer to the first IC stub for this instruction.
     ICStub* firstStub_;
 
     // The PC of this IC's bytecode op within the JSScript.
-    uint32_t pcOffset_ : 31;
-    uint32_t isForOp_ : 1;
+    uint32_t pcOffset_;
 
   public:
-    ICEntry(ICStub* firstStub, uint32_t pcOffset, bool isForOp)
-      : firstStub_(firstStub), pcOffset_(pcOffset), isForOp_(uint32_t(isForOp))
-    {
-        // The offset must fit in at least 31 bits, since we shave off 1 for
-        // the isForOp_ flag.
-        MOZ_ASSERT(pcOffset_ == pcOffset);
-        JS_STATIC_ASSERT(BaselineMaxScriptLength <= (1u << 31) - 1);
-        MOZ_ASSERT(pcOffset <= BaselineMaxScriptLength);
-    }
+    // Non-op ICs are Baseline ICs used for function argument/this type
+    // monitoring in the script's prologue. All other ICs are "for op" ICs.
+    // Note: the last bytecode op in a script is always a return so UINT32_MAX
+    // is never a valid bytecode offset.
+    static constexpr uint32_t NonOpPCOffset = UINT32_MAX;
+
+    ICEntry(ICStub* firstStub, uint32_t pcOffset)
+      : firstStub_(firstStub), pcOffset_(pcOffset)
+    {}
 
     ICStub* firstStub() const {
         MOZ_ASSERT(firstStub_);
         return firstStub_;
     }
 
     ICFallbackStub* fallbackStub() const;
 
     void setFirstStub(ICStub* stub) {
         firstStub_ = stub;
     }
 
     uint32_t pcOffset() const {
-        return pcOffset_;
+        return pcOffset_ == NonOpPCOffset ? 0 : pcOffset_;
     }
     jsbytecode* pc(JSScript* script) const {
-        return script->offsetToPC(pcOffset_);
+        return script->offsetToPC(pcOffset());
     }
 
     static inline size_t offsetOfFirstStub() {
         return offsetof(ICEntry, firstStub_);
     }
 
     inline ICStub** addressOfFirstStub() {
         return &firstStub_;
     }
 
     bool isForOp() const {
-        return !!isForOp_;
+        return pcOffset_ != NonOpPCOffset;
     }
 
     void trace(JSTracer* trc);
 };
 
+// [SMDOC] ICScript
+//
+// ICScript contains IC data used by Baseline (Ion has its own IC chains, stored
+// in IonScript).
+//
+// For each IC we store an ICEntry, which points to the first ICStub in the
+// chain. Note that multiple stubs in the same zone can share Baseline IC code.
+// This works because the stub data is stored in the ICStub instead of baked in
+// in the stub code.
+//
+// Storing this separate from BaselineScript simplifies debug mode OSR because
+// the ICScript can be reused when we replace the BaselineScript. It also makes
+// it easier to experiment with interpreter ICs in the future because the
+// interpreter and Baseline JIT will be able to use exactly the same IC data.
+//
+// ICScript contains the following:
+//
+// * Fallback stub space: this stores all fallback stubs and the "can GC" stubs.
+//   These stubs are never purged before destroying the ICScript. (Other stubs
+//   are stored in the optimized stub space stored in JitZone and can be
+//   discarded more eagerly. See ICScript::purgeOptimizedStubs.)
+//
+// * List of IC entries, in the following order:
+//
+//   - Type monitor IC for |this|.
+//   - Type monitor IC for each formal argument.
+//   - IC for each JOF_IC bytecode op.
+//
+// ICScript is stored in TypeScript and allocated/destroyed at the same time.
+class ICScript
+{
+    // Allocated space for fallback stubs.
+    FallbackICStubSpace fallbackStubSpace_ = {};
+
+    uint32_t numICEntries_;
+
+    explicit ICScript(uint32_t numICEntries)
+      : numICEntries_(numICEntries)
+    {}
+
+    ICEntry* icEntryList() {
+        return (ICEntry*)(reinterpret_cast<uint8_t*>(this) + sizeof(ICScript));
+    }
+
+    void initICEntries(JSScript* script, const ICEntry* entries);
+
+  public:
+    static MOZ_MUST_USE js::UniquePtr<ICScript> create(JSContext* cx, JSScript* script);
+
+    ~ICScript() {
+        // The contents of the fallback stub space are removed and freed
+        // separately after the next minor GC. See prepareForDestruction.
+        MOZ_ASSERT(fallbackStubSpace_.isEmpty());
+    }
+    void prepareForDestruction(Zone* zone) {
+        // When the script contains pointers to nursery things, the store buffer can
+        // contain entries that point into the fallback stub space. Since we can
+        // destroy scripts outside the context of a GC, this situation could result
+        // in us trying to mark invalid store buffer entries.
+        //
+        // Defer freeing any allocated blocks until after the next minor GC.
+        fallbackStubSpace_.freeAllAfterMinorGC(zone);
+    }
+
+    FallbackICStubSpace* fallbackStubSpace() {
+        return &fallbackStubSpace_;
+    }
+
+    void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf, size_t* data,
+                                size_t* fallbackStubs) const {
+        *data += mallocSizeOf(this);
+
+        // |data| already includes the ICStubSpace itself, so use
+        // sizeOfExcludingThis.
+        *fallbackStubs += fallbackStubSpace_.sizeOfExcludingThis(mallocSizeOf);
+    }
+
+    size_t numICEntries() const {
+        return numICEntries_;
+    }
+
+    ICEntry& icEntry(size_t index) {
+        MOZ_ASSERT(index < numICEntries());
+        return icEntryList()[index];
+    }
+
+    void noteAccessedGetter(uint32_t pcOffset);
+    void noteHasDenseAdd(uint32_t pcOffset);
+
+    void trace(JSTracer* trc);
+    void purgeOptimizedStubs(Zone* zone);
+
+    ICEntry* maybeICEntryFromPCOffset(uint32_t pcOffset);
+    ICEntry* maybeICEntryFromPCOffset(uint32_t pcOffset, ICEntry* prevLookedUpEntry);
+
+    ICEntry& icEntryFromPCOffset(uint32_t pcOffset);
+    ICEntry& icEntryFromPCOffset(uint32_t pcOffset, ICEntry* prevLookedUpEntry);
+};
+
 class ICMonitoredStub;
 class ICMonitoredFallbackStub;
 class ICUpdatedStub;
 
 // Constant iterator that traverses arbitrary chains of ICStubs.
 // No requirements are made of the ICStub used to construct this
 // iterator, aside from that the stub be part of a nullptr-terminated
 // chain.
@@ -777,19 +875,16 @@ class ICCacheIR_Trait
 class ICCacheIR_Regular : public ICStub, public ICCacheIR_Trait<ICCacheIR_Regular>
 {
   public:
     ICCacheIR_Regular(JitCode* stubCode, const CacheIRStubInfo* stubInfo)
       : ICStub(ICStub::CacheIR_Regular, stubCode),
         ICCacheIR_Trait(stubInfo)
     {}
 
-    static ICCacheIR_Regular* Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                                    ICCacheIR_Regular& other);
-
     void notePreliminaryObject() {
         extra_ = 1;
     }
     bool hasPreliminaryObject() const {
         return extra_;
     }
 
     uint8_t* stubDataStart();
@@ -830,19 +925,16 @@ class ICCacheIR_Monitored : public ICMon
 
   public:
     ICCacheIR_Monitored(JitCode* stubCode, ICStub* firstMonitorStub,
                         const CacheIRStubInfo* stubInfo)
       : ICMonitoredStub(ICStub::CacheIR_Monitored, stubCode, firstMonitorStub),
         ICCacheIR_Trait(stubInfo)
     {}
 
-    static ICCacheIR_Monitored* Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                                      ICCacheIR_Monitored& other);
-
     void notePreliminaryObject() {
         extra_ = 1;
     }
     bool hasPreliminaryObject() const {
         return extra_;
     }
 
     uint8_t* stubDataStart();
@@ -925,19 +1017,16 @@ class ICCacheIR_Updated : public ICUpdat
   public:
     ICCacheIR_Updated(JitCode* stubCode, const CacheIRStubInfo* stubInfo)
       : ICUpdatedStub(ICStub::CacheIR_Updated, stubCode),
         ICCacheIR_Trait(stubInfo),
         updateStubGroup_(nullptr),
         updateStubId_(JSID_EMPTY)
     {}
 
-    static ICCacheIR_Updated* Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                                    ICCacheIR_Updated& other);
-
     GCPtrObjectGroup& updateStubGroup() {
         return updateStubGroup_;
     }
     GCPtrId& updateStubId() {
         return updateStubId_;
     }
 
     void notePreliminaryObject() {
@@ -1046,22 +1135,18 @@ class ICStubCompiler
     template <typename T, typename... Args>
     T* newStub(Args&&... args) {
         return ICStub::New<T>(cx, std::forward<Args>(args)...);
     }
 
   public:
     virtual ICStub* getStub(ICStubSpace* space) = 0;
 
-    static ICStubSpace* StubSpaceForStub(bool makesGCCalls, JSScript* outerScript) {
-        if (makesGCCalls) {
-            return outerScript->baselineScript()->fallbackStubSpace();
-        }
-        return outerScript->zone()->jitZone()->optimizedStubSpace();
-    }
+    static ICStubSpace* StubSpaceForStub(bool makesGCCalls, JSScript* script);
+
     ICStubSpace* getStubSpace(JSScript* outerScript) {
         return StubSpaceForStub(ICStub::NonCacheIRStubMakesGCCalls(kind), outerScript);
     }
 };
 
 // WarmUpCounter_Fallback
 
 // A WarmUpCounter IC chain has only the fallback stub.
@@ -2109,19 +2194,16 @@ class ICCall_Scripted : public ICMonitor
     GCPtrObject templateObject_;
     uint32_t pcOffset_;
 
     ICCall_Scripted(JitCode* stubCode, ICStub* firstMonitorStub,
                     JSFunction* callee, JSObject* templateObject,
                     uint32_t pcOffset);
 
   public:
-    static ICCall_Scripted* Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                                  ICCall_Scripted& other);
-
     GCPtrFunction& callee() {
         return callee_;
     }
     GCPtrObject& templateObject() {
         return templateObject_;
     }
 
     static size_t offsetOfCallee() {
@@ -2140,19 +2222,16 @@ class ICCall_AnyScripted : public ICMoni
     uint32_t pcOffset_;
 
     ICCall_AnyScripted(JitCode* stubCode, ICStub* firstMonitorStub, uint32_t pcOffset)
       : ICMonitoredStub(ICStub::Call_AnyScripted, stubCode, firstMonitorStub),
         pcOffset_(pcOffset)
     { }
 
   public:
-    static ICCall_AnyScripted* Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                                     ICCall_AnyScripted& other);
-
     static size_t offsetOfPCOffset() {
         return offsetof(ICCall_AnyScripted, pcOffset_);
     }
 };
 
 // Compiler for Call_Scripted and Call_AnyScripted stubs.
 class ICCallScriptedCompiler : public ICCallStubCompiler {
   protected:
@@ -2221,19 +2300,16 @@ class ICCall_Native : public ICMonitored
     void* native_;
 #endif
 
     ICCall_Native(JitCode* stubCode, ICStub* firstMonitorStub,
                   JSFunction* callee, JSObject* templateObject,
                   uint32_t pcOffset);
 
   public:
-    static ICCall_Native* Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                                ICCall_Native& other);
-
     GCPtrFunction& callee() {
         return callee_;
     }
     GCPtrObject& templateObject() {
         return templateObject_;
     }
 
     static size_t offsetOfCallee() {
@@ -2303,19 +2379,16 @@ class ICCall_ClassHook : public ICMonito
     GCPtrObject templateObject_;
     uint32_t pcOffset_;
 
     ICCall_ClassHook(JitCode* stubCode, ICStub* firstMonitorStub,
                      const Class* clasp, Native native, JSObject* templateObject,
                      uint32_t pcOffset);
 
   public:
-    static ICCall_ClassHook* Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
-                                   ICCall_ClassHook& other);
-
     const Class* clasp() {
         return clasp_;
     }
     void* native() {
         return native_;
     }
     GCPtrObject& templateObject() {
         return templateObject_;
@@ -2381,21 +2454,16 @@ class ICCall_ScriptedApplyArray : public
     uint32_t pcOffset_;
 
     ICCall_ScriptedApplyArray(JitCode* stubCode, ICStub* firstMonitorStub, uint32_t pcOffset)
       : ICMonitoredStub(ICStub::Call_ScriptedApplyArray, stubCode, firstMonitorStub),
         pcOffset_(pcOffset)
     {}
 
   public:
-    static ICCall_ScriptedApplyArray* Clone(JSContext* cx,
-                                            ICStubSpace* space,
-                                            ICStub* firstMonitorStub,
-                                            ICCall_ScriptedApplyArray& other);
-
     static size_t offsetOfPCOffset() {
         return offsetof(ICCall_ScriptedApplyArray, pcOffset_);
     }
 
     // Compiler for this stub kind.
     class Compiler : public ICCallStubCompiler {
       protected:
         ICStub* firstMonitorStub_;
@@ -2424,21 +2492,16 @@ class ICCall_ScriptedApplyArguments : pu
     uint32_t pcOffset_;
 
     ICCall_ScriptedApplyArguments(JitCode* stubCode, ICStub* firstMonitorStub, uint32_t pcOffset)
       : ICMonitoredStub(ICStub::Call_ScriptedApplyArguments, stubCode, firstMonitorStub),
         pcOffset_(pcOffset)
     {}
 
   public:
-    static ICCall_ScriptedApplyArguments* Clone(JSContext* cx,
-                                                ICStubSpace* space,
-                                                ICStub* firstMonitorStub,
-                                                ICCall_ScriptedApplyArguments& other);
-
     static size_t offsetOfPCOffset() {
         return offsetof(ICCall_ScriptedApplyArguments, pcOffset_);
     }
 
     // Compiler for this stub kind.
     class Compiler : public ICCallStubCompiler {
       protected:
         ICStub* firstMonitorStub_;
@@ -2468,19 +2531,16 @@ class ICCall_ScriptedFunCall : public IC
     uint32_t pcOffset_;
 
     ICCall_ScriptedFunCall(JitCode* stubCode, ICStub* firstMonitorStub, uint32_t pcOffset)
       : ICMonitoredStub(ICStub::Call_ScriptedFunCall, stubCode, firstMonitorStub),
         pcOffset_(pcOffset)
     {}
 
   public:
-    static ICCall_ScriptedFunCall* Clone(JSContext* cx, ICStubSpace* space,
-                                         ICStub* firstMonitorStub, ICCall_ScriptedFunCall& other);
-
     static size_t offsetOfPCOffset() {
         return offsetof(ICCall_ScriptedFunCall, pcOffset_);
     }
 
     // Compiler for this stub kind.
     class Compiler : public ICCallStubCompiler {
       protected:
         ICStub* firstMonitorStub_;
--- a/js/src/jit/BaselineInspector.cpp
+++ b/js/src/jit/BaselineInspector.cpp
@@ -231,29 +231,59 @@ GetCacheIRReceiverForUnboxedProperty(ICC
     if (!reader.matchOp(CacheOp::StoreUnboxedProperty)) {
         return false;
     }
 
     *receiver = ReceiverGuard(group, nullptr);
     return true;
 }
 
+ICScript*
+BaselineInspector::icScript() const
+{
+    return script->icScript();
+}
+
+ICEntry&
+BaselineInspector::icEntryFromPC(jsbytecode* pc)
+{
+    ICEntry* entry = maybeICEntryFromPC(pc);
+    MOZ_ASSERT(entry);
+    return *entry;
+}
+
+ICEntry*
+BaselineInspector::maybeICEntryFromPC(jsbytecode* pc)
+{
+    MOZ_ASSERT(hasICScript());
+    MOZ_ASSERT(isValidPC(pc));
+    ICEntry* ent =
+        icScript()->maybeICEntryFromPCOffset(script->pcToOffset(pc), prevLookedUpEntry);
+    if (!ent) {
+        return nullptr;
+    }
+
+    MOZ_ASSERT(ent->isForOp());
+    prevLookedUpEntry = ent;
+    return ent;
+}
+
 bool
 BaselineInspector::maybeInfoForPropertyOp(jsbytecode* pc, ReceiverVector& receivers,
                                           ObjectGroupVector& convertUnboxedGroups)
 {
     // Return a list of the receivers seen by the baseline IC for the current
     // op. Empty lists indicate no receivers are known, or there was an
     // uncacheable access. convertUnboxedGroups is used for unboxed object
     // groups which have been seen, but have had instances converted to native
     // objects and should be eagerly converted by Ion.
     MOZ_ASSERT(receivers.empty());
     MOZ_ASSERT(convertUnboxedGroups.empty());
 
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return true;
     }
 
     MOZ_ASSERT(isValidPC(pc));
     const ICEntry& entry = icEntryFromPC(pc);
 
     ICStub* stub = entry.firstStub();
     while (stub->next()) {
@@ -294,17 +324,17 @@ BaselineInspector::maybeInfoForPropertyO
     }
 
     return true;
 }
 
 ICStub*
 BaselineInspector::monomorphicStub(jsbytecode* pc)
 {
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return nullptr;
     }
 
     // IonBuilder::analyzeNewLoopTypes may call this (via expectedResultType
     // below) on code that's unreachable, according to BytecodeAnalysis. Use
     // maybeICEntryFromPC to handle this.
     const ICEntry* entry = maybeICEntryFromPC(pc);
     if (!entry) {
@@ -319,17 +349,17 @@ BaselineInspector::monomorphicStub(jsbyt
     }
 
     return stub;
 }
 
 bool
 BaselineInspector::dimorphicStub(jsbytecode* pc, ICStub** pfirst, ICStub** psecond)
 {
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return false;
     }
 
     const ICEntry& entry = icEntryFromPC(pc);
 
     ICStub* stub = entry.firstStub();
     ICStub* next = stub->next();
     ICStub* after = next ? next->next() : nullptr;
@@ -678,17 +708,17 @@ TryToSpecializeBinaryArithOp(ICStub** st
     MOZ_ASSERT(sawInt32);
     *result = MIRType::Int32;
     return true;
 }
 
 MIRType
 BaselineInspector::expectedBinaryArithSpecialization(jsbytecode* pc)
 {
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return MIRType::None;
     }
 
     MIRType result;
     ICStub* stubs[2];
 
     const ICEntry& entry = icEntryFromPC(pc);
     ICFallbackStub* stub = entry.fallbackStub();
@@ -711,33 +741,33 @@ BaselineInspector::expectedBinaryArithSp
     }
 
     return MIRType::None;
 }
 
 bool
 BaselineInspector::hasSeenNegativeIndexGetElement(jsbytecode* pc)
 {
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return false;
     }
 
     const ICEntry& entry = icEntryFromPC(pc);
     ICStub* stub = entry.fallbackStub();
 
     if (stub->isGetElem_Fallback()) {
         return stub->toGetElem_Fallback()->hasNegativeIndex();
     }
     return false;
 }
 
 bool
 BaselineInspector::hasSeenAccessedGetter(jsbytecode* pc)
 {
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return false;
     }
 
     const ICEntry& entry = icEntryFromPC(pc);
     ICStub* stub = entry.fallbackStub();
 
     if (stub->isGetProp_Fallback()) {
         return stub->toGetProp_Fallback()->hasAccessedGetter();
@@ -745,30 +775,30 @@ BaselineInspector::hasSeenAccessedGetter
     return false;
 }
 
 bool
 BaselineInspector::hasSeenNonStringIterMore(jsbytecode* pc)
 {
     MOZ_ASSERT(JSOp(*pc) == JSOP_MOREITER);
 
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return false;
     }
 
     const ICEntry& entry = icEntryFromPC(pc);
     ICStub* stub = entry.fallbackStub();
 
     return stub->toIteratorMore_Fallback()->hasNonStringResult();
 }
 
 bool
 BaselineInspector::hasSeenDoubleResult(jsbytecode* pc)
 {
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return false;
     }
 
     const ICEntry& entry = icEntryFromPC(pc);
     ICStub* stub = entry.fallbackStub();
 
     MOZ_ASSERT(stub->isUnaryArith_Fallback() || stub->isBinaryArith_Fallback());
 
@@ -777,17 +807,17 @@ BaselineInspector::hasSeenDoubleResult(j
     }
 
     return stub->toBinaryArith_Fallback()->sawDoubleResult();
 }
 
 JSObject*
 BaselineInspector::getTemplateObject(jsbytecode* pc)
 {
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return nullptr;
     }
 
     const ICEntry& entry = icEntryFromPC(pc);
     for (ICStub* stub = entry.firstStub(); stub; stub = stub->next()) {
         switch (stub->kind()) {
           case ICStub::NewArray_Fallback:
             return stub->toNewArray_Fallback()->templateObject();
@@ -806,17 +836,17 @@ BaselineInspector::getTemplateObject(jsb
     }
 
     return nullptr;
 }
 
 ObjectGroup*
 BaselineInspector::getTemplateObjectGroup(jsbytecode* pc)
 {
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return nullptr;
     }
 
     const ICEntry& entry = icEntryFromPC(pc);
     for (ICStub* stub = entry.firstStub(); stub; stub = stub->next()) {
         switch (stub->kind()) {
           case ICStub::NewArray_Fallback:
             return stub->toNewArray_Fallback()->templateGroup();
@@ -828,17 +858,17 @@ BaselineInspector::getTemplateObjectGrou
     return nullptr;
 }
 
 JSFunction*
 BaselineInspector::getSingleCallee(jsbytecode* pc)
 {
     MOZ_ASSERT(*pc == JSOP_NEW);
 
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return nullptr;
     }
 
     const ICEntry& entry = icEntryFromPC(pc);
     ICStub* stub = entry.firstStub();
 
     if (entry.fallbackStub()->state().hasFailures()) {
         return nullptr;
@@ -849,17 +879,17 @@ BaselineInspector::getSingleCallee(jsbyt
     }
 
     return stub->toCall_Scripted()->callee();
 }
 
 JSObject*
 BaselineInspector::getTemplateObjectForNative(jsbytecode* pc, Native native)
 {
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return nullptr;
     }
 
     const ICEntry& entry = icEntryFromPC(pc);
     for (ICStub* stub = entry.firstStub(); stub; stub = stub->next()) {
         if (stub->isCall_Native() && stub->toCall_Native()->callee()->native() == native) {
             return stub->toCall_Native()->templateObject();
         }
@@ -867,17 +897,17 @@ BaselineInspector::getTemplateObjectForN
 
     return nullptr;
 }
 
 bool
 BaselineInspector::isOptimizableConstStringSplit(jsbytecode* pc, JSString** strOut,
                                                  JSString** sepOut, ArrayObject** objOut)
 {
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return false;
     }
 
     const ICEntry& entry = icEntryFromPC(pc);
 
     // If ConstStringSplit stub is attached, must have only one stub attached.
     if (entry.fallbackStub()->numOptimizedStubs() != 1) {
         return false;
@@ -892,54 +922,54 @@ BaselineInspector::isOptimizableConstStr
     *sepOut = stub->toCall_ConstStringSplit()->expectedSep();
     *objOut = stub->toCall_ConstStringSplit()->templateObject();
     return true;
 }
 
 JSObject*
 BaselineInspector::getTemplateObjectForClassHook(jsbytecode* pc, const Class* clasp)
 {
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return nullptr;
     }
 
     const ICEntry& entry = icEntryFromPC(pc);
     for (ICStub* stub = entry.firstStub(); stub; stub = stub->next()) {
         if (stub->isCall_ClassHook() && stub->toCall_ClassHook()->clasp() == clasp) {
             return stub->toCall_ClassHook()->templateObject();
         }
     }
 
     return nullptr;
 }
 
 LexicalEnvironmentObject*
 BaselineInspector::templateNamedLambdaObject()
 {
-    if (!hasBaselineScript()) {
+    if (!script->hasBaselineScript()) {
         return nullptr;
     }
 
-    JSObject* res = baselineScript()->templateEnvironment();
+    JSObject* res = script->baselineScript()->templateEnvironment();
     if (script->bodyScope()->hasEnvironment()) {
         res = res->enclosingEnvironment();
     }
     MOZ_ASSERT(res);
 
     return &res->as<LexicalEnvironmentObject>();
 }
 
 CallObject*
 BaselineInspector::templateCallObject()
 {
-    if (!hasBaselineScript()) {
+    if (!script->hasBaselineScript()) {
         return nullptr;
     }
 
-    JSObject* res = baselineScript()->templateEnvironment();
+    JSObject* res = script->baselineScript()->templateEnvironment();
     MOZ_ASSERT(res);
 
     return &res->as<CallObject>();
 }
 
 static bool
 MatchCacheIRReceiverGuard(CacheIRReader& reader, ICStub* stub, const CacheIRStubInfo* stubInfo,
                           ObjOperandId objId, ReceiverGuard* receiver)
@@ -1217,17 +1247,17 @@ AddCacheIRGetPropFunction(ICCacheIR_Moni
 bool
 BaselineInspector::commonGetPropFunction(jsbytecode* pc, bool innerized,
                                          JSObject** holder, Shape** holderShape,
                                          JSFunction** commonGetter, Shape** globalShape,
                                          bool* isOwnProperty,
                                          ReceiverVector& receivers,
                                          ObjectGroupVector& convertUnboxedGroups)
 {
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return false;
     }
 
     MOZ_ASSERT(receivers.empty());
     MOZ_ASSERT(convertUnboxedGroups.empty());
 
     *globalShape = nullptr;
     *commonGetter = nullptr;
@@ -1286,17 +1316,17 @@ GetMegamorphicGetterSetterFunction(ICStu
     JSObject* obj = isGetter ? propShape->getterObject() : propShape->setterObject();
     return &obj->as<JSFunction>();
 }
 
 bool
 BaselineInspector::megamorphicGetterSetterFunction(jsbytecode* pc, bool isGetter,
                                                    JSFunction** getterOrSetter)
 {
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return false;
     }
 
     *getterOrSetter = nullptr;
     const ICEntry& entry = icEntryFromPC(pc);
 
     for (ICStub* stub = entry.firstStub(); stub; stub = stub->next()) {
         if (stub->isCacheIR_Monitored()) {
@@ -1443,17 +1473,17 @@ AddCacheIRSetPropFunction(ICCacheIR_Upda
 }
 
 bool
 BaselineInspector::commonSetPropFunction(jsbytecode* pc, JSObject** holder, Shape** holderShape,
                                          JSFunction** commonSetter, bool* isOwnProperty,
                                          ReceiverVector& receivers,
                                          ObjectGroupVector& convertUnboxedGroups)
 {
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return false;
     }
 
     MOZ_ASSERT(receivers.empty());
     MOZ_ASSERT(convertUnboxedGroups.empty());
 
     *commonSetter = nullptr;
     const ICEntry& entry = icEntryFromPC(pc);
@@ -1543,17 +1573,17 @@ BaselineInspector::maybeInfoForProtoRead
 {
     // This is like maybeInfoForPropertyOp, but for when the property exists on
     // the prototype.
 
     MOZ_ASSERT(receivers.empty());
     MOZ_ASSERT(convertUnboxedGroups.empty());
     MOZ_ASSERT(!*holder);
 
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return true;
     }
 
     MOZ_ASSERT(isValidPC(pc));
     const ICEntry& entry = icEntryFromPC(pc);
 
     ICStub* stub = entry.firstStub();
     while (stub->next()) {
@@ -1608,17 +1638,17 @@ GetCacheIRExpectedInputType(ICCacheIR_Mo
 
     MOZ_ASSERT_UNREACHABLE("Unexpected instruction");
     return MIRType::Value;
 }
 
 MIRType
 BaselineInspector::expectedPropertyAccessInputType(jsbytecode* pc)
 {
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return MIRType::Value;
     }
 
     const ICEntry& entry = icEntryFromPC(pc);
     MIRType type = MIRType::None;
 
     for (ICStub* stub = entry.firstStub(); stub; stub = stub->next()) {
         MIRType stubType = MIRType::None;
@@ -1648,17 +1678,17 @@ BaselineInspector::expectedPropertyAcces
     return (type == MIRType::None) ? MIRType::Value : type;
 }
 
 bool
 BaselineInspector::instanceOfData(jsbytecode* pc, Shape** shape, uint32_t* slot,
                                   JSObject** prototypeObject)
 {
     MOZ_ASSERT(*pc == JSOP_INSTANCEOF);
-    if (!hasBaselineScript()) {
+    if (!hasICScript()) {
         return false;
     }
 
     const ICEntry& entry = icEntryFromPC(pc);
     ICStub* firstStub = entry.firstStub();
 
     // Ensure singleton instanceof stub
     if (!firstStub->next() ||
--- a/js/src/jit/BaselineInspector.h
+++ b/js/src/jit/BaselineInspector.h
@@ -47,58 +47,36 @@ class BaselineInspector
 
   public:
     explicit BaselineInspector(JSScript* script)
       : script(script), prevLookedUpEntry(nullptr)
     {
         MOZ_ASSERT(script);
     }
 
-    bool hasBaselineScript() const {
-        return script->hasBaselineScript();
+    bool hasICScript() const {
+        return script->hasICScript();
     }
 
-    BaselineScript* baselineScript() const {
-        return script->baselineScript();
-    }
+    ICScript* icScript() const;
 
   private:
 #ifdef DEBUG
     bool isValidPC(jsbytecode* pc) {
         return script->containsPC(pc);
     }
 #endif
 
-    ICEntry& icEntryFromPC(jsbytecode* pc) {
-        MOZ_ASSERT(hasBaselineScript());
-        MOZ_ASSERT(isValidPC(pc));
-        ICEntry& ent =
-            baselineScript()->icEntryFromPCOffset(script->pcToOffset(pc), prevLookedUpEntry);
-        MOZ_ASSERT(ent.isForOp());
-        prevLookedUpEntry = &ent;
-        return ent;
-    }
-
-    ICEntry* maybeICEntryFromPC(jsbytecode* pc) {
-        MOZ_ASSERT(hasBaselineScript());
-        MOZ_ASSERT(isValidPC(pc));
-        ICEntry* ent =
-            baselineScript()->maybeICEntryFromPCOffset(script->pcToOffset(pc), prevLookedUpEntry);
-        if (!ent) {
-            return nullptr;
-        }
-        MOZ_ASSERT(ent->isForOp());
-        prevLookedUpEntry = ent;
-        return ent;
-    }
+    ICEntry& icEntryFromPC(jsbytecode* pc);
+    ICEntry* maybeICEntryFromPC(jsbytecode* pc);
 
     template <typename ICInspectorType>
     ICInspectorType makeICInspector(jsbytecode* pc, ICStub::Kind expectedFallbackKind) {
         ICEntry* ent = nullptr;
-        if (hasBaselineScript()) {
+        if (hasICScript()) {
             ent = &icEntryFromPC(pc);
             MOZ_ASSERT(ent->fallbackStub()->kind() == expectedFallbackKind);
         }
         return ICInspectorType(this, pc, ent);
     }
 
     ICStub* monomorphicStub(jsbytecode* pc);
     MOZ_MUST_USE bool dimorphicStub(jsbytecode* pc, ICStub** pfirst, ICStub** psecond);
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -348,42 +348,38 @@ jit::CanEnterBaselineMethod(JSContext* c
 
 BaselineScript*
 BaselineScript::New(JSScript* jsscript,
                     uint32_t bailoutPrologueOffset,
                     uint32_t debugOsrPrologueOffset,
                     uint32_t debugOsrEpilogueOffset,
                     uint32_t profilerEnterToggleOffset,
                     uint32_t profilerExitToggleOffset,
-                    size_t icEntries,
                     size_t retAddrEntries,
                     size_t pcMappingIndexEntries, size_t pcMappingSize,
                     size_t bytecodeTypeMapEntries,
                     size_t resumeEntries,
                     size_t traceLoggerToggleOffsetEntries)
 {
     static const unsigned DataAlignment = sizeof(uintptr_t);
 
-    size_t icEntriesSize = icEntries * sizeof(ICEntry);
     size_t retAddrEntriesSize = retAddrEntries * sizeof(RetAddrEntry);
     size_t pcMappingIndexEntriesSize = pcMappingIndexEntries * sizeof(PCMappingIndexEntry);
     size_t bytecodeTypeMapSize = bytecodeTypeMapEntries * sizeof(uint32_t);
     size_t resumeEntriesSize = resumeEntries * sizeof(uintptr_t);
     size_t tlEntriesSize = traceLoggerToggleOffsetEntries * sizeof(uint32_t);
 
-    size_t paddedICEntriesSize = AlignBytes(icEntriesSize, DataAlignment);
     size_t paddedRetAddrEntriesSize = AlignBytes(retAddrEntriesSize, DataAlignment);
     size_t paddedPCMappingIndexEntriesSize = AlignBytes(pcMappingIndexEntriesSize, DataAlignment);
     size_t paddedPCMappingSize = AlignBytes(pcMappingSize, DataAlignment);
     size_t paddedBytecodeTypesMapSize = AlignBytes(bytecodeTypeMapSize, DataAlignment);
     size_t paddedResumeEntriesSize = AlignBytes(resumeEntriesSize, DataAlignment);
     size_t paddedTLEntriesSize = AlignBytes(tlEntriesSize, DataAlignment);
 
-    size_t allocBytes = paddedICEntriesSize +
-                        paddedRetAddrEntriesSize +
+    size_t allocBytes = paddedRetAddrEntriesSize +
                         paddedPCMappingIndexEntriesSize +
                         paddedPCMappingSize +
                         paddedBytecodeTypesMapSize +
                         paddedResumeEntriesSize +
                         paddedTLEntriesSize;
 
     BaselineScript* script = jsscript->zone()->pod_malloc_with_extra<BaselineScript, uint8_t>(allocBytes);
     if (!script) {
@@ -393,20 +389,16 @@ BaselineScript::New(JSScript* jsscript,
                                 debugOsrPrologueOffset,
                                 debugOsrEpilogueOffset,
                                 profilerEnterToggleOffset,
                                 profilerExitToggleOffset);
 
     size_t offsetCursor = sizeof(BaselineScript);
     MOZ_ASSERT(offsetCursor == AlignBytes(sizeof(BaselineScript), DataAlignment));
 
-    script->icEntriesOffset_ = offsetCursor;
-    script->icEntries_ = icEntries;
-    offsetCursor += paddedICEntriesSize;
-
     script->retAddrEntriesOffset_ = offsetCursor;
     script->retAddrEntries_ = retAddrEntries;
     offsetCursor += paddedRetAddrEntriesSize;
 
     script->pcMappingIndexOffset_ = offsetCursor;
     script->pcMappingIndexEntries_ = pcMappingIndexEntries;
     offsetCursor += paddedPCMappingIndexEntriesSize;
 
@@ -428,17 +420,21 @@ BaselineScript::New(JSScript* jsscript,
     return script;
 }
 
 void
 BaselineScript::trace(JSTracer* trc)
 {
     TraceEdge(trc, &method_, "baseline-method");
     TraceNullableEdge(trc, &templateEnv_, "baseline-template-environment");
+}
 
+void
+ICScript::trace(JSTracer* trc)
+{
     // Mark all IC stub codes hanging off the IC stub entries.
     for (size_t i = 0; i < numICEntries(); i++) {
         ICEntry& ent = icEntry(i);
         ent.trace(trc);
     }
 }
 
 /* static */
@@ -459,26 +455,16 @@ BaselineScript::Trace(JSTracer* trc, Bas
 void
 BaselineScript::Destroy(FreeOp* fop, BaselineScript* script)
 {
 
     MOZ_ASSERT(!script->hasPendingIonBuilder());
 
     script->unlinkDependentWasmImports(fop);
 
-    /*
-     * When the script contains pointers to nursery things, the store buffer can
-     * contain entries that point into the fallback stub space. Since we can
-     * destroy scripts outside the context of a GC, this situation could result
-     * in us trying to mark invalid store buffer entries.
-     *
-     * Defer freeing any allocated blocks until after the next minor GC.
-     */
-    script->fallbackStubSpace_.freeAllAfterMinorGC(script->method()->zone());
-
     fop->delete_(script);
 }
 
 void
 JS::DeletePolicy<js::jit::BaselineScript>::operator()(const js::jit::BaselineScript* script)
 {
     BaselineScript::Destroy(rt_->defaultFreeOp(), const_cast<BaselineScript*>(script));
 }
@@ -530,23 +516,16 @@ BaselineScript::removeDependentWasmImpor
     for (DependentWasmImport& dep : *dependentWasmImports_) {
         if (dep.instance == &instance && dep.importIndex == idx) {
             dependentWasmImports_->erase(&dep);
             break;
         }
     }
 }
 
-ICEntry&
-BaselineScript::icEntry(size_t index)
-{
-    MOZ_ASSERT(index < numICEntries());
-    return icEntryList()[index];
-}
-
 RetAddrEntry&
 BaselineScript::retAddrEntry(size_t index)
 {
     MOZ_ASSERT(index < numRetAddrEntries());
     return retAddrEntryList()[index];
 }
 
 PCMappingIndexEntry&
@@ -568,25 +547,25 @@ BaselineScript::pcMappingReader(size_t i
 
     return CompactBufferReader(dataStart, dataEnd);
 }
 
 struct ICEntries
 {
     using EntryT = ICEntry;
 
-    BaselineScript* const baseline_;
+    ICScript* const icScript_;
 
-    explicit ICEntries(BaselineScript* baseline) : baseline_(baseline) {}
+    explicit ICEntries(ICScript* icScript) : icScript_(icScript) {}
 
     size_t numEntries() const {
-        return baseline_->numICEntries();
+        return icScript_->numICEntries();
     }
     ICEntry& operator[](size_t index) const {
-        return baseline_->icEntry(index);
+        return icScript_->icEntry(index);
     }
 };
 
 struct RetAddrEntries
 {
     using EntryT = RetAddrEntry;
 
     BaselineScript* const baseline_;
@@ -623,21 +602,21 @@ BaselineScript::retAddrEntryFromReturnOf
                        &loc);
 
     MOZ_ASSERT(found);
     MOZ_ASSERT(loc < numRetAddrEntries());
     MOZ_ASSERT(retAddrEntry(loc).returnOffset().offset() == returnOffset.offset());
     return retAddrEntry(loc);
 }
 
-template <typename Entries>
+template <typename Entries, typename ScriptT>
 static inline bool
-ComputeBinarySearchMid(BaselineScript* baseline, uint32_t pcOffset, size_t* loc)
+ComputeBinarySearchMid(ScriptT* script, uint32_t pcOffset, size_t* loc)
 {
-    Entries entries(baseline);
+    Entries entries(script);
     return BinarySearchIf(entries, 0, entries.numEntries(),
                           [pcOffset](typename Entries::EntryT& entry) {
                               uint32_t entryOffset = entry.pcOffset();
                               if (pcOffset < entryOffset) {
                                   return -1;
                               }
                               if (entryOffset < pcOffset) {
                                   return 1;
@@ -649,17 +628,17 @@ ComputeBinarySearchMid(BaselineScript* b
 
 uint8_t*
 BaselineScript::returnAddressForEntry(const RetAddrEntry& ent)
 {
     return method()->raw() + ent.returnOffset().offset();
 }
 
 ICEntry*
-BaselineScript::maybeICEntryFromPCOffset(uint32_t pcOffset)
+ICScript::maybeICEntryFromPCOffset(uint32_t pcOffset)
 {
     // Multiple IC entries can have the same PC offset, but this method only looks for
     // those which have isForOp() set.
     size_t mid;
     if (!ComputeBinarySearchMid<ICEntries>(this, pcOffset, &mid)) {
         return nullptr;
     }
 
@@ -683,25 +662,25 @@ BaselineScript::maybeICEntryFromPCOffset
         if (icEntry(i).isForOp()) {
             return &icEntry(i);
         }
     }
     return nullptr;
 }
 
 ICEntry&
-BaselineScript::icEntryFromPCOffset(uint32_t pcOffset)
+ICScript::icEntryFromPCOffset(uint32_t pcOffset)
 {
     ICEntry* entry = maybeICEntryFromPCOffset(pcOffset);
     MOZ_RELEASE_ASSERT(entry);
     return *entry;
 }
 
 ICEntry*
-BaselineScript::maybeICEntryFromPCOffset(uint32_t pcOffset, ICEntry* prevLookedUpEntry)
+ICScript::maybeICEntryFromPCOffset(uint32_t pcOffset, ICEntry* prevLookedUpEntry)
 {
     // Do a linear forward search from the last queried PC offset, or fallback to a
     // binary search if the last offset is too far away.
     if (prevLookedUpEntry && pcOffset >= prevLookedUpEntry->pcOffset() &&
         (pcOffset - prevLookedUpEntry->pcOffset()) <= 10)
     {
         ICEntry* firstEntry = &icEntry(0);
         ICEntry* lastEntry = &icEntry(numICEntries() - 1);
@@ -714,17 +693,17 @@ BaselineScript::maybeICEntryFromPCOffset
         }
         return nullptr;
     }
 
     return maybeICEntryFromPCOffset(pcOffset);
 }
 
 ICEntry&
-BaselineScript::icEntryFromPCOffset(uint32_t pcOffset, ICEntry* prevLookedUpEntry)
+ICScript::icEntryFromPCOffset(uint32_t pcOffset, ICEntry* prevLookedUpEntry)
 {
     ICEntry* entry = maybeICEntryFromPCOffset(pcOffset, prevLookedUpEntry);
     MOZ_RELEASE_ASSERT(entry);
     return *entry;
 }
 
 RetAddrEntry&
 BaselineScript::retAddrEntryFromPCOffset(uint32_t pcOffset, RetAddrEntry::Kind kind)
@@ -798,23 +777,23 @@ BaselineScript::computeResumeNativeOffse
     };
 
     mozilla::Span<const uint32_t> pcOffsets = script->resumeOffsets();
     uint8_t** nativeOffsets = resumeEntryList();
     std::transform(pcOffsets.begin(), pcOffsets.end(), nativeOffsets, computeNative);
 }
 
 void
-BaselineScript::copyICEntries(JSScript* script, const ICEntry* entries)
+ICScript::initICEntries(JSScript* script, const ICEntry* entries)
 {
     // Fix up the return offset in the IC entries and copy them in.
     // Also write out the IC entry ptrs in any fallback stubs that were added.
     for (uint32_t i = 0; i < numICEntries(); i++) {
         ICEntry& realEntry = icEntry(i);
-        realEntry = entries[i];
+        new (&realEntry) ICEntry(entries[i]);
 
         // If the attached stub is a fallback stub, then fix it up with
         // a pointer to the (now available) realEntry.
         if (realEntry.firstStub()->isFallback()) {
             realEntry.firstStub()->toFallbackStub()->fixupICEntry(&realEntry);
         }
 
         if (realEntry.firstStub()->isTypeMonitor_Fallback()) {
@@ -828,22 +807,16 @@ void
 BaselineScript::copyRetAddrEntries(JSScript* script, const RetAddrEntry* entries)
 {
     for (uint32_t i = 0; i < numRetAddrEntries(); i++) {
         retAddrEntry(i) = entries[i];
     }
 }
 
 void
-BaselineScript::adoptFallbackStubs(FallbackICStubSpace* stubSpace)
-{
-    fallbackStubSpace_.adoptFrom(stubSpace);
-}
-
-void
 BaselineScript::copyPCMappingEntries(const CompactBufferWriter& entries)
 {
     MOZ_ASSERT(entries.length() > 0);
     MOZ_ASSERT(entries.length() == pcMappingSize_);
 
     memcpy(pcMappingData(), entries.buffer(), entries.length());
 }
 
@@ -1102,17 +1075,17 @@ BaselineScript::toggleProfilerInstrument
     } else {
         Assembler::ToggleToJmp(enterToggleLocation);
         Assembler::ToggleToJmp(exitToggleLocation);
         flags_ &= ~uint32_t(PROFILER_INSTRUMENTATION_ON);
     }
 }
 
 void
-BaselineScript::purgeOptimizedStubs(Zone* zone)
+ICScript::purgeOptimizedStubs(Zone* zone)
 {
     JitSpew(JitSpew_BaselineIC, "Purging optimized stubs");
 
     for (size_t i = 0; i < numICEntries(); i++) {
         ICEntry& entry = icEntry(i);
         ICStub* lastStub = entry.firstStub();
         while (lastStub->next()) {
             lastStub = lastStub->next();
@@ -1194,27 +1167,28 @@ HasEnteredCounters(ICEntry& entry)
         stub = stub->next();
     }
     return false;
 }
 
 void
 jit::JitSpewBaselineICStats(JSScript* script, const char* dumpReason)
 {
-    MOZ_ASSERT(script->hasBaselineScript());
-    BaselineScript* blScript = script->baselineScript();
+    MOZ_ASSERT(script->hasICScript());
     JSContext* cx = TlsContext.get();
     AutoStructuredSpewer spew(cx, SpewChannel::BaselineICStats, script);
     if (!spew) {
         return;
     }
+
+    ICScript* icScript = script->icScript();
     spew->property("reason", dumpReason);
     spew->beginListProperty("entries");
-    for (size_t i = 0; i < blScript->numICEntries(); i++) {
-        ICEntry& entry = blScript->icEntry(i);
+    for (size_t i = 0; i < icScript->numICEntries(); i++) {
+        ICEntry& entry = icScript->icEntry(i);
         if (!HasEnteredCounters(entry)) {
             continue;
         }
 
         uint32_t pcOffset = entry.pcOffset();
         jsbytecode* pc = entry.pc(script);
 
         unsigned column;
@@ -1249,20 +1223,16 @@ jit::JitSpewBaselineICStats(JSScript* sc
 void
 jit::FinishDiscardBaselineScript(FreeOp* fop, JSScript* script)
 {
     if (!script->hasBaselineScript()) {
         return;
     }
 
     if (script->baselineScript()->active()) {
-        // Script is live on the stack. Keep the BaselineScript, but destroy
-        // stubs allocated in the optimized stub space.
-        script->baselineScript()->purgeOptimizedStubs(script->zone());
-
         // Reset |active| flag so that we don't need a separate script
         // iteration to unmark them.
         script->baselineScript()->resetActive();
 
         // The baseline caches have been wiped out, so the script will need to
         // warm back up before it can be inlined during Ion compilation.
         script->baselineScript()->clearIonCompiledOrInlined();
         return;
@@ -1272,18 +1242,23 @@ jit::FinishDiscardBaselineScript(FreeOp*
     script->setBaselineScript(fop->runtime(), nullptr);
     BaselineScript::Destroy(fop, baseline);
 }
 
 void
 jit::AddSizeOfBaselineData(JSScript* script, mozilla::MallocSizeOf mallocSizeOf, size_t* data,
                            size_t* fallbackStubs)
 {
+    if (script->hasICScript()) {
+        // ICScript is stored in TypeScript but we report its size here and not
+        // in TypeScript::sizeOfIncludingThis.
+        script->icScript()->addSizeOfIncludingThis(mallocSizeOf, data, fallbackStubs);
+    }
     if (script->hasBaselineScript()) {
-        script->baselineScript()->addSizeOfIncludingThis(mallocSizeOf, data, fallbackStubs);
+        script->baselineScript()->addSizeOfIncludingThis(mallocSizeOf, data);
     }
 }
 
 void
 jit::ToggleBaselineProfiling(JSRuntime* runtime, bool enable)
 {
     JitRuntime* jrt = runtime->jitRuntime();
     if (!jrt) {
--- a/js/src/jit/BaselineJIT.h
+++ b/js/src/jit/BaselineJIT.h
@@ -240,19 +240,16 @@ struct BaselineScript final
     // Code pointer containing the actual method.
     HeapPtr<JitCode*> method_ = nullptr;
 
     // For functions with a call object, template objects to use for the call
     // object and decl env object (linked via the call object's enclosing
     // scope).
     HeapPtr<EnvironmentObject*> templateEnv_ = nullptr;
 
-    // Allocated space for fallback stubs.
-    FallbackICStubSpace fallbackStubSpace_ = {};
-
     // If non-null, the list of wasm::Modules that contain an optimized call
     // directly to this script.
     Vector<DependentWasmImport>* dependentWasmImports_ = nullptr;
 
     // Early Ion bailouts will enter at this address. This is after frame
     // construction and before environment chain is initialized.
     uint32_t bailoutPrologueOffset_;
 
@@ -308,19 +305,16 @@ struct BaselineScript final
     };
 
   private:
     uint32_t flags_ = 0;
 
   private:
     void trace(JSTracer* trc);
 
-    uint32_t icEntriesOffset_ = 0;
-    uint32_t icEntries_ = 0;
-
     uint32_t retAddrEntriesOffset_ = 0;
     uint32_t retAddrEntries_ = 0;
 
     uint32_t pcMappingIndexOffset_ = 0;
     uint32_t pcMappingIndexEntries_ = 0;
 
     uint32_t pcMappingOffset_ = 0;
     uint32_t pcMappingSize_ = 0;
@@ -365,51 +359,37 @@ struct BaselineScript final
       : bailoutPrologueOffset_(bailoutPrologueOffset),
         debugOsrPrologueOffset_(debugOsrPrologueOffset),
         debugOsrEpilogueOffset_(debugOsrEpilogueOffset),
         profilerEnterToggleOffset_(profilerEnterToggleOffset),
         profilerExitToggleOffset_(profilerExitToggleOffset)
     { }
 
   public:
-    ~BaselineScript() {
-        // The contents of the fallback stub space are removed and freed
-        // separately after the next minor GC. See BaselineScript::Destroy.
-        MOZ_ASSERT(fallbackStubSpace_.isEmpty());
-    }
-
     static BaselineScript* New(JSScript* jsscript,
                                uint32_t bailoutPrologueOffset,
                                uint32_t debugOsrPrologueOffset,
                                uint32_t debugOsrEpilogueOffset,
                                uint32_t profilerEnterToggleOffset,
                                uint32_t profilerExitToggleOffset,
-                               size_t icEntries,
                                size_t retAddrEntries,
                                size_t pcMappingIndexEntries, size_t pcMappingSize,
                                size_t bytecodeTypeMapEntries,
                                size_t resumeEntries,
                                size_t traceLoggerToggleOffsetEntries);
 
     static void Trace(JSTracer* trc, BaselineScript* script);
     static void Destroy(FreeOp* fop, BaselineScript* script);
 
-    void purgeOptimizedStubs(Zone* zone);
-
     static inline size_t offsetOfMethod() {
         return offsetof(BaselineScript, method_);
     }
 
-    void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf, size_t* data,
-                                size_t* fallbackStubs) const {
+    void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf, size_t* data) const {
         *data += mallocSizeOf(this);
-
-        // |data| already includes the ICStubSpace itself, so use
-        // sizeOfExcludingThis.
-        *fallbackStubs += fallbackStubSpace_.sizeOfExcludingThis(mallocSizeOf);
     }
 
     bool active() const {
         return flags_ & ACTIVE;
     }
     void setActive() {
         flags_ |= ACTIVE;
     }
@@ -453,34 +433,28 @@ struct BaselineScript final
     }
     uint8_t* debugOsrPrologueEntryAddr() const {
         return method_->raw() + debugOsrPrologueOffset_;
     }
     uint8_t* debugOsrEpilogueEntryAddr() const {
         return method_->raw() + debugOsrEpilogueOffset_;
     }
 
-    ICEntry* icEntryList() {
-        return (ICEntry*)(reinterpret_cast<uint8_t*>(this) + icEntriesOffset_);
-    }
     RetAddrEntry* retAddrEntryList() {
         return (RetAddrEntry*)(reinterpret_cast<uint8_t*>(this) + retAddrEntriesOffset_);
     }
     uint8_t** resumeEntryList() {
         return (uint8_t**)(reinterpret_cast<uint8_t*>(this) + resumeEntriesOffset_);
     }
     PCMappingIndexEntry* pcMappingIndexEntryList() {
         return (PCMappingIndexEntry*)(reinterpret_cast<uint8_t*>(this) + pcMappingIndexOffset_);
     }
     uint8_t* pcMappingData() {
         return reinterpret_cast<uint8_t*>(this) + pcMappingOffset_;
     }
-    FallbackICStubSpace* fallbackStubSpace() {
-        return &fallbackStubSpace_;
-    }
 
     JitCode* method() const {
         return method_;
     }
     void setMethod(JitCode* code) {
         MOZ_ASSERT(!method_);
         method_ = code;
     }
@@ -492,45 +466,30 @@ struct BaselineScript final
         MOZ_ASSERT(!templateEnv_);
         templateEnv_ = templateEnv;
     }
 
     bool containsCodeAddress(uint8_t* addr) const {
         return method()->raw() <= addr && addr <= method()->raw() + method()->instructionsSize();
     }
 
-    ICEntry* maybeICEntryFromPCOffset(uint32_t pcOffset);
-    ICEntry* maybeICEntryFromPCOffset(uint32_t pcOffset,
-                                      ICEntry* prevLookedUpEntry);
-
-    ICEntry& icEntry(size_t index);
-    ICEntry& icEntryFromPCOffset(uint32_t pcOffset);
-    ICEntry& icEntryFromPCOffset(uint32_t pcOffset, ICEntry* prevLookedUpEntry);
-
     uint8_t* returnAddressForEntry(const RetAddrEntry& ent);
 
     RetAddrEntry& retAddrEntry(size_t index);
     RetAddrEntry& retAddrEntryFromPCOffset(uint32_t pcOffset, RetAddrEntry::Kind kind);
     RetAddrEntry& prologueRetAddrEntry(RetAddrEntry::Kind kind);
     RetAddrEntry& retAddrEntryFromReturnOffset(CodeOffset returnOffset);
     RetAddrEntry& retAddrEntryFromReturnAddress(uint8_t* returnAddr);
 
-    size_t numICEntries() const {
-        return icEntries_;
-    }
-
     size_t numRetAddrEntries() const {
         return retAddrEntries_;
     }
 
-    void copyICEntries(JSScript* script, const ICEntry* entries);
     void copyRetAddrEntries(JSScript* script, const RetAddrEntry* entries);
 
-    void adoptFallbackStubs(FallbackICStubSpace* stubSpace);
-
     // Copy resumeOffsets list from |script| and convert the pcOffsets
     // to native addresses in the Baseline code.
     void computeResumeNativeOffsets(JSScript* script);
 
     PCMappingIndexEntry& pcMappingIndexEntry(size_t index);
     CompactBufferReader pcMappingReader(size_t indexEntry);
 
     size_t numPCMappingIndexEntries() const {
@@ -581,19 +540,16 @@ struct BaselineScript final
 
     uint32_t* traceLoggerToggleOffsets() {
         MOZ_ASSERT(traceLoggerToggleOffsetsOffset_);
         return reinterpret_cast<uint32_t*>(reinterpret_cast<uint8_t*>(this) +
                                            traceLoggerToggleOffsetsOffset_);
     }
 #endif
 
-    void noteAccessedGetter(uint32_t pcOffset);
-    void noteHasDenseAdd(uint32_t pcOffset);
-
     static size_t offsetOfFlags() {
         return offsetof(BaselineScript, flags_);
     }
     static size_t offsetOfResumeEntriesOffset() {
         return offsetof(BaselineScript, resumeEntriesOffset_);
     }
 
     static void writeBarrierPre(Zone* zone, BaselineScript* script);
--- a/js/src/jit/CacheIRCompiler.cpp
+++ b/js/src/jit/CacheIRCompiler.cpp
@@ -938,65 +938,16 @@ CacheIRStubInfo::stubDataSize() const
         StubField::Type type = fieldType(field++);
         if (type == StubField::Type::Limit) {
             return size;
         }
         size += StubField::sizeInBytes(type);
     }
 }
 
-void
-CacheIRStubInfo::copyStubData(ICStub* src, ICStub* dest) const
-{
-    uint8_t* srcBytes = reinterpret_cast<uint8_t*>(src);
-    uint8_t* destBytes = reinterpret_cast<uint8_t*>(dest);
-
-    size_t field = 0;
-    size_t offset = 0;
-    while (true) {
-        StubField::Type type = fieldType(field);
-        switch (type) {
-          case StubField::Type::RawWord:
-            *reinterpret_cast<uintptr_t*>(destBytes + offset) =
-                *reinterpret_cast<uintptr_t*>(srcBytes + offset);
-            break;
-          case StubField::Type::RawInt64:
-          case StubField::Type::DOMExpandoGeneration:
-            *reinterpret_cast<uint64_t*>(destBytes + offset) =
-                *reinterpret_cast<uint64_t*>(srcBytes + offset);
-            break;
-          case StubField::Type::Shape:
-            getStubField<ICStub, Shape*>(dest, offset).init(getStubField<ICStub, Shape*>(src, offset));
-            break;
-          case StubField::Type::JSObject:
-            getStubField<ICStub, JSObject*>(dest, offset).init(getStubField<ICStub, JSObject*>(src, offset));
-            break;
-          case StubField::Type::ObjectGroup:
-            getStubField<ICStub, ObjectGroup*>(dest, offset).init(getStubField<ICStub, ObjectGroup*>(src, offset));
-            break;
-          case StubField::Type::Symbol:
-            getStubField<ICStub, JS::Symbol*>(dest, offset).init(getStubField<ICStub, JS::Symbol*>(src, offset));
-            break;
-          case StubField::Type::String:
-            getStubField<ICStub, JSString*>(dest, offset).init(getStubField<ICStub, JSString*>(src, offset));
-            break;
-          case StubField::Type::Id:
-            getStubField<ICStub, jsid>(dest, offset).init(getStubField<ICStub, jsid>(src, offset));
-            break;
-          case StubField::Type::Value:
-            getStubField<ICStub, Value>(dest, offset).init(getStubField<ICStub, Value>(src, offset));
-            break;
-          case StubField::Type::Limit:
-            return; // Done.
-        }
-        field++;
-        offset += StubField::sizeInBytes(type);
-    }
-}
-
 template <typename T>
 static GCPtr<T>*
 AsGCPtr(uintptr_t* ptr)
 {
     return reinterpret_cast<GCPtr<T>*>(ptr);
 }
 
 uintptr_t
--- a/js/src/jit/CacheIRCompiler.h
+++ b/js/src/jit/CacheIRCompiler.h
@@ -978,18 +978,16 @@ class CacheIRStubInfo
     js::GCPtr<T>& getStubField(Stub* stub, uint32_t field) const;
 
     template <class T>
     js::GCPtr<T>& getStubField(ICStub* stub, uint32_t field) const {
         return getStubField<ICStub, T>(stub, field);
     }
 
     uintptr_t getStubRawWord(ICStub* stub, uint32_t field) const;
-
-    void copyStubData(ICStub* src, ICStub* dest) const;
 };
 
 template <typename T>
 void TraceCacheIRStub(JSTracer* trc, T* stub, const CacheIRStubInfo* stubInfo);
 
 void
 LoadTypedThingData(MacroAssembler& masm, TypedThingLayout layout, Register obj, Register result);
 
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -3297,16 +3297,20 @@ jit::TraceJitScripts(JSTracer* trc, JSSc
 {
     if (script->hasIonScript()) {
         jit::IonScript::Trace(trc, script->ionScript());
     }
 
     if (script->hasBaselineScript()) {
         jit::BaselineScript::Trace(trc, script->baselineScript());
     }
+
+    if (script->hasICScript()) {
+        script->icScript()->trace(trc);
+    }
 }
 
 bool
 jit::JitSupportsFloatingPoint()
 {
     return js::jit::MacroAssembler::SupportsFloatingPoint();
 }
 
--- a/js/src/jit/arm/SharedICHelpers-arm.h
+++ b/js/src/jit/arm/SharedICHelpers-arm.h
@@ -26,26 +26,23 @@ EmitRestoreTailCallReg(MacroAssembler& m
 
 inline void
 EmitRepushTailCallReg(MacroAssembler& masm)
 {
     // No-op on ARM because link register is always holding the return address.
 }
 
 inline void
-EmitCallIC(MacroAssembler& masm, CodeOffset* patchOffset, CodeOffset* callOffset)
+EmitCallIC(MacroAssembler& masm, const ICEntry* entry, CodeOffset* callOffset)
 {
-    // Move ICEntry offset into ICStubReg
-    CodeOffset offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
-    *patchOffset = offset;
+    // Load stub pointer into ICStubReg.
+    masm.loadPtr(AbsoluteAddress(entry).offset(ICEntry::offsetOfFirstStub()),
+                 ICStubReg);
 
-    // Load stub pointer into ICStubReg
-    masm.loadPtr(Address(ICStubReg, ICEntry::offsetOfFirstStub()), ICStubReg);
-
-    // Load stubcode pointer from BaselineStubEntry.
+    // Load stubcode pointer from the ICStub.
     // R2 won't be active when we call ICs, so we can use r0.
     MOZ_ASSERT(R2 == ValueOperand(r1, r0));
     masm.loadPtr(Address(ICStubReg, ICStub::offsetOfStubCode()), r0);
 
     // Call the stubcode via a direct branch-and-link.
     masm.ma_blx(r0);
     *callOffset = CodeOffset(masm.currentOffset());
 }
--- a/js/src/jit/arm64/SharedICHelpers-arm64.h
+++ b/js/src/jit/arm64/SharedICHelpers-arm64.h
@@ -26,26 +26,23 @@ EmitRestoreTailCallReg(MacroAssembler& m
 
 inline void
 EmitRepushTailCallReg(MacroAssembler& masm)
 {
     // No-op on ARM because link register is always holding the return address.
 }
 
 inline void
-EmitCallIC(MacroAssembler& masm, CodeOffset* patchOffset, CodeOffset* callOffset)
+EmitCallIC(MacroAssembler& masm, const ICEntry* entry, CodeOffset* callOffset)
 {
-    // Move ICEntry offset into ICStubReg
-    CodeOffset offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
-    *patchOffset = offset;
+    // Load stub pointer into ICStubReg.
+    masm.loadPtr(AbsoluteAddress(entry).offset(ICEntry::offsetOfFirstStub()),
+                 ICStubReg);
 
-    // Load stub pointer into ICStubReg
-    masm.loadPtr(Address(ICStubReg, ICEntry::offsetOfFirstStub()), ICStubReg);
-
-    // Load stubcode pointer from BaselineStubEntry.
+    // Load stubcode pointer from the ICStub.
     // R2 won't be active when we call ICs, so we can use r0.
     MOZ_ASSERT(R2 == ValueOperand(r0));
     masm.loadPtr(Address(ICStubReg, ICStub::offsetOfStubCode()), r0);
 
     // Call the stubcode via a direct branch-and-link.
     masm.Blr(x0);
     *callOffset = CodeOffset(masm.currentOffset());
 }
--- a/js/src/jit/mips-shared/SharedICHelpers-mips-shared.h
+++ b/js/src/jit/mips-shared/SharedICHelpers-mips-shared.h
@@ -38,26 +38,23 @@ EmitRestoreTailCallReg(MacroAssembler& m
 
 inline void
 EmitRepushTailCallReg(MacroAssembler& masm)
 {
     // No-op on MIPS because ra register is always holding the return address.
 }
 
 inline void
-EmitCallIC(MacroAssembler& masm, CodeOffset* patchOffset, CodeOffset* callOffset)
+EmitCallIC(MacroAssembler& masm, const ICEntry* entry, CodeOffset* callOffset)
 {
-    // Move ICEntry offset into ICStubReg.
-    CodeOffset offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
-    *patchOffset = offset;
+    // Load stub pointer into ICStubReg.
+    masm.loadPtr(AbsoluteAddress(entry).offset(ICEntry::offsetOfFirstStub()),
+                 ICStubReg);
 
-    // Load stub pointer into ICStubReg.
-    masm.loadPtr(Address(ICStubReg, ICEntry::offsetOfFirstStub()), ICStubReg);
-
-    // Load stubcode pointer from BaselineStubEntry.
+    // Load stubcode pointer from the ICStub.
     // R2 won't be active when we call ICs, so we can use it as scratch.
     masm.loadPtr(Address(ICStubReg, ICStub::offsetOfStubCode()), R2.scratchReg());
 
     // Call the stubcode via a direct jump-and-link
     masm.call(R2.scratchReg());
     *callOffset = CodeOffset(masm.currentOffset());
 }
 
--- a/js/src/jit/none/SharedICHelpers-none.h
+++ b/js/src/jit/none/SharedICHelpers-none.h
@@ -9,17 +9,17 @@
 
 namespace js {
 namespace jit {
 
 static const size_t ICStackValueOffset = 0;
 
 inline void EmitRestoreTailCallReg(MacroAssembler&) { MOZ_CRASH(); }
 inline void EmitRepushTailCallReg(MacroAssembler&) { MOZ_CRASH(); }
-inline void EmitCallIC(MacroAssembler&, CodeOffset*, CodeOffset*) { MOZ_CRASH(); }
+inline void EmitCallIC(MacroAssembler&, const ICEntry*, CodeOffset*) { MOZ_CRASH(); }
 inline void EmitEnterTypeMonitorIC(MacroAssembler&, size_t v = 0) { MOZ_CRASH(); }
 inline void EmitReturnFromIC(MacroAssembler&) { MOZ_CRASH(); }
 inline void EmitBaselineLeaveStubFrame(MacroAssembler&, bool v = false) { MOZ_CRASH(); }
 inline void EmitStubGuardFailure(MacroAssembler&) { MOZ_CRASH(); }
 
 template <typename T> inline void EmitPreBarrier(MacroAssembler&, T, MIRType) { MOZ_CRASH(); }
 
 } // namespace jit
--- a/js/src/jit/x64/SharedICHelpers-x64.h
+++ b/js/src/jit/x64/SharedICHelpers-x64.h
@@ -26,24 +26,20 @@ EmitRestoreTailCallReg(MacroAssembler& m
 
 inline void
 EmitRepushTailCallReg(MacroAssembler& masm)
 {
     masm.Push(ICTailCallReg);
 }
 
 inline void
-EmitCallIC(MacroAssembler& masm, CodeOffset* patchOffset, CodeOffset* callOffset)
+EmitCallIC(MacroAssembler& masm, const ICEntry* entry, CodeOffset* callOffset)
 {
-    // Move ICEntry offset into ICStubReg
-    CodeOffset offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
-    *patchOffset = offset;
-
-    // Load stub pointer into ICStubReg
-    masm.loadPtr(Address(ICStubReg, (int32_t) ICEntry::offsetOfFirstStub()),
+    // Load stub pointer into ICStubReg.
+    masm.loadPtr(AbsoluteAddress(entry).offset(ICEntry::offsetOfFirstStub()),
                  ICStubReg);
 
     // Call the stubcode.
     masm.call(Address(ICStubReg, ICStub::offsetOfStubCode()));
     *callOffset = CodeOffset(masm.currentOffset());
 }
 
 inline void
--- a/js/src/jit/x86/SharedICHelpers-x86.h
+++ b/js/src/jit/x86/SharedICHelpers-x86.h
@@ -26,28 +26,23 @@ EmitRestoreTailCallReg(MacroAssembler& m
 
 inline void
 EmitRepushTailCallReg(MacroAssembler& masm)
 {
     masm.Push(ICTailCallReg);
 }
 
 inline void
-EmitCallIC(MacroAssembler& masm, CodeOffset* patchOffset, CodeOffset* callOffset)
+EmitCallIC(MacroAssembler& masm, const ICEntry* entry, CodeOffset* callOffset)
 {
-    // Move ICEntry offset into ICStubReg
-    CodeOffset offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
-    *patchOffset = offset;
-
-    // Load stub pointer into ICStubReg
-    masm.loadPtr(Address(ICStubReg, (int32_t) ICEntry::offsetOfFirstStub()),
+    // Load stub pointer into ICStubReg.
+    masm.loadPtr(AbsoluteAddress(entry).offset(ICEntry::offsetOfFirstStub()),
                  ICStubReg);
 
-    // Load stubcode pointer from BaselineStubEntry into ICTailCallReg
-    // ICTailCallReg will always be unused in the contexts where ICs are called.
+    // Call the stubcode.
     masm.call(Address(ICStubReg, ICStub::offsetOfStubCode()));
     *callOffset = CodeOffset(masm.currentOffset());
 }
 
 inline void
 EmitEnterTypeMonitorIC(MacroAssembler& masm,
                        size_t monitorStubOffset = ICMonitoredStub::offsetOfFirstMonitorStub())
 {
--- a/js/src/vm/Interpreter.cpp
+++ b/js/src/vm/Interpreter.cpp
@@ -1858,18 +1858,18 @@ SetObjectElementOperation(JSContext* cx,
     // anyway.
     TypeScript::MonitorAssign(cx, obj, id);
 
     if (obj->isNative() && JSID_IS_INT(id)) {
         uint32_t length = obj->as<NativeObject>().getDenseInitializedLength();
         int32_t i = JSID_TO_INT(id);
         if ((uint32_t)i >= length) {
             // Annotate script if provided with information (e.g. baseline)
-            if (script && script->hasBaselineScript() && IsSetElemPC(pc)) {
-                script->baselineScript()->noteHasDenseAdd(script->pcToOffset(pc));
+            if (script && script->hasICScript() && IsSetElemPC(pc)) {
+                script->icScript()->noteHasDenseAdd(script->pcToOffset(pc));
             }
         }
     }
 
     // Set the HadElementsAccess flag on the object if needed. This flag is
     // used to do more eager dictionary-mode conversion for objects that are
     // used as hashmaps. Set this flag only for objects with many properties,
     // to avoid unnecessary Shape changes.
--- a/js/src/vm/JSScript-inl.h
+++ b/js/src/vm/JSScript-inl.h
@@ -224,9 +224,16 @@ JSScript::trackRecordReplayProgress() co
     // scripts execute may depend on performed Ion optimizations (for example,
     // self hosted TypedObject logic), so they are ignored.
     return MOZ_UNLIKELY(mozilla::recordreplay::IsRecordingOrReplaying())
         && !runtimeFromAnyThread()->parentRuntime
         && !selfHosted()
         && mozilla::recordreplay::ShouldUpdateProgressCounter(filename());
 }
 
+inline js::jit::ICScript*
+JSScript::icScript() const
+{
+    MOZ_ASSERT(hasICScript());
+    return types_->icScript();
+}
+
 #endif /* vm_JSScript_inl_h */
--- a/js/src/vm/JSScript.cpp
+++ b/js/src/vm/JSScript.cpp
@@ -3629,17 +3629,17 @@ JSScript::finalize(FreeOp* fop)
     if (fop->runtime()->lcovOutput().isEnabled() && hasScriptName()) {
         realm()->lcovOutput.collectCodeCoverageInfo(realm(), this, getScriptName());
         destroyScriptName();
     }
 
     fop->runtime()->geckoProfiler().onScriptFinalized(this);
 
     if (types_) {
-        types_->destroy();
+        types_->destroy(zone());
     }
 
     jit::DestroyJitScripts(fop, this);
 
     destroyScriptCounts();
     destroyDebugScript(fop);
 
 #ifdef MOZ_VTUNE
--- a/js/src/vm/JSScript.h
+++ b/js/src/vm/JSScript.h
@@ -48,16 +48,17 @@ namespace JS {
 struct ScriptSourceInfo;
 template<typename UnitT> class SourceText;
 } // namespace JS
 
 namespace js {
 
 namespace jit {
     struct BaselineScript;
+    class ICScript;
     struct IonScriptCounts;
 } // namespace jit
 
 # define ION_DISABLED_SCRIPT ((js::jit::IonScript*)0x1)
 # define ION_COMPILING_SCRIPT ((js::jit::IonScript*)0x2)
 # define ION_PENDING_SCRIPT ((js::jit::IonScript*)0x3)
 
 # define BASELINE_DISABLED_SCRIPT ((js::jit::BaselineScript*)0x1)
@@ -2305,16 +2306,24 @@ class JSScript : public js::gc::TenuredC
         return baseline != BASELINE_DISABLED_SCRIPT;
     }
     js::jit::BaselineScript* baselineScript() const {
         MOZ_ASSERT(hasBaselineScript());
         return baseline;
     }
     inline void setBaselineScript(JSRuntime* rt, js::jit::BaselineScript* baselineScript);
 
+    inline js::jit::ICScript* icScript() const;
+
+    bool hasICScript() const {
+        // ICScript is stored in TypeScript so we have an ICScript iff we have a
+        // TypeScript.
+        return !!types_;
+    }
+
     void updateJitCodeRaw(JSRuntime* rt);
 
     static size_t offsetOfBaselineScript() {
         return offsetof(JSScript, baseline);
     }
     static size_t offsetOfIonScript() {
         return offsetof(JSScript, ion);
     }
--- a/js/src/vm/NativeObject.cpp
+++ b/js/src/vm/NativeObject.cpp
@@ -7,26 +7,28 @@
 #include "vm/NativeObject-inl.h"
 
 #include "mozilla/ArrayUtils.h"
 #include "mozilla/Casting.h"
 #include "mozilla/CheckedInt.h"
 #include "mozilla/DebugOnly.h"
 
 #include "gc/Marking.h"
+#include "jit/BaselineIC.h"
 #include "js/CharacterEncoding.h"
 #include "js/Value.h"
 #include "vm/Debugger.h"
 #include "vm/TypedArrayObject.h"
 #include "vm/UnboxedObject.h"
 
 #include "gc/Nursery-inl.h"
 #include "vm/ArrayObject-inl.h"
 #include "vm/EnvironmentObject-inl.h"
 #include "vm/JSObject-inl.h"
+#include "vm/JSScript-inl.h"
 #include "vm/Shape-inl.h"
 #include "vm/TypeInference-inl.h"
 #include "vm/UnboxedObject-inl.h"
 
 using namespace js;
 
 using JS::AutoCheckCannotGC;
 using mozilla::ArrayLength;
@@ -2334,22 +2336,22 @@ GetExistingProperty(JSContext* cx,
 
     if (shape->hasDefaultGetter()) {
         return true;
     }
 
     {
         jsbytecode* pc;
         JSScript* script = cx->currentScript(&pc);
-        if (script && script->hasBaselineScript()) {
+        if (script && script->hasICScript()) {
             switch (JSOp(*pc)) {
               case JSOP_GETPROP:
               case JSOP_CALLPROP:
               case JSOP_LENGTH:
-                script->baselineScript()->noteAccessedGetter(script->pcToOffset(pc));
+                script->icScript()->noteAccessedGetter(script->pcToOffset(pc));
                 break;
               default:
                 break;
             }
         }
     }
 
     if (!allowGC) {
--- a/js/src/vm/TypeInference.cpp
+++ b/js/src/vm/TypeInference.cpp
@@ -15,16 +15,17 @@
 #include "mozilla/Sprintf.h"
 
 #include <new>
 
 #include "jsapi.h"
 #include "builtin/String.h"
 
 #include "gc/HashUtil.h"
+#include "jit/BaselineIC.h"
 #include "jit/BaselineJIT.h"
 #include "jit/CompileInfo.h"
 #include "jit/Ion.h"
 #include "jit/IonAnalysis.h"
 #include "jit/JitRealm.h"
 #include "jit/OptimizationTracking.h"
 #include "js/MemoryMetrics.h"
 #include "js/UniquePtr.h"
@@ -3742,24 +3743,37 @@ js::TypeMonitorResult(JSContext* cx, JSS
 bool
 JSScript::makeTypes(JSContext* cx)
 {
     MOZ_ASSERT(!types_);
     cx->check(this);
 
     AutoEnterAnalysis enter(cx);
 
+    UniquePtr<jit::ICScript> icScript(jit::ICScript::create(cx, this));
+    if (!icScript) {
+        return false;
+    }
+
+    // We need to call prepareForDestruction on ICScript before we |delete| it.
+    auto prepareForDestruction = mozilla::MakeScopeExit([&] {
+        icScript->prepareForDestruction(cx->zone());
+    });
+
     unsigned count = TypeScript::NumTypeSets(this);
 
     size_t size = TypeScript::SizeIncludingTypeArray(count);
     auto typeScript = reinterpret_cast<TypeScript*>(cx->pod_calloc<uint8_t>(size));
     if (!typeScript) {
         return false;
     }
 
+    prepareForDestruction.release();
+    typeScript->icScript_ = std::move(icScript);
+
 #ifdef JS_CRASH_DIAGNOSTICS
     {
         StackTypeSet* typeArray = typeScript->typeArray();
         for (unsigned i = 0; i < count; i++) {
             typeArray[i].initMagic();
         }
     }
 #endif
@@ -4904,27 +4918,29 @@ void
 JSScript::maybeReleaseTypes()
 {
     if (!types_ || zone()->types.keepTypeScripts || hasBaselineScript()) {
         return;
     }
 
     MOZ_ASSERT(!hasIonScript());
 
-    types_->destroy();
+    types_->destroy(zone());
     types_ = nullptr;
 
     // Freeze constraints on stack type sets need to be regenerated the
     // next time the script is analyzed.
     clearFlag(MutableFlags::HasFreezeConstraints);
 }
 
 void
-TypeScript::destroy()
+TypeScript::destroy(Zone* zone)
 {
+    icScript_->prepareForDestruction(zone);
+
     js_delete(this);
 }
 
 void
 Zone::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
                              size_t* typePool,
                              size_t* regexpZone,
                              size_t* jitZone,
--- a/js/src/vm/TypeInference.h
+++ b/js/src/vm/TypeInference.h
@@ -32,16 +32,17 @@ namespace js {
 
 class TypeConstraint;
 class TypeZone;
 class CompilerConstraintList;
 class HeapTypeSetKey;
 
 namespace jit {
 
+class ICScript;
 struct IonScript;
 class TempAllocator;
 
 } // namespace jit
 
 // If there is an OOM while sweeping types, the type information is deoptimized
 // so that it stays correct (i.e. overapproximates the possible types in the
 // zone), but constraints might not have been triggered on the deoptimization
@@ -229,30 +230,39 @@ class TypeScript
     friend class ::JSScript;
 
     // The freeze constraints added to stack type sets will only directly
     // invalidate the script containing those stack type sets. This Vector
     // contains compilations that inlined this script, so we can invalidate
     // them as well.
     RecompileInfoVector inlinedCompilations_;
 
+    // ICScript and TypeScript have the same lifetimes, so we store a pointer to
+    // ICScript here to not increase sizeof(JSScript).
+    js::UniquePtr<js::jit::ICScript> icScript_;
+
     // Variable-size array
     StackTypeSet typeArray_[1];
 
   public:
     RecompileInfoVector& inlinedCompilations() {
         return inlinedCompilations_;
     }
     MOZ_MUST_USE bool addInlinedCompilation(RecompileInfo info) {
         if (!inlinedCompilations_.empty() && inlinedCompilations_.back() == info) {
             return true;
         }
         return inlinedCompilations_.append(info);
     }
 
+    jit::ICScript* icScript() const {
+        MOZ_ASSERT(icScript_);
+        return icScript_.get();
+    }
+
     /* Array of type sets for variables and JOF_TYPESET ops. */
     StackTypeSet* typeArray() const {
         // Ensure typeArray_ is the last data member of TypeScript.
         JS_STATIC_ASSERT(sizeof(TypeScript) ==
                          sizeof(typeArray_) + offsetof(TypeScript, typeArray_));
         return const_cast<StackTypeSet*>(typeArray_);
     }
 
@@ -309,19 +319,20 @@ class TypeScript
      */
     static bool FreezeTypeSets(CompilerConstraintList* constraints, JSScript* script,
                                TemporaryTypeSet** pThisTypes,
                                TemporaryTypeSet** pArgTypes,
                                TemporaryTypeSet** pBytecodeTypes);
 
     static void Purge(JSContext* cx, HandleScript script);
 
-    void destroy();
+    void destroy(Zone* zone);
 
     size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
+        // Note: icScript_ size is reported in jit::AddSizeOfBaselineData.
         return mallocSizeOf(this);
     }
 
 #ifdef DEBUG
     void printTypes(JSContext* cx, HandleScript script) const;
 #endif
 };
 
--- a/js/src/vm/UnboxedObject.cpp
+++ b/js/src/vm/UnboxedObject.cpp
@@ -12,16 +12,17 @@
 #include "jit/BaselineIC.h"
 #include "jit/ExecutableAllocator.h"
 #include "jit/JitCommon.h"
 #include "jit/Linker.h"
 
 #include "gc/Nursery-inl.h"
 #include "jit/MacroAssembler-inl.h"
 #include "vm/JSObject-inl.h"
+#include "vm/JSScript-inl.h"
 #include "vm/Shape-inl.h"
 #include "vm/TypeInference-inl.h"
 
 using mozilla::ArrayLength;
 using mozilla::PodCopy;
 
 using namespace js;
 
@@ -644,18 +645,18 @@ UnboxedLayout::makeNativeGroup(JSContext
 
         PlainObject* templateObject = &script->getObject(pc)->as<PlainObject>();
         replacementGroup->addDefiniteProperties(cx, templateObject->lastProperty());
 
         ObjectGroupRealm& realm = ObjectGroupRealm::get(group);
         realm.replaceAllocationSiteGroup(script, pc, JSProto_Object, replacementGroup);
 
         // Clear any baseline information at this opcode which might use the old group.
-        if (script->hasBaselineScript()) {
-            jit::ICEntry& entry = script->baselineScript()->icEntryFromPCOffset(script->pcToOffset(pc));
+        if (script->hasICScript()) {
+            jit::ICEntry& entry = script->icScript()->icEntryFromPCOffset(script->pcToOffset(pc));
             jit::ICFallbackStub* fallback = entry.fallbackStub();
             for (jit::ICStubIterator iter = fallback->beginChain(); !iter.atEnd(); iter++) {
                 iter.unlink(cx);
             }
             if (fallback->isNewObject_Fallback()) {
                 fallback->toNewObject_Fallback()->setTemplateObject(nullptr);
             } else if (fallback->isNewArray_Fallback()) {
                 fallback->toNewArray_Fallback()->setTemplateGroup(replacementGroup);